Commit | Line | Data |
---|---|---|
c638d827 CR |
1 | """Implements a backend for SQLite. |
2 | ||
3 | See https://pysqlite.sourceforge.net/ for pysqlite info | |
4 | ||
5 | ||
6 | NOTE: we use the rdbms_common table creation methods which define datatypes | |
7 | for the columns, but sqlite IGNORES these specifications. | |
8 | """ | |
9 | __docformat__ = 'restructuredtext' | |
10 | ||
11 | import os, base64, marshal, shutil, time, logging | |
12 | ||
13 | from roundup import hyperdb, date, password | |
14 | from roundup.backends import rdbms_common | |
15 | sqlite_version = None | |
16 | try: | |
17 | import sqlite3 as sqlite | |
18 | sqlite_version = 3 | |
19 | except ImportError: | |
20 | try: | |
21 | from pysqlite2 import dbapi2 as sqlite | |
22 | if sqlite.version_info < (2,1,0): | |
23 | raise ValueError('pysqlite2 minimum version is 2.1.0+ ' | |
24 | '- %s found'%sqlite.version) | |
25 | sqlite_version = 2 | |
26 | except ImportError: | |
27 | import sqlite | |
28 | sqlite_version = 1 | |
29 | ||
30 | def db_exists(config): | |
31 | return os.path.exists(os.path.join(config.DATABASE, 'db')) | |
32 | ||
33 | def db_nuke(config): | |
34 | shutil.rmtree(config.DATABASE) | |
35 | ||
36 | class Database(rdbms_common.Database): | |
37 | # char to use for positional arguments | |
38 | if sqlite_version in (2,3): | |
39 | arg = '?' | |
40 | else: | |
41 | arg = '%s' | |
42 | ||
43 | # used by some code to switch styles of query | |
44 | implements_intersect = 1 | |
45 | ||
46 | hyperdb_to_sql_datatypes = { | |
47 | hyperdb.String : 'VARCHAR(255)', | |
48 | hyperdb.Date : 'VARCHAR(30)', | |
49 | hyperdb.Link : 'INTEGER', | |
50 | hyperdb.Interval : 'VARCHAR(255)', | |
51 | hyperdb.Password : 'VARCHAR(255)', | |
52 | hyperdb.Boolean : 'BOOLEAN', | |
53 | hyperdb.Number : 'REAL', | |
54 | } | |
55 | hyperdb_to_sql_value = { | |
56 | hyperdb.String : str, | |
57 | hyperdb.Date : lambda x: x.serialise(), | |
58 | hyperdb.Link : int, | |
59 | hyperdb.Interval : str, | |
60 | hyperdb.Password : str, | |
61 | hyperdb.Boolean : int, | |
62 | hyperdb.Number : lambda x: x, | |
63 | hyperdb.Multilink : lambda x: x, # used in journal marshalling | |
64 | } | |
65 | sql_to_hyperdb_value = { | |
66 | hyperdb.String : lambda x: isinstance(x, unicode) and x.encode('utf8') or str(x), | |
67 | hyperdb.Date : lambda x: date.Date(str(x)), | |
68 | hyperdb.Link : str, # XXX numeric ids | |
69 | hyperdb.Interval : date.Interval, | |
70 | hyperdb.Password : lambda x: password.Password(encrypted=x), | |
71 | hyperdb.Boolean : int, | |
72 | hyperdb.Number : rdbms_common._num_cvt, | |
73 | hyperdb.Multilink : lambda x: x, # used in journal marshalling | |
74 | } | |
75 | ||
76 | def sqlite_busy_handler(self, data, table, count): | |
77 | """invoked whenever SQLite tries to access a database that is locked""" | |
78 | if count == 1: | |
79 | # use a 30 second timeout (extraordinarily generous) | |
80 | # for handling locked database | |
81 | self._busy_handler_endtime = time.time() + 30 | |
82 | elif time.time() > self._busy_handler_endtime: | |
83 | # timeout expired - no more retries | |
84 | return 0 | |
85 | # sleep adaptively as retry count grows, | |
86 | # starting from about half a second | |
87 | time_to_sleep = 0.01 * (2 << min(5, count)) | |
88 | time.sleep(time_to_sleep) | |
89 | return 1 | |
90 | ||
91 | def sql_open_connection(self): | |
92 | """Open a standard, non-autocommitting connection. | |
93 | ||
94 | pysqlite will automatically BEGIN TRANSACTION for us. | |
95 | """ | |
96 | # make sure the database directory exists | |
97 | # database itself will be created by sqlite if needed | |
98 | if not os.path.isdir(self.config.DATABASE): | |
99 | os.makedirs(self.config.DATABASE) | |
100 | ||
101 | db = os.path.join(self.config.DATABASE, 'db') | |
102 | logging.getLogger('hyperdb').info('open database %r'%db) | |
103 | # set a 30 second timeout (extraordinarily generous) for handling | |
104 | # locked database | |
105 | if sqlite_version == 1: | |
106 | conn = sqlite.connect(db=db) | |
107 | conn.db.sqlite_busy_handler(self.sqlite_busy_handler) | |
108 | else: | |
109 | conn = sqlite.connect(db, timeout=30) | |
110 | conn.row_factory = sqlite.Row | |
111 | ||
112 | # pysqlite2 / sqlite3 want us to store Unicode in the db but | |
113 | # that's not what's been done historically and it's definitely | |
114 | # not what the other backends do, so we'll stick with UTF-8 | |
115 | if sqlite_version in (2, 3): | |
116 | conn.text_factory = str | |
117 | ||
118 | cursor = conn.cursor() | |
119 | return (conn, cursor) | |
120 | ||
121 | def open_connection(self): | |
122 | # ensure files are group readable and writable | |
123 | os.umask(self.config.UMASK) | |
124 | ||
125 | (self.conn, self.cursor) = self.sql_open_connection() | |
126 | ||
127 | try: | |
128 | self.load_dbschema() | |
129 | except sqlite.DatabaseError, error: | |
130 | if str(error) != 'no such table: schema': | |
131 | raise | |
132 | self.init_dbschema() | |
133 | self.sql('create table schema (schema varchar)') | |
134 | self.sql('create table ids (name varchar, num integer)') | |
135 | self.sql('create index ids_name_idx on ids(name)') | |
136 | self.create_version_2_tables() | |
137 | ||
138 | def create_version_2_tables(self): | |
139 | self.sql('create table otks (otk_key varchar, ' | |
140 | 'otk_value varchar, otk_time integer)') | |
141 | self.sql('create index otks_key_idx on otks(otk_key)') | |
142 | self.sql('create table sessions (session_key varchar, ' | |
143 | 'session_time integer, session_value varchar)') | |
144 | self.sql('create index sessions_key_idx on ' | |
145 | 'sessions(session_key)') | |
146 | ||
147 | # full-text indexing store | |
148 | self.sql('CREATE TABLE __textids (_class varchar, ' | |
149 | '_itemid varchar, _prop varchar, _textid integer primary key) ') | |
150 | self.sql('CREATE TABLE __words (_word varchar, ' | |
151 | '_textid integer)') | |
152 | self.sql('CREATE INDEX words_word_ids ON __words(_word)') | |
153 | self.sql('CREATE INDEX words_by_id ON __words (_textid)') | |
154 | self.sql('CREATE UNIQUE INDEX __textids_by_props ON ' | |
155 | '__textids (_class, _itemid, _prop)') | |
156 | sql = 'insert into ids (name, num) values (%s,%s)'%(self.arg, self.arg) | |
157 | self.sql(sql, ('__textids', 1)) | |
158 | ||
159 | def add_new_columns_v2(self): | |
160 | # update existing tables to have the new actor column | |
161 | tables = self.database_schema['tables'] | |
162 | for classname, spec in self.classes.items(): | |
163 | if classname in tables: | |
164 | dbspec = tables[classname] | |
165 | self.update_class(spec, dbspec, force=1, adding_v2=1) | |
166 | # we've updated - don't try again | |
167 | tables[classname] = spec.schema() | |
168 | ||
169 | def fix_version_3_tables(self): | |
170 | # NOOP - no restriction on column length here | |
171 | pass | |
172 | ||
173 | def update_class(self, spec, old_spec, force=0, adding_v2=0): | |
174 | """ Determine the differences between the current spec and the | |
175 | database version of the spec, and update where necessary. | |
176 | ||
177 | If 'force' is true, update the database anyway. | |
178 | ||
179 | SQLite doesn't have ALTER TABLE, so we have to copy and | |
180 | regenerate the tables with the new schema. | |
181 | """ | |
182 | new_spec = spec.schema() | |
183 | new_spec[1].sort() | |
184 | old_spec[1].sort() | |
185 | if not force and new_spec == old_spec: | |
186 | # no changes | |
187 | return 0 | |
188 | ||
189 | logging.getLogger('hyperdb').info('update_class %s'%spec.classname) | |
190 | ||
191 | # detect multilinks that have been removed, and drop their table | |
192 | old_has = {} | |
193 | for name, prop in old_spec[1]: | |
194 | old_has[name] = 1 | |
195 | if name in spec.properties or not isinstance(prop, hyperdb.Multilink): | |
196 | continue | |
197 | # it's a multilink, and it's been removed - drop the old | |
198 | # table. First drop indexes. | |
199 | self.drop_multilink_table_indexes(spec.classname, name) | |
200 | sql = 'drop table %s_%s'%(spec.classname, prop) | |
201 | self.sql(sql) | |
202 | ||
203 | # now figure how we populate the new table | |
204 | if adding_v2: | |
205 | fetch = ['_activity', '_creation', '_creator'] | |
206 | else: | |
207 | fetch = ['_actor', '_activity', '_creation', '_creator'] | |
208 | properties = spec.getprops() | |
209 | for propname,x in new_spec[1]: | |
210 | prop = properties[propname] | |
211 | if isinstance(prop, hyperdb.Multilink): | |
212 | if propname not in old_has: | |
213 | # we need to create the new table | |
214 | self.create_multilink_table(spec, propname) | |
215 | elif force: | |
216 | tn = '%s_%s'%(spec.classname, propname) | |
217 | # grabe the current values | |
218 | sql = 'select linkid, nodeid from %s'%tn | |
219 | self.sql(sql) | |
220 | rows = self.cursor.fetchall() | |
221 | ||
222 | # drop the old table | |
223 | self.drop_multilink_table_indexes(spec.classname, propname) | |
224 | sql = 'drop table %s'%tn | |
225 | self.sql(sql) | |
226 | ||
227 | # re-create and populate the new table | |
228 | self.create_multilink_table(spec, propname) | |
229 | sql = """insert into %s (linkid, nodeid) values | |
230 | (%s, %s)"""%(tn, self.arg, self.arg) | |
231 | for linkid, nodeid in rows: | |
232 | self.sql(sql, (int(linkid), int(nodeid))) | |
233 | elif propname in old_has: | |
234 | # we copy this col over from the old table | |
235 | fetch.append('_'+propname) | |
236 | ||
237 | # select the data out of the old table | |
238 | fetch.append('id') | |
239 | fetch.append('__retired__') | |
240 | fetchcols = ','.join(fetch) | |
241 | cn = spec.classname | |
242 | sql = 'select %s from _%s'%(fetchcols, cn) | |
243 | self.sql(sql) | |
244 | olddata = self.cursor.fetchall() | |
245 | ||
246 | # TODO: update all the other index dropping code | |
247 | self.drop_class_table_indexes(cn, old_spec[0]) | |
248 | ||
249 | # drop the old table | |
250 | self.sql('drop table _%s'%cn) | |
251 | ||
252 | # create the new table | |
253 | self.create_class_table(spec) | |
254 | ||
255 | if olddata: | |
256 | inscols = ['id', '_actor', '_activity', '_creation', '_creator'] | |
257 | for propname,x in new_spec[1]: | |
258 | prop = properties[propname] | |
259 | if isinstance(prop, hyperdb.Multilink): | |
260 | continue | |
261 | elif isinstance(prop, hyperdb.Interval): | |
262 | inscols.append('_'+propname) | |
263 | inscols.append('__'+propname+'_int__') | |
264 | elif propname in old_has: | |
265 | # we copy this col over from the old table | |
266 | inscols.append('_'+propname) | |
267 | ||
268 | # do the insert of the old data - the new columns will have | |
269 | # NULL values | |
270 | args = ','.join([self.arg for x in inscols]) | |
271 | cols = ','.join(inscols) | |
272 | sql = 'insert into _%s (%s) values (%s)'%(cn, cols, args) | |
273 | for entry in olddata: | |
274 | d = [] | |
275 | for name in inscols: | |
276 | # generate the new value for the Interval int column | |
277 | if name.endswith('_int__'): | |
278 | name = name[2:-6] | |
279 | if sqlite_version in (2,3): | |
280 | try: | |
281 | v = hyperdb.Interval(entry[name]).as_seconds() | |
282 | except IndexError: | |
283 | v = None | |
284 | elif name in entry: | |
285 | v = hyperdb.Interval(entry[name]).as_seconds() | |
286 | else: | |
287 | v = None | |
288 | elif sqlite_version in (2,3): | |
289 | try: | |
290 | v = entry[name] | |
291 | except IndexError: | |
292 | v = None | |
293 | elif (sqlite_version == 1 and name in entry): | |
294 | v = entry[name] | |
295 | else: | |
296 | v = None | |
297 | d.append(v) | |
298 | self.sql(sql, tuple(d)) | |
299 | ||
300 | return 1 | |
301 | ||
302 | def sql_close(self): | |
303 | """ Squash any error caused by us already having closed the | |
304 | connection. | |
305 | """ | |
306 | try: | |
307 | self.conn.close() | |
308 | except sqlite.ProgrammingError, value: | |
309 | if str(value) != 'close failed - Connection is closed.': | |
310 | raise | |
311 | ||
312 | def sql_rollback(self): | |
313 | """ Squash any error caused by us having closed the connection (and | |
314 | therefore not having anything to roll back) | |
315 | """ | |
316 | try: | |
317 | self.conn.rollback() | |
318 | except sqlite.ProgrammingError, value: | |
319 | if str(value) != 'rollback failed - Connection is closed.': | |
320 | raise | |
321 | ||
322 | def __repr__(self): | |
323 | return '<roundlite 0x%x>'%id(self) | |
324 | ||
325 | def sql_commit(self, fail_ok=False): | |
326 | """ Actually commit to the database. | |
327 | ||
328 | Ignore errors if there's nothing to commit. | |
329 | """ | |
330 | try: | |
331 | self.conn.commit() | |
332 | except sqlite.DatabaseError, error: | |
333 | if str(error) != 'cannot commit - no transaction is active': | |
334 | raise | |
335 | # open a new cursor for subsequent work | |
336 | self.cursor = self.conn.cursor() | |
337 | ||
338 | def sql_index_exists(self, table_name, index_name): | |
339 | self.sql('pragma index_list(%s)'%table_name) | |
340 | for entry in self.cursor.fetchall(): | |
341 | if entry[1] == index_name: | |
342 | return 1 | |
343 | return 0 | |
344 | ||
345 | # old-skool id generation | |
346 | def newid(self, classname): | |
347 | """ Generate a new id for the given class | |
348 | """ | |
349 | # get the next ID | |
350 | sql = 'select num from ids where name=%s'%self.arg | |
351 | self.sql(sql, (classname, )) | |
352 | newid = int(self.cursor.fetchone()[0]) | |
353 | ||
354 | # update the counter | |
355 | sql = 'update ids set num=%s where name=%s'%(self.arg, self.arg) | |
356 | vals = (int(newid)+1, classname) | |
357 | self.sql(sql, vals) | |
358 | ||
359 | # return as string | |
360 | return str(newid) | |
361 | ||
362 | def setid(self, classname, setid): | |
363 | """ Set the id counter: used during import of database | |
364 | ||
365 | We add one to make it behave like the sequences in postgres. | |
366 | """ | |
367 | sql = 'update ids set num=%s where name=%s'%(self.arg, self.arg) | |
368 | vals = (int(setid)+1, classname) | |
369 | self.sql(sql, vals) | |
370 | ||
371 | def clear(self): | |
372 | rdbms_common.Database.clear(self) | |
373 | # set the id counters to 0 (setid adds one) so we start at 1 | |
374 | for cn in self.classes.keys(): | |
375 | self.setid(cn, 0) | |
376 | ||
377 | def create_class(self, spec): | |
378 | rdbms_common.Database.create_class(self, spec) | |
379 | sql = 'insert into ids (name, num) values (%s, %s)'%(self.arg, self.arg) | |
380 | vals = (spec.classname, 1) | |
381 | self.sql(sql, vals) | |
382 | ||
383 | if sqlite_version in (2,3): | |
384 | def load_journal(self, classname, cols, nodeid): | |
385 | """We need to turn the sqlite3.Row into a tuple so it can be | |
386 | unpacked""" | |
387 | l = rdbms_common.Database.load_journal(self, | |
388 | classname, cols, nodeid) | |
389 | cols = range(5) | |
390 | return [[row[col] for col in cols] for row in l] | |
391 | ||
392 | class sqliteClass: | |
393 | def filter(self, search_matches, filterspec, sort=(None,None), | |
394 | group=(None,None)): | |
395 | """ If there's NO matches to a fetch, sqlite returns NULL | |
396 | instead of nothing | |
397 | """ | |
398 | return [f for f in rdbms_common.Class.filter(self, search_matches, | |
399 | filterspec, sort=sort, group=group) if f] | |
400 | ||
401 | class Class(sqliteClass, rdbms_common.Class): | |
402 | pass | |
403 | ||
404 | class IssueClass(sqliteClass, rdbms_common.IssueClass): | |
405 | pass | |
406 | ||
407 | class FileClass(sqliteClass, rdbms_common.FileClass): | |
408 | pass | |
409 | ||
410 | # vim: set et sts=4 sw=4 : |