1 """Implements a backend for SQLite.
3 See https://pysqlite.sourceforge.net/ for pysqlite info
6 NOTE: we use the rdbms_common table creation methods which define datatypes
7 for the columns, but sqlite IGNORES these specifications.
9 __docformat__
= 'restructuredtext'
11 import os
, base64
, marshal
, shutil
, time
, logging
13 from roundup
import hyperdb
, date
, password
14 from roundup
.backends
import rdbms_common
17 import sqlite3
as sqlite
21 from pysqlite2
import dbapi2
as sqlite
22 if sqlite
.version_info
< (2,1,0):
23 raise ValueError('pysqlite2 minimum version is 2.1.0+ '
24 '- %s found'%sqlite
.version
)
30 def db_exists(config
):
31 return os
.path
.exists(os
.path
.join(config
.DATABASE
, 'db'))
34 shutil
.rmtree(config
.DATABASE
)
36 class Database(rdbms_common
.Database
):
37 # char to use for positional arguments
38 if sqlite_version
in (2,3):
43 # used by some code to switch styles of query
44 implements_intersect
= 1
46 hyperdb_to_sql_datatypes
= {
47 hyperdb
.String
: 'VARCHAR(255)',
48 hyperdb
.Date
: 'VARCHAR(30)',
49 hyperdb
.Link
: 'INTEGER',
50 hyperdb
.Interval
: 'VARCHAR(255)',
51 hyperdb
.Password
: 'VARCHAR(255)',
52 hyperdb
.Boolean
: 'BOOLEAN',
53 hyperdb
.Number
: 'REAL',
55 hyperdb_to_sql_value
= {
57 hyperdb
.Date
: lambda x
: x
.serialise(),
59 hyperdb
.Interval
: str,
60 hyperdb
.Password
: str,
61 hyperdb
.Boolean
: int,
62 hyperdb
.Number
: lambda x
: x
,
63 hyperdb
.Multilink
: lambda x
: x
, # used in journal marshalling
65 sql_to_hyperdb_value
= {
66 hyperdb
.String
: lambda x
: isinstance(x
, unicode) and x
.encode('utf8') or str(x
),
67 hyperdb
.Date
: lambda x
: date
.Date(str(x
)),
68 hyperdb
.Link
: str, # XXX numeric ids
69 hyperdb
.Interval
: date
.Interval
,
70 hyperdb
.Password
: lambda x
: password
.Password(encrypted
=x
),
71 hyperdb
.Boolean
: int,
72 hyperdb
.Number
: rdbms_common
._num_cvt
,
73 hyperdb
.Multilink
: lambda x
: x
, # used in journal marshalling
76 def sqlite_busy_handler(self
, data
, table
, count
):
77 """invoked whenever SQLite tries to access a database that is locked"""
79 # use a 30 second timeout (extraordinarily generous)
80 # for handling locked database
81 self
._busy_handler_endtime
= time
.time() + 30
82 elif time
.time() > self
._busy_handler_endtime
:
83 # timeout expired - no more retries
85 # sleep adaptively as retry count grows,
86 # starting from about half a second
87 time_to_sleep
= 0.01 * (2 << min(5, count
))
88 time
.sleep(time_to_sleep
)
91 def sql_open_connection(self
):
92 """Open a standard, non-autocommitting connection.
94 pysqlite will automatically BEGIN TRANSACTION for us.
96 # make sure the database directory exists
97 # database itself will be created by sqlite if needed
98 if not os
.path
.isdir(self
.config
.DATABASE
):
99 os
.makedirs(self
.config
.DATABASE
)
101 db
= os
.path
.join(self
.config
.DATABASE
, 'db')
102 logging
.getLogger('hyperdb').info('open database %r'%db
)
103 # set a 30 second timeout (extraordinarily generous) for handling
105 if sqlite_version
== 1:
106 conn
= sqlite
.connect(db
=db
)
107 conn
.db
.sqlite_busy_handler(self
.sqlite_busy_handler
)
109 conn
= sqlite
.connect(db
, timeout
=30)
110 conn
.row_factory
= sqlite
.Row
112 # pysqlite2 / sqlite3 want us to store Unicode in the db but
113 # that's not what's been done historically and it's definitely
114 # not what the other backends do, so we'll stick with UTF-8
115 if sqlite_version
in (2, 3):
116 conn
.text_factory
= str
118 cursor
= conn
.cursor()
119 return (conn
, cursor
)
121 def open_connection(self
):
122 # ensure files are group readable and writable
123 os
.umask(self
.config
.UMASK
)
125 (self
.conn
, self
.cursor
) = self
.sql_open_connection()
129 except sqlite
.DatabaseError
, error
:
130 if str(error
) != 'no such table: schema':
133 self
.sql('create table schema (schema varchar)')
134 self
.sql('create table ids (name varchar, num integer)')
135 self
.sql('create index ids_name_idx on ids(name)')
136 self
.create_version_2_tables()
138 def create_version_2_tables(self
):
139 self
.sql('create table otks (otk_key varchar, '
140 'otk_value varchar, otk_time integer)')
141 self
.sql('create index otks_key_idx on otks(otk_key)')
142 self
.sql('create table sessions (session_key varchar, '
143 'session_time integer, session_value varchar)')
144 self
.sql('create index sessions_key_idx on '
145 'sessions(session_key)')
147 # full-text indexing store
148 self
.sql('CREATE TABLE __textids (_class varchar, '
149 '_itemid varchar, _prop varchar, _textid integer primary key) ')
150 self
.sql('CREATE TABLE __words (_word varchar, '
152 self
.sql('CREATE INDEX words_word_ids ON __words(_word)')
153 self
.sql('CREATE INDEX words_by_id ON __words (_textid)')
154 self
.sql('CREATE UNIQUE INDEX __textids_by_props ON '
155 '__textids (_class, _itemid, _prop)')
156 sql
= 'insert into ids (name, num) values (%s,%s)'%(self
.arg
, self
.arg
)
157 self
.sql(sql
, ('__textids', 1))
159 def add_new_columns_v2(self
):
160 # update existing tables to have the new actor column
161 tables
= self
.database_schema
['tables']
162 for classname
, spec
in self
.classes
.items():
163 if classname
in tables
:
164 dbspec
= tables
[classname
]
165 self
.update_class(spec
, dbspec
, force
=1, adding_v2
=1)
166 # we've updated - don't try again
167 tables
[classname
] = spec
.schema()
169 def fix_version_3_tables(self
):
170 # NOOP - no restriction on column length here
173 def update_class(self
, spec
, old_spec
, force
=0, adding_v2
=0):
174 """ Determine the differences between the current spec and the
175 database version of the spec, and update where necessary.
177 If 'force' is true, update the database anyway.
179 SQLite doesn't have ALTER TABLE, so we have to copy and
180 regenerate the tables with the new schema.
182 new_spec
= spec
.schema()
185 if not force
and new_spec
== old_spec
:
189 logging
.getLogger('hyperdb').info('update_class %s'%spec
.classname
)
191 # detect multilinks that have been removed, and drop their table
193 for name
, prop
in old_spec
[1]:
195 if name
in spec
.properties
or not isinstance(prop
, hyperdb
.Multilink
):
197 # it's a multilink, and it's been removed - drop the old
198 # table. First drop indexes.
199 self
.drop_multilink_table_indexes(spec
.classname
, name
)
200 sql
= 'drop table %s_%s'%(spec
.classname
, prop
)
203 # now figure how we populate the new table
205 fetch
= ['_activity', '_creation', '_creator']
207 fetch
= ['_actor', '_activity', '_creation', '_creator']
208 properties
= spec
.getprops()
209 for propname
,x
in new_spec
[1]:
210 prop
= properties
[propname
]
211 if isinstance(prop
, hyperdb
.Multilink
):
212 if propname
not in old_has
:
213 # we need to create the new table
214 self
.create_multilink_table(spec
, propname
)
216 tn
= '%s_%s'%(spec
.classname
, propname
)
217 # grabe the current values
218 sql
= 'select linkid, nodeid from %s'%tn
220 rows
= self
.cursor
.fetchall()
223 self
.drop_multilink_table_indexes(spec
.classname
, propname
)
224 sql
= 'drop table %s'%tn
227 # re-create and populate the new table
228 self
.create_multilink_table(spec
, propname
)
229 sql
= """insert into %s (linkid, nodeid) values
230 (%s, %s)"""%(tn
, self
.arg
, self
.arg
)
231 for linkid
, nodeid
in rows
:
232 self
.sql(sql
, (int(linkid
), int(nodeid
)))
233 elif propname
in old_has
:
234 # we copy this col over from the old table
235 fetch
.append('_'+propname
)
237 # select the data out of the old table
239 fetch
.append('__retired__')
240 fetchcols
= ','.join(fetch
)
242 sql
= 'select %s from _%s'%(fetchcols
, cn
)
244 olddata
= self
.cursor
.fetchall()
246 # TODO: update all the other index dropping code
247 self
.drop_class_table_indexes(cn
, old_spec
[0])
250 self
.sql('drop table _%s'%cn
)
252 # create the new table
253 self
.create_class_table(spec
)
256 inscols
= ['id', '_actor', '_activity', '_creation', '_creator']
257 for propname
,x
in new_spec
[1]:
258 prop
= properties
[propname
]
259 if isinstance(prop
, hyperdb
.Multilink
):
261 elif isinstance(prop
, hyperdb
.Interval
):
262 inscols
.append('_'+propname
)
263 inscols
.append('__'+propname
+'_int__')
264 elif propname
in old_has
:
265 # we copy this col over from the old table
266 inscols
.append('_'+propname
)
268 # do the insert of the old data - the new columns will have
270 args
= ','.join([self
.arg
for x
in inscols
])
271 cols
= ','.join(inscols
)
272 sql
= 'insert into _%s (%s) values (%s)'%(cn
, cols
, args
)
273 for entry
in olddata
:
276 # generate the new value for the Interval int column
277 if name
.endswith('_int__'):
279 if sqlite_version
in (2,3):
281 v
= hyperdb
.Interval(entry
[name
]).as_seconds()
285 v
= hyperdb
.Interval(entry
[name
]).as_seconds()
288 elif sqlite_version
in (2,3):
293 elif (sqlite_version
== 1 and name
in entry
):
298 self
.sql(sql
, tuple(d
))
303 """ Squash any error caused by us already having closed the
308 except sqlite
.ProgrammingError
, value
:
309 if str(value
) != 'close failed - Connection is closed.':
312 def sql_rollback(self
):
313 """ Squash any error caused by us having closed the connection (and
314 therefore not having anything to roll back)
318 except sqlite
.ProgrammingError
, value
:
319 if str(value
) != 'rollback failed - Connection is closed.':
323 return '<roundlite 0x%x>'%id(self
)
325 def sql_commit(self
, fail_ok
=False):
326 """ Actually commit to the database.
328 Ignore errors if there's nothing to commit.
332 except sqlite
.DatabaseError
, error
:
333 if str(error
) != 'cannot commit - no transaction is active':
335 # open a new cursor for subsequent work
336 self
.cursor
= self
.conn
.cursor()
338 def sql_index_exists(self
, table_name
, index_name
):
339 self
.sql('pragma index_list(%s)'%table_name
)
340 for entry
in self
.cursor
.fetchall():
341 if entry
[1] == index_name
:
345 # old-skool id generation
346 def newid(self
, classname
):
347 """ Generate a new id for the given class
350 sql
= 'select num from ids where name=%s'%self
.arg
351 self
.sql(sql
, (classname
, ))
352 newid
= int(self
.cursor
.fetchone()[0])
355 sql
= 'update ids set num=%s where name=%s'%(self
.arg
, self
.arg
)
356 vals
= (int(newid
)+1, classname
)
362 def setid(self
, classname
, setid
):
363 """ Set the id counter: used during import of database
365 We add one to make it behave like the sequences in postgres.
367 sql
= 'update ids set num=%s where name=%s'%(self
.arg
, self
.arg
)
368 vals
= (int(setid
)+1, classname
)
372 rdbms_common
.Database
.clear(self
)
373 # set the id counters to 0 (setid adds one) so we start at 1
374 for cn
in self
.classes
.keys():
377 def create_class(self
, spec
):
378 rdbms_common
.Database
.create_class(self
, spec
)
379 sql
= 'insert into ids (name, num) values (%s, %s)'%(self
.arg
, self
.arg
)
380 vals
= (spec
.classname
, 1)
383 if sqlite_version
in (2,3):
384 def load_journal(self
, classname
, cols
, nodeid
):
385 """We need to turn the sqlite3.Row into a tuple so it can be
387 l
= rdbms_common
.Database
.load_journal(self
,
388 classname
, cols
, nodeid
)
390 return [[row
[col
] for col
in cols
] for row
in l
]
393 def filter(self
, search_matches
, filterspec
, sort
=(None,None),
395 """ If there's NO matches to a fetch, sqlite returns NULL
398 return [f
for f
in rdbms_common
.Class
.filter(self
, search_matches
,
399 filterspec
, sort
=sort
, group
=group
) if f
]
401 class Class(sqliteClass
, rdbms_common
.Class
):
404 class IssueClass(sqliteClass
, rdbms_common
.IssueClass
):
407 class FileClass(sqliteClass
, rdbms_common
.FileClass
):
410 # vim: set et sts=4 sw=4 :