Commit | Line | Data |
---|---|---|
c638d827 CR |
1 | # |
2 | # Copyright (c) 2001 Bizar Software Pty Ltd (http://www.bizarsoftware.com.au/) | |
3 | # This module is free software, and you may redistribute it and/or modify | |
4 | # under the same terms as Python, so long as this copyright message and | |
5 | # disclaimer are retained in their original form. | |
6 | # | |
7 | # IN NO EVENT SHALL BIZAR SOFTWARE PTY LTD BE LIABLE TO ANY PARTY FOR | |
8 | # DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING | |
9 | # OUT OF THE USE OF THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE | |
10 | # POSSIBILITY OF SUCH DAMAGE. | |
11 | # | |
12 | # BIZAR SOFTWARE PTY LTD SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, | |
13 | # BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS | |
14 | # FOR A PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" | |
15 | # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, | |
16 | # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. | |
17 | # | |
18 | """ Relational database (SQL) backend common code. | |
19 | ||
20 | Basics: | |
21 | ||
22 | - map roundup classes to relational tables | |
23 | - automatically detect schema changes and modify the table schemas | |
24 | appropriately (we store the "database version" of the schema in the | |
25 | database itself as the only row of the "schema" table) | |
26 | - multilinks (which represent a many-to-many relationship) are handled through | |
27 | intermediate tables | |
28 | - journals are stored adjunct to the per-class tables | |
29 | - table names and columns have "_" prepended so the names can't clash with | |
30 | restricted names (like "order") | |
31 | - retirement is determined by the __retired__ column being > 0 | |
32 | ||
33 | Database-specific changes may generally be pushed out to the overridable | |
34 | sql_* methods, since everything else should be fairly generic. There's | |
35 | probably a bit of work to be done if a database is used that actually | |
36 | honors column typing, since the initial databases don't (sqlite stores | |
37 | everything as a string.) | |
38 | ||
39 | The schema of the hyperdb being mapped to the database is stored in the | |
40 | database itself as a repr()'ed dictionary of information about each Class | |
41 | that maps to a table. If that information differs from the hyperdb schema, | |
42 | then we update it. We also store in the schema dict a version which | |
43 | allows us to upgrade the database schema when necessary. See upgrade_db(). | |
44 | ||
45 | To force a unqiueness constraint on the key properties we put the item | |
46 | id into the __retired__ column duing retirement (so it's 0 for "active" | |
47 | items) and place a unqiueness constraint on key + __retired__. This is | |
48 | particularly important for the users class where multiple users may | |
49 | try to have the same username, with potentially many retired users with | |
50 | the same name. | |
51 | """ | |
52 | __docformat__ = 'restructuredtext' | |
53 | ||
54 | # standard python modules | |
55 | import sys, os, time, re, errno, weakref, copy, logging | |
56 | ||
57 | # roundup modules | |
58 | from roundup import hyperdb, date, password, roundupdb, security, support | |
59 | from roundup.hyperdb import String, Password, Date, Interval, Link, \ | |
60 | Multilink, DatabaseError, Boolean, Number, Node | |
61 | from roundup.backends import locking | |
62 | from roundup.support import reversed | |
63 | from roundup.i18n import _ | |
64 | ||
65 | # support | |
66 | from roundup.backends.blobfiles import FileStorage | |
67 | try: | |
68 | from roundup.backends.indexer_xapian import Indexer | |
69 | except ImportError: | |
70 | from roundup.backends.indexer_rdbms import Indexer | |
71 | from roundup.backends.sessions_rdbms import Sessions, OneTimeKeys | |
72 | from roundup.date import Range | |
73 | ||
74 | # dummy value meaning "argument not passed" | |
75 | _marker = [] | |
76 | ||
77 | def _num_cvt(num): | |
78 | num = str(num) | |
79 | try: | |
80 | return int(num) | |
81 | except: | |
82 | return float(num) | |
83 | ||
84 | def _bool_cvt(value): | |
85 | if value in ('TRUE', 'FALSE'): | |
86 | return {'TRUE': 1, 'FALSE': 0}[value] | |
87 | # assume it's a number returned from the db API | |
88 | return int(value) | |
89 | ||
90 | def connection_dict(config, dbnamestr=None): | |
91 | """ Used by Postgresql and MySQL to detemine the keyword args for | |
92 | opening the database connection.""" | |
93 | d = { } | |
94 | if dbnamestr: | |
95 | d[dbnamestr] = config.RDBMS_NAME | |
96 | for name in ('host', 'port', 'password', 'user', 'read_default_group', | |
97 | 'read_default_file'): | |
98 | cvar = 'RDBMS_'+name.upper() | |
99 | if config[cvar] is not None: | |
100 | d[name] = config[cvar] | |
101 | return d | |
102 | ||
103 | class Database(FileStorage, hyperdb.Database, roundupdb.Database): | |
104 | """ Wrapper around an SQL database that presents a hyperdb interface. | |
105 | ||
106 | - some functionality is specific to the actual SQL database, hence | |
107 | the sql_* methods that are NotImplemented | |
108 | - we keep a cache of the latest N row fetches (where N is configurable). | |
109 | """ | |
110 | def __init__(self, config, journaltag=None): | |
111 | """ Open the database and load the schema from it. | |
112 | """ | |
113 | FileStorage.__init__(self, config.UMASK) | |
114 | self.config, self.journaltag = config, journaltag | |
115 | self.dir = config.DATABASE | |
116 | self.classes = {} | |
117 | self.indexer = Indexer(self) | |
118 | self.security = security.Security(self) | |
119 | ||
120 | # additional transaction support for external files and the like | |
121 | self.transactions = [] | |
122 | ||
123 | # keep a cache of the N most recently retrieved rows of any kind | |
124 | # (classname, nodeid) = row | |
125 | self.cache_size = config.RDBMS_CACHE_SIZE | |
126 | self.cache = {} | |
127 | self.cache_lru = [] | |
128 | self.stats = {'cache_hits': 0, 'cache_misses': 0, 'get_items': 0, | |
129 | 'filtering': 0} | |
130 | ||
131 | # database lock | |
132 | self.lockfile = None | |
133 | ||
134 | # open a connection to the database, creating the "conn" attribute | |
135 | self.open_connection() | |
136 | ||
137 | def clearCache(self): | |
138 | self.cache = {} | |
139 | self.cache_lru = [] | |
140 | ||
141 | def getSessionManager(self): | |
142 | return Sessions(self) | |
143 | ||
144 | def getOTKManager(self): | |
145 | return OneTimeKeys(self) | |
146 | ||
147 | def open_connection(self): | |
148 | """ Open a connection to the database, creating it if necessary. | |
149 | ||
150 | Must call self.load_dbschema() | |
151 | """ | |
152 | raise NotImplemented | |
153 | ||
154 | def sql(self, sql, args=None): | |
155 | """ Execute the sql with the optional args. | |
156 | """ | |
157 | self.log_debug('SQL %r %r'%(sql, args)) | |
158 | if args: | |
159 | self.cursor.execute(sql, args) | |
160 | else: | |
161 | self.cursor.execute(sql) | |
162 | ||
163 | def sql_fetchone(self): | |
164 | """ Fetch a single row. If there's nothing to fetch, return None. | |
165 | """ | |
166 | return self.cursor.fetchone() | |
167 | ||
168 | def sql_fetchall(self): | |
169 | """ Fetch all rows. If there's nothing to fetch, return []. | |
170 | """ | |
171 | return self.cursor.fetchall() | |
172 | ||
173 | def sql_stringquote(self, value): | |
174 | """ Quote the string so it's safe to put in the 'sql quotes' | |
175 | """ | |
176 | return re.sub("'", "''", str(value)) | |
177 | ||
178 | def init_dbschema(self): | |
179 | self.database_schema = { | |
180 | 'version': self.current_db_version, | |
181 | 'tables': {} | |
182 | } | |
183 | ||
184 | def load_dbschema(self): | |
185 | """ Load the schema definition that the database currently implements | |
186 | """ | |
187 | self.cursor.execute('select schema from schema') | |
188 | schema = self.cursor.fetchone() | |
189 | if schema: | |
190 | self.database_schema = eval(schema[0]) | |
191 | else: | |
192 | self.database_schema = {} | |
193 | ||
194 | def save_dbschema(self): | |
195 | """ Save the schema definition that the database currently implements | |
196 | """ | |
197 | s = repr(self.database_schema) | |
198 | self.sql('delete from schema') | |
199 | self.sql('insert into schema values (%s)'%self.arg, (s,)) | |
200 | ||
201 | def post_init(self): | |
202 | """ Called once the schema initialisation has finished. | |
203 | ||
204 | We should now confirm that the schema defined by our "classes" | |
205 | attribute actually matches the schema in the database. | |
206 | """ | |
207 | save = 0 | |
208 | ||
209 | # handle changes in the schema | |
210 | tables = self.database_schema['tables'] | |
211 | for classname, spec in self.classes.iteritems(): | |
212 | if classname in tables: | |
213 | dbspec = tables[classname] | |
214 | if self.update_class(spec, dbspec): | |
215 | tables[classname] = spec.schema() | |
216 | save = 1 | |
217 | else: | |
218 | self.create_class(spec) | |
219 | tables[classname] = spec.schema() | |
220 | save = 1 | |
221 | ||
222 | for classname, spec in list(tables.items()): | |
223 | if classname not in self.classes: | |
224 | self.drop_class(classname, tables[classname]) | |
225 | del tables[classname] | |
226 | save = 1 | |
227 | ||
228 | # now upgrade the database for column type changes, new internal | |
229 | # tables, etc. | |
230 | save = save | self.upgrade_db() | |
231 | ||
232 | # update the database version of the schema | |
233 | if save: | |
234 | self.save_dbschema() | |
235 | ||
236 | # reindex the db if necessary | |
237 | if self.indexer.should_reindex(): | |
238 | self.reindex() | |
239 | ||
240 | # commit | |
241 | self.sql_commit() | |
242 | ||
243 | # update this number when we need to make changes to the SQL structure | |
244 | # of the backen database | |
245 | current_db_version = 5 | |
246 | db_version_updated = False | |
247 | def upgrade_db(self): | |
248 | """ Update the SQL database to reflect changes in the backend code. | |
249 | ||
250 | Return boolean whether we need to save the schema. | |
251 | """ | |
252 | version = self.database_schema.get('version', 1) | |
253 | if version > self.current_db_version: | |
254 | raise DatabaseError('attempting to run rev %d DATABASE with rev ' | |
255 | '%d CODE!'%(version, self.current_db_version)) | |
256 | if version == self.current_db_version: | |
257 | # nothing to do | |
258 | return 0 | |
259 | ||
260 | if version < 2: | |
261 | self.log_info('upgrade to version 2') | |
262 | # change the schema structure | |
263 | self.database_schema = {'tables': self.database_schema} | |
264 | ||
265 | # version 1 didn't have the actor column (note that in | |
266 | # MySQL this will also transition the tables to typed columns) | |
267 | self.add_new_columns_v2() | |
268 | ||
269 | # version 1 doesn't have the OTK, session and indexing in the | |
270 | # database | |
271 | self.create_version_2_tables() | |
272 | ||
273 | if version < 3: | |
274 | self.log_info('upgrade to version 3') | |
275 | self.fix_version_2_tables() | |
276 | ||
277 | if version < 4: | |
278 | self.fix_version_3_tables() | |
279 | ||
280 | if version < 5: | |
281 | self.fix_version_4_tables() | |
282 | ||
283 | self.database_schema['version'] = self.current_db_version | |
284 | self.db_version_updated = True | |
285 | return 1 | |
286 | ||
287 | def fix_version_3_tables(self): | |
288 | # drop the shorter VARCHAR OTK column and add a new TEXT one | |
289 | for name in ('otk', 'session'): | |
290 | self.sql('DELETE FROM %ss'%name) | |
291 | self.sql('ALTER TABLE %ss DROP %s_value'%(name, name)) | |
292 | self.sql('ALTER TABLE %ss ADD %s_value TEXT'%(name, name)) | |
293 | ||
294 | def fix_version_2_tables(self): | |
295 | # Default (used by sqlite): NOOP | |
296 | pass | |
297 | ||
298 | def fix_version_4_tables(self): | |
299 | # note this is an explicit call now | |
300 | c = self.cursor | |
301 | for cn, klass in self.classes.iteritems(): | |
302 | c.execute('select id from _%s where __retired__<>0'%(cn,)) | |
303 | for (id,) in c.fetchall(): | |
304 | c.execute('update _%s set __retired__=%s where id=%s'%(cn, | |
305 | self.arg, self.arg), (id, id)) | |
306 | ||
307 | if klass.key: | |
308 | self.add_class_key_required_unique_constraint(cn, klass.key) | |
309 | ||
310 | def _convert_journal_tables(self): | |
311 | """Get current journal table contents, drop the table and re-create""" | |
312 | c = self.cursor | |
313 | cols = ','.join('nodeid date tag action params'.split()) | |
314 | for klass in self.classes.itervalues(): | |
315 | # slurp and drop | |
316 | sql = 'select %s from %s__journal order by date'%(cols, | |
317 | klass.classname) | |
318 | c.execute(sql) | |
319 | contents = c.fetchall() | |
320 | self.drop_journal_table_indexes(klass.classname) | |
321 | c.execute('drop table %s__journal'%klass.classname) | |
322 | ||
323 | # re-create and re-populate | |
324 | self.create_journal_table(klass) | |
325 | a = self.arg | |
326 | sql = 'insert into %s__journal (%s) values (%s,%s,%s,%s,%s)'%( | |
327 | klass.classname, cols, a, a, a, a, a) | |
328 | for row in contents: | |
329 | # no data conversion needed | |
330 | self.cursor.execute(sql, row) | |
331 | ||
332 | def _convert_string_properties(self): | |
333 | """Get current Class tables that contain String properties, and | |
334 | convert the VARCHAR columns to TEXT""" | |
335 | c = self.cursor | |
336 | for klass in self.classes.itervalues(): | |
337 | # slurp and drop | |
338 | cols, mls = self.determine_columns(list(klass.properties.iteritems())) | |
339 | scols = ','.join([i[0] for i in cols]) | |
340 | sql = 'select id,%s from _%s'%(scols, klass.classname) | |
341 | c.execute(sql) | |
342 | contents = c.fetchall() | |
343 | self.drop_class_table_indexes(klass.classname, klass.getkey()) | |
344 | c.execute('drop table _%s'%klass.classname) | |
345 | ||
346 | # re-create and re-populate | |
347 | self.create_class_table(klass, create_sequence=0) | |
348 | a = ','.join([self.arg for i in range(len(cols)+1)]) | |
349 | sql = 'insert into _%s (id,%s) values (%s)'%(klass.classname, | |
350 | scols, a) | |
351 | for row in contents: | |
352 | l = [] | |
353 | for entry in row: | |
354 | # mysql will already be a string - psql needs "help" | |
355 | if entry is not None and not isinstance(entry, type('')): | |
356 | entry = str(entry) | |
357 | l.append(entry) | |
358 | self.cursor.execute(sql, l) | |
359 | ||
360 | def refresh_database(self): | |
361 | self.post_init() | |
362 | ||
363 | ||
364 | def reindex(self, classname=None, show_progress=False): | |
365 | if classname: | |
366 | classes = [self.getclass(classname)] | |
367 | else: | |
368 | classes = list(self.classes.itervalues()) | |
369 | for klass in classes: | |
370 | if show_progress: | |
371 | for nodeid in support.Progress('Reindex %s'%klass.classname, | |
372 | klass.list()): | |
373 | klass.index(nodeid) | |
374 | else: | |
375 | for nodeid in klass.list(): | |
376 | klass.index(nodeid) | |
377 | self.indexer.save_index() | |
378 | ||
379 | hyperdb_to_sql_datatypes = { | |
380 | hyperdb.String : 'TEXT', | |
381 | hyperdb.Date : 'TIMESTAMP', | |
382 | hyperdb.Link : 'INTEGER', | |
383 | hyperdb.Interval : 'VARCHAR(255)', | |
384 | hyperdb.Password : 'VARCHAR(255)', | |
385 | hyperdb.Boolean : 'BOOLEAN', | |
386 | hyperdb.Number : 'REAL', | |
387 | } | |
388 | ||
389 | def hyperdb_to_sql_datatype(self, propclass): | |
390 | ||
391 | datatype = self.hyperdb_to_sql_datatypes.get(propclass) | |
392 | if datatype: | |
393 | return datatype | |
394 | ||
395 | for k, v in self.hyperdb_to_sql_datatypes.iteritems(): | |
396 | if issubclass(propclass, k): | |
397 | return v | |
398 | ||
399 | raise ValueError('%r is not a hyperdb property class' % propclass) | |
400 | ||
401 | def determine_columns(self, properties): | |
402 | """ Figure the column names and multilink properties from the spec | |
403 | ||
404 | "properties" is a list of (name, prop) where prop may be an | |
405 | instance of a hyperdb "type" _or_ a string repr of that type. | |
406 | """ | |
407 | cols = [ | |
408 | ('_actor', self.hyperdb_to_sql_datatype(hyperdb.Link)), | |
409 | ('_activity', self.hyperdb_to_sql_datatype(hyperdb.Date)), | |
410 | ('_creator', self.hyperdb_to_sql_datatype(hyperdb.Link)), | |
411 | ('_creation', self.hyperdb_to_sql_datatype(hyperdb.Date)), | |
412 | ] | |
413 | mls = [] | |
414 | # add the multilinks separately | |
415 | for col, prop in properties: | |
416 | if isinstance(prop, Multilink): | |
417 | mls.append(col) | |
418 | continue | |
419 | ||
420 | if isinstance(prop, type('')): | |
421 | raise ValueError("string property spec!") | |
422 | #and prop.find('Multilink') != -1: | |
423 | #mls.append(col) | |
424 | ||
425 | datatype = self.hyperdb_to_sql_datatype(prop.__class__) | |
426 | cols.append(('_'+col, datatype)) | |
427 | ||
428 | # Intervals stored as two columns | |
429 | if isinstance(prop, Interval): | |
430 | cols.append(('__'+col+'_int__', 'BIGINT')) | |
431 | ||
432 | cols.sort() | |
433 | return cols, mls | |
434 | ||
435 | def update_class(self, spec, old_spec, force=0): | |
436 | """ Determine the differences between the current spec and the | |
437 | database version of the spec, and update where necessary. | |
438 | ||
439 | If 'force' is true, update the database anyway. | |
440 | """ | |
441 | new_spec = spec.schema() | |
442 | new_spec[1].sort() | |
443 | old_spec[1].sort() | |
444 | if not force and new_spec == old_spec: | |
445 | # no changes | |
446 | return 0 | |
447 | ||
448 | logger = logging.getLogger('hyperdb') | |
449 | logger.info('update_class %s'%spec.classname) | |
450 | ||
451 | logger.debug('old_spec %r'%(old_spec,)) | |
452 | logger.debug('new_spec %r'%(new_spec,)) | |
453 | ||
454 | # detect key prop change for potential index change | |
455 | keyprop_changes = {} | |
456 | if new_spec[0] != old_spec[0]: | |
457 | if old_spec[0]: | |
458 | keyprop_changes['remove'] = old_spec[0] | |
459 | if new_spec[0]: | |
460 | keyprop_changes['add'] = new_spec[0] | |
461 | ||
462 | # detect multilinks that have been removed, and drop their table | |
463 | old_has = {} | |
464 | for name, prop in old_spec[1]: | |
465 | old_has[name] = 1 | |
466 | if name in spec.properties: | |
467 | continue | |
468 | ||
469 | if prop.find('Multilink to') != -1: | |
470 | # first drop indexes. | |
471 | self.drop_multilink_table_indexes(spec.classname, name) | |
472 | ||
473 | # now the multilink table itself | |
474 | sql = 'drop table %s_%s'%(spec.classname, name) | |
475 | else: | |
476 | # if this is the key prop, drop the index first | |
477 | if old_spec[0] == prop: | |
478 | self.drop_class_table_key_index(spec.classname, name) | |
479 | del keyprop_changes['remove'] | |
480 | ||
481 | # drop the column | |
482 | sql = 'alter table _%s drop column _%s'%(spec.classname, name) | |
483 | ||
484 | self.sql(sql) | |
485 | ||
486 | # if we didn't remove the key prop just then, but the key prop has | |
487 | # changed, we still need to remove the old index | |
488 | if 'remove' in keyprop_changes: | |
489 | self.drop_class_table_key_index(spec.classname, | |
490 | keyprop_changes['remove']) | |
491 | ||
492 | # add new columns | |
493 | for propname, prop in new_spec[1]: | |
494 | if propname in old_has: | |
495 | continue | |
496 | prop = spec.properties[propname] | |
497 | if isinstance(prop, Multilink): | |
498 | self.create_multilink_table(spec, propname) | |
499 | else: | |
500 | # add the column | |
501 | coltype = self.hyperdb_to_sql_datatype(prop.__class__) | |
502 | sql = 'alter table _%s add column _%s %s'%( | |
503 | spec.classname, propname, coltype) | |
504 | self.sql(sql) | |
505 | ||
506 | # extra Interval column | |
507 | if isinstance(prop, Interval): | |
508 | sql = 'alter table _%s add column __%s_int__ BIGINT'%( | |
509 | spec.classname, propname) | |
510 | self.sql(sql) | |
511 | ||
512 | # if the new column is a key prop, we need an index! | |
513 | if new_spec[0] == propname: | |
514 | self.create_class_table_key_index(spec.classname, propname) | |
515 | del keyprop_changes['add'] | |
516 | ||
517 | # if we didn't add the key prop just then, but the key prop has | |
518 | # changed, we still need to add the new index | |
519 | if 'add' in keyprop_changes: | |
520 | self.create_class_table_key_index(spec.classname, | |
521 | keyprop_changes['add']) | |
522 | ||
523 | return 1 | |
524 | ||
525 | def determine_all_columns(self, spec): | |
526 | """Figure out the columns from the spec and also add internal columns | |
527 | ||
528 | """ | |
529 | cols, mls = self.determine_columns(list(spec.properties.iteritems())) | |
530 | ||
531 | # add on our special columns | |
532 | cols.append(('id', 'INTEGER PRIMARY KEY')) | |
533 | cols.append(('__retired__', 'INTEGER DEFAULT 0')) | |
534 | return cols, mls | |
535 | ||
536 | def create_class_table(self, spec): | |
537 | """Create the class table for the given Class "spec". Creates the | |
538 | indexes too.""" | |
539 | cols, mls = self.determine_all_columns(spec) | |
540 | ||
541 | # create the base table | |
542 | scols = ','.join(['%s %s'%x for x in cols]) | |
543 | sql = 'create table _%s (%s)'%(spec.classname, scols) | |
544 | self.sql(sql) | |
545 | ||
546 | self.create_class_table_indexes(spec) | |
547 | ||
548 | return cols, mls | |
549 | ||
550 | def create_class_table_indexes(self, spec): | |
551 | """ create the class table for the given spec | |
552 | """ | |
553 | # create __retired__ index | |
554 | index_sql2 = 'create index _%s_retired_idx on _%s(__retired__)'%( | |
555 | spec.classname, spec.classname) | |
556 | self.sql(index_sql2) | |
557 | ||
558 | # create index for key property | |
559 | if spec.key: | |
560 | index_sql3 = 'create index _%s_%s_idx on _%s(_%s)'%( | |
561 | spec.classname, spec.key, | |
562 | spec.classname, spec.key) | |
563 | self.sql(index_sql3) | |
564 | ||
565 | # and the unique index for key / retired(id) | |
566 | self.add_class_key_required_unique_constraint(spec.classname, | |
567 | spec.key) | |
568 | ||
569 | # TODO: create indexes on (selected?) Link property columns, as | |
570 | # they're more likely to be used for lookup | |
571 | ||
572 | def add_class_key_required_unique_constraint(self, cn, key): | |
573 | sql = '''create unique index _%s_key_retired_idx | |
574 | on _%s(__retired__, _%s)'''%(cn, cn, key) | |
575 | self.sql(sql) | |
576 | ||
577 | def drop_class_table_indexes(self, cn, key): | |
578 | # drop the old table indexes first | |
579 | l = ['_%s_id_idx'%cn, '_%s_retired_idx'%cn] | |
580 | if key: | |
581 | l.append('_%s_%s_idx'%(cn, key)) | |
582 | ||
583 | table_name = '_%s'%cn | |
584 | for index_name in l: | |
585 | if not self.sql_index_exists(table_name, index_name): | |
586 | continue | |
587 | index_sql = 'drop index '+index_name | |
588 | self.sql(index_sql) | |
589 | ||
590 | def create_class_table_key_index(self, cn, key): | |
591 | """ create the class table for the given spec | |
592 | """ | |
593 | sql = 'create index _%s_%s_idx on _%s(_%s)'%(cn, key, cn, key) | |
594 | self.sql(sql) | |
595 | ||
596 | def drop_class_table_key_index(self, cn, key): | |
597 | table_name = '_%s'%cn | |
598 | index_name = '_%s_%s_idx'%(cn, key) | |
599 | if self.sql_index_exists(table_name, index_name): | |
600 | sql = 'drop index '+index_name | |
601 | self.sql(sql) | |
602 | ||
603 | # and now the retired unique index too | |
604 | index_name = '_%s_key_retired_idx'%cn | |
605 | if self.sql_index_exists(table_name, index_name): | |
606 | sql = 'drop index '+index_name | |
607 | self.sql(sql) | |
608 | ||
609 | def create_journal_table(self, spec): | |
610 | """ create the journal table for a class given the spec and | |
611 | already-determined cols | |
612 | """ | |
613 | # journal table | |
614 | cols = ','.join(['%s varchar'%x | |
615 | for x in 'nodeid date tag action params'.split()]) | |
616 | sql = """create table %s__journal ( | |
617 | nodeid integer, date %s, tag varchar(255), | |
618 | action varchar(255), params text)""" % (spec.classname, | |
619 | self.hyperdb_to_sql_datatype(hyperdb.Date)) | |
620 | self.sql(sql) | |
621 | self.create_journal_table_indexes(spec) | |
622 | ||
623 | def create_journal_table_indexes(self, spec): | |
624 | # index on nodeid | |
625 | sql = 'create index %s_journ_idx on %s__journal(nodeid)'%( | |
626 | spec.classname, spec.classname) | |
627 | self.sql(sql) | |
628 | ||
629 | def drop_journal_table_indexes(self, classname): | |
630 | index_name = '%s_journ_idx'%classname | |
631 | if not self.sql_index_exists('%s__journal'%classname, index_name): | |
632 | return | |
633 | index_sql = 'drop index '+index_name | |
634 | self.sql(index_sql) | |
635 | ||
636 | def create_multilink_table(self, spec, ml): | |
637 | """ Create a multilink table for the "ml" property of the class | |
638 | given by the spec | |
639 | """ | |
640 | # create the table | |
641 | sql = 'create table %s_%s (linkid INTEGER, nodeid INTEGER)'%( | |
642 | spec.classname, ml) | |
643 | self.sql(sql) | |
644 | self.create_multilink_table_indexes(spec, ml) | |
645 | ||
646 | def create_multilink_table_indexes(self, spec, ml): | |
647 | # create index on linkid | |
648 | index_sql = 'create index %s_%s_l_idx on %s_%s(linkid)'%( | |
649 | spec.classname, ml, spec.classname, ml) | |
650 | self.sql(index_sql) | |
651 | ||
652 | # create index on nodeid | |
653 | index_sql = 'create index %s_%s_n_idx on %s_%s(nodeid)'%( | |
654 | spec.classname, ml, spec.classname, ml) | |
655 | self.sql(index_sql) | |
656 | ||
657 | def drop_multilink_table_indexes(self, classname, ml): | |
658 | l = [ | |
659 | '%s_%s_l_idx'%(classname, ml), | |
660 | '%s_%s_n_idx'%(classname, ml) | |
661 | ] | |
662 | table_name = '%s_%s'%(classname, ml) | |
663 | for index_name in l: | |
664 | if not self.sql_index_exists(table_name, index_name): | |
665 | continue | |
666 | index_sql = 'drop index %s'%index_name | |
667 | self.sql(index_sql) | |
668 | ||
669 | def create_class(self, spec): | |
670 | """ Create a database table according to the given spec. | |
671 | """ | |
672 | cols, mls = self.create_class_table(spec) | |
673 | self.create_journal_table(spec) | |
674 | ||
675 | # now create the multilink tables | |
676 | for ml in mls: | |
677 | self.create_multilink_table(spec, ml) | |
678 | ||
679 | def drop_class(self, cn, spec): | |
680 | """ Drop the given table from the database. | |
681 | ||
682 | Drop the journal and multilink tables too. | |
683 | """ | |
684 | properties = spec[1] | |
685 | # figure the multilinks | |
686 | mls = [] | |
687 | for propname, prop in properties: | |
688 | if isinstance(prop, Multilink): | |
689 | mls.append(propname) | |
690 | ||
691 | # drop class table and indexes | |
692 | self.drop_class_table_indexes(cn, spec[0]) | |
693 | ||
694 | self.drop_class_table(cn) | |
695 | ||
696 | # drop journal table and indexes | |
697 | self.drop_journal_table_indexes(cn) | |
698 | sql = 'drop table %s__journal'%cn | |
699 | self.sql(sql) | |
700 | ||
701 | for ml in mls: | |
702 | # drop multilink table and indexes | |
703 | self.drop_multilink_table_indexes(cn, ml) | |
704 | sql = 'drop table %s_%s'%(spec.classname, ml) | |
705 | self.sql(sql) | |
706 | ||
707 | def drop_class_table(self, cn): | |
708 | sql = 'drop table _%s'%cn | |
709 | self.sql(sql) | |
710 | ||
711 | # | |
712 | # Classes | |
713 | # | |
714 | def __getattr__(self, classname): | |
715 | """ A convenient way of calling self.getclass(classname). | |
716 | """ | |
717 | if classname in self.classes: | |
718 | return self.classes[classname] | |
719 | raise AttributeError(classname) | |
720 | ||
721 | def addclass(self, cl): | |
722 | """ Add a Class to the hyperdatabase. | |
723 | """ | |
724 | cn = cl.classname | |
725 | if cn in self.classes: | |
726 | raise ValueError(cn) | |
727 | self.classes[cn] = cl | |
728 | ||
729 | # add default Edit and View permissions | |
730 | self.security.addPermission(name="Create", klass=cn, | |
731 | description="User is allowed to create "+cn) | |
732 | self.security.addPermission(name="Edit", klass=cn, | |
733 | description="User is allowed to edit "+cn) | |
734 | self.security.addPermission(name="View", klass=cn, | |
735 | description="User is allowed to access "+cn) | |
736 | ||
737 | def getclasses(self): | |
738 | """ Return a list of the names of all existing classes. | |
739 | """ | |
740 | return sorted(self.classes) | |
741 | ||
742 | def getclass(self, classname): | |
743 | """Get the Class object representing a particular class. | |
744 | ||
745 | If 'classname' is not a valid class name, a KeyError is raised. | |
746 | """ | |
747 | try: | |
748 | return self.classes[classname] | |
749 | except KeyError: | |
750 | raise KeyError('There is no class called "%s"'%classname) | |
751 | ||
752 | def clear(self): | |
753 | """Delete all database contents. | |
754 | ||
755 | Note: I don't commit here, which is different behaviour to the | |
756 | "nuke from orbit" behaviour in the dbs. | |
757 | """ | |
758 | logging.getLogger('hyperdb').info('clear') | |
759 | for cn in self.classes: | |
760 | sql = 'delete from _%s'%cn | |
761 | self.sql(sql) | |
762 | ||
763 | # | |
764 | # Nodes | |
765 | # | |
766 | ||
767 | hyperdb_to_sql_value = { | |
768 | hyperdb.String : str, | |
769 | # fractional seconds by default | |
770 | hyperdb.Date : lambda x: x.formal(sep=' ', sec='%06.3f'), | |
771 | hyperdb.Link : int, | |
772 | hyperdb.Interval : str, | |
773 | hyperdb.Password : str, | |
774 | hyperdb.Boolean : lambda x: x and 'TRUE' or 'FALSE', | |
775 | hyperdb.Number : lambda x: x, | |
776 | hyperdb.Multilink : lambda x: x, # used in journal marshalling | |
777 | } | |
778 | ||
779 | def to_sql_value(self, propklass): | |
780 | ||
781 | fn = self.hyperdb_to_sql_value.get(propklass) | |
782 | if fn: | |
783 | return fn | |
784 | ||
785 | for k, v in self.hyperdb_to_sql_value.iteritems(): | |
786 | if issubclass(propklass, k): | |
787 | return v | |
788 | ||
789 | raise ValueError('%r is not a hyperdb property class' % propklass) | |
790 | ||
791 | def addnode(self, classname, nodeid, node): | |
792 | """ Add the specified node to its class's db. | |
793 | """ | |
794 | self.log_debug('addnode %s%s %r'%(classname, | |
795 | nodeid, node)) | |
796 | ||
797 | # determine the column definitions and multilink tables | |
798 | cl = self.classes[classname] | |
799 | cols, mls = self.determine_columns(list(cl.properties.iteritems())) | |
800 | ||
801 | # we'll be supplied these props if we're doing an import | |
802 | values = node.copy() | |
803 | if 'creator' not in values: | |
804 | # add in the "calculated" properties (dupe so we don't affect | |
805 | # calling code's node assumptions) | |
806 | values['creation'] = values['activity'] = date.Date() | |
807 | values['actor'] = values['creator'] = self.getuid() | |
808 | ||
809 | cl = self.classes[classname] | |
810 | props = cl.getprops(protected=1) | |
811 | del props['id'] | |
812 | ||
813 | # default the non-multilink columns | |
814 | for col, prop in props.iteritems(): | |
815 | if col not in values: | |
816 | if isinstance(prop, Multilink): | |
817 | values[col] = [] | |
818 | else: | |
819 | values[col] = None | |
820 | ||
821 | # clear this node out of the cache if it's in there | |
822 | key = (classname, nodeid) | |
823 | if key in self.cache: | |
824 | del self.cache[key] | |
825 | self.cache_lru.remove(key) | |
826 | ||
827 | # figure the values to insert | |
828 | vals = [] | |
829 | for col,dt in cols: | |
830 | # this is somewhat dodgy.... | |
831 | if col.endswith('_int__'): | |
832 | # XXX eugh, this test suxxors | |
833 | value = values[col[2:-6]] | |
834 | # this is an Interval special "int" column | |
835 | if value is not None: | |
836 | vals.append(value.as_seconds()) | |
837 | else: | |
838 | vals.append(value) | |
839 | continue | |
840 | ||
841 | prop = props[col[1:]] | |
842 | value = values[col[1:]] | |
843 | if value is not None: | |
844 | value = self.to_sql_value(prop.__class__)(value) | |
845 | vals.append(value) | |
846 | vals.append(nodeid) | |
847 | vals = tuple(vals) | |
848 | ||
849 | # make sure the ordering is correct for column name -> column value | |
850 | s = ','.join([self.arg for x in cols]) + ',%s'%self.arg | |
851 | cols = ','.join([col for col,dt in cols]) + ',id' | |
852 | ||
853 | # perform the inserts | |
854 | sql = 'insert into _%s (%s) values (%s)'%(classname, cols, s) | |
855 | self.sql(sql, vals) | |
856 | ||
857 | # insert the multilink rows | |
858 | for col in mls: | |
859 | t = '%s_%s'%(classname, col) | |
860 | for entry in node[col]: | |
861 | sql = 'insert into %s (linkid, nodeid) values (%s,%s)'%(t, | |
862 | self.arg, self.arg) | |
863 | self.sql(sql, (entry, nodeid)) | |
864 | ||
865 | def setnode(self, classname, nodeid, values, multilink_changes={}): | |
866 | """ Change the specified node. | |
867 | """ | |
868 | self.log_debug('setnode %s%s %r' | |
869 | % (classname, nodeid, values)) | |
870 | ||
871 | # clear this node out of the cache if it's in there | |
872 | key = (classname, nodeid) | |
873 | if key in self.cache: | |
874 | del self.cache[key] | |
875 | self.cache_lru.remove(key) | |
876 | ||
877 | cl = self.classes[classname] | |
878 | props = cl.getprops() | |
879 | ||
880 | cols = [] | |
881 | mls = [] | |
882 | # add the multilinks separately | |
883 | for col in values: | |
884 | prop = props[col] | |
885 | if isinstance(prop, Multilink): | |
886 | mls.append(col) | |
887 | elif isinstance(prop, Interval): | |
888 | # Intervals store the seconds value too | |
889 | cols.append(col) | |
890 | # extra leading '_' added by code below | |
891 | cols.append('_' +col + '_int__') | |
892 | else: | |
893 | cols.append(col) | |
894 | cols.sort() | |
895 | ||
896 | # figure the values to insert | |
897 | vals = [] | |
898 | for col in cols: | |
899 | if col.endswith('_int__'): | |
900 | # XXX eugh, this test suxxors | |
901 | # Intervals store the seconds value too | |
902 | col = col[1:-6] | |
903 | prop = props[col] | |
904 | value = values[col] | |
905 | if value is None: | |
906 | vals.append(None) | |
907 | else: | |
908 | vals.append(value.as_seconds()) | |
909 | else: | |
910 | prop = props[col] | |
911 | value = values[col] | |
912 | if value is None: | |
913 | e = None | |
914 | else: | |
915 | e = self.to_sql_value(prop.__class__)(value) | |
916 | vals.append(e) | |
917 | ||
918 | vals.append(int(nodeid)) | |
919 | vals = tuple(vals) | |
920 | ||
921 | # if there's any updates to regular columns, do them | |
922 | if cols: | |
923 | # make sure the ordering is correct for column name -> column value | |
924 | s = ','.join(['_%s=%s'%(x, self.arg) for x in cols]) | |
925 | cols = ','.join(cols) | |
926 | ||
927 | # perform the update | |
928 | sql = 'update _%s set %s where id=%s'%(classname, s, self.arg) | |
929 | self.sql(sql, vals) | |
930 | ||
931 | # we're probably coming from an import, not a change | |
932 | if not multilink_changes: | |
933 | for name in mls: | |
934 | prop = props[name] | |
935 | value = values[name] | |
936 | ||
937 | t = '%s_%s'%(classname, name) | |
938 | ||
939 | # clear out previous values for this node | |
940 | # XXX numeric ids | |
941 | self.sql('delete from %s where nodeid=%s'%(t, self.arg), | |
942 | (nodeid,)) | |
943 | ||
944 | # insert the values for this node | |
945 | for entry in values[name]: | |
946 | sql = 'insert into %s (linkid, nodeid) values (%s,%s)'%(t, | |
947 | self.arg, self.arg) | |
948 | # XXX numeric ids | |
949 | self.sql(sql, (entry, nodeid)) | |
950 | ||
951 | # we have multilink changes to apply | |
952 | for col, (add, remove) in multilink_changes.iteritems(): | |
953 | tn = '%s_%s'%(classname, col) | |
954 | if add: | |
955 | sql = 'insert into %s (nodeid, linkid) values (%s,%s)'%(tn, | |
956 | self.arg, self.arg) | |
957 | for addid in add: | |
958 | # XXX numeric ids | |
959 | self.sql(sql, (int(nodeid), int(addid))) | |
960 | if remove: | |
961 | s = ','.join([self.arg]*len(remove)) | |
962 | sql = 'delete from %s where nodeid=%s and linkid in (%s)'%(tn, | |
963 | self.arg, s) | |
964 | # XXX numeric ids | |
965 | self.sql(sql, [int(nodeid)] + remove) | |
966 | ||
967 | sql_to_hyperdb_value = { | |
968 | hyperdb.String : str, | |
969 | hyperdb.Date : lambda x:date.Date(str(x).replace(' ', '.')), | |
970 | # hyperdb.Link : int, # XXX numeric ids | |
971 | hyperdb.Link : str, | |
972 | hyperdb.Interval : date.Interval, | |
973 | hyperdb.Password : lambda x: password.Password(encrypted=x), | |
974 | hyperdb.Boolean : _bool_cvt, | |
975 | hyperdb.Number : _num_cvt, | |
976 | hyperdb.Multilink : lambda x: x, # used in journal marshalling | |
977 | } | |
978 | ||
979 | def to_hyperdb_value(self, propklass): | |
980 | ||
981 | fn = self.sql_to_hyperdb_value.get(propklass) | |
982 | if fn: | |
983 | return fn | |
984 | ||
985 | for k, v in self.sql_to_hyperdb_value.iteritems(): | |
986 | if issubclass(propklass, k): | |
987 | return v | |
988 | ||
989 | raise ValueError('%r is not a hyperdb property class' % propklass) | |
990 | ||
991 | def getnode(self, classname, nodeid): | |
992 | """ Get a node from the database. | |
993 | """ | |
994 | # see if we have this node cached | |
995 | key = (classname, nodeid) | |
996 | if key in self.cache: | |
997 | # push us back to the top of the LRU | |
998 | self.cache_lru.remove(key) | |
999 | self.cache_lru.insert(0, key) | |
1000 | if __debug__: | |
1001 | self.stats['cache_hits'] += 1 | |
1002 | # return the cached information | |
1003 | return self.cache[key] | |
1004 | ||
1005 | if __debug__: | |
1006 | self.stats['cache_misses'] += 1 | |
1007 | start_t = time.time() | |
1008 | ||
1009 | # figure the columns we're fetching | |
1010 | cl = self.classes[classname] | |
1011 | cols, mls = self.determine_columns(list(cl.properties.iteritems())) | |
1012 | scols = ','.join([col for col,dt in cols]) | |
1013 | ||
1014 | # perform the basic property fetch | |
1015 | sql = 'select %s from _%s where id=%s'%(scols, classname, self.arg) | |
1016 | self.sql(sql, (nodeid,)) | |
1017 | ||
1018 | values = self.sql_fetchone() | |
1019 | if values is None: | |
1020 | raise IndexError('no such %s node %s'%(classname, nodeid)) | |
1021 | ||
1022 | # make up the node | |
1023 | node = {} | |
1024 | props = cl.getprops(protected=1) | |
1025 | for col in range(len(cols)): | |
1026 | name = cols[col][0][1:] | |
1027 | if name.endswith('_int__'): | |
1028 | # XXX eugh, this test suxxors | |
1029 | # ignore the special Interval-as-seconds column | |
1030 | continue | |
1031 | value = values[col] | |
1032 | if value is not None: | |
1033 | value = self.to_hyperdb_value(props[name].__class__)(value) | |
1034 | node[name] = value | |
1035 | ||
1036 | ||
1037 | # now the multilinks | |
1038 | for col in mls: | |
1039 | # get the link ids | |
1040 | sql = 'select linkid from %s_%s where nodeid=%s'%(classname, col, | |
1041 | self.arg) | |
1042 | self.sql(sql, (nodeid,)) | |
1043 | # extract the first column from the result | |
1044 | # XXX numeric ids | |
1045 | items = [int(x[0]) for x in self.cursor.fetchall()] | |
1046 | items.sort () | |
1047 | node[col] = [str(x) for x in items] | |
1048 | ||
1049 | # save off in the cache | |
1050 | key = (classname, nodeid) | |
1051 | self.cache[key] = node | |
1052 | # update the LRU | |
1053 | self.cache_lru.insert(0, key) | |
1054 | if len(self.cache_lru) > self.cache_size: | |
1055 | del self.cache[self.cache_lru.pop()] | |
1056 | ||
1057 | if __debug__: | |
1058 | self.stats['get_items'] += (time.time() - start_t) | |
1059 | ||
1060 | return node | |
1061 | ||
1062 | def destroynode(self, classname, nodeid): | |
1063 | """Remove a node from the database. Called exclusively by the | |
1064 | destroy() method on Class. | |
1065 | """ | |
1066 | logging.getLogger('hyperdb').info('destroynode %s%s'%(classname, nodeid)) | |
1067 | ||
1068 | # make sure the node exists | |
1069 | if not self.hasnode(classname, nodeid): | |
1070 | raise IndexError('%s has no node %s'%(classname, nodeid)) | |
1071 | ||
1072 | # see if we have this node cached | |
1073 | if (classname, nodeid) in self.cache: | |
1074 | del self.cache[(classname, nodeid)] | |
1075 | ||
1076 | # see if there's any obvious commit actions that we should get rid of | |
1077 | for entry in self.transactions[:]: | |
1078 | if entry[1][:2] == (classname, nodeid): | |
1079 | self.transactions.remove(entry) | |
1080 | ||
1081 | # now do the SQL | |
1082 | sql = 'delete from _%s where id=%s'%(classname, self.arg) | |
1083 | self.sql(sql, (nodeid,)) | |
1084 | ||
1085 | # remove from multilnks | |
1086 | cl = self.getclass(classname) | |
1087 | x, mls = self.determine_columns(list(cl.properties.iteritems())) | |
1088 | for col in mls: | |
1089 | # get the link ids | |
1090 | sql = 'delete from %s_%s where nodeid=%s'%(classname, col, self.arg) | |
1091 | self.sql(sql, (nodeid,)) | |
1092 | ||
1093 | # remove journal entries | |
1094 | sql = 'delete from %s__journal where nodeid=%s'%(classname, self.arg) | |
1095 | self.sql(sql, (nodeid,)) | |
1096 | ||
1097 | # cleanup any blob filestorage when we commit | |
1098 | self.transactions.append((FileStorage.destroy, (self, classname, nodeid))) | |
1099 | ||
1100 | def hasnode(self, classname, nodeid): | |
1101 | """ Determine if the database has a given node. | |
1102 | """ | |
1103 | # If this node is in the cache, then we do not need to go to | |
1104 | # the database. (We don't consider this an LRU hit, though.) | |
1105 | if (classname, nodeid) in self.cache: | |
1106 | # Return 1, not True, to match the type of the result of | |
1107 | # the SQL operation below. | |
1108 | return 1 | |
1109 | sql = 'select count(*) from _%s where id=%s'%(classname, self.arg) | |
1110 | self.sql(sql, (nodeid,)) | |
1111 | return int(self.cursor.fetchone()[0]) | |
1112 | ||
1113 | def countnodes(self, classname): | |
1114 | """ Count the number of nodes that exist for a particular Class. | |
1115 | """ | |
1116 | sql = 'select count(*) from _%s'%classname | |
1117 | self.sql(sql) | |
1118 | return self.cursor.fetchone()[0] | |
1119 | ||
1120 | def addjournal(self, classname, nodeid, action, params, creator=None, | |
1121 | creation=None): | |
1122 | """ Journal the Action | |
1123 | 'action' may be: | |
1124 | ||
1125 | 'create' or 'set' -- 'params' is a dictionary of property values | |
1126 | 'link' or 'unlink' -- 'params' is (classname, nodeid, propname) | |
1127 | 'retire' -- 'params' is None | |
1128 | """ | |
1129 | # handle supply of the special journalling parameters (usually | |
1130 | # supplied on importing an existing database) | |
1131 | if creator: | |
1132 | journaltag = creator | |
1133 | else: | |
1134 | journaltag = self.getuid() | |
1135 | if creation: | |
1136 | journaldate = creation | |
1137 | else: | |
1138 | journaldate = date.Date() | |
1139 | ||
1140 | # create the journal entry | |
1141 | cols = 'nodeid,date,tag,action,params' | |
1142 | ||
1143 | self.log_debug('addjournal %s%s %r %s %s %r'%(classname, | |
1144 | nodeid, journaldate, journaltag, action, params)) | |
1145 | ||
1146 | # make the journalled data marshallable | |
1147 | if isinstance(params, type({})): | |
1148 | self._journal_marshal(params, classname) | |
1149 | ||
1150 | params = repr(params) | |
1151 | ||
1152 | dc = self.to_sql_value(hyperdb.Date) | |
1153 | journaldate = dc(journaldate) | |
1154 | ||
1155 | self.save_journal(classname, cols, nodeid, journaldate, | |
1156 | journaltag, action, params) | |
1157 | ||
1158 | def setjournal(self, classname, nodeid, journal): | |
1159 | """Set the journal to the "journal" list.""" | |
1160 | # clear out any existing entries | |
1161 | self.sql('delete from %s__journal where nodeid=%s'%(classname, | |
1162 | self.arg), (nodeid,)) | |
1163 | ||
1164 | # create the journal entry | |
1165 | cols = 'nodeid,date,tag,action,params' | |
1166 | ||
1167 | dc = self.to_sql_value(hyperdb.Date) | |
1168 | for nodeid, journaldate, journaltag, action, params in journal: | |
1169 | self.log_debug('addjournal %s%s %r %s %s %r'%( | |
1170 | classname, nodeid, journaldate, journaltag, action, | |
1171 | params)) | |
1172 | ||
1173 | # make the journalled data marshallable | |
1174 | if isinstance(params, type({})): | |
1175 | self._journal_marshal(params, classname) | |
1176 | params = repr(params) | |
1177 | ||
1178 | self.save_journal(classname, cols, nodeid, dc(journaldate), | |
1179 | journaltag, action, params) | |
1180 | ||
1181 | def _journal_marshal(self, params, classname): | |
1182 | """Convert the journal params values into safely repr'able and | |
1183 | eval'able values.""" | |
1184 | properties = self.getclass(classname).getprops() | |
1185 | for param, value in params.iteritems(): | |
1186 | if not value: | |
1187 | continue | |
1188 | property = properties[param] | |
1189 | cvt = self.to_sql_value(property.__class__) | |
1190 | if isinstance(property, Password): | |
1191 | params[param] = cvt(value) | |
1192 | elif isinstance(property, Date): | |
1193 | params[param] = cvt(value) | |
1194 | elif isinstance(property, Interval): | |
1195 | params[param] = cvt(value) | |
1196 | elif isinstance(property, Boolean): | |
1197 | params[param] = cvt(value) | |
1198 | ||
1199 | def getjournal(self, classname, nodeid): | |
1200 | """ get the journal for id | |
1201 | """ | |
1202 | # make sure the node exists | |
1203 | if not self.hasnode(classname, nodeid): | |
1204 | raise IndexError('%s has no node %s'%(classname, nodeid)) | |
1205 | ||
1206 | cols = ','.join('nodeid date tag action params'.split()) | |
1207 | journal = self.load_journal(classname, cols, nodeid) | |
1208 | ||
1209 | # now unmarshal the data | |
1210 | dc = self.to_hyperdb_value(hyperdb.Date) | |
1211 | res = [] | |
1212 | properties = self.getclass(classname).getprops() | |
1213 | for nodeid, date_stamp, user, action, params in journal: | |
1214 | params = eval(params) | |
1215 | if isinstance(params, type({})): | |
1216 | for param, value in params.iteritems(): | |
1217 | if not value: | |
1218 | continue | |
1219 | property = properties.get(param, None) | |
1220 | if property is None: | |
1221 | # deleted property | |
1222 | continue | |
1223 | cvt = self.to_hyperdb_value(property.__class__) | |
1224 | if isinstance(property, Password): | |
1225 | params[param] = cvt(value) | |
1226 | elif isinstance(property, Date): | |
1227 | params[param] = cvt(value) | |
1228 | elif isinstance(property, Interval): | |
1229 | params[param] = cvt(value) | |
1230 | elif isinstance(property, Boolean): | |
1231 | params[param] = cvt(value) | |
1232 | # XXX numeric ids | |
1233 | res.append((str(nodeid), dc(date_stamp), user, action, params)) | |
1234 | return res | |
1235 | ||
1236 | def save_journal(self, classname, cols, nodeid, journaldate, | |
1237 | journaltag, action, params): | |
1238 | """ Save the journal entry to the database | |
1239 | """ | |
1240 | entry = (nodeid, journaldate, journaltag, action, params) | |
1241 | ||
1242 | # do the insert | |
1243 | a = self.arg | |
1244 | sql = 'insert into %s__journal (%s) values (%s,%s,%s,%s,%s)'%( | |
1245 | classname, cols, a, a, a, a, a) | |
1246 | self.sql(sql, entry) | |
1247 | ||
1248 | def load_journal(self, classname, cols, nodeid): | |
1249 | """ Load the journal from the database | |
1250 | """ | |
1251 | # now get the journal entries | |
1252 | sql = 'select %s from %s__journal where nodeid=%s order by date'%( | |
1253 | cols, classname, self.arg) | |
1254 | self.sql(sql, (nodeid,)) | |
1255 | return self.cursor.fetchall() | |
1256 | ||
1257 | def pack(self, pack_before): | |
1258 | """ Delete all journal entries except "create" before 'pack_before'. | |
1259 | """ | |
1260 | date_stamp = self.to_sql_value(Date)(pack_before) | |
1261 | ||
1262 | # do the delete | |
1263 | for classname in self.classes: | |
1264 | sql = "delete from %s__journal where date<%s and "\ | |
1265 | "action<>'create'"%(classname, self.arg) | |
1266 | self.sql(sql, (date_stamp,)) | |
1267 | ||
1268 | def sql_commit(self, fail_ok=False): | |
1269 | """ Actually commit to the database. | |
1270 | """ | |
1271 | logging.getLogger('hyperdb').info('commit') | |
1272 | ||
1273 | self.conn.commit() | |
1274 | ||
1275 | # open a new cursor for subsequent work | |
1276 | self.cursor = self.conn.cursor() | |
1277 | ||
1278 | def commit(self, fail_ok=False): | |
1279 | """ Commit the current transactions. | |
1280 | ||
1281 | Save all data changed since the database was opened or since the | |
1282 | last commit() or rollback(). | |
1283 | ||
1284 | fail_ok indicates that the commit is allowed to fail. This is used | |
1285 | in the web interface when committing cleaning of the session | |
1286 | database. We don't care if there's a concurrency issue there. | |
1287 | ||
1288 | The only backend this seems to affect is postgres. | |
1289 | """ | |
1290 | # commit the database | |
1291 | self.sql_commit(fail_ok) | |
1292 | ||
1293 | # now, do all the other transaction stuff | |
1294 | for method, args in self.transactions: | |
1295 | method(*args) | |
1296 | ||
1297 | # save the indexer | |
1298 | self.indexer.save_index() | |
1299 | ||
1300 | # clear out the transactions | |
1301 | self.transactions = [] | |
1302 | ||
1303 | def sql_rollback(self): | |
1304 | self.conn.rollback() | |
1305 | ||
1306 | def rollback(self): | |
1307 | """ Reverse all actions from the current transaction. | |
1308 | ||
1309 | Undo all the changes made since the database was opened or the last | |
1310 | commit() or rollback() was performed. | |
1311 | """ | |
1312 | logging.getLogger('hyperdb').info('rollback') | |
1313 | ||
1314 | self.sql_rollback() | |
1315 | ||
1316 | # roll back "other" transaction stuff | |
1317 | for method, args in self.transactions: | |
1318 | # delete temporary files | |
1319 | if method == self.doStoreFile: | |
1320 | self.rollbackStoreFile(*args) | |
1321 | self.transactions = [] | |
1322 | ||
1323 | # clear the cache | |
1324 | self.clearCache() | |
1325 | ||
1326 | def sql_close(self): | |
1327 | logging.getLogger('hyperdb').info('close') | |
1328 | self.conn.close() | |
1329 | ||
1330 | def close(self): | |
1331 | """ Close off the connection. | |
1332 | """ | |
1333 | self.indexer.close() | |
1334 | self.sql_close() | |
1335 | ||
1336 | # | |
1337 | # The base Class class | |
1338 | # | |
1339 | class Class(hyperdb.Class): | |
1340 | """ The handle to a particular class of nodes in a hyperdatabase. | |
1341 | ||
1342 | All methods except __repr__ and getnode must be implemented by a | |
1343 | concrete backend Class. | |
1344 | """ | |
1345 | ||
1346 | def schema(self): | |
1347 | """ A dumpable version of the schema that we can store in the | |
1348 | database | |
1349 | """ | |
1350 | return (self.key, [(x, repr(y)) for x,y in self.properties.iteritems()]) | |
1351 | ||
1352 | def enableJournalling(self): | |
1353 | """Turn journalling on for this class | |
1354 | """ | |
1355 | self.do_journal = 1 | |
1356 | ||
1357 | def disableJournalling(self): | |
1358 | """Turn journalling off for this class | |
1359 | """ | |
1360 | self.do_journal = 0 | |
1361 | ||
1362 | # Editing nodes: | |
1363 | def create(self, **propvalues): | |
1364 | """ Create a new node of this class and return its id. | |
1365 | ||
1366 | The keyword arguments in 'propvalues' map property names to values. | |
1367 | ||
1368 | The values of arguments must be acceptable for the types of their | |
1369 | corresponding properties or a TypeError is raised. | |
1370 | ||
1371 | If this class has a key property, it must be present and its value | |
1372 | must not collide with other key strings or a ValueError is raised. | |
1373 | ||
1374 | Any other properties on this class that are missing from the | |
1375 | 'propvalues' dictionary are set to None. | |
1376 | ||
1377 | If an id in a link or multilink property does not refer to a valid | |
1378 | node, an IndexError is raised. | |
1379 | """ | |
1380 | self.fireAuditors('create', None, propvalues) | |
1381 | newid = self.create_inner(**propvalues) | |
1382 | self.fireReactors('create', newid, None) | |
1383 | return newid | |
1384 | ||
1385 | def create_inner(self, **propvalues): | |
1386 | """ Called by create, in-between the audit and react calls. | |
1387 | """ | |
1388 | if 'id' in propvalues: | |
1389 | raise KeyError('"id" is reserved') | |
1390 | ||
1391 | if self.db.journaltag is None: | |
1392 | raise DatabaseError(_('Database open read-only')) | |
1393 | ||
1394 | if ('creator' in propvalues or 'actor' in propvalues or | |
1395 | 'creation' in propvalues or 'activity' in propvalues): | |
1396 | raise KeyError('"creator", "actor", "creation" and ' | |
1397 | '"activity" are reserved') | |
1398 | ||
1399 | # new node's id | |
1400 | newid = self.db.newid(self.classname) | |
1401 | ||
1402 | # validate propvalues | |
1403 | num_re = re.compile('^\d+$') | |
1404 | for key, value in propvalues.iteritems(): | |
1405 | if key == self.key: | |
1406 | try: | |
1407 | self.lookup(value) | |
1408 | except KeyError: | |
1409 | pass | |
1410 | else: | |
1411 | raise ValueError('node with key "%s" exists'%value) | |
1412 | ||
1413 | # try to handle this property | |
1414 | try: | |
1415 | prop = self.properties[key] | |
1416 | except KeyError: | |
1417 | raise KeyError('"%s" has no property "%s"'%(self.classname, | |
1418 | key)) | |
1419 | ||
1420 | if value is not None and isinstance(prop, Link): | |
1421 | if type(value) != type(''): | |
1422 | raise ValueError('link value must be String') | |
1423 | link_class = self.properties[key].classname | |
1424 | # if it isn't a number, it's a key | |
1425 | if not num_re.match(value): | |
1426 | try: | |
1427 | value = self.db.classes[link_class].lookup(value) | |
1428 | except (TypeError, KeyError): | |
1429 | raise IndexError('new property "%s": %s not a %s'%( | |
1430 | key, value, link_class)) | |
1431 | elif not self.db.getclass(link_class).hasnode(value): | |
1432 | raise IndexError('%s has no node %s'%(link_class, | |
1433 | value)) | |
1434 | ||
1435 | # save off the value | |
1436 | propvalues[key] = value | |
1437 | ||
1438 | # register the link with the newly linked node | |
1439 | if self.do_journal and self.properties[key].do_journal: | |
1440 | self.db.addjournal(link_class, value, 'link', | |
1441 | (self.classname, newid, key)) | |
1442 | ||
1443 | elif isinstance(prop, Multilink): | |
1444 | if value is None: | |
1445 | value = [] | |
1446 | if not hasattr(value, '__iter__'): | |
1447 | raise TypeError('new property "%s" not an iterable of ids'%key) | |
1448 | # clean up and validate the list of links | |
1449 | link_class = self.properties[key].classname | |
1450 | l = [] | |
1451 | for entry in value: | |
1452 | if type(entry) != type(''): | |
1453 | raise ValueError('"%s" multilink value (%r) ' | |
1454 | 'must contain Strings'%(key, value)) | |
1455 | # if it isn't a number, it's a key | |
1456 | if not num_re.match(entry): | |
1457 | try: | |
1458 | entry = self.db.classes[link_class].lookup(entry) | |
1459 | except (TypeError, KeyError): | |
1460 | raise IndexError('new property "%s": %s not a %s'%( | |
1461 | key, entry, self.properties[key].classname)) | |
1462 | l.append(entry) | |
1463 | value = l | |
1464 | propvalues[key] = value | |
1465 | ||
1466 | # handle additions | |
1467 | for nodeid in value: | |
1468 | if not self.db.getclass(link_class).hasnode(nodeid): | |
1469 | raise IndexError('%s has no node %s'%(link_class, | |
1470 | nodeid)) | |
1471 | # register the link with the newly linked node | |
1472 | if self.do_journal and self.properties[key].do_journal: | |
1473 | self.db.addjournal(link_class, nodeid, 'link', | |
1474 | (self.classname, newid, key)) | |
1475 | ||
1476 | elif isinstance(prop, String): | |
1477 | if type(value) != type('') and type(value) != type(u''): | |
1478 | raise TypeError('new property "%s" not a string'%key) | |
1479 | if prop.indexme: | |
1480 | self.db.indexer.add_text((self.classname, newid, key), | |
1481 | value) | |
1482 | ||
1483 | elif isinstance(prop, Password): | |
1484 | if not isinstance(value, password.Password): | |
1485 | raise TypeError('new property "%s" not a Password'%key) | |
1486 | ||
1487 | elif isinstance(prop, Date): | |
1488 | if value is not None and not isinstance(value, date.Date): | |
1489 | raise TypeError('new property "%s" not a Date'%key) | |
1490 | ||
1491 | elif isinstance(prop, Interval): | |
1492 | if value is not None and not isinstance(value, date.Interval): | |
1493 | raise TypeError('new property "%s" not an Interval'%key) | |
1494 | ||
1495 | elif value is not None and isinstance(prop, Number): | |
1496 | try: | |
1497 | float(value) | |
1498 | except ValueError: | |
1499 | raise TypeError('new property "%s" not numeric'%key) | |
1500 | ||
1501 | elif value is not None and isinstance(prop, Boolean): | |
1502 | try: | |
1503 | int(value) | |
1504 | except ValueError: | |
1505 | raise TypeError('new property "%s" not boolean'%key) | |
1506 | ||
1507 | # make sure there's data where there needs to be | |
1508 | for key, prop in self.properties.iteritems(): | |
1509 | if key in propvalues: | |
1510 | continue | |
1511 | if key == self.key: | |
1512 | raise ValueError('key property "%s" is required'%key) | |
1513 | if isinstance(prop, Multilink): | |
1514 | propvalues[key] = [] | |
1515 | else: | |
1516 | propvalues[key] = None | |
1517 | ||
1518 | # done | |
1519 | self.db.addnode(self.classname, newid, propvalues) | |
1520 | if self.do_journal: | |
1521 | self.db.addjournal(self.classname, newid, ''"create", {}) | |
1522 | ||
1523 | # XXX numeric ids | |
1524 | return str(newid) | |
1525 | ||
1526 | def get(self, nodeid, propname, default=_marker, cache=1): | |
1527 | """Get the value of a property on an existing node of this class. | |
1528 | ||
1529 | 'nodeid' must be the id of an existing node of this class or an | |
1530 | IndexError is raised. 'propname' must be the name of a property | |
1531 | of this class or a KeyError is raised. | |
1532 | ||
1533 | 'cache' exists for backwards compatibility, and is not used. | |
1534 | """ | |
1535 | if propname == 'id': | |
1536 | return nodeid | |
1537 | ||
1538 | # get the node's dict | |
1539 | d = self.db.getnode(self.classname, nodeid) | |
1540 | ||
1541 | if propname == 'creation': | |
1542 | if 'creation' in d: | |
1543 | return d['creation'] | |
1544 | else: | |
1545 | return date.Date() | |
1546 | if propname == 'activity': | |
1547 | if 'activity' in d: | |
1548 | return d['activity'] | |
1549 | else: | |
1550 | return date.Date() | |
1551 | if propname == 'creator': | |
1552 | if 'creator' in d: | |
1553 | return d['creator'] | |
1554 | else: | |
1555 | return self.db.getuid() | |
1556 | if propname == 'actor': | |
1557 | if 'actor' in d: | |
1558 | return d['actor'] | |
1559 | else: | |
1560 | return self.db.getuid() | |
1561 | ||
1562 | # get the property (raises KeyErorr if invalid) | |
1563 | prop = self.properties[propname] | |
1564 | ||
1565 | # handle there being no value in the table for the property | |
1566 | if propname not in d or d[propname] is None: | |
1567 | if default is _marker: | |
1568 | if isinstance(prop, Multilink): | |
1569 | return [] | |
1570 | else: | |
1571 | return None | |
1572 | else: | |
1573 | return default | |
1574 | ||
1575 | # don't pass our list to other code | |
1576 | if isinstance(prop, Multilink): | |
1577 | return d[propname][:] | |
1578 | ||
1579 | return d[propname] | |
1580 | ||
1581 | def set(self, nodeid, **propvalues): | |
1582 | """Modify a property on an existing node of this class. | |
1583 | ||
1584 | 'nodeid' must be the id of an existing node of this class or an | |
1585 | IndexError is raised. | |
1586 | ||
1587 | Each key in 'propvalues' must be the name of a property of this | |
1588 | class or a KeyError is raised. | |
1589 | ||
1590 | All values in 'propvalues' must be acceptable types for their | |
1591 | corresponding properties or a TypeError is raised. | |
1592 | ||
1593 | If the value of the key property is set, it must not collide with | |
1594 | other key strings or a ValueError is raised. | |
1595 | ||
1596 | If the value of a Link or Multilink property contains an invalid | |
1597 | node id, a ValueError is raised. | |
1598 | """ | |
1599 | self.fireAuditors('set', nodeid, propvalues) | |
1600 | oldvalues = copy.deepcopy(self.db.getnode(self.classname, nodeid)) | |
1601 | propvalues = self.set_inner(nodeid, **propvalues) | |
1602 | self.fireReactors('set', nodeid, oldvalues) | |
1603 | return propvalues | |
1604 | ||
1605 | def set_inner(self, nodeid, **propvalues): | |
1606 | """ Called by set, in-between the audit and react calls. | |
1607 | """ | |
1608 | if not propvalues: | |
1609 | return propvalues | |
1610 | ||
1611 | if ('creator' in propvalues or 'actor' in propvalues or | |
1612 | 'creation' in propvalues or 'activity' in propvalues): | |
1613 | raise KeyError('"creator", "actor", "creation" and ' | |
1614 | '"activity" are reserved') | |
1615 | ||
1616 | if 'id' in propvalues: | |
1617 | raise KeyError('"id" is reserved') | |
1618 | ||
1619 | if self.db.journaltag is None: | |
1620 | raise DatabaseError(_('Database open read-only')) | |
1621 | ||
1622 | node = self.db.getnode(self.classname, nodeid) | |
1623 | if self.is_retired(nodeid): | |
1624 | raise IndexError('Requested item is retired') | |
1625 | num_re = re.compile('^\d+$') | |
1626 | ||
1627 | # make a copy of the values dictionary - we'll modify the contents | |
1628 | propvalues = propvalues.copy() | |
1629 | ||
1630 | # if the journal value is to be different, store it in here | |
1631 | journalvalues = {} | |
1632 | ||
1633 | # remember the add/remove stuff for multilinks, making it easier | |
1634 | # for the Database layer to do its stuff | |
1635 | multilink_changes = {} | |
1636 | ||
1637 | for propname, value in list(propvalues.items()): | |
1638 | # check to make sure we're not duplicating an existing key | |
1639 | if propname == self.key and node[propname] != value: | |
1640 | try: | |
1641 | self.lookup(value) | |
1642 | except KeyError: | |
1643 | pass | |
1644 | else: | |
1645 | raise ValueError('node with key "%s" exists'%value) | |
1646 | ||
1647 | # this will raise the KeyError if the property isn't valid | |
1648 | # ... we don't use getprops() here because we only care about | |
1649 | # the writeable properties. | |
1650 | try: | |
1651 | prop = self.properties[propname] | |
1652 | except KeyError: | |
1653 | raise KeyError('"%s" has no property named "%s"'%( | |
1654 | self.classname, propname)) | |
1655 | ||
1656 | # if the value's the same as the existing value, no sense in | |
1657 | # doing anything | |
1658 | current = node.get(propname, None) | |
1659 | if value == current: | |
1660 | del propvalues[propname] | |
1661 | continue | |
1662 | journalvalues[propname] = current | |
1663 | ||
1664 | # do stuff based on the prop type | |
1665 | if isinstance(prop, Link): | |
1666 | link_class = prop.classname | |
1667 | # if it isn't a number, it's a key | |
1668 | if value is not None and not isinstance(value, type('')): | |
1669 | raise ValueError('property "%s" link value be a string'%( | |
1670 | propname)) | |
1671 | if isinstance(value, type('')) and not num_re.match(value): | |
1672 | try: | |
1673 | value = self.db.classes[link_class].lookup(value) | |
1674 | except (TypeError, KeyError): | |
1675 | raise IndexError('new property "%s": %s not a %s'%( | |
1676 | propname, value, prop.classname)) | |
1677 | ||
1678 | if (value is not None and | |
1679 | not self.db.getclass(link_class).hasnode(value)): | |
1680 | raise IndexError('%s has no node %s'%(link_class, | |
1681 | value)) | |
1682 | ||
1683 | if self.do_journal and prop.do_journal: | |
1684 | # register the unlink with the old linked node | |
1685 | if node[propname] is not None: | |
1686 | self.db.addjournal(link_class, node[propname], | |
1687 | ''"unlink", (self.classname, nodeid, propname)) | |
1688 | ||
1689 | # register the link with the newly linked node | |
1690 | if value is not None: | |
1691 | self.db.addjournal(link_class, value, ''"link", | |
1692 | (self.classname, nodeid, propname)) | |
1693 | ||
1694 | elif isinstance(prop, Multilink): | |
1695 | if value is None: | |
1696 | value = [] | |
1697 | if not hasattr(value, '__iter__'): | |
1698 | raise TypeError('new property "%s" not an iterable of' | |
1699 | ' ids'%propname) | |
1700 | link_class = self.properties[propname].classname | |
1701 | l = [] | |
1702 | for entry in value: | |
1703 | # if it isn't a number, it's a key | |
1704 | if type(entry) != type(''): | |
1705 | raise ValueError('new property "%s" link value ' | |
1706 | 'must be a string'%propname) | |
1707 | if not num_re.match(entry): | |
1708 | try: | |
1709 | entry = self.db.classes[link_class].lookup(entry) | |
1710 | except (TypeError, KeyError): | |
1711 | raise IndexError('new property "%s": %s not a %s'%( | |
1712 | propname, entry, | |
1713 | self.properties[propname].classname)) | |
1714 | l.append(entry) | |
1715 | value = l | |
1716 | propvalues[propname] = value | |
1717 | ||
1718 | # figure the journal entry for this property | |
1719 | add = [] | |
1720 | remove = [] | |
1721 | ||
1722 | # handle removals | |
1723 | if propname in node: | |
1724 | l = node[propname] | |
1725 | else: | |
1726 | l = [] | |
1727 | for id in l[:]: | |
1728 | if id in value: | |
1729 | continue | |
1730 | # register the unlink with the old linked node | |
1731 | if self.do_journal and self.properties[propname].do_journal: | |
1732 | self.db.addjournal(link_class, id, 'unlink', | |
1733 | (self.classname, nodeid, propname)) | |
1734 | l.remove(id) | |
1735 | remove.append(id) | |
1736 | ||
1737 | # handle additions | |
1738 | for id in value: | |
1739 | if id in l: | |
1740 | continue | |
1741 | # We can safely check this condition after | |
1742 | # checking that this is an addition to the | |
1743 | # multilink since the condition was checked for | |
1744 | # existing entries at the point they were added to | |
1745 | # the multilink. Since the hasnode call will | |
1746 | # result in a SQL query, it is more efficient to | |
1747 | # avoid the check if possible. | |
1748 | if not self.db.getclass(link_class).hasnode(id): | |
1749 | raise IndexError('%s has no node %s'%(link_class, | |
1750 | id)) | |
1751 | # register the link with the newly linked node | |
1752 | if self.do_journal and self.properties[propname].do_journal: | |
1753 | self.db.addjournal(link_class, id, 'link', | |
1754 | (self.classname, nodeid, propname)) | |
1755 | l.append(id) | |
1756 | add.append(id) | |
1757 | ||
1758 | # figure the journal entry | |
1759 | l = [] | |
1760 | if add: | |
1761 | l.append(('+', add)) | |
1762 | if remove: | |
1763 | l.append(('-', remove)) | |
1764 | multilink_changes[propname] = (add, remove) | |
1765 | if l: | |
1766 | journalvalues[propname] = tuple(l) | |
1767 | ||
1768 | elif isinstance(prop, String): | |
1769 | if value is not None and type(value) != type('') and type(value) != type(u''): | |
1770 | raise TypeError('new property "%s" not a string'%propname) | |
1771 | if prop.indexme: | |
1772 | if value is None: value = '' | |
1773 | self.db.indexer.add_text((self.classname, nodeid, propname), | |
1774 | value) | |
1775 | ||
1776 | elif isinstance(prop, Password): | |
1777 | if not isinstance(value, password.Password): | |
1778 | raise TypeError('new property "%s" not a Password'%propname) | |
1779 | propvalues[propname] = value | |
1780 | ||
1781 | elif value is not None and isinstance(prop, Date): | |
1782 | if not isinstance(value, date.Date): | |
1783 | raise TypeError('new property "%s" not a Date'% propname) | |
1784 | propvalues[propname] = value | |
1785 | ||
1786 | elif value is not None and isinstance(prop, Interval): | |
1787 | if not isinstance(value, date.Interval): | |
1788 | raise TypeError('new property "%s" not an ' | |
1789 | 'Interval'%propname) | |
1790 | propvalues[propname] = value | |
1791 | ||
1792 | elif value is not None and isinstance(prop, Number): | |
1793 | try: | |
1794 | float(value) | |
1795 | except ValueError: | |
1796 | raise TypeError('new property "%s" not numeric'%propname) | |
1797 | ||
1798 | elif value is not None and isinstance(prop, Boolean): | |
1799 | try: | |
1800 | int(value) | |
1801 | except ValueError: | |
1802 | raise TypeError('new property "%s" not boolean'%propname) | |
1803 | ||
1804 | # nothing to do? | |
1805 | if not propvalues: | |
1806 | return propvalues | |
1807 | ||
1808 | # update the activity time | |
1809 | propvalues['activity'] = date.Date() | |
1810 | propvalues['actor'] = self.db.getuid() | |
1811 | ||
1812 | # do the set | |
1813 | self.db.setnode(self.classname, nodeid, propvalues, multilink_changes) | |
1814 | ||
1815 | # remove the activity props now they're handled | |
1816 | del propvalues['activity'] | |
1817 | del propvalues['actor'] | |
1818 | ||
1819 | # journal the set | |
1820 | if self.do_journal: | |
1821 | self.db.addjournal(self.classname, nodeid, ''"set", journalvalues) | |
1822 | ||
1823 | return propvalues | |
1824 | ||
1825 | def retire(self, nodeid): | |
1826 | """Retire a node. | |
1827 | ||
1828 | The properties on the node remain available from the get() method, | |
1829 | and the node's id is never reused. | |
1830 | ||
1831 | Retired nodes are not returned by the find(), list(), or lookup() | |
1832 | methods, and other nodes may reuse the values of their key properties. | |
1833 | """ | |
1834 | if self.db.journaltag is None: | |
1835 | raise DatabaseError(_('Database open read-only')) | |
1836 | ||
1837 | self.fireAuditors('retire', nodeid, None) | |
1838 | ||
1839 | # use the arg for __retired__ to cope with any odd database type | |
1840 | # conversion (hello, sqlite) | |
1841 | sql = 'update _%s set __retired__=%s where id=%s'%(self.classname, | |
1842 | self.db.arg, self.db.arg) | |
1843 | self.db.sql(sql, (nodeid, nodeid)) | |
1844 | if self.do_journal: | |
1845 | self.db.addjournal(self.classname, nodeid, ''"retired", None) | |
1846 | ||
1847 | self.fireReactors('retire', nodeid, None) | |
1848 | ||
1849 | def restore(self, nodeid): | |
1850 | """Restore a retired node. | |
1851 | ||
1852 | Make node available for all operations like it was before retirement. | |
1853 | """ | |
1854 | if self.db.journaltag is None: | |
1855 | raise DatabaseError(_('Database open read-only')) | |
1856 | ||
1857 | node = self.db.getnode(self.classname, nodeid) | |
1858 | # check if key property was overrided | |
1859 | key = self.getkey() | |
1860 | try: | |
1861 | id = self.lookup(node[key]) | |
1862 | except KeyError: | |
1863 | pass | |
1864 | else: | |
1865 | raise KeyError("Key property (%s) of retired node clashes " | |
1866 | "with existing one (%s)" % (key, node[key])) | |
1867 | ||
1868 | self.fireAuditors('restore', nodeid, None) | |
1869 | # use the arg for __retired__ to cope with any odd database type | |
1870 | # conversion (hello, sqlite) | |
1871 | sql = 'update _%s set __retired__=%s where id=%s'%(self.classname, | |
1872 | self.db.arg, self.db.arg) | |
1873 | self.db.sql(sql, (0, nodeid)) | |
1874 | if self.do_journal: | |
1875 | self.db.addjournal(self.classname, nodeid, ''"restored", None) | |
1876 | ||
1877 | self.fireReactors('restore', nodeid, None) | |
1878 | ||
1879 | def is_retired(self, nodeid): | |
1880 | """Return true if the node is rerired | |
1881 | """ | |
1882 | sql = 'select __retired__ from _%s where id=%s'%(self.classname, | |
1883 | self.db.arg) | |
1884 | self.db.sql(sql, (nodeid,)) | |
1885 | return int(self.db.sql_fetchone()[0]) > 0 | |
1886 | ||
1887 | def destroy(self, nodeid): | |
1888 | """Destroy a node. | |
1889 | ||
1890 | WARNING: this method should never be used except in extremely rare | |
1891 | situations where there could never be links to the node being | |
1892 | deleted | |
1893 | ||
1894 | WARNING: use retire() instead | |
1895 | ||
1896 | WARNING: the properties of this node will not be available ever again | |
1897 | ||
1898 | WARNING: really, use retire() instead | |
1899 | ||
1900 | Well, I think that's enough warnings. This method exists mostly to | |
1901 | support the session storage of the cgi interface. | |
1902 | ||
1903 | The node is completely removed from the hyperdb, including all journal | |
1904 | entries. It will no longer be available, and will generally break code | |
1905 | if there are any references to the node. | |
1906 | """ | |
1907 | if self.db.journaltag is None: | |
1908 | raise DatabaseError(_('Database open read-only')) | |
1909 | self.db.destroynode(self.classname, nodeid) | |
1910 | ||
1911 | def history(self, nodeid): | |
1912 | """Retrieve the journal of edits on a particular node. | |
1913 | ||
1914 | 'nodeid' must be the id of an existing node of this class or an | |
1915 | IndexError is raised. | |
1916 | ||
1917 | The returned list contains tuples of the form | |
1918 | ||
1919 | (nodeid, date, tag, action, params) | |
1920 | ||
1921 | 'date' is a Timestamp object specifying the time of the change and | |
1922 | 'tag' is the journaltag specified when the database was opened. | |
1923 | """ | |
1924 | if not self.do_journal: | |
1925 | raise ValueError('Journalling is disabled for this class') | |
1926 | return self.db.getjournal(self.classname, nodeid) | |
1927 | ||
1928 | # Locating nodes: | |
1929 | def hasnode(self, nodeid): | |
1930 | """Determine if the given nodeid actually exists | |
1931 | """ | |
1932 | return self.db.hasnode(self.classname, nodeid) | |
1933 | ||
1934 | def setkey(self, propname): | |
1935 | """Select a String property of this class to be the key property. | |
1936 | ||
1937 | 'propname' must be the name of a String property of this class or | |
1938 | None, or a TypeError is raised. The values of the key property on | |
1939 | all existing nodes must be unique or a ValueError is raised. | |
1940 | """ | |
1941 | prop = self.getprops()[propname] | |
1942 | if not isinstance(prop, String): | |
1943 | raise TypeError('key properties must be String') | |
1944 | self.key = propname | |
1945 | ||
1946 | def getkey(self): | |
1947 | """Return the name of the key property for this class or None.""" | |
1948 | return self.key | |
1949 | ||
1950 | def lookup(self, keyvalue): | |
1951 | """Locate a particular node by its key property and return its id. | |
1952 | ||
1953 | If this class has no key property, a TypeError is raised. If the | |
1954 | 'keyvalue' matches one of the values for the key property among | |
1955 | the nodes in this class, the matching node's id is returned; | |
1956 | otherwise a KeyError is raised. | |
1957 | """ | |
1958 | if not self.key: | |
1959 | raise TypeError('No key property set for class %s'%self.classname) | |
1960 | ||
1961 | # use the arg to handle any odd database type conversion (hello, | |
1962 | # sqlite) | |
1963 | sql = "select id from _%s where _%s=%s and __retired__=%s"%( | |
1964 | self.classname, self.key, self.db.arg, self.db.arg) | |
1965 | self.db.sql(sql, (str(keyvalue), 0)) | |
1966 | ||
1967 | # see if there was a result that's not retired | |
1968 | row = self.db.sql_fetchone() | |
1969 | if not row: | |
1970 | raise KeyError('No key (%s) value "%s" for "%s"'%(self.key, | |
1971 | keyvalue, self.classname)) | |
1972 | ||
1973 | # return the id | |
1974 | # XXX numeric ids | |
1975 | return str(row[0]) | |
1976 | ||
1977 | def find(self, **propspec): | |
1978 | """Get the ids of nodes in this class which link to the given nodes. | |
1979 | ||
1980 | 'propspec' consists of keyword args propname=nodeid or | |
1981 | propname={nodeid:1, } | |
1982 | 'propname' must be the name of a property in this class, or a | |
1983 | KeyError is raised. That property must be a Link or | |
1984 | Multilink property, or a TypeError is raised. | |
1985 | ||
1986 | Any node in this class whose 'propname' property links to any of | |
1987 | the nodeids will be returned. Examples:: | |
1988 | ||
1989 | db.issue.find(messages='1') | |
1990 | db.issue.find(messages={'1':1,'3':1}, files={'7':1}) | |
1991 | """ | |
1992 | # shortcut | |
1993 | if not propspec: | |
1994 | return [] | |
1995 | ||
1996 | # validate the args | |
1997 | props = self.getprops() | |
1998 | for propname, nodeids in propspec.iteritems(): | |
1999 | # check the prop is OK | |
2000 | prop = props[propname] | |
2001 | if not isinstance(prop, Link) and not isinstance(prop, Multilink): | |
2002 | raise TypeError("'%s' not a Link/Multilink property"%propname) | |
2003 | ||
2004 | # first, links | |
2005 | a = self.db.arg | |
2006 | allvalues = () | |
2007 | sql = [] | |
2008 | where = [] | |
2009 | for prop, values in propspec.iteritems(): | |
2010 | if not isinstance(props[prop], hyperdb.Link): | |
2011 | continue | |
2012 | if type(values) is type({}) and len(values) == 1: | |
2013 | values = list(values)[0] | |
2014 | if type(values) is type(''): | |
2015 | allvalues += (values,) | |
2016 | where.append('_%s = %s'%(prop, a)) | |
2017 | elif values is None: | |
2018 | where.append('_%s is NULL'%prop) | |
2019 | else: | |
2020 | values = list(values) | |
2021 | s = '' | |
2022 | if None in values: | |
2023 | values.remove(None) | |
2024 | s = '_%s is NULL or '%prop | |
2025 | allvalues += tuple(values) | |
2026 | s += '_%s in (%s)'%(prop, ','.join([a]*len(values))) | |
2027 | where.append('(' + s +')') | |
2028 | if where: | |
2029 | allvalues = (0, ) + allvalues | |
2030 | sql.append("""select id from _%s where __retired__=%s | |
2031 | and %s"""%(self.classname, a, ' and '.join(where))) | |
2032 | ||
2033 | # now multilinks | |
2034 | for prop, values in propspec.iteritems(): | |
2035 | if not isinstance(props[prop], hyperdb.Multilink): | |
2036 | continue | |
2037 | if not values: | |
2038 | continue | |
2039 | allvalues += (0, ) | |
2040 | if type(values) is type(''): | |
2041 | allvalues += (values,) | |
2042 | s = a | |
2043 | else: | |
2044 | allvalues += tuple(values) | |
2045 | s = ','.join([a]*len(values)) | |
2046 | tn = '%s_%s'%(self.classname, prop) | |
2047 | sql.append("""select id from _%s, %s where __retired__=%s | |
2048 | and id = %s.nodeid and %s.linkid in (%s)"""%(self.classname, | |
2049 | tn, a, tn, tn, s)) | |
2050 | ||
2051 | if not sql: | |
2052 | return [] | |
2053 | sql = ' union '.join(sql) | |
2054 | self.db.sql(sql, allvalues) | |
2055 | # XXX numeric ids | |
2056 | l = [str(x[0]) for x in self.db.sql_fetchall()] | |
2057 | return l | |
2058 | ||
2059 | def stringFind(self, **requirements): | |
2060 | """Locate a particular node by matching a set of its String | |
2061 | properties in a caseless search. | |
2062 | ||
2063 | If the property is not a String property, a TypeError is raised. | |
2064 | ||
2065 | The return is a list of the id of all nodes that match. | |
2066 | """ | |
2067 | where = [] | |
2068 | args = [] | |
2069 | for propname in requirements: | |
2070 | prop = self.properties[propname] | |
2071 | if not isinstance(prop, String): | |
2072 | raise TypeError("'%s' not a String property"%propname) | |
2073 | where.append(propname) | |
2074 | args.append(requirements[propname].lower()) | |
2075 | ||
2076 | # generate the where clause | |
2077 | s = ' and '.join(['lower(_%s)=%s'%(col, self.db.arg) for col in where]) | |
2078 | sql = 'select id from _%s where %s and __retired__=%s'%( | |
2079 | self.classname, s, self.db.arg) | |
2080 | args.append(0) | |
2081 | self.db.sql(sql, tuple(args)) | |
2082 | # XXX numeric ids | |
2083 | l = [str(x[0]) for x in self.db.sql_fetchall()] | |
2084 | return l | |
2085 | ||
2086 | def list(self): | |
2087 | """ Return a list of the ids of the active nodes in this class. | |
2088 | """ | |
2089 | return self.getnodeids(retired=0) | |
2090 | ||
2091 | def getnodeids(self, retired=None): | |
2092 | """ Retrieve all the ids of the nodes for a particular Class. | |
2093 | ||
2094 | Set retired=None to get all nodes. Otherwise it'll get all the | |
2095 | retired or non-retired nodes, depending on the flag. | |
2096 | """ | |
2097 | # flip the sense of the 'retired' flag if we don't want all of them | |
2098 | if retired is not None: | |
2099 | args = (0, ) | |
2100 | if retired: | |
2101 | compare = '>' | |
2102 | else: | |
2103 | compare = '=' | |
2104 | sql = 'select id from _%s where __retired__%s%s'%(self.classname, | |
2105 | compare, self.db.arg) | |
2106 | else: | |
2107 | args = () | |
2108 | sql = 'select id from _%s'%self.classname | |
2109 | self.db.sql(sql, args) | |
2110 | # XXX numeric ids | |
2111 | ids = [str(x[0]) for x in self.db.cursor.fetchall()] | |
2112 | return ids | |
2113 | ||
2114 | def _subselect(self, classname, multilink_table): | |
2115 | """Create a subselect. This is factored out because some | |
2116 | databases (hmm only one, so far) doesn't support subselects | |
2117 | look for "I can't believe it's not a toy RDBMS" in the mysql | |
2118 | backend. | |
2119 | """ | |
2120 | return '_%s.id not in (select nodeid from %s)'%(classname, | |
2121 | multilink_table) | |
2122 | ||
2123 | # Some DBs order NULL values last. Set this variable in the backend | |
2124 | # for prepending an order by clause for each attribute that causes | |
2125 | # correct sort order for NULLs. Examples: | |
2126 | # order_by_null_values = '(%s is not NULL)' | |
2127 | # order_by_null_values = 'notnull(%s)' | |
2128 | # The format parameter is replaced with the attribute. | |
2129 | order_by_null_values = None | |
2130 | ||
2131 | def filter(self, search_matches, filterspec, sort=[], group=[]): | |
2132 | """Return a list of the ids of the active nodes in this class that | |
2133 | match the 'filter' spec, sorted by the group spec and then the | |
2134 | sort spec | |
2135 | ||
2136 | "filterspec" is {propname: value(s)} | |
2137 | ||
2138 | "sort" and "group" are [(dir, prop), ...] where dir is '+', '-' | |
2139 | or None and prop is a prop name or None. Note that for | |
2140 | backward-compatibility reasons a single (dir, prop) tuple is | |
2141 | also allowed. | |
2142 | ||
2143 | "search_matches" is a container type or None | |
2144 | ||
2145 | The filter must match all properties specificed. If the property | |
2146 | value to match is a list: | |
2147 | ||
2148 | 1. String properties must match all elements in the list, and | |
2149 | 2. Other properties must match any of the elements in the list. | |
2150 | """ | |
2151 | # we can't match anything if search_matches is empty | |
2152 | if not search_matches and search_matches is not None: | |
2153 | return [] | |
2154 | ||
2155 | if __debug__: | |
2156 | start_t = time.time() | |
2157 | ||
2158 | icn = self.classname | |
2159 | ||
2160 | # vars to hold the components of the SQL statement | |
2161 | frum = [] # FROM clauses | |
2162 | loj = [] # LEFT OUTER JOIN clauses | |
2163 | where = [] # WHERE clauses | |
2164 | args = [] # *any* positional arguments | |
2165 | a = self.db.arg | |
2166 | ||
2167 | # figure the WHERE clause from the filterspec | |
2168 | mlfilt = 0 # are we joining with Multilink tables? | |
2169 | sortattr = self._sortattr (group = group, sort = sort) | |
2170 | proptree = self._proptree(filterspec, sortattr) | |
2171 | mlseen = 0 | |
2172 | for pt in reversed(proptree.sortattr): | |
2173 | p = pt | |
2174 | while p.parent: | |
2175 | if isinstance (p.propclass, Multilink): | |
2176 | mlseen = True | |
2177 | if mlseen: | |
2178 | p.sort_ids_needed = True | |
2179 | p.tree_sort_done = False | |
2180 | p = p.parent | |
2181 | if not mlseen: | |
2182 | pt.attr_sort_done = pt.tree_sort_done = True | |
2183 | proptree.compute_sort_done() | |
2184 | ||
2185 | ordercols = [] | |
2186 | auxcols = {} | |
2187 | mlsort = [] | |
2188 | rhsnum = 0 | |
2189 | for p in proptree: | |
2190 | oc = None | |
2191 | cn = p.classname | |
2192 | ln = p.uniqname | |
2193 | pln = p.parent.uniqname | |
2194 | pcn = p.parent.classname | |
2195 | k = p.name | |
2196 | v = p.val | |
2197 | propclass = p.propclass | |
2198 | if p.sort_type > 0: | |
2199 | oc = ac = '_%s._%s'%(pln, k) | |
2200 | if isinstance(propclass, Multilink): | |
2201 | if p.sort_type < 2: | |
2202 | mlfilt = 1 | |
2203 | tn = '%s_%s'%(pcn, k) | |
2204 | if v in ('-1', ['-1'], []): | |
2205 | # only match rows that have count(linkid)=0 in the | |
2206 | # corresponding multilink table) | |
2207 | where.append(self._subselect(pcn, tn)) | |
2208 | else: | |
2209 | frum.append(tn) | |
2210 | where.append('_%s.id=%s.nodeid'%(pln,tn)) | |
2211 | if p.children: | |
2212 | frum.append('_%s as _%s' % (cn, ln)) | |
2213 | where.append('%s.linkid=_%s.id'%(tn, ln)) | |
2214 | if p.has_values: | |
2215 | if isinstance(v, type([])): | |
2216 | s = ','.join([a for x in v]) | |
2217 | where.append('%s.linkid in (%s)'%(tn, s)) | |
2218 | args = args + v | |
2219 | else: | |
2220 | where.append('%s.linkid=%s'%(tn, a)) | |
2221 | args.append(v) | |
2222 | if p.sort_type > 0: | |
2223 | assert not p.attr_sort_done and not p.sort_ids_needed | |
2224 | elif k == 'id': | |
2225 | if p.sort_type < 2: | |
2226 | if isinstance(v, type([])): | |
2227 | s = ','.join([a for x in v]) | |
2228 | where.append('_%s.%s in (%s)'%(pln, k, s)) | |
2229 | args = args + v | |
2230 | else: | |
2231 | where.append('_%s.%s=%s'%(pln, k, a)) | |
2232 | args.append(v) | |
2233 | if p.sort_type > 0: | |
2234 | oc = ac = '_%s.id'%pln | |
2235 | elif isinstance(propclass, String): | |
2236 | if p.sort_type < 2: | |
2237 | if not isinstance(v, type([])): | |
2238 | v = [v] | |
2239 | ||
2240 | # Quote the bits in the string that need it and then embed | |
2241 | # in a "substring" search. Note - need to quote the '%' so | |
2242 | # they make it through the python layer happily | |
2243 | v = ['%%'+self.db.sql_stringquote(s)+'%%' for s in v] | |
2244 | ||
2245 | # now add to the where clause | |
2246 | where.append('(' | |
2247 | +' and '.join(["_%s._%s LIKE '%s'"%(pln, k, s) for s in v]) | |
2248 | +')') | |
2249 | # note: args are embedded in the query string now | |
2250 | if p.sort_type > 0: | |
2251 | oc = ac = 'lower(_%s._%s)'%(pln, k) | |
2252 | elif isinstance(propclass, Link): | |
2253 | if p.sort_type < 2: | |
2254 | if p.children: | |
2255 | if p.sort_type == 0: | |
2256 | frum.append('_%s as _%s' % (cn, ln)) | |
2257 | where.append('_%s._%s=_%s.id'%(pln, k, ln)) | |
2258 | if p.has_values: | |
2259 | if isinstance(v, type([])): | |
2260 | d = {} | |
2261 | for entry in v: | |
2262 | if entry == '-1': | |
2263 | entry = None | |
2264 | d[entry] = entry | |
2265 | l = [] | |
2266 | if None in d or not d: | |
2267 | if None in d: del d[None] | |
2268 | l.append('_%s._%s is NULL'%(pln, k)) | |
2269 | if d: | |
2270 | v = list(d) | |
2271 | s = ','.join([a for x in v]) | |
2272 | l.append('(_%s._%s in (%s))'%(pln, k, s)) | |
2273 | args = args + v | |
2274 | if l: | |
2275 | where.append('(' + ' or '.join(l) +')') | |
2276 | else: | |
2277 | if v in ('-1', None): | |
2278 | v = None | |
2279 | where.append('_%s._%s is NULL'%(pln, k)) | |
2280 | else: | |
2281 | where.append('_%s._%s=%s'%(pln, k, a)) | |
2282 | args.append(v) | |
2283 | if p.sort_type > 0: | |
2284 | lp = p.cls.labelprop() | |
2285 | oc = ac = '_%s._%s'%(pln, k) | |
2286 | if lp != 'id': | |
2287 | if p.tree_sort_done and p.sort_type > 0: | |
2288 | loj.append( | |
2289 | 'LEFT OUTER JOIN _%s as _%s on _%s._%s=_%s.id'%( | |
2290 | cn, ln, pln, k, ln)) | |
2291 | oc = '_%s._%s'%(ln, lp) | |
2292 | elif isinstance(propclass, Date) and p.sort_type < 2: | |
2293 | dc = self.db.to_sql_value(hyperdb.Date) | |
2294 | if isinstance(v, type([])): | |
2295 | s = ','.join([a for x in v]) | |
2296 | where.append('_%s._%s in (%s)'%(pln, k, s)) | |
2297 | args = args + [dc(date.Date(x)) for x in v] | |
2298 | else: | |
2299 | try: | |
2300 | # Try to filter on range of dates | |
2301 | date_rng = propclass.range_from_raw(v, self.db) | |
2302 | if date_rng.from_value: | |
2303 | where.append('_%s._%s >= %s'%(pln, k, a)) | |
2304 | args.append(dc(date_rng.from_value)) | |
2305 | if date_rng.to_value: | |
2306 | where.append('_%s._%s <= %s'%(pln, k, a)) | |
2307 | args.append(dc(date_rng.to_value)) | |
2308 | except ValueError: | |
2309 | # If range creation fails - ignore that search parameter | |
2310 | pass | |
2311 | elif isinstance(propclass, Interval): | |
2312 | # filter/sort using the __<prop>_int__ column | |
2313 | if p.sort_type < 2: | |
2314 | if isinstance(v, type([])): | |
2315 | s = ','.join([a for x in v]) | |
2316 | where.append('_%s.__%s_int__ in (%s)'%(pln, k, s)) | |
2317 | args = args + [date.Interval(x).as_seconds() for x in v] | |
2318 | else: | |
2319 | try: | |
2320 | # Try to filter on range of intervals | |
2321 | date_rng = Range(v, date.Interval) | |
2322 | if date_rng.from_value: | |
2323 | where.append('_%s.__%s_int__ >= %s'%(pln, k, a)) | |
2324 | args.append(date_rng.from_value.as_seconds()) | |
2325 | if date_rng.to_value: | |
2326 | where.append('_%s.__%s_int__ <= %s'%(pln, k, a)) | |
2327 | args.append(date_rng.to_value.as_seconds()) | |
2328 | except ValueError: | |
2329 | # If range creation fails - ignore search parameter | |
2330 | pass | |
2331 | if p.sort_type > 0: | |
2332 | oc = ac = '_%s.__%s_int__'%(pln,k) | |
2333 | elif isinstance(propclass, Boolean) and p.sort_type < 2: | |
2334 | if type(v) == type(""): | |
2335 | v = v.split(',') | |
2336 | if type(v) != type([]): | |
2337 | v = [v] | |
2338 | bv = [] | |
2339 | for val in v: | |
2340 | if type(val) is type(''): | |
2341 | bv.append(propclass.from_raw (val)) | |
2342 | else: | |
2343 | bv.append(bool(val)) | |
2344 | if len(bv) == 1: | |
2345 | where.append('_%s._%s=%s'%(pln, k, a)) | |
2346 | args = args + bv | |
2347 | else: | |
2348 | s = ','.join([a for x in v]) | |
2349 | where.append('_%s._%s in (%s)'%(pln, k, s)) | |
2350 | args = args + bv | |
2351 | elif p.sort_type < 2: | |
2352 | if isinstance(v, type([])): | |
2353 | s = ','.join([a for x in v]) | |
2354 | where.append('_%s._%s in (%s)'%(pln, k, s)) | |
2355 | args = args + v | |
2356 | else: | |
2357 | where.append('_%s._%s=%s'%(pln, k, a)) | |
2358 | args.append(v) | |
2359 | if oc: | |
2360 | if p.sort_ids_needed: | |
2361 | auxcols[ac] = p | |
2362 | if p.tree_sort_done and p.sort_direction: | |
2363 | # Don't select top-level id twice | |
2364 | if p.name != 'id' or p.parent != proptree: | |
2365 | ordercols.append(oc) | |
2366 | desc = ['', ' desc'][p.sort_direction == '-'] | |
2367 | # Some SQL dbs sort NULL values last -- we want them first. | |
2368 | if (self.order_by_null_values and p.name != 'id'): | |
2369 | nv = self.order_by_null_values % oc | |
2370 | ordercols.append(nv) | |
2371 | p.orderby.append(nv + desc) | |
2372 | p.orderby.append(oc + desc) | |
2373 | ||
2374 | props = self.getprops() | |
2375 | ||
2376 | # don't match retired nodes | |
2377 | where.append('_%s.__retired__=0'%icn) | |
2378 | ||
2379 | # add results of full text search | |
2380 | if search_matches is not None: | |
2381 | s = ','.join([a for x in search_matches]) | |
2382 | where.append('_%s.id in (%s)'%(icn, s)) | |
2383 | args = args + [x for x in search_matches] | |
2384 | ||
2385 | # construct the SQL | |
2386 | frum.append('_'+icn) | |
2387 | frum = ','.join(frum) | |
2388 | if where: | |
2389 | where = ' where ' + (' and '.join(where)) | |
2390 | else: | |
2391 | where = '' | |
2392 | if mlfilt: | |
2393 | # we're joining tables on the id, so we will get dupes if we | |
2394 | # don't distinct() | |
2395 | cols = ['distinct(_%s.id)'%icn] | |
2396 | else: | |
2397 | cols = ['_%s.id'%icn] | |
2398 | if ordercols: | |
2399 | cols = cols + ordercols | |
2400 | order = [] | |
2401 | # keep correct sequence of order attributes. | |
2402 | for sa in proptree.sortattr: | |
2403 | if not sa.attr_sort_done: | |
2404 | continue | |
2405 | order.extend(sa.orderby) | |
2406 | if order: | |
2407 | order = ' order by %s'%(','.join(order)) | |
2408 | else: | |
2409 | order = '' | |
2410 | for o, p in auxcols.iteritems (): | |
2411 | cols.append (o) | |
2412 | p.auxcol = len (cols) - 1 | |
2413 | ||
2414 | cols = ','.join(cols) | |
2415 | loj = ' '.join(loj) | |
2416 | sql = 'select %s from %s %s %s%s'%(cols, frum, loj, where, order) | |
2417 | args = tuple(args) | |
2418 | __traceback_info__ = (sql, args) | |
2419 | self.db.sql(sql, args) | |
2420 | l = self.db.sql_fetchall() | |
2421 | ||
2422 | # Compute values needed for sorting in proptree.sort | |
2423 | for p in auxcols.itervalues(): | |
2424 | p.sort_ids = p.sort_result = [row[p.auxcol] for row in l] | |
2425 | # return the IDs (the first column) | |
2426 | # XXX numeric ids | |
2427 | l = [str(row[0]) for row in l] | |
2428 | l = proptree.sort (l) | |
2429 | ||
2430 | if __debug__: | |
2431 | self.db.stats['filtering'] += (time.time() - start_t) | |
2432 | return l | |
2433 | ||
2434 | def filter_sql(self, sql): | |
2435 | """Return a list of the ids of the items in this class that match | |
2436 | the SQL provided. The SQL is a complete "select" statement. | |
2437 | ||
2438 | The SQL select must include the item id as the first column. | |
2439 | ||
2440 | This function DOES NOT filter out retired items, add on a where | |
2441 | clause "__retired__=0" if you don't want retired nodes. | |
2442 | """ | |
2443 | if __debug__: | |
2444 | start_t = time.time() | |
2445 | ||
2446 | self.db.sql(sql) | |
2447 | l = self.db.sql_fetchall() | |
2448 | ||
2449 | if __debug__: | |
2450 | self.db.stats['filtering'] += (time.time() - start_t) | |
2451 | return l | |
2452 | ||
2453 | def count(self): | |
2454 | """Get the number of nodes in this class. | |
2455 | ||
2456 | If the returned integer is 'numnodes', the ids of all the nodes | |
2457 | in this class run from 1 to numnodes, and numnodes+1 will be the | |
2458 | id of the next node to be created in this class. | |
2459 | """ | |
2460 | return self.db.countnodes(self.classname) | |
2461 | ||
2462 | # Manipulating properties: | |
2463 | def getprops(self, protected=1): | |
2464 | """Return a dictionary mapping property names to property objects. | |
2465 | If the "protected" flag is true, we include protected properties - | |
2466 | those which may not be modified. | |
2467 | """ | |
2468 | d = self.properties.copy() | |
2469 | if protected: | |
2470 | d['id'] = String() | |
2471 | d['creation'] = hyperdb.Date() | |
2472 | d['activity'] = hyperdb.Date() | |
2473 | d['creator'] = hyperdb.Link('user') | |
2474 | d['actor'] = hyperdb.Link('user') | |
2475 | return d | |
2476 | ||
2477 | def addprop(self, **properties): | |
2478 | """Add properties to this class. | |
2479 | ||
2480 | The keyword arguments in 'properties' must map names to property | |
2481 | objects, or a TypeError is raised. None of the keys in 'properties' | |
2482 | may collide with the names of existing properties, or a ValueError | |
2483 | is raised before any properties have been added. | |
2484 | """ | |
2485 | for key in properties: | |
2486 | if key in self.properties: | |
2487 | raise ValueError(key) | |
2488 | self.properties.update(properties) | |
2489 | ||
2490 | def index(self, nodeid): | |
2491 | """Add (or refresh) the node to search indexes | |
2492 | """ | |
2493 | # find all the String properties that have indexme | |
2494 | for prop, propclass in self.getprops().iteritems(): | |
2495 | if isinstance(propclass, String) and propclass.indexme: | |
2496 | self.db.indexer.add_text((self.classname, nodeid, prop), | |
2497 | str(self.get(nodeid, prop))) | |
2498 | ||
2499 | # | |
2500 | # import / export support | |
2501 | # | |
2502 | def export_list(self, propnames, nodeid): | |
2503 | """ Export a node - generate a list of CSV-able data in the order | |
2504 | specified by propnames for the given node. | |
2505 | """ | |
2506 | properties = self.getprops() | |
2507 | l = [] | |
2508 | for prop in propnames: | |
2509 | proptype = properties[prop] | |
2510 | value = self.get(nodeid, prop) | |
2511 | # "marshal" data where needed | |
2512 | if value is None: | |
2513 | pass | |
2514 | elif isinstance(proptype, hyperdb.Date): | |
2515 | value = value.get_tuple() | |
2516 | elif isinstance(proptype, hyperdb.Interval): | |
2517 | value = value.get_tuple() | |
2518 | elif isinstance(proptype, hyperdb.Password): | |
2519 | value = str(value) | |
2520 | l.append(repr(value)) | |
2521 | l.append(repr(self.is_retired(nodeid))) | |
2522 | return l | |
2523 | ||
2524 | def import_list(self, propnames, proplist): | |
2525 | """ Import a node - all information including "id" is present and | |
2526 | should not be sanity checked. Triggers are not triggered. The | |
2527 | journal should be initialised using the "creator" and "created" | |
2528 | information. | |
2529 | ||
2530 | Return the nodeid of the node imported. | |
2531 | """ | |
2532 | if self.db.journaltag is None: | |
2533 | raise DatabaseError(_('Database open read-only')) | |
2534 | properties = self.getprops() | |
2535 | ||
2536 | # make the new node's property map | |
2537 | d = {} | |
2538 | retire = 0 | |
2539 | if not "id" in propnames: | |
2540 | newid = self.db.newid(self.classname) | |
2541 | else: | |
2542 | newid = eval(proplist[propnames.index("id")]) | |
2543 | for i in range(len(propnames)): | |
2544 | # Use eval to reverse the repr() used to output the CSV | |
2545 | value = eval(proplist[i]) | |
2546 | ||
2547 | # Figure the property for this column | |
2548 | propname = propnames[i] | |
2549 | ||
2550 | # "unmarshal" where necessary | |
2551 | if propname == 'id': | |
2552 | continue | |
2553 | elif propname == 'is retired': | |
2554 | # is the item retired? | |
2555 | if int(value): | |
2556 | retire = 1 | |
2557 | continue | |
2558 | elif value is None: | |
2559 | d[propname] = None | |
2560 | continue | |
2561 | ||
2562 | prop = properties[propname] | |
2563 | if value is None: | |
2564 | # don't set Nones | |
2565 | continue | |
2566 | elif isinstance(prop, hyperdb.Date): | |
2567 | value = date.Date(value) | |
2568 | elif isinstance(prop, hyperdb.Interval): | |
2569 | value = date.Interval(value) | |
2570 | elif isinstance(prop, hyperdb.Password): | |
2571 | pwd = password.Password() | |
2572 | pwd.unpack(value) | |
2573 | value = pwd | |
2574 | elif isinstance(prop, String): | |
2575 | if isinstance(value, unicode): | |
2576 | value = value.encode('utf8') | |
2577 | if not isinstance(value, str): | |
2578 | raise TypeError('new property "%(propname)s" not a ' | |
2579 | 'string: %(value)r'%locals()) | |
2580 | if prop.indexme: | |
2581 | self.db.indexer.add_text((self.classname, newid, propname), | |
2582 | value) | |
2583 | d[propname] = value | |
2584 | ||
2585 | # get a new id if necessary | |
2586 | if newid is None: | |
2587 | newid = self.db.newid(self.classname) | |
2588 | ||
2589 | # insert new node or update existing? | |
2590 | if not self.hasnode(newid): | |
2591 | self.db.addnode(self.classname, newid, d) # insert | |
2592 | else: | |
2593 | self.db.setnode(self.classname, newid, d) # update | |
2594 | ||
2595 | # retire? | |
2596 | if retire: | |
2597 | # use the arg for __retired__ to cope with any odd database type | |
2598 | # conversion (hello, sqlite) | |
2599 | sql = 'update _%s set __retired__=%s where id=%s'%(self.classname, | |
2600 | self.db.arg, self.db.arg) | |
2601 | self.db.sql(sql, (newid, newid)) | |
2602 | return newid | |
2603 | ||
2604 | def export_journals(self): | |
2605 | """Export a class's journal - generate a list of lists of | |
2606 | CSV-able data: | |
2607 | ||
2608 | nodeid, date, user, action, params | |
2609 | ||
2610 | No heading here - the columns are fixed. | |
2611 | """ | |
2612 | properties = self.getprops() | |
2613 | r = [] | |
2614 | for nodeid in self.getnodeids(): | |
2615 | for nodeid, date, user, action, params in self.history(nodeid): | |
2616 | date = date.get_tuple() | |
2617 | if action == 'set': | |
2618 | export_data = {} | |
2619 | for propname, value in params.iteritems(): | |
2620 | if propname not in properties: | |
2621 | # property no longer in the schema | |
2622 | continue | |
2623 | ||
2624 | prop = properties[propname] | |
2625 | # make sure the params are eval()'able | |
2626 | if value is None: | |
2627 | pass | |
2628 | elif isinstance(prop, Date): | |
2629 | value = value.get_tuple() | |
2630 | elif isinstance(prop, Interval): | |
2631 | value = value.get_tuple() | |
2632 | elif isinstance(prop, Password): | |
2633 | value = str(value) | |
2634 | export_data[propname] = value | |
2635 | params = export_data | |
2636 | elif action == 'create' and params: | |
2637 | # old tracker with data stored in the create! | |
2638 | params = {} | |
2639 | l = [nodeid, date, user, action, params] | |
2640 | r.append(list(map(repr, l))) | |
2641 | return r | |
2642 | ||
2643 | def import_journals(self, entries): | |
2644 | """Import a class's journal. | |
2645 | ||
2646 | Uses setjournal() to set the journal for each item.""" | |
2647 | properties = self.getprops() | |
2648 | d = {} | |
2649 | for l in entries: | |
2650 | nodeid, jdate, user, action, params = map(eval, l) | |
2651 | r = d.setdefault(nodeid, []) | |
2652 | if action == 'set': | |
2653 | for propname, value in params.iteritems(): | |
2654 | prop = properties[propname] | |
2655 | if value is None: | |
2656 | pass | |
2657 | elif isinstance(prop, Date): | |
2658 | value = date.Date(value) | |
2659 | elif isinstance(prop, Interval): | |
2660 | value = date.Interval(value) | |
2661 | elif isinstance(prop, Password): | |
2662 | pwd = password.Password() | |
2663 | pwd.unpack(value) | |
2664 | value = pwd | |
2665 | params[propname] = value | |
2666 | elif action == 'create' and params: | |
2667 | # old tracker with data stored in the create! | |
2668 | params = {} | |
2669 | r.append((nodeid, date.Date(jdate), user, action, params)) | |
2670 | ||
2671 | for nodeid, l in d.iteritems(): | |
2672 | self.db.setjournal(self.classname, nodeid, l) | |
2673 | ||
2674 | class FileClass(hyperdb.FileClass, Class): | |
2675 | """This class defines a large chunk of data. To support this, it has a | |
2676 | mandatory String property "content" which is typically saved off | |
2677 | externally to the hyperdb. | |
2678 | ||
2679 | The default MIME type of this data is defined by the | |
2680 | "default_mime_type" class attribute, which may be overridden by each | |
2681 | node if the class defines a "type" String property. | |
2682 | """ | |
2683 | def __init__(self, db, classname, **properties): | |
2684 | """The newly-created class automatically includes the "content" | |
2685 | and "type" properties. | |
2686 | """ | |
2687 | if 'content' not in properties: | |
2688 | properties['content'] = hyperdb.String(indexme='yes') | |
2689 | if 'type' not in properties: | |
2690 | properties['type'] = hyperdb.String() | |
2691 | Class.__init__(self, db, classname, **properties) | |
2692 | ||
2693 | def create(self, **propvalues): | |
2694 | """ snaffle the file propvalue and store in a file | |
2695 | """ | |
2696 | # we need to fire the auditors now, or the content property won't | |
2697 | # be in propvalues for the auditors to play with | |
2698 | self.fireAuditors('create', None, propvalues) | |
2699 | ||
2700 | # now remove the content property so it's not stored in the db | |
2701 | content = propvalues['content'] | |
2702 | del propvalues['content'] | |
2703 | ||
2704 | # do the database create | |
2705 | newid = self.create_inner(**propvalues) | |
2706 | ||
2707 | # figure the mime type | |
2708 | mime_type = propvalues.get('type', self.default_mime_type) | |
2709 | ||
2710 | # and index! | |
2711 | if self.properties['content'].indexme: | |
2712 | self.db.indexer.add_text((self.classname, newid, 'content'), | |
2713 | content, mime_type) | |
2714 | ||
2715 | # store off the content as a file | |
2716 | self.db.storefile(self.classname, newid, None, content) | |
2717 | ||
2718 | # fire reactors | |
2719 | self.fireReactors('create', newid, None) | |
2720 | ||
2721 | return newid | |
2722 | ||
2723 | def get(self, nodeid, propname, default=_marker, cache=1): | |
2724 | """ Trap the content propname and get it from the file | |
2725 | ||
2726 | 'cache' exists for backwards compatibility, and is not used. | |
2727 | """ | |
2728 | poss_msg = 'Possibly a access right configuration problem.' | |
2729 | if propname == 'content': | |
2730 | try: | |
2731 | return self.db.getfile(self.classname, nodeid, None) | |
2732 | except IOError, (strerror): | |
2733 | # BUG: by catching this we donot see an error in the log. | |
2734 | return 'ERROR reading file: %s%s\n%s\n%s'%( | |
2735 | self.classname, nodeid, poss_msg, strerror) | |
2736 | if default is not _marker: | |
2737 | return Class.get(self, nodeid, propname, default) | |
2738 | else: | |
2739 | return Class.get(self, nodeid, propname) | |
2740 | ||
2741 | def set(self, itemid, **propvalues): | |
2742 | """ Snarf the "content" propvalue and update it in a file | |
2743 | """ | |
2744 | self.fireAuditors('set', itemid, propvalues) | |
2745 | oldvalues = copy.deepcopy(self.db.getnode(self.classname, itemid)) | |
2746 | ||
2747 | # now remove the content property so it's not stored in the db | |
2748 | content = None | |
2749 | if 'content' in propvalues: | |
2750 | content = propvalues['content'] | |
2751 | del propvalues['content'] | |
2752 | ||
2753 | # do the database create | |
2754 | propvalues = self.set_inner(itemid, **propvalues) | |
2755 | ||
2756 | # do content? | |
2757 | if content: | |
2758 | # store and possibly index | |
2759 | self.db.storefile(self.classname, itemid, None, content) | |
2760 | if self.properties['content'].indexme: | |
2761 | mime_type = self.get(itemid, 'type', self.default_mime_type) | |
2762 | self.db.indexer.add_text((self.classname, itemid, 'content'), | |
2763 | content, mime_type) | |
2764 | propvalues['content'] = content | |
2765 | ||
2766 | # fire reactors | |
2767 | self.fireReactors('set', itemid, oldvalues) | |
2768 | return propvalues | |
2769 | ||
2770 | def index(self, nodeid): | |
2771 | """ Add (or refresh) the node to search indexes. | |
2772 | ||
2773 | Use the content-type property for the content property. | |
2774 | """ | |
2775 | # find all the String properties that have indexme | |
2776 | for prop, propclass in self.getprops().iteritems(): | |
2777 | if prop == 'content' and propclass.indexme: | |
2778 | mime_type = self.get(nodeid, 'type', self.default_mime_type) | |
2779 | self.db.indexer.add_text((self.classname, nodeid, 'content'), | |
2780 | str(self.get(nodeid, 'content')), mime_type) | |
2781 | elif isinstance(propclass, hyperdb.String) and propclass.indexme: | |
2782 | # index them under (classname, nodeid, property) | |
2783 | try: | |
2784 | value = str(self.get(nodeid, prop)) | |
2785 | except IndexError: | |
2786 | # node has been destroyed | |
2787 | continue | |
2788 | self.db.indexer.add_text((self.classname, nodeid, prop), value) | |
2789 | ||
2790 | # XXX deviation from spec - was called ItemClass | |
2791 | class IssueClass(Class, roundupdb.IssueClass): | |
2792 | # Overridden methods: | |
2793 | def __init__(self, db, classname, **properties): | |
2794 | """The newly-created class automatically includes the "messages", | |
2795 | "files", "nosy", and "superseder" properties. If the 'properties' | |
2796 | dictionary attempts to specify any of these properties or a | |
2797 | "creation", "creator", "activity" or "actor" property, a ValueError | |
2798 | is raised. | |
2799 | """ | |
2800 | if 'title' not in properties: | |
2801 | properties['title'] = hyperdb.String(indexme='yes') | |
2802 | if 'messages' not in properties: | |
2803 | properties['messages'] = hyperdb.Multilink("msg") | |
2804 | if 'files' not in properties: | |
2805 | properties['files'] = hyperdb.Multilink("file") | |
2806 | if 'nosy' not in properties: | |
2807 | # note: journalling is turned off as it really just wastes | |
2808 | # space. this behaviour may be overridden in an instance | |
2809 | properties['nosy'] = hyperdb.Multilink("user", do_journal="no") | |
2810 | if 'superseder' not in properties: | |
2811 | properties['superseder'] = hyperdb.Multilink(classname) | |
2812 | Class.__init__(self, db, classname, **properties) | |
2813 | ||
2814 | # vim: set et sts=4 sw=4 : |