Commit | Line | Data |
---|---|---|
c638d827 CR |
1 | # |
2 | # Copyright (c) 2001 Bizar Software Pty Ltd (http://www.bizarsoftware.com.au/) | |
3 | # This module is free software, and you may redistribute it and/or modify | |
4 | # under the same terms as Python, so long as this copyright message and | |
5 | # disclaimer are retained in their original form. | |
6 | # | |
7 | # IN NO EVENT SHALL BIZAR SOFTWARE PTY LTD BE LIABLE TO ANY PARTY FOR | |
8 | # DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING | |
9 | # OUT OF THE USE OF THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE | |
10 | # POSSIBILITY OF SUCH DAMAGE. | |
11 | # | |
12 | # BIZAR SOFTWARE PTY LTD SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, | |
13 | # BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS | |
14 | # FOR A PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" | |
15 | # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, | |
16 | # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. | |
17 | # | |
18 | """This module defines a backend that saves the hyperdatabase in a | |
19 | database chosen by anydbm. It is guaranteed to always be available in python | |
20 | versions >2.1.1 (the dumbdbm fallback in 2.1.1 and earlier has several | |
21 | serious bugs, and is not available) | |
22 | """ | |
23 | __docformat__ = 'restructuredtext' | |
24 | ||
25 | import os, marshal, re, weakref, string, copy, time, shutil, logging | |
26 | ||
27 | from roundup.anypy.dbm_ import anydbm, whichdb | |
28 | ||
29 | from roundup import hyperdb, date, password, roundupdb, security, support | |
30 | from roundup.support import reversed | |
31 | from roundup.backends import locking | |
32 | from roundup.i18n import _ | |
33 | ||
34 | from roundup.backends.blobfiles import FileStorage | |
35 | from roundup.backends.sessions_dbm import Sessions, OneTimeKeys | |
36 | ||
37 | try: | |
38 | from roundup.backends.indexer_xapian import Indexer | |
39 | except ImportError: | |
40 | from roundup.backends.indexer_dbm import Indexer | |
41 | ||
42 | def db_exists(config): | |
43 | # check for the user db | |
44 | for db in 'nodes.user nodes.user.db'.split(): | |
45 | if os.path.exists(os.path.join(config.DATABASE, db)): | |
46 | return 1 | |
47 | return 0 | |
48 | ||
49 | def db_nuke(config): | |
50 | shutil.rmtree(config.DATABASE) | |
51 | ||
52 | # | |
53 | # Now the database | |
54 | # | |
55 | class Database(FileStorage, hyperdb.Database, roundupdb.Database): | |
56 | """A database for storing records containing flexible data types. | |
57 | ||
58 | Transaction stuff TODO: | |
59 | ||
60 | - check the timestamp of the class file and nuke the cache if it's | |
61 | modified. Do some sort of conflict checking on the dirty stuff. | |
62 | - perhaps detect write collisions (related to above)? | |
63 | """ | |
64 | def __init__(self, config, journaltag=None): | |
65 | """Open a hyperdatabase given a specifier to some storage. | |
66 | ||
67 | The 'storagelocator' is obtained from config.DATABASE. | |
68 | The meaning of 'storagelocator' depends on the particular | |
69 | implementation of the hyperdatabase. It could be a file name, | |
70 | a directory path, a socket descriptor for a connection to a | |
71 | database over the network, etc. | |
72 | ||
73 | The 'journaltag' is a token that will be attached to the journal | |
74 | entries for any edits done on the database. If 'journaltag' is | |
75 | None, the database is opened in read-only mode: the Class.create(), | |
76 | Class.set(), Class.retire(), and Class.restore() methods are | |
77 | disabled. | |
78 | """ | |
79 | FileStorage.__init__(self, config.UMASK) | |
80 | self.config, self.journaltag = config, journaltag | |
81 | self.dir = config.DATABASE | |
82 | self.classes = {} | |
83 | self.cache = {} # cache of nodes loaded or created | |
84 | self.stats = {'cache_hits': 0, 'cache_misses': 0, 'get_items': 0, | |
85 | 'filtering': 0} | |
86 | self.dirtynodes = {} # keep track of the dirty nodes by class | |
87 | self.newnodes = {} # keep track of the new nodes by class | |
88 | self.destroyednodes = {}# keep track of the destroyed nodes by class | |
89 | self.transactions = [] | |
90 | self.indexer = Indexer(self) | |
91 | self.security = security.Security(self) | |
92 | os.umask(config.UMASK) | |
93 | ||
94 | # lock it | |
95 | lockfilenm = os.path.join(self.dir, 'lock') | |
96 | self.lockfile = locking.acquire_lock(lockfilenm) | |
97 | self.lockfile.write(str(os.getpid())) | |
98 | self.lockfile.flush() | |
99 | ||
100 | def post_init(self): | |
101 | """Called once the schema initialisation has finished. | |
102 | """ | |
103 | # reindex the db if necessary | |
104 | if self.indexer.should_reindex(): | |
105 | self.reindex() | |
106 | ||
107 | def refresh_database(self): | |
108 | """Rebuild the database | |
109 | """ | |
110 | self.reindex() | |
111 | ||
112 | def getSessionManager(self): | |
113 | return Sessions(self) | |
114 | ||
115 | def getOTKManager(self): | |
116 | return OneTimeKeys(self) | |
117 | ||
118 | def reindex(self, classname=None, show_progress=False): | |
119 | if classname: | |
120 | classes = [self.getclass(classname)] | |
121 | else: | |
122 | classes = self.classes.values() | |
123 | for klass in classes: | |
124 | if show_progress: | |
125 | for nodeid in support.Progress('Reindex %s'%klass.classname, | |
126 | klass.list()): | |
127 | klass.index(nodeid) | |
128 | else: | |
129 | for nodeid in klass.list(): | |
130 | klass.index(nodeid) | |
131 | self.indexer.save_index() | |
132 | ||
133 | def __repr__(self): | |
134 | return '<back_anydbm instance at %x>'%id(self) | |
135 | ||
136 | # | |
137 | # Classes | |
138 | # | |
139 | def __getattr__(self, classname): | |
140 | """A convenient way of calling self.getclass(classname).""" | |
141 | if classname in self.classes: | |
142 | return self.classes[classname] | |
143 | raise AttributeError, classname | |
144 | ||
145 | def addclass(self, cl): | |
146 | cn = cl.classname | |
147 | if cn in self.classes: | |
148 | raise ValueError, cn | |
149 | self.classes[cn] = cl | |
150 | ||
151 | # add default Edit and View permissions | |
152 | self.security.addPermission(name="Create", klass=cn, | |
153 | description="User is allowed to create "+cn) | |
154 | self.security.addPermission(name="Edit", klass=cn, | |
155 | description="User is allowed to edit "+cn) | |
156 | self.security.addPermission(name="View", klass=cn, | |
157 | description="User is allowed to access "+cn) | |
158 | ||
159 | def getclasses(self): | |
160 | """Return a list of the names of all existing classes.""" | |
161 | return sorted(self.classes) | |
162 | ||
163 | def getclass(self, classname): | |
164 | """Get the Class object representing a particular class. | |
165 | ||
166 | If 'classname' is not a valid class name, a KeyError is raised. | |
167 | """ | |
168 | try: | |
169 | return self.classes[classname] | |
170 | except KeyError: | |
171 | raise KeyError('There is no class called "%s"'%classname) | |
172 | ||
173 | # | |
174 | # Class DBs | |
175 | # | |
176 | def clear(self): | |
177 | """Delete all database contents | |
178 | """ | |
179 | logging.getLogger('hyperdb').info('clear') | |
180 | for cn in self.classes: | |
181 | for dummy in 'nodes', 'journals': | |
182 | path = os.path.join(self.dir, 'journals.%s'%cn) | |
183 | if os.path.exists(path): | |
184 | os.remove(path) | |
185 | elif os.path.exists(path+'.db'): # dbm appends .db | |
186 | os.remove(path+'.db') | |
187 | # reset id sequences | |
188 | path = os.path.join(os.getcwd(), self.dir, '_ids') | |
189 | if os.path.exists(path): | |
190 | os.remove(path) | |
191 | elif os.path.exists(path+'.db'): # dbm appends .db | |
192 | os.remove(path+'.db') | |
193 | ||
194 | def getclassdb(self, classname, mode='r'): | |
195 | """ grab a connection to the class db that will be used for | |
196 | multiple actions | |
197 | """ | |
198 | return self.opendb('nodes.%s'%classname, mode) | |
199 | ||
200 | def determine_db_type(self, path): | |
201 | """ determine which DB wrote the class file | |
202 | """ | |
203 | db_type = '' | |
204 | if os.path.exists(path): | |
205 | db_type = whichdb(path) | |
206 | if not db_type: | |
207 | raise hyperdb.DatabaseError(_("Couldn't identify database type")) | |
208 | elif os.path.exists(path+'.db'): | |
209 | # if the path ends in '.db', it's a dbm database, whether | |
210 | # anydbm says it's dbhash or not! | |
211 | db_type = 'dbm' | |
212 | return db_type | |
213 | ||
214 | def opendb(self, name, mode): | |
215 | """Low-level database opener that gets around anydbm/dbm | |
216 | eccentricities. | |
217 | """ | |
218 | # figure the class db type | |
219 | path = os.path.join(os.getcwd(), self.dir, name) | |
220 | db_type = self.determine_db_type(path) | |
221 | ||
222 | # new database? let anydbm pick the best dbm | |
223 | # in Python 3+ the "dbm" ("anydbm" to us) module already uses the | |
224 | # whichdb() function to do this | |
225 | if not db_type or hasattr(anydbm, 'whichdb'): | |
226 | if __debug__: | |
227 | logging.getLogger('hyperdb').debug( | |
228 | "opendb anydbm.open(%r, 'c')"%path) | |
229 | return anydbm.open(path, 'c') | |
230 | ||
231 | # in Python <3 it anydbm was a little dumb so manually open the | |
232 | # database with the correct module | |
233 | try: | |
234 | dbm = __import__(db_type) | |
235 | except ImportError: | |
236 | raise hyperdb.DatabaseError(_("Couldn't open database - the " | |
237 | "required module '%s' is not available")%db_type) | |
238 | if __debug__: | |
239 | logging.getLogger('hyperdb').debug( | |
240 | "opendb %r.open(%r, %r)"%(db_type, path, mode)) | |
241 | return dbm.open(path, mode) | |
242 | ||
243 | # | |
244 | # Node IDs | |
245 | # | |
246 | def newid(self, classname): | |
247 | """ Generate a new id for the given class | |
248 | """ | |
249 | # open the ids DB - create if if doesn't exist | |
250 | db = self.opendb('_ids', 'c') | |
251 | if classname in db: | |
252 | newid = db[classname] = str(int(db[classname]) + 1) | |
253 | else: | |
254 | # the count() bit is transitional - older dbs won't start at 1 | |
255 | newid = str(self.getclass(classname).count()+1) | |
256 | db[classname] = newid | |
257 | db.close() | |
258 | return newid | |
259 | ||
260 | def setid(self, classname, setid): | |
261 | """ Set the id counter: used during import of database | |
262 | """ | |
263 | # open the ids DB - create if if doesn't exist | |
264 | db = self.opendb('_ids', 'c') | |
265 | db[classname] = str(setid) | |
266 | db.close() | |
267 | ||
268 | # | |
269 | # Nodes | |
270 | # | |
271 | def addnode(self, classname, nodeid, node): | |
272 | """ add the specified node to its class's db | |
273 | """ | |
274 | # we'll be supplied these props if we're doing an import | |
275 | if 'creator' not in node: | |
276 | # add in the "calculated" properties (dupe so we don't affect | |
277 | # calling code's node assumptions) | |
278 | node = node.copy() | |
279 | node['creator'] = self.getuid() | |
280 | node['actor'] = self.getuid() | |
281 | node['creation'] = node['activity'] = date.Date() | |
282 | ||
283 | self.newnodes.setdefault(classname, {})[nodeid] = 1 | |
284 | self.cache.setdefault(classname, {})[nodeid] = node | |
285 | self.savenode(classname, nodeid, node) | |
286 | ||
287 | def setnode(self, classname, nodeid, node): | |
288 | """ change the specified node | |
289 | """ | |
290 | self.dirtynodes.setdefault(classname, {})[nodeid] = 1 | |
291 | ||
292 | # can't set without having already loaded the node | |
293 | self.cache[classname][nodeid] = node | |
294 | self.savenode(classname, nodeid, node) | |
295 | ||
296 | def savenode(self, classname, nodeid, node): | |
297 | """ perform the saving of data specified by the set/addnode | |
298 | """ | |
299 | if __debug__: | |
300 | logging.getLogger('hyperdb').debug('save %s%s %r'%(classname, nodeid, node)) | |
301 | self.transactions.append((self.doSaveNode, (classname, nodeid, node))) | |
302 | ||
303 | def getnode(self, classname, nodeid, db=None, cache=1): | |
304 | """ get a node from the database | |
305 | ||
306 | Note the "cache" parameter is not used, and exists purely for | |
307 | backward compatibility! | |
308 | """ | |
309 | # try the cache | |
310 | cache_dict = self.cache.setdefault(classname, {}) | |
311 | if nodeid in cache_dict: | |
312 | if __debug__: | |
313 | logging.getLogger('hyperdb').debug('get %s%s cached'%(classname, nodeid)) | |
314 | self.stats['cache_hits'] += 1 | |
315 | return cache_dict[nodeid] | |
316 | ||
317 | if __debug__: | |
318 | self.stats['cache_misses'] += 1 | |
319 | start_t = time.time() | |
320 | logging.getLogger('hyperdb').debug('get %s%s'%(classname, nodeid)) | |
321 | ||
322 | # get from the database and save in the cache | |
323 | if db is None: | |
324 | db = self.getclassdb(classname) | |
325 | if nodeid not in db: | |
326 | raise IndexError("no such %s %s"%(classname, nodeid)) | |
327 | ||
328 | # check the uncommitted, destroyed nodes | |
329 | if (classname in self.destroyednodes and | |
330 | nodeid in self.destroyednodes[classname]): | |
331 | raise IndexError("no such %s %s"%(classname, nodeid)) | |
332 | ||
333 | # decode | |
334 | res = marshal.loads(db[nodeid]) | |
335 | ||
336 | # reverse the serialisation | |
337 | res = self.unserialise(classname, res) | |
338 | ||
339 | # store off in the cache dict | |
340 | if cache: | |
341 | cache_dict[nodeid] = res | |
342 | ||
343 | if __debug__: | |
344 | self.stats['get_items'] += (time.time() - start_t) | |
345 | ||
346 | return res | |
347 | ||
348 | def destroynode(self, classname, nodeid): | |
349 | """Remove a node from the database. Called exclusively by the | |
350 | destroy() method on Class. | |
351 | """ | |
352 | logging.getLogger('hyperdb').info('destroy %s%s'%(classname, nodeid)) | |
353 | ||
354 | # remove from cache and newnodes if it's there | |
355 | if (classname in self.cache and nodeid in self.cache[classname]): | |
356 | del self.cache[classname][nodeid] | |
357 | if (classname in self.newnodes and nodeid in self.newnodes[classname]): | |
358 | del self.newnodes[classname][nodeid] | |
359 | ||
360 | # see if there's any obvious commit actions that we should get rid of | |
361 | for entry in self.transactions[:]: | |
362 | if entry[1][:2] == (classname, nodeid): | |
363 | self.transactions.remove(entry) | |
364 | ||
365 | # add to the destroyednodes map | |
366 | self.destroyednodes.setdefault(classname, {})[nodeid] = 1 | |
367 | ||
368 | # add the destroy commit action | |
369 | self.transactions.append((self.doDestroyNode, (classname, nodeid))) | |
370 | self.transactions.append((FileStorage.destroy, (self, classname, nodeid))) | |
371 | ||
372 | def serialise(self, classname, node): | |
373 | """Copy the node contents, converting non-marshallable data into | |
374 | marshallable data. | |
375 | """ | |
376 | properties = self.getclass(classname).getprops() | |
377 | d = {} | |
378 | for k, v in node.iteritems(): | |
379 | if k == self.RETIRED_FLAG: | |
380 | d[k] = v | |
381 | continue | |
382 | ||
383 | # if the property doesn't exist then we really don't care | |
384 | if k not in properties: | |
385 | continue | |
386 | ||
387 | # get the property spec | |
388 | prop = properties[k] | |
389 | ||
390 | if isinstance(prop, hyperdb.Password) and v is not None: | |
391 | d[k] = str(v) | |
392 | elif isinstance(prop, hyperdb.Date) and v is not None: | |
393 | d[k] = v.serialise() | |
394 | elif isinstance(prop, hyperdb.Interval) and v is not None: | |
395 | d[k] = v.serialise() | |
396 | else: | |
397 | d[k] = v | |
398 | return d | |
399 | ||
400 | def unserialise(self, classname, node): | |
401 | """Decode the marshalled node data | |
402 | """ | |
403 | properties = self.getclass(classname).getprops() | |
404 | d = {} | |
405 | for k, v in node.iteritems(): | |
406 | # if the property doesn't exist, or is the "retired" flag then | |
407 | # it won't be in the properties dict | |
408 | if k not in properties: | |
409 | d[k] = v | |
410 | continue | |
411 | ||
412 | # get the property spec | |
413 | prop = properties[k] | |
414 | ||
415 | if isinstance(prop, hyperdb.Date) and v is not None: | |
416 | d[k] = date.Date(v) | |
417 | elif isinstance(prop, hyperdb.Interval) and v is not None: | |
418 | d[k] = date.Interval(v) | |
419 | elif isinstance(prop, hyperdb.Password) and v is not None: | |
420 | p = password.Password() | |
421 | p.unpack(v) | |
422 | d[k] = p | |
423 | else: | |
424 | d[k] = v | |
425 | return d | |
426 | ||
427 | def hasnode(self, classname, nodeid, db=None): | |
428 | """ determine if the database has a given node | |
429 | """ | |
430 | # try the cache | |
431 | cache = self.cache.setdefault(classname, {}) | |
432 | if nodeid in cache: | |
433 | return 1 | |
434 | ||
435 | # not in the cache - check the database | |
436 | if db is None: | |
437 | db = self.getclassdb(classname) | |
438 | return nodeid in db | |
439 | ||
440 | def countnodes(self, classname, db=None): | |
441 | count = 0 | |
442 | ||
443 | # include the uncommitted nodes | |
444 | if classname in self.newnodes: | |
445 | count += len(self.newnodes[classname]) | |
446 | if classname in self.destroyednodes: | |
447 | count -= len(self.destroyednodes[classname]) | |
448 | ||
449 | # and count those in the DB | |
450 | if db is None: | |
451 | db = self.getclassdb(classname) | |
452 | return count + len(db) | |
453 | ||
454 | ||
455 | # | |
456 | # Files - special node properties | |
457 | # inherited from FileStorage | |
458 | ||
459 | # | |
460 | # Journal | |
461 | # | |
462 | def addjournal(self, classname, nodeid, action, params, creator=None, | |
463 | creation=None): | |
464 | """ Journal the Action | |
465 | 'action' may be: | |
466 | ||
467 | 'create' or 'set' -- 'params' is a dictionary of property values | |
468 | 'link' or 'unlink' -- 'params' is (classname, nodeid, propname) | |
469 | 'retire' -- 'params' is None | |
470 | ||
471 | 'creator' -- the user performing the action, which defaults to | |
472 | the current user. | |
473 | """ | |
474 | if __debug__: | |
475 | logging.getLogger('hyperdb').debug('addjournal %s%s %s %r %s %r'%(classname, | |
476 | nodeid, action, params, creator, creation)) | |
477 | if creator is None: | |
478 | creator = self.getuid() | |
479 | self.transactions.append((self.doSaveJournal, (classname, nodeid, | |
480 | action, params, creator, creation))) | |
481 | ||
482 | def setjournal(self, classname, nodeid, journal): | |
483 | """Set the journal to the "journal" list.""" | |
484 | if __debug__: | |
485 | logging.getLogger('hyperdb').debug('setjournal %s%s %r'%(classname, | |
486 | nodeid, journal)) | |
487 | self.transactions.append((self.doSetJournal, (classname, nodeid, | |
488 | journal))) | |
489 | ||
490 | def getjournal(self, classname, nodeid): | |
491 | """ get the journal for id | |
492 | ||
493 | Raise IndexError if the node doesn't exist (as per history()'s | |
494 | API) | |
495 | """ | |
496 | # our journal result | |
497 | res = [] | |
498 | ||
499 | # add any journal entries for transactions not committed to the | |
500 | # database | |
501 | for method, args in self.transactions: | |
502 | if method != self.doSaveJournal: | |
503 | continue | |
504 | (cache_classname, cache_nodeid, cache_action, cache_params, | |
505 | cache_creator, cache_creation) = args | |
506 | if cache_classname == classname and cache_nodeid == nodeid: | |
507 | if not cache_creator: | |
508 | cache_creator = self.getuid() | |
509 | if not cache_creation: | |
510 | cache_creation = date.Date() | |
511 | res.append((cache_nodeid, cache_creation, cache_creator, | |
512 | cache_action, cache_params)) | |
513 | ||
514 | # attempt to open the journal - in some rare cases, the journal may | |
515 | # not exist | |
516 | try: | |
517 | db = self.opendb('journals.%s'%classname, 'r') | |
518 | except anydbm.error, error: | |
519 | if str(error) == "need 'c' or 'n' flag to open new db": | |
520 | raise IndexError('no such %s %s'%(classname, nodeid)) | |
521 | elif error.args[0] != 2: | |
522 | # this isn't a "not found" error, be alarmed! | |
523 | raise | |
524 | if res: | |
525 | # we have unsaved journal entries, return them | |
526 | return res | |
527 | raise IndexError('no such %s %s'%(classname, nodeid)) | |
528 | try: | |
529 | journal = marshal.loads(db[nodeid]) | |
530 | except KeyError: | |
531 | db.close() | |
532 | if res: | |
533 | # we have some unsaved journal entries, be happy! | |
534 | return res | |
535 | raise IndexError('no such %s %s'%(classname, nodeid)) | |
536 | db.close() | |
537 | ||
538 | # add all the saved journal entries for this node | |
539 | for nodeid, date_stamp, user, action, params in journal: | |
540 | res.append((nodeid, date.Date(date_stamp), user, action, params)) | |
541 | return res | |
542 | ||
543 | def pack(self, pack_before): | |
544 | """ Delete all journal entries except "create" before 'pack_before'. | |
545 | """ | |
546 | pack_before = pack_before.serialise() | |
547 | for classname in self.getclasses(): | |
548 | packed = 0 | |
549 | # get the journal db | |
550 | db_name = 'journals.%s'%classname | |
551 | path = os.path.join(os.getcwd(), self.dir, classname) | |
552 | db_type = self.determine_db_type(path) | |
553 | db = self.opendb(db_name, 'w') | |
554 | ||
555 | for key in db: | |
556 | # get the journal for this db entry | |
557 | journal = marshal.loads(db[key]) | |
558 | l = [] | |
559 | last_set_entry = None | |
560 | for entry in journal: | |
561 | # unpack the entry | |
562 | (nodeid, date_stamp, self.journaltag, action, | |
563 | params) = entry | |
564 | # if the entry is after the pack date, _or_ the initial | |
565 | # create entry, then it stays | |
566 | if date_stamp > pack_before or action == 'create': | |
567 | l.append(entry) | |
568 | else: | |
569 | packed += 1 | |
570 | db[key] = marshal.dumps(l) | |
571 | ||
572 | logging.getLogger('hyperdb').info('packed %d %s items'%(packed, | |
573 | classname)) | |
574 | ||
575 | if db_type == 'gdbm': | |
576 | db.reorganize() | |
577 | db.close() | |
578 | ||
579 | ||
580 | # | |
581 | # Basic transaction support | |
582 | # | |
583 | def commit(self, fail_ok=False): | |
584 | """ Commit the current transactions. | |
585 | ||
586 | Save all data changed since the database was opened or since the | |
587 | last commit() or rollback(). | |
588 | ||
589 | fail_ok indicates that the commit is allowed to fail. This is used | |
590 | in the web interface when committing cleaning of the session | |
591 | database. We don't care if there's a concurrency issue there. | |
592 | ||
593 | The only backend this seems to affect is postgres. | |
594 | """ | |
595 | logging.getLogger('hyperdb').info('commit %s transactions'%( | |
596 | len(self.transactions))) | |
597 | ||
598 | # keep a handle to all the database files opened | |
599 | self.databases = {} | |
600 | ||
601 | try: | |
602 | # now, do all the transactions | |
603 | reindex = {} | |
604 | for method, args in self.transactions: | |
605 | reindex[method(*args)] = 1 | |
606 | finally: | |
607 | # make sure we close all the database files | |
608 | for db in self.databases.itervalues(): | |
609 | db.close() | |
610 | del self.databases | |
611 | ||
612 | # clear the transactions list now so the blobfile implementation | |
613 | # doesn't think there's still pending file commits when it tries | |
614 | # to access the file data | |
615 | self.transactions = [] | |
616 | ||
617 | # reindex the nodes that request it | |
618 | for classname, nodeid in [k for k in reindex if k]: | |
619 | self.getclass(classname).index(nodeid) | |
620 | ||
621 | # save the indexer state | |
622 | self.indexer.save_index() | |
623 | ||
624 | self.clearCache() | |
625 | ||
626 | def clearCache(self): | |
627 | # all transactions committed, back to normal | |
628 | self.cache = {} | |
629 | self.dirtynodes = {} | |
630 | self.newnodes = {} | |
631 | self.destroyednodes = {} | |
632 | self.transactions = [] | |
633 | ||
634 | def getCachedClassDB(self, classname): | |
635 | """ get the class db, looking in our cache of databases for commit | |
636 | """ | |
637 | # get the database handle | |
638 | db_name = 'nodes.%s'%classname | |
639 | if db_name not in self.databases: | |
640 | self.databases[db_name] = self.getclassdb(classname, 'c') | |
641 | return self.databases[db_name] | |
642 | ||
643 | def doSaveNode(self, classname, nodeid, node): | |
644 | db = self.getCachedClassDB(classname) | |
645 | ||
646 | # now save the marshalled data | |
647 | db[nodeid] = marshal.dumps(self.serialise(classname, node)) | |
648 | ||
649 | # return the classname, nodeid so we reindex this content | |
650 | return (classname, nodeid) | |
651 | ||
652 | def getCachedJournalDB(self, classname): | |
653 | """ get the journal db, looking in our cache of databases for commit | |
654 | """ | |
655 | # get the database handle | |
656 | db_name = 'journals.%s'%classname | |
657 | if db_name not in self.databases: | |
658 | self.databases[db_name] = self.opendb(db_name, 'c') | |
659 | return self.databases[db_name] | |
660 | ||
661 | def doSaveJournal(self, classname, nodeid, action, params, creator, | |
662 | creation): | |
663 | # serialise the parameters now if necessary | |
664 | if isinstance(params, type({})): | |
665 | if action in ('set', 'create'): | |
666 | params = self.serialise(classname, params) | |
667 | ||
668 | # handle supply of the special journalling parameters (usually | |
669 | # supplied on importing an existing database) | |
670 | journaltag = creator | |
671 | if creation: | |
672 | journaldate = creation.serialise() | |
673 | else: | |
674 | journaldate = date.Date().serialise() | |
675 | ||
676 | # create the journal entry | |
677 | entry = (nodeid, journaldate, journaltag, action, params) | |
678 | ||
679 | db = self.getCachedJournalDB(classname) | |
680 | ||
681 | # now insert the journal entry | |
682 | if nodeid in db: | |
683 | # append to existing | |
684 | s = db[nodeid] | |
685 | l = marshal.loads(s) | |
686 | l.append(entry) | |
687 | else: | |
688 | l = [entry] | |
689 | ||
690 | db[nodeid] = marshal.dumps(l) | |
691 | ||
692 | def doSetJournal(self, classname, nodeid, journal): | |
693 | l = [] | |
694 | for nodeid, journaldate, journaltag, action, params in journal: | |
695 | # serialise the parameters now if necessary | |
696 | if isinstance(params, type({})): | |
697 | if action in ('set', 'create'): | |
698 | params = self.serialise(classname, params) | |
699 | journaldate = journaldate.serialise() | |
700 | l.append((nodeid, journaldate, journaltag, action, params)) | |
701 | db = self.getCachedJournalDB(classname) | |
702 | db[nodeid] = marshal.dumps(l) | |
703 | ||
704 | def doDestroyNode(self, classname, nodeid): | |
705 | # delete from the class database | |
706 | db = self.getCachedClassDB(classname) | |
707 | if nodeid in db: | |
708 | del db[nodeid] | |
709 | ||
710 | # delete from the database | |
711 | db = self.getCachedJournalDB(classname) | |
712 | if nodeid in db: | |
713 | del db[nodeid] | |
714 | ||
715 | def rollback(self): | |
716 | """ Reverse all actions from the current transaction. | |
717 | """ | |
718 | logging.getLogger('hyperdb').info('rollback %s transactions'%( | |
719 | len(self.transactions))) | |
720 | ||
721 | for method, args in self.transactions: | |
722 | # delete temporary files | |
723 | if method == self.doStoreFile: | |
724 | self.rollbackStoreFile(*args) | |
725 | self.cache = {} | |
726 | self.dirtynodes = {} | |
727 | self.newnodes = {} | |
728 | self.destroyednodes = {} | |
729 | self.transactions = [] | |
730 | ||
731 | def close(self): | |
732 | """ Nothing to do | |
733 | """ | |
734 | if self.lockfile is not None: | |
735 | locking.release_lock(self.lockfile) | |
736 | self.lockfile.close() | |
737 | self.lockfile = None | |
738 | ||
739 | _marker = [] | |
740 | class Class(hyperdb.Class): | |
741 | """The handle to a particular class of nodes in a hyperdatabase.""" | |
742 | ||
743 | def enableJournalling(self): | |
744 | """Turn journalling on for this class | |
745 | """ | |
746 | self.do_journal = 1 | |
747 | ||
748 | def disableJournalling(self): | |
749 | """Turn journalling off for this class | |
750 | """ | |
751 | self.do_journal = 0 | |
752 | ||
753 | # Editing nodes: | |
754 | ||
755 | def create(self, **propvalues): | |
756 | """Create a new node of this class and return its id. | |
757 | ||
758 | The keyword arguments in 'propvalues' map property names to values. | |
759 | ||
760 | The values of arguments must be acceptable for the types of their | |
761 | corresponding properties or a TypeError is raised. | |
762 | ||
763 | If this class has a key property, it must be present and its value | |
764 | must not collide with other key strings or a ValueError is raised. | |
765 | ||
766 | Any other properties on this class that are missing from the | |
767 | 'propvalues' dictionary are set to None. | |
768 | ||
769 | If an id in a link or multilink property does not refer to a valid | |
770 | node, an IndexError is raised. | |
771 | ||
772 | These operations trigger detectors and can be vetoed. Attempts | |
773 | to modify the "creation" or "activity" properties cause a KeyError. | |
774 | """ | |
775 | if self.db.journaltag is None: | |
776 | raise hyperdb.DatabaseError(_('Database open read-only')) | |
777 | self.fireAuditors('create', None, propvalues) | |
778 | newid = self.create_inner(**propvalues) | |
779 | self.fireReactors('create', newid, None) | |
780 | return newid | |
781 | ||
782 | def create_inner(self, **propvalues): | |
783 | """ Called by create, in-between the audit and react calls. | |
784 | """ | |
785 | if 'id' in propvalues: | |
786 | raise KeyError('"id" is reserved') | |
787 | ||
788 | if self.db.journaltag is None: | |
789 | raise hyperdb.DatabaseError(_('Database open read-only')) | |
790 | ||
791 | if 'creation' in propvalues or 'activity' in propvalues: | |
792 | raise KeyError('"creation" and "activity" are reserved') | |
793 | # new node's id | |
794 | newid = self.db.newid(self.classname) | |
795 | ||
796 | # validate propvalues | |
797 | num_re = re.compile('^\d+$') | |
798 | for key, value in propvalues.iteritems(): | |
799 | if key == self.key: | |
800 | try: | |
801 | self.lookup(value) | |
802 | except KeyError: | |
803 | pass | |
804 | else: | |
805 | raise ValueError('node with key "%s" exists'%value) | |
806 | ||
807 | # try to handle this property | |
808 | try: | |
809 | prop = self.properties[key] | |
810 | except KeyError: | |
811 | raise KeyError('"%s" has no property "%s"'%(self.classname, | |
812 | key)) | |
813 | ||
814 | if value is not None and isinstance(prop, hyperdb.Link): | |
815 | if type(value) != type(''): | |
816 | raise ValueError('link value must be String') | |
817 | link_class = self.properties[key].classname | |
818 | # if it isn't a number, it's a key | |
819 | if not num_re.match(value): | |
820 | try: | |
821 | value = self.db.classes[link_class].lookup(value) | |
822 | except (TypeError, KeyError): | |
823 | raise IndexError('new property "%s": %s not a %s'%( | |
824 | key, value, link_class)) | |
825 | elif not self.db.getclass(link_class).hasnode(value): | |
826 | raise IndexError('%s has no node %s'%(link_class, | |
827 | value)) | |
828 | ||
829 | # save off the value | |
830 | propvalues[key] = value | |
831 | ||
832 | # register the link with the newly linked node | |
833 | if self.do_journal and self.properties[key].do_journal: | |
834 | self.db.addjournal(link_class, value, 'link', | |
835 | (self.classname, newid, key)) | |
836 | ||
837 | elif isinstance(prop, hyperdb.Multilink): | |
838 | if value is None: | |
839 | value = [] | |
840 | if not hasattr(value, '__iter__'): | |
841 | raise TypeError('new property "%s" not an iterable of ids'%key) | |
842 | ||
843 | # clean up and validate the list of links | |
844 | link_class = self.properties[key].classname | |
845 | l = [] | |
846 | for entry in value: | |
847 | if type(entry) != type(''): | |
848 | raise ValueError('"%s" multilink value (%r) '\ | |
849 | 'must contain Strings'%(key, value)) | |
850 | # if it isn't a number, it's a key | |
851 | if not num_re.match(entry): | |
852 | try: | |
853 | entry = self.db.classes[link_class].lookup(entry) | |
854 | except (TypeError, KeyError): | |
855 | raise IndexError('new property "%s": %s not a %s'%( | |
856 | key, entry, self.properties[key].classname)) | |
857 | l.append(entry) | |
858 | value = l | |
859 | propvalues[key] = value | |
860 | ||
861 | # handle additions | |
862 | for nodeid in value: | |
863 | if not self.db.getclass(link_class).hasnode(nodeid): | |
864 | raise IndexError('%s has no node %s'%(link_class, | |
865 | nodeid)) | |
866 | # register the link with the newly linked node | |
867 | if self.do_journal and self.properties[key].do_journal: | |
868 | self.db.addjournal(link_class, nodeid, 'link', | |
869 | (self.classname, newid, key)) | |
870 | ||
871 | elif isinstance(prop, hyperdb.String): | |
872 | if type(value) != type('') and type(value) != type(u''): | |
873 | raise TypeError('new property "%s" not a string'%key) | |
874 | if prop.indexme: | |
875 | self.db.indexer.add_text((self.classname, newid, key), | |
876 | value) | |
877 | ||
878 | elif isinstance(prop, hyperdb.Password): | |
879 | if not isinstance(value, password.Password): | |
880 | raise TypeError('new property "%s" not a Password'%key) | |
881 | ||
882 | elif isinstance(prop, hyperdb.Date): | |
883 | if value is not None and not isinstance(value, date.Date): | |
884 | raise TypeError('new property "%s" not a Date'%key) | |
885 | ||
886 | elif isinstance(prop, hyperdb.Interval): | |
887 | if value is not None and not isinstance(value, date.Interval): | |
888 | raise TypeError('new property "%s" not an Interval'%key) | |
889 | ||
890 | elif value is not None and isinstance(prop, hyperdb.Number): | |
891 | try: | |
892 | float(value) | |
893 | except ValueError: | |
894 | raise TypeError('new property "%s" not numeric'%key) | |
895 | ||
896 | elif value is not None and isinstance(prop, hyperdb.Boolean): | |
897 | try: | |
898 | int(value) | |
899 | except ValueError: | |
900 | raise TypeError('new property "%s" not boolean'%key) | |
901 | ||
902 | # make sure there's data where there needs to be | |
903 | for key, prop in self.properties.iteritems(): | |
904 | if key in propvalues: | |
905 | continue | |
906 | if key == self.key: | |
907 | raise ValueError('key property "%s" is required'%key) | |
908 | if isinstance(prop, hyperdb.Multilink): | |
909 | propvalues[key] = [] | |
910 | ||
911 | # done | |
912 | self.db.addnode(self.classname, newid, propvalues) | |
913 | if self.do_journal: | |
914 | self.db.addjournal(self.classname, newid, 'create', {}) | |
915 | ||
916 | return newid | |
917 | ||
918 | def get(self, nodeid, propname, default=_marker, cache=1): | |
919 | """Get the value of a property on an existing node of this class. | |
920 | ||
921 | 'nodeid' must be the id of an existing node of this class or an | |
922 | IndexError is raised. 'propname' must be the name of a property | |
923 | of this class or a KeyError is raised. | |
924 | ||
925 | 'cache' exists for backward compatibility, and is not used. | |
926 | ||
927 | Attempts to get the "creation" or "activity" properties should | |
928 | do the right thing. | |
929 | """ | |
930 | if propname == 'id': | |
931 | return nodeid | |
932 | ||
933 | # get the node's dict | |
934 | d = self.db.getnode(self.classname, nodeid) | |
935 | ||
936 | # check for one of the special props | |
937 | if propname == 'creation': | |
938 | if 'creation' in d: | |
939 | return d['creation'] | |
940 | if not self.do_journal: | |
941 | raise ValueError('Journalling is disabled for this class') | |
942 | journal = self.db.getjournal(self.classname, nodeid) | |
943 | if journal: | |
944 | return journal[0][1] | |
945 | else: | |
946 | # on the strange chance that there's no journal | |
947 | return date.Date() | |
948 | if propname == 'activity': | |
949 | if 'activity' in d: | |
950 | return d['activity'] | |
951 | if not self.do_journal: | |
952 | raise ValueError('Journalling is disabled for this class') | |
953 | journal = self.db.getjournal(self.classname, nodeid) | |
954 | if journal: | |
955 | return self.db.getjournal(self.classname, nodeid)[-1][1] | |
956 | else: | |
957 | # on the strange chance that there's no journal | |
958 | return date.Date() | |
959 | if propname == 'creator': | |
960 | if 'creator' in d: | |
961 | return d['creator'] | |
962 | if not self.do_journal: | |
963 | raise ValueError('Journalling is disabled for this class') | |
964 | journal = self.db.getjournal(self.classname, nodeid) | |
965 | if journal: | |
966 | num_re = re.compile('^\d+$') | |
967 | value = journal[0][2] | |
968 | if num_re.match(value): | |
969 | return value | |
970 | else: | |
971 | # old-style "username" journal tag | |
972 | try: | |
973 | return self.db.user.lookup(value) | |
974 | except KeyError: | |
975 | # user's been retired, return admin | |
976 | return '1' | |
977 | else: | |
978 | return self.db.getuid() | |
979 | if propname == 'actor': | |
980 | if 'actor' in d: | |
981 | return d['actor'] | |
982 | if not self.do_journal: | |
983 | raise ValueError('Journalling is disabled for this class') | |
984 | journal = self.db.getjournal(self.classname, nodeid) | |
985 | if journal: | |
986 | num_re = re.compile('^\d+$') | |
987 | value = journal[-1][2] | |
988 | if num_re.match(value): | |
989 | return value | |
990 | else: | |
991 | # old-style "username" journal tag | |
992 | try: | |
993 | return self.db.user.lookup(value) | |
994 | except KeyError: | |
995 | # user's been retired, return admin | |
996 | return '1' | |
997 | else: | |
998 | return self.db.getuid() | |
999 | ||
1000 | # get the property (raises KeyErorr if invalid) | |
1001 | prop = self.properties[propname] | |
1002 | ||
1003 | if propname not in d: | |
1004 | if default is _marker: | |
1005 | if isinstance(prop, hyperdb.Multilink): | |
1006 | return [] | |
1007 | else: | |
1008 | return None | |
1009 | else: | |
1010 | return default | |
1011 | ||
1012 | # return a dupe of the list so code doesn't get confused | |
1013 | if isinstance(prop, hyperdb.Multilink): | |
1014 | return d[propname][:] | |
1015 | ||
1016 | return d[propname] | |
1017 | ||
1018 | def set(self, nodeid, **propvalues): | |
1019 | """Modify a property on an existing node of this class. | |
1020 | ||
1021 | 'nodeid' must be the id of an existing node of this class or an | |
1022 | IndexError is raised. | |
1023 | ||
1024 | Each key in 'propvalues' must be the name of a property of this | |
1025 | class or a KeyError is raised. | |
1026 | ||
1027 | All values in 'propvalues' must be acceptable types for their | |
1028 | corresponding properties or a TypeError is raised. | |
1029 | ||
1030 | If the value of the key property is set, it must not collide with | |
1031 | other key strings or a ValueError is raised. | |
1032 | ||
1033 | If the value of a Link or Multilink property contains an invalid | |
1034 | node id, a ValueError is raised. | |
1035 | ||
1036 | These operations trigger detectors and can be vetoed. Attempts | |
1037 | to modify the "creation" or "activity" properties cause a KeyError. | |
1038 | """ | |
1039 | if self.db.journaltag is None: | |
1040 | raise hyperdb.DatabaseError(_('Database open read-only')) | |
1041 | ||
1042 | self.fireAuditors('set', nodeid, propvalues) | |
1043 | oldvalues = copy.deepcopy(self.db.getnode(self.classname, nodeid)) | |
1044 | for name, prop in self.getprops(protected=0).iteritems(): | |
1045 | if name in oldvalues: | |
1046 | continue | |
1047 | if isinstance(prop, hyperdb.Multilink): | |
1048 | oldvalues[name] = [] | |
1049 | else: | |
1050 | oldvalues[name] = None | |
1051 | propvalues = self.set_inner(nodeid, **propvalues) | |
1052 | self.fireReactors('set', nodeid, oldvalues) | |
1053 | return propvalues | |
1054 | ||
1055 | def set_inner(self, nodeid, **propvalues): | |
1056 | """ Called by set, in-between the audit and react calls. | |
1057 | """ | |
1058 | if not propvalues: | |
1059 | return propvalues | |
1060 | ||
1061 | if 'creation' in propvalues or 'activity' in propvalues: | |
1062 | raise KeyError, '"creation" and "activity" are reserved' | |
1063 | ||
1064 | if 'id' in propvalues: | |
1065 | raise KeyError, '"id" is reserved' | |
1066 | ||
1067 | if self.db.journaltag is None: | |
1068 | raise hyperdb.DatabaseError(_('Database open read-only')) | |
1069 | ||
1070 | node = self.db.getnode(self.classname, nodeid) | |
1071 | if self.db.RETIRED_FLAG in node: | |
1072 | raise IndexError | |
1073 | num_re = re.compile('^\d+$') | |
1074 | ||
1075 | # if the journal value is to be different, store it in here | |
1076 | journalvalues = {} | |
1077 | ||
1078 | # list() propvalues 'cos it might be modified by the loop | |
1079 | for propname, value in list(propvalues.items()): | |
1080 | # check to make sure we're not duplicating an existing key | |
1081 | if propname == self.key and node[propname] != value: | |
1082 | try: | |
1083 | self.lookup(value) | |
1084 | except KeyError: | |
1085 | pass | |
1086 | else: | |
1087 | raise ValueError('node with key "%s" exists'%value) | |
1088 | ||
1089 | # this will raise the KeyError if the property isn't valid | |
1090 | # ... we don't use getprops() here because we only care about | |
1091 | # the writeable properties. | |
1092 | try: | |
1093 | prop = self.properties[propname] | |
1094 | except KeyError: | |
1095 | raise KeyError('"%s" has no property named "%s"'%( | |
1096 | self.classname, propname)) | |
1097 | ||
1098 | # if the value's the same as the existing value, no sense in | |
1099 | # doing anything | |
1100 | current = node.get(propname, None) | |
1101 | if value == current: | |
1102 | del propvalues[propname] | |
1103 | continue | |
1104 | journalvalues[propname] = current | |
1105 | ||
1106 | # do stuff based on the prop type | |
1107 | if isinstance(prop, hyperdb.Link): | |
1108 | link_class = prop.classname | |
1109 | # if it isn't a number, it's a key | |
1110 | if value is not None and not isinstance(value, type('')): | |
1111 | raise ValueError('property "%s" link value be a string'%( | |
1112 | propname)) | |
1113 | if isinstance(value, type('')) and not num_re.match(value): | |
1114 | try: | |
1115 | value = self.db.classes[link_class].lookup(value) | |
1116 | except (TypeError, KeyError): | |
1117 | raise IndexError('new property "%s": %s not a %s'%( | |
1118 | propname, value, prop.classname)) | |
1119 | ||
1120 | if (value is not None and | |
1121 | not self.db.getclass(link_class).hasnode(value)): | |
1122 | raise IndexError('%s has no node %s'%(link_class, | |
1123 | value)) | |
1124 | ||
1125 | if self.do_journal and prop.do_journal: | |
1126 | # register the unlink with the old linked node | |
1127 | if propname in node and node[propname] is not None: | |
1128 | self.db.addjournal(link_class, node[propname], 'unlink', | |
1129 | (self.classname, nodeid, propname)) | |
1130 | ||
1131 | # register the link with the newly linked node | |
1132 | if value is not None: | |
1133 | self.db.addjournal(link_class, value, 'link', | |
1134 | (self.classname, nodeid, propname)) | |
1135 | ||
1136 | elif isinstance(prop, hyperdb.Multilink): | |
1137 | if value is None: | |
1138 | value = [] | |
1139 | if not hasattr(value, '__iter__'): | |
1140 | raise TypeError('new property "%s" not an iterable of' | |
1141 | ' ids'%propname) | |
1142 | link_class = self.properties[propname].classname | |
1143 | l = [] | |
1144 | for entry in value: | |
1145 | # if it isn't a number, it's a key | |
1146 | if type(entry) != type(''): | |
1147 | raise ValueError('new property "%s" link value ' | |
1148 | 'must be a string'%propname) | |
1149 | if not num_re.match(entry): | |
1150 | try: | |
1151 | entry = self.db.classes[link_class].lookup(entry) | |
1152 | except (TypeError, KeyError): | |
1153 | raise IndexError('new property "%s": %s not a %s'%( | |
1154 | propname, entry, | |
1155 | self.properties[propname].classname)) | |
1156 | l.append(entry) | |
1157 | value = l | |
1158 | propvalues[propname] = value | |
1159 | ||
1160 | # figure the journal entry for this property | |
1161 | add = [] | |
1162 | remove = [] | |
1163 | ||
1164 | # handle removals | |
1165 | if propname in node: | |
1166 | l = node[propname] | |
1167 | else: | |
1168 | l = [] | |
1169 | for id in l[:]: | |
1170 | if id in value: | |
1171 | continue | |
1172 | # register the unlink with the old linked node | |
1173 | if self.do_journal and self.properties[propname].do_journal: | |
1174 | self.db.addjournal(link_class, id, 'unlink', | |
1175 | (self.classname, nodeid, propname)) | |
1176 | l.remove(id) | |
1177 | remove.append(id) | |
1178 | ||
1179 | # handle additions | |
1180 | for id in value: | |
1181 | if not self.db.getclass(link_class).hasnode(id): | |
1182 | raise IndexError('%s has no node %s'%(link_class, | |
1183 | id)) | |
1184 | if id in l: | |
1185 | continue | |
1186 | # register the link with the newly linked node | |
1187 | if self.do_journal and self.properties[propname].do_journal: | |
1188 | self.db.addjournal(link_class, id, 'link', | |
1189 | (self.classname, nodeid, propname)) | |
1190 | l.append(id) | |
1191 | add.append(id) | |
1192 | ||
1193 | # figure the journal entry | |
1194 | l = [] | |
1195 | if add: | |
1196 | l.append(('+', add)) | |
1197 | if remove: | |
1198 | l.append(('-', remove)) | |
1199 | if l: | |
1200 | journalvalues[propname] = tuple(l) | |
1201 | ||
1202 | elif isinstance(prop, hyperdb.String): | |
1203 | if value is not None and type(value) != type('') and type(value) != type(u''): | |
1204 | raise TypeError('new property "%s" not a ' | |
1205 | 'string'%propname) | |
1206 | if prop.indexme: | |
1207 | self.db.indexer.add_text((self.classname, nodeid, propname), | |
1208 | value) | |
1209 | ||
1210 | elif isinstance(prop, hyperdb.Password): | |
1211 | if not isinstance(value, password.Password): | |
1212 | raise TypeError('new property "%s" not a ' | |
1213 | 'Password'%propname) | |
1214 | propvalues[propname] = value | |
1215 | ||
1216 | elif value is not None and isinstance(prop, hyperdb.Date): | |
1217 | if not isinstance(value, date.Date): | |
1218 | raise TypeError('new property "%s" not a ' | |
1219 | 'Date'%propname) | |
1220 | propvalues[propname] = value | |
1221 | ||
1222 | elif value is not None and isinstance(prop, hyperdb.Interval): | |
1223 | if not isinstance(value, date.Interval): | |
1224 | raise TypeError('new property "%s" not an ' | |
1225 | 'Interval'%propname) | |
1226 | propvalues[propname] = value | |
1227 | ||
1228 | elif value is not None and isinstance(prop, hyperdb.Number): | |
1229 | try: | |
1230 | float(value) | |
1231 | except ValueError: | |
1232 | raise TypeError('new property "%s" not ' | |
1233 | 'numeric'%propname) | |
1234 | ||
1235 | elif value is not None and isinstance(prop, hyperdb.Boolean): | |
1236 | try: | |
1237 | int(value) | |
1238 | except ValueError: | |
1239 | raise TypeError('new property "%s" not ' | |
1240 | 'boolean'%propname) | |
1241 | ||
1242 | node[propname] = value | |
1243 | ||
1244 | # nothing to do? | |
1245 | if not propvalues: | |
1246 | return propvalues | |
1247 | ||
1248 | # update the activity time | |
1249 | node['activity'] = date.Date() | |
1250 | node['actor'] = self.db.getuid() | |
1251 | ||
1252 | # do the set, and journal it | |
1253 | self.db.setnode(self.classname, nodeid, node) | |
1254 | ||
1255 | if self.do_journal: | |
1256 | self.db.addjournal(self.classname, nodeid, 'set', journalvalues) | |
1257 | ||
1258 | return propvalues | |
1259 | ||
1260 | def retire(self, nodeid): | |
1261 | """Retire a node. | |
1262 | ||
1263 | The properties on the node remain available from the get() method, | |
1264 | and the node's id is never reused. | |
1265 | ||
1266 | Retired nodes are not returned by the find(), list(), or lookup() | |
1267 | methods, and other nodes may reuse the values of their key properties. | |
1268 | ||
1269 | These operations trigger detectors and can be vetoed. Attempts | |
1270 | to modify the "creation" or "activity" properties cause a KeyError. | |
1271 | """ | |
1272 | if self.db.journaltag is None: | |
1273 | raise hyperdb.DatabaseError(_('Database open read-only')) | |
1274 | ||
1275 | self.fireAuditors('retire', nodeid, None) | |
1276 | ||
1277 | node = self.db.getnode(self.classname, nodeid) | |
1278 | node[self.db.RETIRED_FLAG] = 1 | |
1279 | self.db.setnode(self.classname, nodeid, node) | |
1280 | if self.do_journal: | |
1281 | self.db.addjournal(self.classname, nodeid, 'retired', None) | |
1282 | ||
1283 | self.fireReactors('retire', nodeid, None) | |
1284 | ||
1285 | def restore(self, nodeid): | |
1286 | """Restpre a retired node. | |
1287 | ||
1288 | Make node available for all operations like it was before retirement. | |
1289 | """ | |
1290 | if self.db.journaltag is None: | |
1291 | raise hyperdb.DatabaseError(_('Database open read-only')) | |
1292 | ||
1293 | node = self.db.getnode(self.classname, nodeid) | |
1294 | # check if key property was overrided | |
1295 | key = self.getkey() | |
1296 | try: | |
1297 | id = self.lookup(node[key]) | |
1298 | except KeyError: | |
1299 | pass | |
1300 | else: | |
1301 | raise KeyError("Key property (%s) of retired node clashes " | |
1302 | "with existing one (%s)" % (key, node[key])) | |
1303 | # Now we can safely restore node | |
1304 | self.fireAuditors('restore', nodeid, None) | |
1305 | del node[self.db.RETIRED_FLAG] | |
1306 | self.db.setnode(self.classname, nodeid, node) | |
1307 | if self.do_journal: | |
1308 | self.db.addjournal(self.classname, nodeid, 'restored', None) | |
1309 | ||
1310 | self.fireReactors('restore', nodeid, None) | |
1311 | ||
1312 | def is_retired(self, nodeid, cldb=None): | |
1313 | """Return true if the node is retired. | |
1314 | """ | |
1315 | node = self.db.getnode(self.classname, nodeid, cldb) | |
1316 | if self.db.RETIRED_FLAG in node: | |
1317 | return 1 | |
1318 | return 0 | |
1319 | ||
1320 | def destroy(self, nodeid): | |
1321 | """Destroy a node. | |
1322 | ||
1323 | WARNING: this method should never be used except in extremely rare | |
1324 | situations where there could never be links to the node being | |
1325 | deleted | |
1326 | ||
1327 | WARNING: use retire() instead | |
1328 | ||
1329 | WARNING: the properties of this node will not be available ever again | |
1330 | ||
1331 | WARNING: really, use retire() instead | |
1332 | ||
1333 | Well, I think that's enough warnings. This method exists mostly to | |
1334 | support the session storage of the cgi interface. | |
1335 | """ | |
1336 | if self.db.journaltag is None: | |
1337 | raise hyperdb.DatabaseError(_('Database open read-only')) | |
1338 | self.db.destroynode(self.classname, nodeid) | |
1339 | ||
1340 | def history(self, nodeid): | |
1341 | """Retrieve the journal of edits on a particular node. | |
1342 | ||
1343 | 'nodeid' must be the id of an existing node of this class or an | |
1344 | IndexError is raised. | |
1345 | ||
1346 | The returned list contains tuples of the form | |
1347 | ||
1348 | (nodeid, date, tag, action, params) | |
1349 | ||
1350 | 'date' is a Timestamp object specifying the time of the change and | |
1351 | 'tag' is the journaltag specified when the database was opened. | |
1352 | """ | |
1353 | if not self.do_journal: | |
1354 | raise ValueError('Journalling is disabled for this class') | |
1355 | return self.db.getjournal(self.classname, nodeid) | |
1356 | ||
1357 | # Locating nodes: | |
1358 | def hasnode(self, nodeid): | |
1359 | """Determine if the given nodeid actually exists | |
1360 | """ | |
1361 | return self.db.hasnode(self.classname, nodeid) | |
1362 | ||
1363 | def setkey(self, propname): | |
1364 | """Select a String property of this class to be the key property. | |
1365 | ||
1366 | 'propname' must be the name of a String property of this class or | |
1367 | None, or a TypeError is raised. The values of the key property on | |
1368 | all existing nodes must be unique or a ValueError is raised. If the | |
1369 | property doesn't exist, KeyError is raised. | |
1370 | """ | |
1371 | prop = self.getprops()[propname] | |
1372 | if not isinstance(prop, hyperdb.String): | |
1373 | raise TypeError('key properties must be String') | |
1374 | self.key = propname | |
1375 | ||
1376 | def getkey(self): | |
1377 | """Return the name of the key property for this class or None.""" | |
1378 | return self.key | |
1379 | ||
1380 | # TODO: set up a separate index db file for this? profile? | |
1381 | def lookup(self, keyvalue): | |
1382 | """Locate a particular node by its key property and return its id. | |
1383 | ||
1384 | If this class has no key property, a TypeError is raised. If the | |
1385 | 'keyvalue' matches one of the values for the key property among | |
1386 | the nodes in this class, the matching node's id is returned; | |
1387 | otherwise a KeyError is raised. | |
1388 | """ | |
1389 | if not self.key: | |
1390 | raise TypeError('No key property set for ' | |
1391 | 'class %s'%self.classname) | |
1392 | cldb = self.db.getclassdb(self.classname) | |
1393 | try: | |
1394 | for nodeid in self.getnodeids(cldb): | |
1395 | node = self.db.getnode(self.classname, nodeid, cldb) | |
1396 | if self.db.RETIRED_FLAG in node: | |
1397 | continue | |
1398 | if self.key not in node: | |
1399 | continue | |
1400 | if node[self.key] == keyvalue: | |
1401 | return nodeid | |
1402 | finally: | |
1403 | cldb.close() | |
1404 | raise KeyError('No key (%s) value "%s" for "%s"'%(self.key, | |
1405 | keyvalue, self.classname)) | |
1406 | ||
1407 | # change from spec - allows multiple props to match | |
1408 | def find(self, **propspec): | |
1409 | """Get the ids of nodes in this class which link to the given nodes. | |
1410 | ||
1411 | 'propspec' consists of keyword args propname=nodeid or | |
1412 | propname={nodeid:1, } | |
1413 | 'propname' must be the name of a property in this class, or a | |
1414 | KeyError is raised. That property must be a Link or | |
1415 | Multilink property, or a TypeError is raised. | |
1416 | ||
1417 | Any node in this class whose 'propname' property links to any of | |
1418 | the nodeids will be returned. Examples:: | |
1419 | ||
1420 | db.issue.find(messages='1') | |
1421 | db.issue.find(messages={'1':1,'3':1}, files={'7':1}) | |
1422 | """ | |
1423 | for propname, itemids in propspec.iteritems(): | |
1424 | # check the prop is OK | |
1425 | prop = self.properties[propname] | |
1426 | if not isinstance(prop, hyperdb.Link) and not isinstance(prop, hyperdb.Multilink): | |
1427 | raise TypeError("'%s' not a Link/Multilink " | |
1428 | "property"%propname) | |
1429 | ||
1430 | # ok, now do the find | |
1431 | cldb = self.db.getclassdb(self.classname) | |
1432 | l = [] | |
1433 | try: | |
1434 | for id in self.getnodeids(db=cldb): | |
1435 | item = self.db.getnode(self.classname, id, db=cldb) | |
1436 | if self.db.RETIRED_FLAG in item: | |
1437 | continue | |
1438 | for propname, itemids in propspec.iteritems(): | |
1439 | if type(itemids) is not type({}): | |
1440 | itemids = {itemids:1} | |
1441 | ||
1442 | # special case if the item doesn't have this property | |
1443 | if propname not in item: | |
1444 | if None in itemids: | |
1445 | l.append(id) | |
1446 | break | |
1447 | continue | |
1448 | ||
1449 | # grab the property definition and its value on this item | |
1450 | prop = self.properties[propname] | |
1451 | value = item[propname] | |
1452 | if isinstance(prop, hyperdb.Link) and value in itemids: | |
1453 | l.append(id) | |
1454 | break | |
1455 | elif isinstance(prop, hyperdb.Multilink): | |
1456 | hit = 0 | |
1457 | for v in value: | |
1458 | if v in itemids: | |
1459 | l.append(id) | |
1460 | hit = 1 | |
1461 | break | |
1462 | if hit: | |
1463 | break | |
1464 | finally: | |
1465 | cldb.close() | |
1466 | return l | |
1467 | ||
1468 | def stringFind(self, **requirements): | |
1469 | """Locate a particular node by matching a set of its String | |
1470 | properties in a caseless search. | |
1471 | ||
1472 | If the property is not a String property, a TypeError is raised. | |
1473 | ||
1474 | The return is a list of the id of all nodes that match. | |
1475 | """ | |
1476 | for propname in requirements: | |
1477 | prop = self.properties[propname] | |
1478 | if not isinstance(prop, hyperdb.String): | |
1479 | raise TypeError("'%s' not a String property"%propname) | |
1480 | requirements[propname] = requirements[propname].lower() | |
1481 | l = [] | |
1482 | cldb = self.db.getclassdb(self.classname) | |
1483 | try: | |
1484 | for nodeid in self.getnodeids(cldb): | |
1485 | node = self.db.getnode(self.classname, nodeid, cldb) | |
1486 | if self.db.RETIRED_FLAG in node: | |
1487 | continue | |
1488 | for key, value in requirements.iteritems(): | |
1489 | if key not in node: | |
1490 | break | |
1491 | if node[key] is None or node[key].lower() != value: | |
1492 | break | |
1493 | else: | |
1494 | l.append(nodeid) | |
1495 | finally: | |
1496 | cldb.close() | |
1497 | return l | |
1498 | ||
1499 | def list(self): | |
1500 | """ Return a list of the ids of the active nodes in this class. | |
1501 | """ | |
1502 | l = [] | |
1503 | cn = self.classname | |
1504 | cldb = self.db.getclassdb(cn) | |
1505 | try: | |
1506 | for nodeid in self.getnodeids(cldb): | |
1507 | node = self.db.getnode(cn, nodeid, cldb) | |
1508 | if self.db.RETIRED_FLAG in node: | |
1509 | continue | |
1510 | l.append(nodeid) | |
1511 | finally: | |
1512 | cldb.close() | |
1513 | l.sort() | |
1514 | return l | |
1515 | ||
1516 | def getnodeids(self, db=None, retired=None): | |
1517 | """ Return a list of ALL nodeids | |
1518 | ||
1519 | Set retired=None to get all nodes. Otherwise it'll get all the | |
1520 | retired or non-retired nodes, depending on the flag. | |
1521 | """ | |
1522 | res = [] | |
1523 | ||
1524 | # start off with the new nodes | |
1525 | if self.classname in self.db.newnodes: | |
1526 | res.extend(self.db.newnodes[self.classname]) | |
1527 | ||
1528 | must_close = False | |
1529 | if db is None: | |
1530 | db = self.db.getclassdb(self.classname) | |
1531 | must_close = True | |
1532 | try: | |
1533 | res.extend(db) | |
1534 | ||
1535 | # remove the uncommitted, destroyed nodes | |
1536 | if self.classname in self.db.destroyednodes: | |
1537 | for nodeid in self.db.destroyednodes[self.classname]: | |
1538 | if nodeid in db: | |
1539 | res.remove(nodeid) | |
1540 | ||
1541 | # check retired flag | |
1542 | if retired is False or retired is True: | |
1543 | l = [] | |
1544 | for nodeid in res: | |
1545 | node = self.db.getnode(self.classname, nodeid, db) | |
1546 | is_ret = self.db.RETIRED_FLAG in node | |
1547 | if retired == is_ret: | |
1548 | l.append(nodeid) | |
1549 | res = l | |
1550 | finally: | |
1551 | if must_close: | |
1552 | db.close() | |
1553 | return res | |
1554 | ||
1555 | def _filter(self, search_matches, filterspec, proptree, | |
1556 | num_re = re.compile('^\d+$')): | |
1557 | """Return a list of the ids of the active nodes in this class that | |
1558 | match the 'filter' spec, sorted by the group spec and then the | |
1559 | sort spec. | |
1560 | ||
1561 | "filterspec" is {propname: value(s)} | |
1562 | ||
1563 | "sort" and "group" are (dir, prop) where dir is '+', '-' or None | |
1564 | and prop is a prop name or None | |
1565 | ||
1566 | "search_matches" is a sequence type or None | |
1567 | ||
1568 | The filter must match all properties specificed. If the property | |
1569 | value to match is a list: | |
1570 | ||
1571 | 1. String properties must match all elements in the list, and | |
1572 | 2. Other properties must match any of the elements in the list. | |
1573 | """ | |
1574 | if __debug__: | |
1575 | start_t = time.time() | |
1576 | ||
1577 | cn = self.classname | |
1578 | ||
1579 | # optimise filterspec | |
1580 | l = [] | |
1581 | props = self.getprops() | |
1582 | LINK = 'spec:link' | |
1583 | MULTILINK = 'spec:multilink' | |
1584 | STRING = 'spec:string' | |
1585 | DATE = 'spec:date' | |
1586 | INTERVAL = 'spec:interval' | |
1587 | OTHER = 'spec:other' | |
1588 | ||
1589 | for k, v in filterspec.iteritems(): | |
1590 | propclass = props[k] | |
1591 | if isinstance(propclass, hyperdb.Link): | |
1592 | if type(v) is not type([]): | |
1593 | v = [v] | |
1594 | u = [] | |
1595 | for entry in v: | |
1596 | # the value -1 is a special "not set" sentinel | |
1597 | if entry == '-1': | |
1598 | entry = None | |
1599 | u.append(entry) | |
1600 | l.append((LINK, k, u)) | |
1601 | elif isinstance(propclass, hyperdb.Multilink): | |
1602 | # the value -1 is a special "not set" sentinel | |
1603 | if v in ('-1', ['-1']): | |
1604 | v = [] | |
1605 | elif type(v) is not type([]): | |
1606 | v = [v] | |
1607 | l.append((MULTILINK, k, v)) | |
1608 | elif isinstance(propclass, hyperdb.String) and k != 'id': | |
1609 | if type(v) is not type([]): | |
1610 | v = [v] | |
1611 | for v in v: | |
1612 | # simple glob searching | |
1613 | v = re.sub(r'([\|\{\}\\\.\+\[\]\(\)])', r'\\\1', v) | |
1614 | v = v.replace('?', '.') | |
1615 | v = v.replace('*', '.*?') | |
1616 | l.append((STRING, k, re.compile(v, re.I))) | |
1617 | elif isinstance(propclass, hyperdb.Date): | |
1618 | try: | |
1619 | date_rng = propclass.range_from_raw(v, self.db) | |
1620 | l.append((DATE, k, date_rng)) | |
1621 | except ValueError: | |
1622 | # If range creation fails - ignore that search parameter | |
1623 | pass | |
1624 | elif isinstance(propclass, hyperdb.Interval): | |
1625 | try: | |
1626 | intv_rng = date.Range(v, date.Interval) | |
1627 | l.append((INTERVAL, k, intv_rng)) | |
1628 | except ValueError: | |
1629 | # If range creation fails - ignore that search parameter | |
1630 | pass | |
1631 | ||
1632 | elif isinstance(propclass, hyperdb.Boolean): | |
1633 | if type(v) == type(""): | |
1634 | v = v.split(',') | |
1635 | if type(v) != type([]): | |
1636 | v = [v] | |
1637 | bv = [] | |
1638 | for val in v: | |
1639 | if type(val) is type(''): | |
1640 | bv.append(propclass.from_raw (val)) | |
1641 | else: | |
1642 | bv.append(val) | |
1643 | l.append((OTHER, k, bv)) | |
1644 | ||
1645 | elif k == 'id': | |
1646 | if type(v) != type([]): | |
1647 | v = v.split(',') | |
1648 | l.append((OTHER, k, [str(int(val)) for val in v])) | |
1649 | ||
1650 | elif isinstance(propclass, hyperdb.Number): | |
1651 | if type(v) != type([]): | |
1652 | try : | |
1653 | v = v.split(',') | |
1654 | except AttributeError : | |
1655 | v = [v] | |
1656 | l.append((OTHER, k, [float(val) for val in v])) | |
1657 | ||
1658 | filterspec = l | |
1659 | ||
1660 | # now, find all the nodes that are active and pass filtering | |
1661 | matches = [] | |
1662 | cldb = self.db.getclassdb(cn) | |
1663 | t = 0 | |
1664 | try: | |
1665 | # TODO: only full-scan once (use items()) | |
1666 | for nodeid in self.getnodeids(cldb): | |
1667 | node = self.db.getnode(cn, nodeid, cldb) | |
1668 | if self.db.RETIRED_FLAG in node: | |
1669 | continue | |
1670 | # apply filter | |
1671 | for t, k, v in filterspec: | |
1672 | # handle the id prop | |
1673 | if k == 'id': | |
1674 | if nodeid not in v: | |
1675 | break | |
1676 | continue | |
1677 | ||
1678 | # get the node value | |
1679 | nv = node.get(k, None) | |
1680 | ||
1681 | match = 0 | |
1682 | ||
1683 | # now apply the property filter | |
1684 | if t == LINK: | |
1685 | # link - if this node's property doesn't appear in the | |
1686 | # filterspec's nodeid list, skip it | |
1687 | match = nv in v | |
1688 | elif t == MULTILINK: | |
1689 | # multilink - if any of the nodeids required by the | |
1690 | # filterspec aren't in this node's property, then skip | |
1691 | # it | |
1692 | nv = node.get(k, []) | |
1693 | ||
1694 | # check for matching the absence of multilink values | |
1695 | if not v: | |
1696 | match = not nv | |
1697 | else: | |
1698 | # othewise, make sure this node has each of the | |
1699 | # required values | |
1700 | for want in v: | |
1701 | if want in nv: | |
1702 | match = 1 | |
1703 | break | |
1704 | elif t == STRING: | |
1705 | if nv is None: | |
1706 | nv = '' | |
1707 | # RE search | |
1708 | match = v.search(nv) | |
1709 | elif t == DATE or t == INTERVAL: | |
1710 | if nv is None: | |
1711 | match = v is None | |
1712 | else: | |
1713 | if v.to_value: | |
1714 | if v.from_value <= nv and v.to_value >= nv: | |
1715 | match = 1 | |
1716 | else: | |
1717 | if v.from_value <= nv: | |
1718 | match = 1 | |
1719 | elif t == OTHER: | |
1720 | # straight value comparison for the other types | |
1721 | match = nv in v | |
1722 | if not match: | |
1723 | break | |
1724 | else: | |
1725 | matches.append([nodeid, node]) | |
1726 | ||
1727 | # filter based on full text search | |
1728 | if search_matches is not None: | |
1729 | k = [] | |
1730 | for v in matches: | |
1731 | if v[0] in search_matches: | |
1732 | k.append(v) | |
1733 | matches = k | |
1734 | ||
1735 | # add sorting information to the proptree | |
1736 | JPROPS = {'actor':1, 'activity':1, 'creator':1, 'creation':1} | |
1737 | children = [] | |
1738 | if proptree: | |
1739 | children = proptree.sortable_children() | |
1740 | for pt in children: | |
1741 | dir = pt.sort_direction | |
1742 | prop = pt.name | |
1743 | assert (dir and prop) | |
1744 | propclass = props[prop] | |
1745 | pt.sort_ids = [] | |
1746 | is_pointer = isinstance(propclass,(hyperdb.Link, | |
1747 | hyperdb.Multilink)) | |
1748 | if not is_pointer: | |
1749 | pt.sort_result = [] | |
1750 | try: | |
1751 | # cache the opened link class db, if needed. | |
1752 | lcldb = None | |
1753 | # cache the linked class items too | |
1754 | lcache = {} | |
1755 | ||
1756 | for entry in matches: | |
1757 | itemid = entry[-2] | |
1758 | item = entry[-1] | |
1759 | # handle the properties that might be "faked" | |
1760 | # also, handle possible missing properties | |
1761 | try: | |
1762 | v = item[prop] | |
1763 | except KeyError: | |
1764 | if prop in JPROPS: | |
1765 | # force lookup of the special journal prop | |
1766 | v = self.get(itemid, prop) | |
1767 | else: | |
1768 | # the node doesn't have a value for this | |
1769 | # property | |
1770 | v = None | |
1771 | if isinstance(propclass, hyperdb.Multilink): | |
1772 | v = [] | |
1773 | if prop == 'id': | |
1774 | v = int (itemid) | |
1775 | pt.sort_ids.append(v) | |
1776 | if not is_pointer: | |
1777 | pt.sort_result.append(v) | |
1778 | continue | |
1779 | ||
1780 | # missing (None) values are always sorted first | |
1781 | if v is None: | |
1782 | pt.sort_ids.append(v) | |
1783 | if not is_pointer: | |
1784 | pt.sort_result.append(v) | |
1785 | continue | |
1786 | ||
1787 | if isinstance(propclass, hyperdb.Link): | |
1788 | lcn = propclass.classname | |
1789 | link = self.db.classes[lcn] | |
1790 | key = link.orderprop() | |
1791 | child = pt.propdict[key] | |
1792 | if key!='id': | |
1793 | if v not in lcache: | |
1794 | # open the link class db if it's not already | |
1795 | if lcldb is None: | |
1796 | lcldb = self.db.getclassdb(lcn) | |
1797 | lcache[v] = self.db.getnode(lcn, v, lcldb) | |
1798 | r = lcache[v][key] | |
1799 | child.propdict[key].sort_ids.append(r) | |
1800 | else: | |
1801 | child.propdict[key].sort_ids.append(v) | |
1802 | pt.sort_ids.append(v) | |
1803 | if not is_pointer: | |
1804 | r = propclass.sort_repr(pt.parent.cls, v, pt.name) | |
1805 | pt.sort_result.append(r) | |
1806 | finally: | |
1807 | # if we opened the link class db, close it now | |
1808 | if lcldb is not None: | |
1809 | lcldb.close() | |
1810 | del lcache | |
1811 | finally: | |
1812 | cldb.close() | |
1813 | ||
1814 | # pull the id out of the individual entries | |
1815 | matches = [entry[-2] for entry in matches] | |
1816 | if __debug__: | |
1817 | self.db.stats['filtering'] += (time.time() - start_t) | |
1818 | return matches | |
1819 | ||
1820 | def count(self): | |
1821 | """Get the number of nodes in this class. | |
1822 | ||
1823 | If the returned integer is 'numnodes', the ids of all the nodes | |
1824 | in this class run from 1 to numnodes, and numnodes+1 will be the | |
1825 | id of the next node to be created in this class. | |
1826 | """ | |
1827 | return self.db.countnodes(self.classname) | |
1828 | ||
1829 | # Manipulating properties: | |
1830 | ||
1831 | def getprops(self, protected=1): | |
1832 | """Return a dictionary mapping property names to property objects. | |
1833 | If the "protected" flag is true, we include protected properties - | |
1834 | those which may not be modified. | |
1835 | ||
1836 | In addition to the actual properties on the node, these | |
1837 | methods provide the "creation" and "activity" properties. If the | |
1838 | "protected" flag is true, we include protected properties - those | |
1839 | which may not be modified. | |
1840 | """ | |
1841 | d = self.properties.copy() | |
1842 | if protected: | |
1843 | d['id'] = hyperdb.String() | |
1844 | d['creation'] = hyperdb.Date() | |
1845 | d['activity'] = hyperdb.Date() | |
1846 | d['creator'] = hyperdb.Link('user') | |
1847 | d['actor'] = hyperdb.Link('user') | |
1848 | return d | |
1849 | ||
1850 | def addprop(self, **properties): | |
1851 | """Add properties to this class. | |
1852 | ||
1853 | The keyword arguments in 'properties' must map names to property | |
1854 | objects, or a TypeError is raised. None of the keys in 'properties' | |
1855 | may collide with the names of existing properties, or a ValueError | |
1856 | is raised before any properties have been added. | |
1857 | """ | |
1858 | for key in properties: | |
1859 | if key in self.properties: | |
1860 | raise ValueError(key) | |
1861 | self.properties.update(properties) | |
1862 | ||
1863 | def index(self, nodeid): | |
1864 | """ Add (or refresh) the node to search indexes """ | |
1865 | # find all the String properties that have indexme | |
1866 | for prop, propclass in self.getprops().iteritems(): | |
1867 | if isinstance(propclass, hyperdb.String) and propclass.indexme: | |
1868 | # index them under (classname, nodeid, property) | |
1869 | try: | |
1870 | value = str(self.get(nodeid, prop)) | |
1871 | except IndexError: | |
1872 | # node has been destroyed | |
1873 | continue | |
1874 | self.db.indexer.add_text((self.classname, nodeid, prop), value) | |
1875 | ||
1876 | # | |
1877 | # import / export support | |
1878 | # | |
1879 | def export_list(self, propnames, nodeid): | |
1880 | """ Export a node - generate a list of CSV-able data in the order | |
1881 | specified by propnames for the given node. | |
1882 | """ | |
1883 | properties = self.getprops() | |
1884 | l = [] | |
1885 | for prop in propnames: | |
1886 | proptype = properties[prop] | |
1887 | value = self.get(nodeid, prop) | |
1888 | # "marshal" data where needed | |
1889 | if value is None: | |
1890 | pass | |
1891 | elif isinstance(proptype, hyperdb.Date): | |
1892 | value = value.get_tuple() | |
1893 | elif isinstance(proptype, hyperdb.Interval): | |
1894 | value = value.get_tuple() | |
1895 | elif isinstance(proptype, hyperdb.Password): | |
1896 | value = str(value) | |
1897 | l.append(repr(value)) | |
1898 | ||
1899 | # append retired flag | |
1900 | l.append(repr(self.is_retired(nodeid))) | |
1901 | ||
1902 | return l | |
1903 | ||
1904 | def import_list(self, propnames, proplist): | |
1905 | """ Import a node - all information including "id" is present and | |
1906 | should not be sanity checked. Triggers are not triggered. The | |
1907 | journal should be initialised using the "creator" and "created" | |
1908 | information. | |
1909 | ||
1910 | Return the nodeid of the node imported. | |
1911 | """ | |
1912 | if self.db.journaltag is None: | |
1913 | raise hyperdb.DatabaseError(_('Database open read-only')) | |
1914 | properties = self.getprops() | |
1915 | ||
1916 | # make the new node's property map | |
1917 | d = {} | |
1918 | newid = None | |
1919 | for i in range(len(propnames)): | |
1920 | # Figure the property for this column | |
1921 | propname = propnames[i] | |
1922 | ||
1923 | # Use eval to reverse the repr() used to output the CSV | |
1924 | value = eval(proplist[i]) | |
1925 | ||
1926 | # "unmarshal" where necessary | |
1927 | if propname == 'id': | |
1928 | newid = value | |
1929 | continue | |
1930 | elif propname == 'is retired': | |
1931 | # is the item retired? | |
1932 | if int(value): | |
1933 | d[self.db.RETIRED_FLAG] = 1 | |
1934 | continue | |
1935 | elif value is None: | |
1936 | d[propname] = None | |
1937 | continue | |
1938 | ||
1939 | prop = properties[propname] | |
1940 | if isinstance(prop, hyperdb.Date): | |
1941 | value = date.Date(value) | |
1942 | elif isinstance(prop, hyperdb.Interval): | |
1943 | value = date.Interval(value) | |
1944 | elif isinstance(prop, hyperdb.Password): | |
1945 | pwd = password.Password() | |
1946 | pwd.unpack(value) | |
1947 | value = pwd | |
1948 | d[propname] = value | |
1949 | ||
1950 | # get a new id if necessary | |
1951 | if newid is None: | |
1952 | newid = self.db.newid(self.classname) | |
1953 | ||
1954 | # add the node and journal | |
1955 | self.db.addnode(self.classname, newid, d) | |
1956 | return newid | |
1957 | ||
1958 | def export_journals(self): | |
1959 | """Export a class's journal - generate a list of lists of | |
1960 | CSV-able data: | |
1961 | ||
1962 | nodeid, date, user, action, params | |
1963 | ||
1964 | No heading here - the columns are fixed. | |
1965 | """ | |
1966 | properties = self.getprops() | |
1967 | r = [] | |
1968 | for nodeid in self.getnodeids(): | |
1969 | for nodeid, date, user, action, params in self.history(nodeid): | |
1970 | date = date.get_tuple() | |
1971 | if action == 'set': | |
1972 | export_data = {} | |
1973 | for propname, value in params.iteritems(): | |
1974 | if propname not in properties: | |
1975 | # property no longer in the schema | |
1976 | continue | |
1977 | ||
1978 | prop = properties[propname] | |
1979 | # make sure the params are eval()'able | |
1980 | if value is None: | |
1981 | pass | |
1982 | elif isinstance(prop, hyperdb.Date): | |
1983 | # this is a hack - some dates are stored as strings | |
1984 | if not isinstance(value, type('')): | |
1985 | value = value.get_tuple() | |
1986 | elif isinstance(prop, hyperdb.Interval): | |
1987 | # hack too - some intervals are stored as strings | |
1988 | if not isinstance(value, type('')): | |
1989 | value = value.get_tuple() | |
1990 | elif isinstance(prop, hyperdb.Password): | |
1991 | value = str(value) | |
1992 | export_data[propname] = value | |
1993 | params = export_data | |
1994 | r.append([repr(nodeid), repr(date), repr(user), | |
1995 | repr(action), repr(params)]) | |
1996 | return r | |
1997 | ||
1998 | def import_journals(self, entries): | |
1999 | """Import a class's journal. | |
2000 | ||
2001 | Uses setjournal() to set the journal for each item.""" | |
2002 | properties = self.getprops() | |
2003 | d = {} | |
2004 | for l in entries: | |
2005 | nodeid, jdate, user, action, params = tuple(map(eval, l)) | |
2006 | r = d.setdefault(nodeid, []) | |
2007 | if action == 'set': | |
2008 | for propname, value in params.iteritems(): | |
2009 | prop = properties[propname] | |
2010 | if value is None: | |
2011 | pass | |
2012 | elif isinstance(prop, hyperdb.Date): | |
2013 | value = date.Date(value) | |
2014 | elif isinstance(prop, hyperdb.Interval): | |
2015 | value = date.Interval(value) | |
2016 | elif isinstance(prop, hyperdb.Password): | |
2017 | pwd = password.Password() | |
2018 | pwd.unpack(value) | |
2019 | value = pwd | |
2020 | params[propname] = value | |
2021 | r.append((nodeid, date.Date(jdate), user, action, params)) | |
2022 | ||
2023 | for nodeid, l in d.iteritems(): | |
2024 | self.db.setjournal(self.classname, nodeid, l) | |
2025 | ||
2026 | class FileClass(hyperdb.FileClass, Class): | |
2027 | """This class defines a large chunk of data. To support this, it has a | |
2028 | mandatory String property "content" which is typically saved off | |
2029 | externally to the hyperdb. | |
2030 | ||
2031 | The default MIME type of this data is defined by the | |
2032 | "default_mime_type" class attribute, which may be overridden by each | |
2033 | node if the class defines a "type" String property. | |
2034 | """ | |
2035 | def __init__(self, db, classname, **properties): | |
2036 | """The newly-created class automatically includes the "content" | |
2037 | and "type" properties. | |
2038 | """ | |
2039 | if 'content' not in properties: | |
2040 | properties['content'] = hyperdb.String(indexme='yes') | |
2041 | if 'type' not in properties: | |
2042 | properties['type'] = hyperdb.String() | |
2043 | Class.__init__(self, db, classname, **properties) | |
2044 | ||
2045 | def create(self, **propvalues): | |
2046 | """ Snarf the "content" propvalue and store in a file | |
2047 | """ | |
2048 | # we need to fire the auditors now, or the content property won't | |
2049 | # be in propvalues for the auditors to play with | |
2050 | self.fireAuditors('create', None, propvalues) | |
2051 | ||
2052 | # now remove the content property so it's not stored in the db | |
2053 | content = propvalues['content'] | |
2054 | del propvalues['content'] | |
2055 | ||
2056 | # make sure we have a MIME type | |
2057 | mime_type = propvalues.get('type', self.default_mime_type) | |
2058 | ||
2059 | # do the database create | |
2060 | newid = self.create_inner(**propvalues) | |
2061 | ||
2062 | # store off the content as a file | |
2063 | self.db.storefile(self.classname, newid, None, content) | |
2064 | ||
2065 | # fire reactors | |
2066 | self.fireReactors('create', newid, None) | |
2067 | ||
2068 | return newid | |
2069 | ||
2070 | def get(self, nodeid, propname, default=_marker, cache=1): | |
2071 | """ Trap the content propname and get it from the file | |
2072 | ||
2073 | 'cache' exists for backwards compatibility, and is not used. | |
2074 | """ | |
2075 | poss_msg = 'Possibly an access right configuration problem.' | |
2076 | if propname == 'content': | |
2077 | try: | |
2078 | return self.db.getfile(self.classname, nodeid, None) | |
2079 | except IOError, strerror: | |
2080 | # XXX by catching this we don't see an error in the log. | |
2081 | return 'ERROR reading file: %s%s\n%s\n%s'%( | |
2082 | self.classname, nodeid, poss_msg, strerror) | |
2083 | if default is not _marker: | |
2084 | return Class.get(self, nodeid, propname, default) | |
2085 | else: | |
2086 | return Class.get(self, nodeid, propname) | |
2087 | ||
2088 | def set(self, itemid, **propvalues): | |
2089 | """ Snarf the "content" propvalue and update it in a file | |
2090 | """ | |
2091 | self.fireAuditors('set', itemid, propvalues) | |
2092 | ||
2093 | # create the oldvalues dict - fill in any missing values | |
2094 | oldvalues = copy.deepcopy(self.db.getnode(self.classname, itemid)) | |
2095 | for name, prop in self.getprops(protected=0).iteritems(): | |
2096 | if name in oldvalues: | |
2097 | continue | |
2098 | if isinstance(prop, hyperdb.Multilink): | |
2099 | oldvalues[name] = [] | |
2100 | else: | |
2101 | oldvalues[name] = None | |
2102 | ||
2103 | # now remove the content property so it's not stored in the db | |
2104 | content = None | |
2105 | if 'content' in propvalues: | |
2106 | content = propvalues['content'] | |
2107 | del propvalues['content'] | |
2108 | ||
2109 | # do the database update | |
2110 | propvalues = self.set_inner(itemid, **propvalues) | |
2111 | ||
2112 | # do content? | |
2113 | if content: | |
2114 | # store and possibly index | |
2115 | self.db.storefile(self.classname, itemid, None, content) | |
2116 | if self.properties['content'].indexme: | |
2117 | mime_type = self.get(itemid, 'type', self.default_mime_type) | |
2118 | self.db.indexer.add_text((self.classname, itemid, 'content'), | |
2119 | content, mime_type) | |
2120 | propvalues['content'] = content | |
2121 | ||
2122 | # fire reactors | |
2123 | self.fireReactors('set', itemid, oldvalues) | |
2124 | return propvalues | |
2125 | ||
2126 | def index(self, nodeid): | |
2127 | """ Add (or refresh) the node to search indexes. | |
2128 | ||
2129 | Use the content-type property for the content property. | |
2130 | """ | |
2131 | # find all the String properties that have indexme | |
2132 | for prop, propclass in self.getprops().iteritems(): | |
2133 | if prop == 'content' and propclass.indexme: | |
2134 | mime_type = self.get(nodeid, 'type', self.default_mime_type) | |
2135 | self.db.indexer.add_text((self.classname, nodeid, 'content'), | |
2136 | str(self.get(nodeid, 'content')), mime_type) | |
2137 | elif isinstance(propclass, hyperdb.String) and propclass.indexme: | |
2138 | # index them under (classname, nodeid, property) | |
2139 | try: | |
2140 | value = str(self.get(nodeid, prop)) | |
2141 | except IndexError: | |
2142 | # node has been destroyed | |
2143 | continue | |
2144 | self.db.indexer.add_text((self.classname, nodeid, prop), value) | |
2145 | ||
2146 | # deviation from spec - was called ItemClass | |
2147 | class IssueClass(Class, roundupdb.IssueClass): | |
2148 | # Overridden methods: | |
2149 | def __init__(self, db, classname, **properties): | |
2150 | """The newly-created class automatically includes the "messages", | |
2151 | "files", "nosy", and "superseder" properties. If the 'properties' | |
2152 | dictionary attempts to specify any of these properties or a | |
2153 | "creation" or "activity" property, a ValueError is raised. | |
2154 | """ | |
2155 | if 'title' not in properties: | |
2156 | properties['title'] = hyperdb.String(indexme='yes') | |
2157 | if 'messages' not in properties: | |
2158 | properties['messages'] = hyperdb.Multilink("msg") | |
2159 | if 'files' not in properties: | |
2160 | properties['files'] = hyperdb.Multilink("file") | |
2161 | if 'nosy' not in properties: | |
2162 | # note: journalling is turned off as it really just wastes | |
2163 | # space. this behaviour may be overridden in an instance | |
2164 | properties['nosy'] = hyperdb.Multilink("user", do_journal="no") | |
2165 | if 'superseder' not in properties: | |
2166 | properties['superseder'] = hyperdb.Multilink(classname) | |
2167 | Class.__init__(self, db, classname, **properties) | |
2168 | ||
2169 | # vim: set et sts=4 sw=4 : |