Package gen :: Package db :: Module dbdir
[frames] | no frames]

Source Code for Module gen.db.dbdir

   1  # 
   2  # Gramps - a GTK+/GNOME based genealogy program 
   3  # 
   4  # Copyright (C) 2000-2008  Donald N. Allingham 
   5  # 
   6  # This program is free software; you can redistribute it and/or modify 
   7  # it under the terms of the GNU General Public License as published by 
   8  # the Free Software Foundation; either version 2 of the License, or 
   9  # (at your option) any later version. 
  10  # 
  11  # This program is distributed in the hope that it will be useful, 
  12  # but WITHOUT ANY WARRANTY; without even the implied warranty of 
  13  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the 
  14  # GNU General Public License for more details. 
  15  # 
  16  # You should have received a copy of the GNU General Public License 
  17  # along with this program; if not, write to the Free Software 
  18  # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA 
  19  # 
  20   
  21  # $Id: dbdir.py 10236 2008-03-09 18:10:13Z bmcage $ 
  22   
  23  """ 
  24  Provide the Berkeley DB (DBDir) database backend for GRAMPS. 
  25  This is used since GRAMPS version 3.0 
  26  """ 
  27   
  28  #------------------------------------------------------------------------- 
  29  # 
  30  # Standard python modules 
  31  # 
  32  #------------------------------------------------------------------------- 
  33  import cPickle as pickle 
  34  import os 
  35  import time 
  36  from types import InstanceType 
  37   
  38  from gettext import gettext as _ 
  39  from bsddb import dbshelve, db 
  40  import logging 
  41   
  42  log = logging.getLogger(".GrampsDb") 
  43   
  44  #------------------------------------------------------------------------- 
  45  # 
  46  # Gramps modules 
  47  # 
  48  #------------------------------------------------------------------------- 
  49  from gen.lib import (GenderStats, Person, Family, Event, Place, Source,  
  50                       MediaObject, Repository, Note) 
  51  from gen.db import (GrampsDbBase, KEY_TO_CLASS_MAP, CLASS_TO_KEY_MAP,  
  52                      REFERENCE_KEY, Transaction) 
  53  from gen.db.exceptions import FileVersionError 
  54  from BasicUtils import UpdateCallback 
  55  from gen.db.cursor import GrampsCursor 
  56  import Errors 
  57   
  58  _MINVERSION = 9 
  59  _DBVERSION = 13 
  60   
  61  IDTRANS     = "person_id" 
  62  FIDTRANS    = "family_id" 
  63  PIDTRANS    = "place_id" 
  64  OIDTRANS    = "media_id" 
  65  EIDTRANS    = "event_id" 
  66  RIDTRANS    = "repo_id" 
  67  NIDTRANS    = "note_id" 
  68  SIDTRANS    = "source_id" 
  69  SURNAMES    = "surnames" 
  70  NAME_GROUP  = "name_group" 
  71  META        = "meta_data" 
  72   
  73  FAMILY_TBL  = "family" 
  74  PLACES_TBL  = "place" 
  75  SOURCES_TBL = "source" 
  76  MEDIA_TBL   = "media" 
  77  EVENTS_TBL  = "event" 
  78  PERSON_TBL  = "person" 
  79  REPO_TBL    = "repo" 
  80  NOTE_TBL    = "note" 
  81   
  82  REF_MAP     = "reference_map" 
  83  REF_PRI     = "primary_map" 
  84  REF_REF     = "referenced_map" 
  85   
  86  DBERRS      = (db.DBRunRecoveryError, db.DBAccessError,  
  87                 db.DBPageNotFoundError, db.DBInvalidArgError) 
  88   
  89   
90 -def find_surname(key, data):
91 return str(data[3][5])
92
93 -def find_idmap(key, data):
94 return str(data[1])
95 96 # Secondary database key lookups for reference_map table 97 # reference_map data values are of the form: 98 # ((primary_object_class_name, primary_object_handle), 99 # (referenced_object_class_name, referenced_object_handle)) 100
101 -def find_primary_handle(key, data):
102 return str((data)[0][1])
103
104 -def find_referenced_handle(key, data):
105 return str((data)[1][1])
106
107 -class GrampsDBDirCursor(GrampsCursor):
108
109 - def __init__(self, source, txn=None):
110 self.cursor = source.db.cursor(txn) 111 self.source = source
112
113 - def first(self):
114 d = self.cursor.first() 115 if d: 116 return (d[0], pickle.loads(d[1])) 117 return None
118
119 - def next(self):
120 d = self.cursor.next() 121 if d: 122 return (d[0], pickle.loads(d[1])) 123 return None
124
125 - def close(self):
126 self.cursor.close()
127
128 - def delete(self):
129 self.cursor.delete()
130
131 - def get_length(self):
132 return self.source.stat()['ndata']
133
134 -class GrampsDBDirAssocCursor(GrampsCursor):
135
136 - def __init__(self, source, txn=None):
137 self.cursor = source.cursor(txn) 138 self.source = source
139
140 - def first(self):
141 d = self.cursor.first() 142 if d: 143 return (d[0], pickle.loads(d[1])) 144 return None
145
146 - def next(self):
147 d = self.cursor.next() 148 if d: 149 return (d[0], pickle.loads(d[1])) 150 return None
151
152 - def close(self):
153 self.cursor.close()
154
155 - def delete(self):
156 self.cursor.delete()
157
158 - def get_length(self):
159 return self.source.stat()['ndata']
160
161 -class GrampsDBDirDupCursor(GrampsDBDirAssocCursor):
162 """Cursor that includes handling for duplicate keys.""" 163
164 - def set(self, key):
165 return self.cursor.set(str(key))
166
167 - def next_dup(self):
168 return self.cursor.next_dup()
169 170 #------------------------------------------------------------------------- 171 # 172 # GrampsDBDir 173 # 174 #-------------------------------------------------------------------------
175 -class GrampsDBDir(GrampsDbBase, UpdateCallback):
176 """ 177 GRAMPS database object. 178 179 This object is a base class for other objects. 180 """ 181
182 - def __init__(self):
183 """Create a new GrampsDB.""" 184 185 GrampsDbBase.__init__(self) 186 self.txn = None 187 self.secondary_connected = False
188
189 - def __open_flags(self):
190 return db.DB_CREATE | db.DB_AUTO_COMMIT
191
192 - def __open_table(self, file_name, table_name, dbtype=db.DB_HASH):
193 dbmap = dbshelve.DBShelf(self.env) 194 dbmap.db.set_pagesize(16384) 195 196 fname = os.path.join(file_name, table_name + ".db") 197 198 if self.readonly: 199 dbmap.open(fname, table_name, dbtype, db.DB_RDONLY) 200 else: 201 dbmap.open(fname, table_name, dbtype, self.__open_flags(), 0666) 202 return dbmap
203
204 - def all_handles(self, table):
205 return table.keys(self.txn)
206
207 - def __log_error(self):
208 mypath = os.path.join(self.get_save_path(),"need_recover") 209 ofile = open(mypath, "w") 210 ofile.close() 211 try: 212 clear_lock_file(self.get_save_path()) 213 except: 214 pass
215
216 - def __get_cursor(self, table):
217 try: 218 return GrampsDBDirCursor(table, self.txn) 219 except DBERRS, msg: 220 self.__log_error() 221 raise Errors.DbError(msg)
222
223 - def get_person_cursor(self):
224 return self.__get_cursor(self.person_map)
225
226 - def get_family_cursor(self):
227 return self.__get_cursor(self.family_map)
228
229 - def get_event_cursor(self):
230 return self.__get_cursor(self.event_map)
231
232 - def get_place_cursor(self):
233 return self.__get_cursor(self.place_map)
234
235 - def get_source_cursor(self):
236 return self.__get_cursor(self.source_map)
237
238 - def get_media_cursor(self):
239 return self.__get_cursor(self.media_map)
240
241 - def get_repository_cursor(self):
242 return self.__get_cursor(self.repository_map)
243
244 - def get_note_cursor(self):
245 return self.__get_cursor(self.note_map)
246
247 - def __has_handle(self, table, handle):
248 try: 249 return table.get(str(handle), txn=self.txn) != None 250 except DBERRS, msg: 251 self.__log_error() 252 raise Errors.DbError(msg)
253
254 - def has_person_handle(self, handle):
255 """ 256 Return True if the handle exists in the current Person database. 257 """ 258 return self.__has_handle(self.person_map, handle)
259
260 - def has_family_handle(self, handle):
261 """ 262 Return True if the handle exists in the current Family database. 263 """ 264 return self.__has_handle(self.family_map, handle)
265
266 - def has_object_handle(self, handle):
267 """ 268 Return True if the handle exists in the current MediaObjectdatabase. 269 """ 270 return self.__has_handle(self.media_map, handle)
271
272 - def has_repository_handle(self, handle):
273 """ 274 Return True if the handle exists in the current Repository database. 275 """ 276 return self.__has_handle(self.repository_map, handle)
277
278 - def has_note_handle(self, handle):
279 """ 280 Return True if the handle exists in the current Note database. 281 """ 282 return self.__has_handle(self.note_map, handle)
283
284 - def has_event_handle(self, handle):
285 """ 286 Return True if the handle exists in the current Event database. 287 """ 288 return self.__has_handle(self.event_map, handle)
289
290 - def has_place_handle(self, handle):
291 """ 292 Return True if the handle exists in the current Place database. 293 """ 294 return self.__has_handle(self.place_map, handle)
295
296 - def has_source_handle(self, handle):
297 """ 298 Return True if the handle exists in the current Source database. 299 """ 300 return self.__has_handle(self.source_map, handle)
301
302 - def __get_raw_data(self, table, handle):
303 try: 304 return table.get(str(handle), txn=self.txn) 305 except DBERRS, msg: 306 self.__log_error() 307 raise Errors.DbError(msg)
308
309 - def get_raw_person_data(self, handle):
310 return self.__get_raw_data(self.person_map, handle)
311
312 - def get_raw_family_data(self, handle):
313 return self.__get_raw_data(self.family_map, handle)
314
315 - def get_raw_object_data(self, handle):
316 return self.__get_raw_data(self.media_map, handle)
317
318 - def get_raw_place_data(self, handle):
319 return self.__get_raw_data(self.place_map, handle)
320
321 - def get_raw_event_data(self, handle):
322 return self.__get_raw_data(self.event_map, handle)
323
324 - def get_raw_source_data(self, handle):
325 return self.__get_raw_data(self.source_map, handle)
326
327 - def get_raw_repository_data(self, handle):
328 return self.__get_raw_data(self.repository_map, handle)
329
330 - def get_raw_note_data(self, handle):
331 return self.__get_raw_data(self.note_map, handle)
332 333 # cursors for lookups in the reference_map for back reference 334 # lookups. The reference_map has three indexes: 335 # the main index: a tuple of (primary_handle, referenced_handle) 336 # the primary_handle index: the primary_handle 337 # the referenced_handle index: the referenced_handle 338 # the main index is unique, the others allow duplicate entries. 339
340 - def get_reference_map_cursor(self):
341 try: 342 return GrampsDBDirAssocCursor(self.reference_map, self.txn) 343 except DBERRS, msg: 344 self.__log_error() 345 raise Errors.DbError(msg)
346
347 - def get_reference_map_primary_cursor(self):
348 try: 349 return GrampsDBDirDupCursor(self.reference_map_primary_map, 350 self.txn) 351 except DBERRS, msg: 352 self.__log_error() 353 raise Errors.DbError(msg)
354
355 - def get_reference_map_referenced_cursor(self):
356 try: 357 return GrampsDBDirDupCursor(self.reference_map_referenced_map, 358 self.txn) 359 except DBERRS, msg: 360 self.__log_error() 361 raise Errors.DbError(msg)
362 363 # These are overriding the GrampsDbBase's methods of saving metadata 364 # because we now have txn-capable metadata table
365 - def set_default_person_handle(self, handle):
366 try: 367 return self.__set_default_person_handle(handle) 368 except DBERRS, msg: 369 self.__log_error() 370 raise Errors.DbError(msg)
371
372 - def __set_default_person_handle(self, handle):
373 """Set the default Person to the passed instance.""" 374 if not self.readonly: 375 # Start transaction 376 the_txn = self.env.txn_begin() 377 self.metadata.put('default', str(handle), txn=the_txn) 378 the_txn.commit()
379
380 - def get_default_person(self):
381 try: 382 return self.__get_default_person() 383 except DBERRS, msg: 384 self.__log_error() 385 raise Errors.DbError(msg)
386
387 - def __get_default_person(self):
388 """Return the default Person of the database.""" 389 person = self.get_person_from_handle(self.get_default_handle()) 390 if person: 391 return person 392 elif (self.metadata) and (not self.readonly): 393 # Start transaction 394 the_txn = self.env.txn_begin() 395 self.metadata.put('default', None, txn=the_txn) 396 the_txn.commit() 397 return None
398
399 - def set_mediapath(self, path):
400 """Set the default media path for database, path should be utf-8.""" 401 if self.metadata and not self.readonly: 402 # Start transaction 403 the_txn = self.env.txn_begin() 404 self.metadata.put('mediapath', path, txn=the_txn) 405 the_txn.commit()
406
407 - def set_column_order(self, col_list, name):
408 if self.metadata and not self.readonly: 409 # Start transaction 410 the_txn = self.env.txn_begin() 411 self.metadata.put(name, col_list, txn=the_txn) 412 the_txn.commit()
413
414 - def version_supported(self):
415 try: 416 dbversion = self.metadata.get('version', default=0) 417 return ((dbversion <= _DBVERSION) and (dbversion >= _MINVERSION)) 418 except DBERRS, msg: 419 self.__log_error() 420 raise Errors.DbError(msg)
421
422 - def need_upgrade(self):
423 try: 424 dbversion = self.metadata.get('version', default=0) 425 return not self.readonly and dbversion < _DBVERSION 426 except DBERRS, msg: 427 self.__log_error() 428 raise Errors.DbError(msg)
429
430 - def load(self, name, callback, mode="w"):
431 try: 432 if self.__check_readonly(name): 433 mode = "r" 434 write_lock_file(name) 435 return self.__load(name, callback, mode) 436 except DBERRS, msg: 437 self.__log_error() 438 raise Errors.DbError(msg)
439
440 - def __check_readonly(self, name):
441 for base in [FAMILY_TBL, PLACES_TBL, SOURCES_TBL, MEDIA_TBL, 442 EVENTS_TBL, PERSON_TBL, REPO_TBL, NOTE_TBL, REF_MAP, META]: 443 path = os.path.join(name, base + ".db") 444 if os.path.isfile(path) and not os.access(path, os.W_OK): 445 return True 446 return False
447
448 - def __load(self, name, callback, mode="w"):
449 450 if self.db_is_open: 451 self.close() 452 453 self.readonly = mode == "r" 454 455 if callback: 456 callback(12) 457 458 self.full_name = os.path.abspath(name) 459 self.path = self.full_name 460 self.brief_name = os.path.basename(name) 461 462 self.env = db.DBEnv() 463 self.env.set_cachesize(0, 0x4000000) # 32MB 464 465 # These env settings are only needed for Txn environment 466 self.env.set_lk_max_locks(25000) 467 self.env.set_lk_max_objects(25000) 468 self.env.set_flags(db.DB_LOG_AUTOREMOVE, 1) # clean up unused logs 469 470 # The DB_PRIVATE flag must go if we ever move to multi-user setup 471 env_flags = db.DB_CREATE | db.DB_PRIVATE |\ 472 db.DB_INIT_MPOOL | db.DB_INIT_LOCK |\ 473 db.DB_INIT_LOG | db.DB_INIT_TXN | db.DB_THREAD 474 475 # As opposed to before, we always try recovery on databases 476 # in _GrampsBSDDB.py we only do that on existing filenames 477 env_flags = env_flags | db.DB_RECOVER 478 479 # Environment name is now based on the filename 480 env_name = name 481 482 self.env.open(env_name, env_flags) 483 self.env.txn_checkpoint() 484 485 if callback: 486 callback(25) 487 self.metadata = self.__open_table(self.full_name, META) 488 489 # If we cannot work with this DB version, 490 # it makes no sense to go further 491 if not self.version_supported: 492 self.__close_early() 493 494 self.family_map = self.__open_table(self.full_name, FAMILY_TBL) 495 self.place_map = self.__open_table(self.full_name, PLACES_TBL) 496 self.source_map = self.__open_table(self.full_name, SOURCES_TBL) 497 self.media_map = self.__open_table(self.full_name, MEDIA_TBL) 498 self.event_map = self.__open_table(self.full_name, EVENTS_TBL) 499 self.person_map = self.__open_table(self.full_name, PERSON_TBL) 500 self.repository_map = self.__open_table(self.full_name, REPO_TBL) 501 self.note_map = self.__open_table(self.full_name, NOTE_TBL) 502 self.reference_map = self.__open_table(self.full_name, REF_MAP, 503 dbtype=db.DB_BTREE) 504 if callback: 505 callback(37) 506 507 self.name_group = db.DB(self.env) 508 self.name_group.set_flags(db.DB_DUP) 509 if self.readonly: 510 self.name_group.open(_mkname(self.full_name, NAME_GROUP), 511 NAME_GROUP, db.DB_HASH, flags=db.DB_RDONLY) 512 else: 513 self.name_group.open(_mkname(self.full_name, NAME_GROUP), 514 NAME_GROUP, db.DB_HASH, 515 flags=self.__open_flags()) 516 self.__load_metadata() 517 518 gstats = self.metadata.get('gender_stats', default=None) 519 520 if not self.readonly: 521 # Start transaction 522 the_txn = self.env.txn_begin() 523 524 if gstats == None: 525 # New database. Set up the current version. 526 self.metadata.put('version', _DBVERSION, txn=the_txn) 527 elif not self.metadata.has_key('version'): 528 # Not new database, but the version is missing. 529 # Use 0, but it is likely to fail anyway. 530 self.metadata.put('version', 0, txn=the_txn) 531 532 the_txn.commit() 533 534 self.genderStats = GenderStats(gstats) 535 536 # Here we take care of any changes in the tables related to new code. 537 # If secondary indices change, then they should removed 538 # or rebuilt by upgrade as well. In any case, the 539 # self.secondary_connected flag should be set accordingly. 540 541 if self.need_upgrade(): 542 self.gramps_upgrade(callback) 543 544 if callback: 545 callback(50) 546 547 if not self.secondary_connected: 548 self.__connect_secondary() 549 550 if callback: 551 callback(75) 552 553 self.open_undodb() 554 self.db_is_open = True 555 556 if callback: 557 callback(87) 558 559 # Re-set the undo history to a fresh session start 560 self.undoindex = -1 561 self.translist = [None] * len(self.translist) 562 self.abort_possible = True 563 self.undo_history_timestamp = time.time() 564 565 return 1
566
567 - def open_undodb(self):
568 """ 569 Override method from GrampsDbBase because in DIR setup we want the 570 undo database to be inside the dir. 571 """ 572 if not self.readonly: 573 self.undolog = os.path.join(self.full_name, "undo.db") 574 self.undodb = db.DB() 575 self.undodb.open(self.undolog, db.DB_RECNO, db.DB_CREATE)
576
577 - def load_from(self, other_database, filename, callback):
578 try: 579 self.load(filename, callback) 580 from gen.utils import db_copy 581 db_copy(other_database, self, callback) 582 return 1 583 except DBERRS, msg: 584 self.__log_error() 585 raise Errors.DbError(msg)
586
587 - def __load_metadata(self):
588 # name display formats 589 self.name_formats = self.metadata.get('name_formats', default=[]) 590 # upgrade formats if they were saved in the old way 591 for format_ix in range(len(self.name_formats)): 592 format = self.name_formats[format_ix] 593 if len(format) == 3: 594 format = format + (True,) 595 self.name_formats[format_ix] = format 596 597 # database owner 598 try: 599 owner_data = self.metadata.get('researcher') 600 if owner_data: 601 self.owner.unserialize(owner_data) 602 except ImportError: #handle problems with pre-alpha 3.0 603 pass 604 605 # bookmarks 606 self.bookmarks.set(self.metadata.get('bookmarks', default=[])) 607 self.family_bookmarks.set(self.metadata.get('family_bookmarks', 608 default=[])) 609 self.event_bookmarks.set(self.metadata.get('event_bookmarks', 610 default=[])) 611 self.source_bookmarks.set(self.metadata.get('source_bookmarks', 612 default=[])) 613 self.repo_bookmarks.set(self.metadata.get('repo_bookmarks', 614 default=[])) 615 self.media_bookmarks.set(self.metadata.get('media_bookmarks', 616 default=[])) 617 self.place_bookmarks.set(self.metadata.get('place_bookmarks', 618 default=[])) 619 self.note_bookmarks.set(self.metadata.get('note_bookmarks', 620 default=[])) 621 622 # Custom type values 623 self.family_event_names = set(self.metadata.get('fevent_names', 624 default=[])) 625 self.individual_event_names = set(self.metadata.get('pevent_names', 626 default=[])) 627 self.family_attributes = set(self.metadata.get('fattr_names', 628 default=[])) 629 self.individual_attributes = set(self.metadata.get('pattr_names', 630 default=[])) 631 self.marker_names = set(self.metadata.get('marker_names', default=[])) 632 self.child_ref_types = set(self.metadata.get('child_refs', 633 default=[])) 634 self.family_rel_types = set(self.metadata.get('family_rels', 635 default=[])) 636 self.event_role_names = set(self.metadata.get('event_roles', 637 default=[])) 638 self.name_types = set(self.metadata.get('name_types', default=[])) 639 self.repository_types = set(self.metadata.get('repo_types', 640 default=[])) 641 self.note_types = set(self.metadata.get('note_types', 642 default=[])) 643 self.source_media_types = set(self.metadata.get('sm_types', 644 default=[])) 645 self.url_types = set(self.metadata.get('url_types', default=[])) 646 self.media_attributes = set(self.metadata.get('mattr_names', 647 default=[])) 648 649 # surname list 650 self.surname_list = self.metadata.get('surname_list', default=[])
651
652 - def __connect_secondary(self):
653 """ 654 Connect or creates secondary index tables. 655 656 It assumes that the tables either exist and are in the right 657 format or do not exist (in which case they get created). 658 659 It is the responsibility of upgrade code to either create 660 or remove invalid secondary index tables. 661 """ 662 663 # index tables used just for speeding up searches 664 if self.readonly: 665 table_flags = db.DB_RDONLY 666 else: 667 table_flags = self.__open_flags() 668 669 self.surnames = db.DB(self.env) 670 self.surnames.set_flags(db.DB_DUP | db.DB_DUPSORT) 671 self.surnames.open(_mkname(self.full_name, SURNAMES), SURNAMES, 672 db.DB_BTREE, flags=table_flags) 673 674 self.id_trans = db.DB(self.env) 675 self.id_trans.set_flags(db.DB_DUP) 676 self.id_trans.open(_mkname(self.full_name, IDTRANS), IDTRANS, 677 db.DB_HASH, flags=table_flags) 678 679 self.fid_trans = db.DB(self.env) 680 self.fid_trans.set_flags(db.DB_DUP) 681 self.fid_trans.open(_mkname(self.full_name, FIDTRANS), FIDTRANS, 682 db.DB_HASH, flags=table_flags) 683 684 self.eid_trans = db.DB(self.env) 685 self.eid_trans.set_flags(db.DB_DUP) 686 self.eid_trans.open(_mkname(self.full_name, EIDTRANS), EIDTRANS, 687 db.DB_HASH, flags=table_flags) 688 689 self.pid_trans = db.DB(self.env) 690 self.pid_trans.set_flags(db.DB_DUP) 691 self.pid_trans.open(_mkname(self.full_name, PIDTRANS), PIDTRANS, 692 db.DB_HASH, flags=table_flags) 693 694 self.sid_trans = db.DB(self.env) 695 self.sid_trans.set_flags(db.DB_DUP) 696 self.sid_trans.open(_mkname(self.full_name, SIDTRANS), SIDTRANS, 697 db.DB_HASH, flags=table_flags) 698 699 self.oid_trans = db.DB(self.env) 700 self.oid_trans.set_flags(db.DB_DUP) 701 self.oid_trans.open(_mkname(self.full_name, OIDTRANS), OIDTRANS, 702 db.DB_HASH, flags=table_flags) 703 704 self.rid_trans = db.DB(self.env) 705 self.rid_trans.set_flags(db.DB_DUP) 706 self.rid_trans.open(_mkname(self.full_name, RIDTRANS), RIDTRANS, 707 db.DB_HASH, flags=table_flags) 708 709 self.nid_trans = db.DB(self.env) 710 self.nid_trans.set_flags(db.DB_DUP) 711 self.nid_trans.open(_mkname(self.full_name, NIDTRANS), NIDTRANS, 712 db.DB_HASH, flags=table_flags) 713 714 self.reference_map_primary_map = db.DB(self.env) 715 self.reference_map_primary_map.set_flags(db.DB_DUP) 716 self.reference_map_primary_map.open( 717 _mkname(self.full_name, REF_PRI), 718 REF_PRI, db.DB_BTREE, flags=table_flags) 719 720 self.reference_map_referenced_map = db.DB(self.env) 721 self.reference_map_referenced_map.set_flags(db.DB_DUP|db.DB_DUPSORT) 722 self.reference_map_referenced_map.open( 723 _mkname(self.full_name, REF_REF), 724 REF_REF, db.DB_BTREE, flags=table_flags) 725 726 if not self.readonly: 727 self.person_map.associate(self.surnames, find_surname, table_flags) 728 self.person_map.associate(self.id_trans, find_idmap, table_flags) 729 self.family_map.associate(self.fid_trans, find_idmap, table_flags) 730 self.event_map.associate(self.eid_trans, find_idmap, table_flags) 731 self.repository_map.associate(self.rid_trans, find_idmap, 732 table_flags) 733 self.note_map.associate(self.nid_trans, find_idmap, table_flags) 734 self.place_map.associate(self.pid_trans, find_idmap, table_flags) 735 self.media_map.associate(self.oid_trans, find_idmap, table_flags) 736 self.source_map.associate(self.sid_trans, find_idmap, table_flags) 737 self.reference_map.associate(self.reference_map_primary_map, 738 find_primary_handle, 739 table_flags) 740 self.reference_map.associate(self.reference_map_referenced_map, 741 find_referenced_handle, 742 table_flags) 743 self.secondary_connected = True 744 745 self.smap_index = len(self.source_map) 746 self.emap_index = len(self.event_map) 747 self.pmap_index = len(self.person_map) 748 self.fmap_index = len(self.family_map) 749 self.lmap_index = len(self.place_map) 750 self.omap_index = len(self.media_map) 751 self.rmap_index = len(self.repository_map) 752 self.nmap_index = len(self.note_map)
753
754 - def rebuild_secondary(self, callback=None):
755 try: 756 self.__rebuild_secondary(callback) 757 except DBERRS, msg: 758 self.__log_error() 759 raise Errors.DbError(msg)
760
761 - def __rebuild_secondary(self, callback=None):
762 if self.readonly: 763 return 764 765 table_flags = self.__open_flags() 766 767 # remove existing secondary indices 768 769 index = 1 770 771 items = [ 772 ( self.id_trans, IDTRANS ), 773 ( self.surnames, SURNAMES ), 774 ( self.fid_trans, FIDTRANS ), 775 ( self.pid_trans, PIDTRANS ), 776 ( self.oid_trans, OIDTRANS ), 777 ( self.eid_trans, EIDTRANS ), 778 ( self.rid_trans, RIDTRANS ), 779 ( self.nid_trans, NIDTRANS ), 780 ( self.reference_map_primary_map, REF_PRI), 781 ( self.reference_map_referenced_map, REF_REF), 782 ] 783 784 for (database, name) in items: 785 database.close() 786 env = db.DB(self.env) 787 env.remove(_mkname(self.full_name, name), name) 788 if callback: 789 callback(index) 790 index += 1 791 792 if callback: 793 callback(11) 794 795 # Set flag saying that we have removed secondary indices 796 # and then call the creating routine 797 self.secondary_connected = False 798 self.__connect_secondary() 799 if callback: 800 callback(12)
801 808 859
860 - def delete_primary_from_reference_map(self, handle, transaction, txn=None):
861 """ 862 Remove all references to the primary object from the reference_map. 863 """ 864 865 primary_cur = self.get_reference_map_primary_cursor() 866 867 try: 868 ret = primary_cur.set(handle) 869 except: 870 ret = None 871 872 remove_list = set() 873 while (ret is not None): 874 (key, data) = ret 875 876 # data values are of the form: 877 # ((primary_object_class_name, primary_object_handle), 878 # (referenced_object_class_name, referenced_object_handle)) 879 880 # so we need the second tuple give us a reference that we can 881 # combine with the primary_handle to get the main key. 882 883 main_key = (handle, pickle.loads(data)[1][1]) 884 885 # The trick is not to remove while inside the cursor, 886 # but collect them all and remove after the cursor is closed 887 remove_list.add(main_key) 888 889 ret = primary_cur.next_dup() 890 891 primary_cur.close() 892 893 # Now that the cursor is closed, we can remove things 894 for main_key in remove_list: 895 self.__remove_reference(main_key, transaction, txn)
896
897 - def update_reference_map(self, obj, transaction, txn=None):
898 """ 899 If txn is given, then changes are written right away using txn. 900 """ 901 902 # Add references to the reference_map for all primary object referenced 903 # from the primary object 'obj' or any of its secondary objects. 904 905 handle = obj.handle 906 update = self.reference_map_primary_map.has_key(str(handle)) 907 908 if update: 909 # First thing to do is get hold of all rows in the reference_map 910 # table that hold a reference from this primary obj. This means 911 # finding all the rows that have this handle somewhere in the 912 # list of (class_name, handle) pairs. 913 # The primary_map sec index allows us to look this up quickly. 914 915 existing_references = set() 916 917 primary_cur = self.get_reference_map_primary_cursor() 918 919 try: 920 ret = primary_cur.set(handle) 921 except: 922 ret = None 923 924 while (ret is not None): 925 (key, data) = ret 926 927 # data values are of the form: 928 # ((primary_object_class_name, primary_object_handle), 929 # (referenced_object_class_name, referenced_object_handle)) 930 # so we need the second tuple give us a reference that we can 931 # compare with what is returned from 932 # get_referenced_handles_recursively 933 934 # secondary DBs are not DBShelf's, so we need to do pickling 935 # and unpicking ourselves here 936 existing_reference = pickle.loads(data)[1] 937 existing_references.add( 938 (KEY_TO_CLASS_MAP[existing_reference[0]], 939 existing_reference[1])) 940 ret = primary_cur.next_dup() 941 942 primary_cur.close() 943 944 # Once we have the list of rows that already have a reference 945 # we need to compare it with the list of objects that are 946 # still references from the primary object. 947 948 current_references = set(obj.get_referenced_handles_recursively()) 949 950 no_longer_required_references = existing_references.difference( 951 current_references) 952 953 new_references = current_references.difference(existing_references) 954 955 else: 956 # No existing refs are found: 957 # all we have is new, nothing to remove 958 no_longer_required_references = set() 959 new_references = set(obj.get_referenced_handles_recursively()) 960 961 # handle addition of new references 962 for (ref_class_name, ref_handle) in new_references: 963 data = ((CLASS_TO_KEY_MAP[obj.__class__.__name__], handle), 964 (CLASS_TO_KEY_MAP[ref_class_name], ref_handle),) 965 self.__add_reference((handle, ref_handle), data, transaction, txn) 966 967 # handle deletion of old references 968 for (ref_class_name, ref_handle) in no_longer_required_references: 969 try: 970 self.__remove_reference((handle, ref_handle), transaction, txn) 971 except: 972 # ignore missing old reference 973 pass
974
975 - def __remove_reference(self, key, transaction, txn=None):
976 """ 977 Remove the reference specified by the key, preserving the change in 978 the passed transaction. 979 """ 980 if not self.readonly: 981 if transaction.batch: 982 self.reference_map.delete(str(key), txn=txn) 983 else: 984 old_data = self.reference_map.get(str(key), txn=self.txn) 985 transaction.add(REFERENCE_KEY, str(key), old_data, None) 986 transaction.reference_del.append(str(key))
987
988 - def __add_reference(self, key, data, transaction, txn=None):
989 """ 990 Add the reference specified by the key and the data, preserving the 991 change in the passed transaction. 992 """ 993 994 if self.readonly or not key: 995 return 996 997 if transaction.batch: 998 self.reference_map.put(str(key), data, txn=txn) 999 else: 1000 transaction.add(REFERENCE_KEY, str(key), None, data) 1001 transaction.reference_add.append((str(key), data))
1002
1003 - def reindex_reference_map(self, callback):
1004 try: 1005 self.__reindex_reference_map(callback) 1006 except DBERRS, msg: 1007 self.__log_error() 1008 raise Errors.DbError(msg)
1009
1010 - def __reindex_reference_map(self, callback):
1011 """ 1012 Reindex all primary records in the database. 1013 1014 This will be a slow process for large databases. 1015 """ 1016 1017 # First, remove the reference map and related tables 1018 self.reference_map_referenced_map.close() 1019 junk = db.DB(self.env) 1020 junk.remove(_mkname(self.full_name, REF_REF), REF_REF) 1021 callback(1) 1022 1023 self.reference_map_primary_map.close() 1024 junk = db.DB(self.env) 1025 junk.remove(_mkname(self.full_name, REF_PRI), REF_PRI) 1026 callback(2) 1027 1028 self.reference_map.close() 1029 junk = db.DB(self.env) 1030 junk.remove(_mkname(self.full_name, REF_MAP), REF_MAP) 1031 callback(3) 1032 1033 # Open reference_map and primapry map 1034 self.reference_map = self.__open_table(self.full_name, REF_MAP, 1035 dbtype=db.DB_BTREE) 1036 1037 open_flags = self.__open_flags() 1038 self.reference_map_primary_map = db.DB(self.env) 1039 self.reference_map_primary_map.set_flags(db.DB_DUP) 1040 self.reference_map_primary_map.open( 1041 _mkname(self.full_name, REF_PRI), REF_PRI, db.DB_BTREE, 1042 flags=open_flags) 1043 1044 self.reference_map.associate(self.reference_map_primary_map, 1045 find_primary_handle, open_flags) 1046 1047 # Make a dictionary of the functions and classes that we need for 1048 # each of the primary object tables. 1049 primary_tables = { 1050 'Person': {'cursor_func': self.get_person_cursor, 1051 'class_func': Person}, 1052 'Family': {'cursor_func': self.get_family_cursor, 1053 'class_func': Family}, 1054 'Event': {'cursor_func': self.get_event_cursor, 1055 'class_func': Event}, 1056 'Place': {'cursor_func': self.get_place_cursor, 1057 'class_func': Place}, 1058 'Source': {'cursor_func': self.get_source_cursor, 1059 'class_func': Source}, 1060 'MediaObject': {'cursor_func': self.get_media_cursor, 1061 'class_func': MediaObject}, 1062 'Repository': {'cursor_func': self.get_repository_cursor, 1063 'class_func': Repository}, 1064 'Note': {'cursor_func': self.get_note_cursor, 1065 'class_func': Note}, 1066 } 1067 1068 transaction = self.transaction_begin(batch=True, no_magic=True) 1069 callback(4) 1070 1071 # Now we use the functions and classes defined above 1072 # to loop through each of the primary object tables. 1073 for primary_table_name in primary_tables.keys(): 1074 1075 cursor = primary_tables[primary_table_name]['cursor_func']() 1076 data = cursor.first() 1077 1078 # Grab the real object class here so that the lookup does 1079 # not happen inside the cursor loop. 1080 class_func = primary_tables[primary_table_name]['class_func'] 1081 while data: 1082 found_handle, val = data 1083 obj = InstanceType(class_func) 1084 obj.unserialize(val) 1085 1086 the_txn = self.env.txn_begin() 1087 self.update_reference_map(obj, transaction, the_txn) 1088 if the_txn: 1089 the_txn.commit() 1090 1091 data = cursor.next() 1092 1093 cursor.close() 1094 callback(5) 1095 self.transaction_commit(transaction, _("Rebuild reference map")) 1096 1097 self.reference_map_referenced_map = db.DB(self.env) 1098 self.reference_map_referenced_map.set_flags(db.DB_DUP|db.DB_DUPSORT) 1099 self.reference_map_referenced_map.open( 1100 _mkname(self.full_name, REF_REF), 1101 REF_REF, db.DB_BTREE,flags=open_flags) 1102 self.reference_map.associate(self.reference_map_referenced_map, 1103 find_referenced_handle, open_flags) 1104 callback(6)
1105
1106 - def __close_metadata(self):
1107 if not self.readonly: 1108 # Start transaction 1109 the_txn = self.env.txn_begin() 1110 1111 # name display formats 1112 self.metadata.put('name_formats', self.name_formats, txn=the_txn) 1113 1114 # database owner 1115 owner_data = self.owner.serialize() 1116 self.metadata.put('researcher', owner_data, txn=the_txn) 1117 1118 # bookmarks 1119 self.metadata.put('bookmarks', self.bookmarks.get(), txn=the_txn) 1120 self.metadata.put('family_bookmarks', self.family_bookmarks.get(), 1121 txn=the_txn) 1122 self.metadata.put('event_bookmarks', self.event_bookmarks.get(), 1123 txn=the_txn) 1124 self.metadata.put('source_bookmarks', self.source_bookmarks.get(), 1125 txn=the_txn) 1126 self.metadata.put('place_bookmarks', self.place_bookmarks.get(), 1127 txn=the_txn) 1128 self.metadata.put('repo_bookmarks', self.repo_bookmarks.get(), 1129 txn=the_txn) 1130 self.metadata.put('media_bookmarks', self.media_bookmarks.get(), 1131 txn=the_txn) 1132 self.metadata.put('note_bookmarks', self.note_bookmarks.get(), 1133 txn=the_txn) 1134 1135 # gender stats 1136 self.metadata.put('gender_stats', self.genderStats.save_stats(), 1137 txn=the_txn) 1138 # Custom type values 1139 self.metadata.put('fevent_names', list(self.family_event_names), 1140 txn=the_txn) 1141 self.metadata.put('pevent_names', list(self.individual_event_names), 1142 txn=the_txn) 1143 self.metadata.put('fattr_names', list(self.family_attributes), 1144 txn=the_txn) 1145 self.metadata.put('pattr_names', list(self.individual_attributes), 1146 txn=the_txn) 1147 self.metadata.put('marker_names', list(self.marker_names), 1148 txn=the_txn) 1149 self.metadata.put('child_refs', list(self.child_ref_types), 1150 txn=the_txn) 1151 self.metadata.put('family_rels', list(self.family_rel_types), 1152 txn=the_txn) 1153 self.metadata.put('event_roles', list(self.event_role_names), 1154 txn=the_txn) 1155 self.metadata.put('name_types', list(self.name_types), 1156 txn=the_txn) 1157 self.metadata.put('repo_types', list(self.repository_types), 1158 txn=the_txn) 1159 self.metadata.put('note_types', list(self.note_types), 1160 txn=the_txn) 1161 self.metadata.put('sm_types', list(self.source_media_types), 1162 txn=the_txn) 1163 self.metadata.put('url_types', list(self.url_types), 1164 txn=the_txn) 1165 self.metadata.put('mattr_names', list(self.media_attributes), 1166 txn=the_txn) 1167 # name display formats 1168 self.metadata.put('surname_list', self.surname_list, txn=the_txn) 1169 1170 the_txn.commit() 1171 1172 self.metadata.close()
1173
1174 - def __close_early(self):
1175 """ 1176 Bail out if the incompatible version is discovered: 1177 * close cleanly to not damage data/env 1178 * raise exception 1179 """ 1180 self.metadata.close() 1181 self.env.close() 1182 self.metadata = None 1183 self.env = None 1184 self.db_is_open = False 1185 raise FileVersionError( 1186 "The database version is not supported by this " 1187 "version of GRAMPS.\nPlease upgrade to the " 1188 "corresponding version or use XML for porting" 1189 "data between different database versions.")
1190
1191 - def close(self):
1192 try: 1193 self.__close() 1194 clear_lock_file(self.get_save_path()) 1195 except DBERRS, msg: 1196 self.__log_error() 1197 raise Errors.DbError(msg) 1198 except IOError: 1199 pass
1200
1201 - def __close(self):
1202 if not self.db_is_open: 1203 return 1204 1205 self.env.txn_checkpoint() 1206 1207 self.__close_metadata() 1208 self.name_group.close() 1209 self.surnames.close() 1210 self.id_trans.close() 1211 self.fid_trans.close() 1212 self.eid_trans.close() 1213 self.rid_trans.close() 1214 self.nid_trans.close() 1215 self.oid_trans.close() 1216 self.sid_trans.close() 1217 self.pid_trans.close() 1218 self.reference_map_primary_map.close() 1219 self.reference_map_referenced_map.close() 1220 self.reference_map.close() 1221 1222 # primary databases must be closed after secondary indexes, or 1223 # we run into problems with any active cursors. 1224 self.person_map.close() 1225 self.family_map.close() 1226 self.repository_map.close() 1227 self.note_map.close() 1228 self.place_map.close() 1229 self.source_map.close() 1230 self.media_map.close() 1231 self.event_map.close() 1232 self.env.close() 1233 1234 try: 1235 self.close_undodb() 1236 except db.DBNoSuchFileError: 1237 pass 1238 1239 self.person_map = None 1240 self.family_map = None 1241 self.repository_map = None 1242 self.note_map = None 1243 self.place_map = None 1244 self.source_map = None 1245 self.media_map = None 1246 self.event_map = None 1247 self.surnames = None 1248 self.name_group = None 1249 self.env = None 1250 self.metadata = None 1251 self.db_is_open = False
1252
1253 - def do_remove_object(self, handle, transaction, data_map, key, del_list):
1254 if self.readonly or not handle: 1255 return 1256 1257 handle = str(handle) 1258 if transaction.batch: 1259 the_txn = self.env.txn_begin() 1260 self.delete_primary_from_reference_map(handle, transaction, 1261 txn=the_txn) 1262 data_map.delete(handle, txn=the_txn) 1263 if the_txn: 1264 the_txn.commit() 1265 else: 1266 self.delete_primary_from_reference_map(handle, transaction) 1267 old_data = data_map.get(handle, txn=self.txn) 1268 transaction.add(key, handle, old_data, None) 1269 del_list.append(handle)
1270
1271 - def del_person(self, handle):
1272 self.person_map.delete(str(handle), txn=self.txn)
1273
1274 - def del_source(self, handle):
1275 self.source_map.delete(str(handle), txn=self.txn)
1276
1277 - def del_repository(self, handle):
1278 self.repository_map.delete(str(handle), txn=self.txn)
1279
1280 - def del_note(self, handle):
1281 self.note_map.delete(str(handle), txn=self.txn)
1282
1283 - def del_place(self, handle):
1284 self.place_map.delete(str(handle), txn=self.txn)
1285
1286 - def del_media(self, handle):
1287 self.media_map.delete(str(handle), txn=self.txn)
1288
1289 - def del_family(self, handle):
1290 self.family_map.delete(str(handle), txn=self.txn)
1291
1292 - def del_event(self, handle):
1293 self.event_map.delete(str(handle), txn=self.txn)
1294
1295 - def set_name_group_mapping(self, name, group):
1296 """ 1297 Make name group under the value of group. 1298 1299 If group =None, the old grouping is deleted. 1300 """ 1301 try: 1302 self.__set_name_group_mapping(name, group) 1303 except DBERRS, msg: 1304 self.__log_error() 1305 raise Errors.DbError(msg)
1306
1307 - def __set_name_group_mapping(self, name, group):
1308 if not self.readonly: 1309 # Start transaction 1310 the_txn = self.env.txn_begin() 1311 1312 name = str(name) 1313 data = self.name_group.get(name, txn=the_txn) 1314 if data is not None: 1315 self.name_group.delete(name, txn=the_txn) 1316 if group is not None: 1317 self.name_group.put(name, group, txn=the_txn) 1318 the_txn.commit() 1319 self.emit('person-rebuild')
1320
1321 - def build_surname_list(self):
1322 try: 1323 self.surname_list = list(set(self.surnames.keys())) 1324 self.sort_surname_list() 1325 except DBERRS, msg: 1326 self.__log_error() 1327 raise Errors.DbError(msg)
1328
1329 - def remove_from_surname_list(self, person):
1330 """ 1331 Check whether there are persons with the same surname left in 1332 the database. 1333 1334 If not then we need to remove the name from the list. 1335 The function must be overridden in the derived class. 1336 """ 1337 name = str(person.get_primary_name().get_surname()) 1338 try: 1339 if self.surnames.keys().count(name) == 1: 1340 self.surname_list.remove(unicode(name)) 1341 except ValueError: 1342 pass 1343 except DBERRS, msg: 1344 self.__log_error() 1345 raise Errors.DbError(msg)
1346
1347 - def __get_obj_from_gramps_id(self, val, tbl, class_init, prim_tbl):
1348 try: 1349 if tbl.has_key(str(val)): 1350 data = tbl.get(str(val), txn=self.txn) 1351 obj = class_init() 1352 ### FIXME: this is a dirty hack that works without no 1353 ### sensible explanation. For some reason, for a readonly 1354 ### database, secondary index returns a primary table key 1355 ### corresponding to the data, not the data. 1356 if self.readonly: 1357 tuple_data = prim_tbl.get(data, txn=self.txn) 1358 else: 1359 tuple_data = pickle.loads(data) 1360 obj.unserialize(tuple_data) 1361 return obj 1362 else: 1363 return None 1364 except DBERRS, msg: 1365 self.__log_error() 1366 raise Errors.DbError(msg)
1367
1368 - def get_person_from_gramps_id(self, val):
1369 """ 1370 Find a Person in the database from the passed gramps' ID. 1371 1372 If no such Person exists, None is returned. 1373 """ 1374 return self.__get_obj_from_gramps_id(val, self.id_trans, Person, 1375 self.person_map)
1376
1377 - def get_family_from_gramps_id(self, val):
1378 """ 1379 Find a Family in the database from the passed gramps' ID. 1380 1381 If no such Family exists, None is return. 1382 """ 1383 return self.__get_obj_from_gramps_id(val, self.fid_trans, Family, 1384 self.family_map)
1385
1386 - def get_event_from_gramps_id(self, val):
1387 """ 1388 Find an Event in the database from the passed gramps' ID. 1389 1390 If no such Family exists, None is returned. 1391 """ 1392 return self.__get_obj_from_gramps_id(val, self.eid_trans, Event, 1393 self.event_map)
1394
1395 - def get_place_from_gramps_id(self, val):
1396 """ 1397 Find a Place in the database from the passed gramps' ID. 1398 1399 If no such Place exists, None is returned. 1400 """ 1401 return self.__get_obj_from_gramps_id(val, self.pid_trans, Place, 1402 self.place_map)
1403
1404 - def get_source_from_gramps_id(self, val):
1405 """ 1406 Find a Source in the database from the passed gramps' ID. 1407 1408 If no such Source exists, None is returned. 1409 """ 1410 return self.__get_obj_from_gramps_id(val, self.sid_trans, Source, 1411 self.source_map)
1412
1413 - def get_object_from_gramps_id(self, val):
1414 """ 1415 Find a MediaObject in the database from the passed gramps' ID. 1416 1417 If no such MediaObject exists, None is returned. 1418 """ 1419 return self.__get_obj_from_gramps_id(val, self.oid_trans, MediaObject, 1420 self.media_map)
1421
1422 - def get_repository_from_gramps_id(self, val):
1423 """ 1424 Find a Repository in the database from the passed gramps' ID. 1425 1426 If no such Repository exists, None is returned. 1427 """ 1428 return self.__get_obj_from_gramps_id(val, self.rid_trans, Repository, 1429 self.repository_map)
1430
1431 - def get_note_from_gramps_id(self, val):
1432 """ 1433 Find a Note in the database from the passed gramps' ID. 1434 1435 If no such Note exists, None is returned. 1436 """ 1437 return self.__get_obj_from_gramps_id(val, self.nid_trans, Note, 1438 self.note_map)
1439
1440 - def commit_base(self, obj, data_map, key, update_list, add_list, 1441 transaction, change_time):
1442 """ 1443 Commit the specified object to the database, storing the changes as 1444 part of the transaction. 1445 """ 1446 if self.readonly or not obj or not obj.handle: 1447 return 1448 1449 if change_time: 1450 obj.change = int(change_time) 1451 else: 1452 obj.change = int(time.time()) 1453 handle = str(obj.handle) 1454 1455 if transaction.batch: 1456 the_txn = self.env.txn_begin() 1457 self.update_reference_map(obj, transaction, txn=the_txn) 1458 data_map.put(handle, obj.serialize(), txn=the_txn) 1459 if the_txn: 1460 the_txn.commit() 1461 old_data = None 1462 else: 1463 self.update_reference_map(obj, transaction) 1464 old_data = data_map.get(handle, txn=self.txn) 1465 new_data = obj.serialize() 1466 transaction.add(key, handle, old_data, new_data) 1467 if old_data: 1468 update_list.append((handle, new_data)) 1469 else: 1470 add_list.append((handle, new_data)) 1471 return old_data
1472
1473 - def do_commit(self, add_list, db_map):
1474 retlist = [] 1475 for (handle, data) in add_list: 1476 db_map.put(handle, data, self.txn) 1477 retlist.append(str(handle)) 1478 return retlist
1479
1480 - def get_from_handle(self, handle, class_type, data_map):
1481 try: 1482 data = data_map.get(str(handle), txn=self.txn) 1483 except: 1484 data = None 1485 # under certain circumstances during a database reload, 1486 # data_map can be none. If so, then don't report an error 1487 if data_map: 1488 log.error("Failed to get from handle", exc_info=True) 1489 if data: 1490 newobj = InstanceType(class_type) 1491 newobj.unserialize(data) 1492 return newobj 1493 return None
1494
1495 - def find_from_handle(self, handle, transaction, class_type, dmap, add_func):
1496 """ 1497 Find a object of class_type in the database from the passed handle. 1498 1499 If no object exists, a new object is added to the database. 1500 1501 @return: Returns a tuple, first the object, second a bool which is True 1502 if the object is new 1503 @rtype: tuple 1504 """ 1505 obj = class_type() 1506 handle = str(handle) 1507 new = True 1508 if dmap.has_key(handle): 1509 data = dmap.get(handle, txn=self.txn) 1510 obj.unserialize(data) 1511 #references create object with id None before object is really made 1512 if obj.gramps_id is not None: 1513 new = False 1514 else: 1515 obj.set_handle(handle) 1516 add_func(obj, transaction) 1517 return obj, new
1518
1519 - def transaction_begin(self, msg="", batch=False, no_magic=False):
1520 try: 1521 return self.__transaction_begin(msg, batch, no_magic) 1522 except DBERRS, msg: 1523 self.__log_error() 1524 raise Errors.DbError(msg)
1525
1526 - def __transaction_begin(self, msg="", batch=False, no_magic=False):
1527 """ 1528 Create a new Transaction tied to the current UNDO database. 1529 1530 The transaction has no effect until it is committed using the 1531 transaction_commit function of the this database object. 1532 """ 1533 1534 if batch: 1535 # A batch transaction does not store the commits 1536 # Aborting the session completely will become impossible. 1537 self.abort_possible = False 1538 # Undo is also impossible after batch transaction 1539 self.undoindex = -1 1540 self.translist = [None] * len(self.translist) 1541 transaction = BdbTransaction(msg, self.undodb, batch, no_magic) 1542 if transaction.batch: 1543 self.env.txn_checkpoint() 1544 self.env.set_flags(db.DB_TXN_NOSYNC, 1) # async txn 1545 1546 if self.secondary_connected and not transaction.no_magic: 1547 # Disconnect unneeded secondary indices 1548 self.surnames.close() 1549 junk = db.DB(self.env) 1550 junk.remove(_mkname(self.full_name, SURNAMES), SURNAMES) 1551 1552 self.reference_map_referenced_map.close() 1553 junk = db.DB(self.env) 1554 junk.remove(_mkname(self.full_name, REF_REF), REF_REF) 1555 1556 return transaction
1557
1558 - def transaction_commit(self, transaction, msg):
1559 try: 1560 self.__transaction_commit(transaction, msg) 1561 except DBERRS, msg: 1562 self.__log_error() 1563 raise Errors.DbError(msg)
1564
1565 - def __transaction_commit(self, transaction, msg):
1566 1567 # Start BSD DB transaction -- DBTxn 1568 self.txn = self.env.txn_begin() 1569 1570 GrampsDbBase.transaction_commit(self, transaction, msg) 1571 1572 for (key, data) in transaction.reference_add: 1573 self.reference_map.put(str(key), data, txn=self.txn) 1574 1575 for key in transaction.reference_del: 1576 self.reference_map.delete(str(key), txn=self.txn) 1577 1578 # Commit BSD DB transaction -- DBTxn 1579 self.txn.commit() 1580 if transaction.batch: 1581 self.env.txn_checkpoint() 1582 self.env.set_flags(db.DB_TXN_NOSYNC, 0) # sync txn 1583 1584 if not transaction.no_magic: 1585 # create new secondary indices to replace the ones removed 1586 open_flags = self.__open_flags() 1587 dupe_flags = db.DB_DUP|db.DB_DUPSORT 1588 1589 self.surnames = db.DB(self.env) 1590 self.surnames.set_flags(dupe_flags) 1591 self.surnames.open( 1592 _mkname(self.full_name, "surnames"), 1593 'surnames', db.DB_BTREE,flags=open_flags) 1594 self.person_map.associate(self.surnames, find_surname, 1595 open_flags) 1596 1597 self.reference_map_referenced_map = db.DB(self.env) 1598 self.reference_map_referenced_map.set_flags(dupe_flags) 1599 self.reference_map_referenced_map.open( 1600 _mkname(self.full_name, REF_REF), 1601 REF_REF, db.DB_BTREE,flags=open_flags) 1602 self.reference_map.associate(self.reference_map_referenced_map, 1603 find_referenced_handle, open_flags) 1604 1605 # Only build surname list after surname index is surely back 1606 self.build_surname_list() 1607 1608 self.txn = None
1609
1610 - def undo(self, update_history=True):
1611 try: 1612 self.txn = self.env.txn_begin() 1613 status = GrampsDbBase.undo(self, update_history) 1614 if status: 1615 self.txn.commit() 1616 else: 1617 self.txn.abort() 1618 self.txn = None 1619 return status 1620 except DBERRS, msg: 1621 self.__log_error() 1622 raise Errors.DbError(msg)
1623
1624 - def redo(self, update_history=True):
1625 try: 1626 self.txn = self.env.txn_begin() 1627 status = GrampsDbBase.redo(self, update_history) 1628 if status: 1629 self.txn.commit() 1630 else: 1631 self.txn.abort() 1632 self.txn = None 1633 return status 1634 except DBERRS, msg: 1635 self.__log_error() 1636 raise Errors.DbError(msg)
1637
1638 - def undo_reference(self, data, handle):
1639 try: 1640 if data == None: 1641 self.reference_map.delete(handle, txn=self.txn) 1642 else: 1643 self.reference_map.put(handle, data, txn=self.txn) 1644 except DBERRS, msg: 1645 self.__log_error() 1646 raise Errors.DbError(msg)
1647
1648 - def undo_data(self, data, handle, db_map, signal_root):
1649 try: 1650 if data == None: 1651 self.emit(signal_root + '-delete', ([handle],)) 1652 db_map.delete(handle, txn=self.txn) 1653 else: 1654 ex_data = db_map.get(handle, txn=self.txn) 1655 if ex_data: 1656 signal = signal_root + '-update' 1657 else: 1658 signal = signal_root + '-add' 1659 db_map.put(handle, data, txn=self.txn) 1660 self.emit(signal, ([handle],)) 1661 except DBERRS, msg: 1662 self.__log_error() 1663 raise Errors.DbError(msg)
1664
1665 - def gramps_upgrade(self, callback=None):
1666 UpdateCallback.__init__(self, callback) 1667 1668 # version = self.metadata.get('version', default=_MINVERSION) 1669 1670 t = time.time() 1671 # if version < 13: 1672 # self.gramps_upgrade_13() 1673 print "Upgrade time:", int(time.time()-t), "seconds"
1674 1675
1676 -class BdbTransaction(Transaction):
1677 - def __init__(self, msg, db, batch=False, no_magic=False):
1678 Transaction.__init__(self, msg, db, batch, no_magic) 1679 self.reference_del = [] 1680 self.reference_add = []
1681
1682 -def _mkname(path, name):
1683 return os.path.join(path, name + ".db")
1684
1685 -def clear_lock_file(name):
1686 try: 1687 os.unlink(os.path.join(name, "lock")) 1688 except OSError: 1689 return
1690
1691 -def write_lock_file(name):
1692 if not os.path.isdir(name): 1693 os.mkdir(name) 1694 f = open(os.path.join(name, "lock"), "w") 1695 if os.name == 'nt': 1696 text = os.environ['USERNAME'] 1697 else: 1698 host = os.uname()[1] 1699 # An ugly workaround for os.getlogin() issue with Konsole 1700 try: 1701 user = os.getlogin() 1702 except: 1703 user = os.environ.get('USER') 1704 text = "%s@%s" % (user, host) 1705 f.write(_("Locked by %s") % text) 1706 f.close()
1707 1708 if __name__ == "__main__": 1709 1710 import sys 1711 1712 d = GrampsDBDir() 1713 d.load(sys.argv[1], lambda x: x) 1714 1715 c = d.get_person_cursor() 1716 data = c.first() 1717 while data: 1718 person = Person(data[1]) 1719 print data[0], person.get_primary_name().get_name(), 1720 data = c.next() 1721 c.close() 1722 1723 print d.surnames.keys() 1724