1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23 """
24 Provide the Berkeley DB (DBDir) database backend for GRAMPS.
25 This is used since GRAMPS version 3.0
26 """
27
28
29
30
31
32
33 import cPickle as pickle
34 import os
35 import time
36 from types import InstanceType
37
38 from gettext import gettext as _
39 from bsddb import dbshelve, db
40 import logging
41
42 log = logging.getLogger(".GrampsDb")
43
44
45
46
47
48
49 from gen.lib import (GenderStats, Person, Family, Event, Place, Source,
50 MediaObject, Repository, Note)
51 from gen.db import (GrampsDbBase, KEY_TO_CLASS_MAP, CLASS_TO_KEY_MAP,
52 REFERENCE_KEY, Transaction)
53 from gen.db.exceptions import FileVersionError
54 from BasicUtils import UpdateCallback
55 from gen.db.cursor import GrampsCursor
56 import Errors
57
58 _MINVERSION = 9
59 _DBVERSION = 13
60
61 IDTRANS = "person_id"
62 FIDTRANS = "family_id"
63 PIDTRANS = "place_id"
64 OIDTRANS = "media_id"
65 EIDTRANS = "event_id"
66 RIDTRANS = "repo_id"
67 NIDTRANS = "note_id"
68 SIDTRANS = "source_id"
69 SURNAMES = "surnames"
70 NAME_GROUP = "name_group"
71 META = "meta_data"
72
73 FAMILY_TBL = "family"
74 PLACES_TBL = "place"
75 SOURCES_TBL = "source"
76 MEDIA_TBL = "media"
77 EVENTS_TBL = "event"
78 PERSON_TBL = "person"
79 REPO_TBL = "repo"
80 NOTE_TBL = "note"
81
82 REF_MAP = "reference_map"
83 REF_PRI = "primary_map"
84 REF_REF = "referenced_map"
85
86 DBERRS = (db.DBRunRecoveryError, db.DBAccessError,
87 db.DBPageNotFoundError, db.DBInvalidArgError)
88
89
91 return str(data[3][5])
92
95
96
97
98
99
100
102 return str((data)[0][1])
103
105 return str((data)[1][1])
106
108
112
114 d = self.cursor.first()
115 if d:
116 return (d[0], pickle.loads(d[1]))
117 return None
118
120 d = self.cursor.next()
121 if d:
122 return (d[0], pickle.loads(d[1]))
123 return None
124
127
130
132 return self.source.stat()['ndata']
133
135
137 self.cursor = source.cursor(txn)
138 self.source = source
139
141 d = self.cursor.first()
142 if d:
143 return (d[0], pickle.loads(d[1]))
144 return None
145
147 d = self.cursor.next()
148 if d:
149 return (d[0], pickle.loads(d[1]))
150 return None
151
154
157
159 return self.source.stat()['ndata']
160
162 """Cursor that includes handling for duplicate keys."""
163
164 - def set(self, key):
166
169
170
171
172
173
174
176 """
177 GRAMPS database object.
178
179 This object is a base class for other objects.
180 """
181
183 """Create a new GrampsDB."""
184
185 GrampsDbBase.__init__(self)
186 self.txn = None
187 self.secondary_connected = False
188
190 return db.DB_CREATE | db.DB_AUTO_COMMIT
191
193 dbmap = dbshelve.DBShelf(self.env)
194 dbmap.db.set_pagesize(16384)
195
196 fname = os.path.join(file_name, table_name + ".db")
197
198 if self.readonly:
199 dbmap.open(fname, table_name, dbtype, db.DB_RDONLY)
200 else:
201 dbmap.open(fname, table_name, dbtype, self.__open_flags(), 0666)
202 return dbmap
203
205 return table.keys(self.txn)
206
215
217 try:
218 return GrampsDBDirCursor(table, self.txn)
219 except DBERRS, msg:
220 self.__log_error()
221 raise Errors.DbError(msg)
222
224 return self.__get_cursor(self.person_map)
225
227 return self.__get_cursor(self.family_map)
228
230 return self.__get_cursor(self.event_map)
231
233 return self.__get_cursor(self.place_map)
234
236 return self.__get_cursor(self.source_map)
237
240
242 return self.__get_cursor(self.repository_map)
243
245 return self.__get_cursor(self.note_map)
246
248 try:
249 return table.get(str(handle), txn=self.txn) != None
250 except DBERRS, msg:
251 self.__log_error()
252 raise Errors.DbError(msg)
253
255 """
256 Return True if the handle exists in the current Person database.
257 """
258 return self.__has_handle(self.person_map, handle)
259
261 """
262 Return True if the handle exists in the current Family database.
263 """
264 return self.__has_handle(self.family_map, handle)
265
267 """
268 Return True if the handle exists in the current MediaObjectdatabase.
269 """
270 return self.__has_handle(self.media_map, handle)
271
273 """
274 Return True if the handle exists in the current Repository database.
275 """
276 return self.__has_handle(self.repository_map, handle)
277
279 """
280 Return True if the handle exists in the current Note database.
281 """
282 return self.__has_handle(self.note_map, handle)
283
285 """
286 Return True if the handle exists in the current Event database.
287 """
288 return self.__has_handle(self.event_map, handle)
289
291 """
292 Return True if the handle exists in the current Place database.
293 """
294 return self.__has_handle(self.place_map, handle)
295
297 """
298 Return True if the handle exists in the current Source database.
299 """
300 return self.__has_handle(self.source_map, handle)
301
303 try:
304 return table.get(str(handle), txn=self.txn)
305 except DBERRS, msg:
306 self.__log_error()
307 raise Errors.DbError(msg)
308
310 return self.__get_raw_data(self.person_map, handle)
311
313 return self.__get_raw_data(self.family_map, handle)
314
316 return self.__get_raw_data(self.media_map, handle)
317
319 return self.__get_raw_data(self.place_map, handle)
320
322 return self.__get_raw_data(self.event_map, handle)
323
325 return self.__get_raw_data(self.source_map, handle)
326
328 return self.__get_raw_data(self.repository_map, handle)
329
331 return self.__get_raw_data(self.note_map, handle)
332
333
334
335
336
337
338
339
341 try:
342 return GrampsDBDirAssocCursor(self.reference_map, self.txn)
343 except DBERRS, msg:
344 self.__log_error()
345 raise Errors.DbError(msg)
346
348 try:
349 return GrampsDBDirDupCursor(self.reference_map_primary_map,
350 self.txn)
351 except DBERRS, msg:
352 self.__log_error()
353 raise Errors.DbError(msg)
354
356 try:
357 return GrampsDBDirDupCursor(self.reference_map_referenced_map,
358 self.txn)
359 except DBERRS, msg:
360 self.__log_error()
361 raise Errors.DbError(msg)
362
363
364
366 try:
367 return self.__set_default_person_handle(handle)
368 except DBERRS, msg:
369 self.__log_error()
370 raise Errors.DbError(msg)
371
373 """Set the default Person to the passed instance."""
374 if not self.readonly:
375
376 the_txn = self.env.txn_begin()
377 self.metadata.put('default', str(handle), txn=the_txn)
378 the_txn.commit()
379
381 try:
382 return self.__get_default_person()
383 except DBERRS, msg:
384 self.__log_error()
385 raise Errors.DbError(msg)
386
388 """Return the default Person of the database."""
389 person = self.get_person_from_handle(self.get_default_handle())
390 if person:
391 return person
392 elif (self.metadata) and (not self.readonly):
393
394 the_txn = self.env.txn_begin()
395 self.metadata.put('default', None, txn=the_txn)
396 the_txn.commit()
397 return None
398
406
408 if self.metadata and not self.readonly:
409
410 the_txn = self.env.txn_begin()
411 self.metadata.put(name, col_list, txn=the_txn)
412 the_txn.commit()
413
415 try:
416 dbversion = self.metadata.get('version', default=0)
417 return ((dbversion <= _DBVERSION) and (dbversion >= _MINVERSION))
418 except DBERRS, msg:
419 self.__log_error()
420 raise Errors.DbError(msg)
421
423 try:
424 dbversion = self.metadata.get('version', default=0)
425 return not self.readonly and dbversion < _DBVERSION
426 except DBERRS, msg:
427 self.__log_error()
428 raise Errors.DbError(msg)
429
430 - def load(self, name, callback, mode="w"):
431 try:
432 if self.__check_readonly(name):
433 mode = "r"
434 write_lock_file(name)
435 return self.__load(name, callback, mode)
436 except DBERRS, msg:
437 self.__log_error()
438 raise Errors.DbError(msg)
439
441 for base in [FAMILY_TBL, PLACES_TBL, SOURCES_TBL, MEDIA_TBL,
442 EVENTS_TBL, PERSON_TBL, REPO_TBL, NOTE_TBL, REF_MAP, META]:
443 path = os.path.join(name, base + ".db")
444 if os.path.isfile(path) and not os.access(path, os.W_OK):
445 return True
446 return False
447
448 - def __load(self, name, callback, mode="w"):
449
450 if self.db_is_open:
451 self.close()
452
453 self.readonly = mode == "r"
454
455 if callback:
456 callback(12)
457
458 self.full_name = os.path.abspath(name)
459 self.path = self.full_name
460 self.brief_name = os.path.basename(name)
461
462 self.env = db.DBEnv()
463 self.env.set_cachesize(0, 0x4000000)
464
465
466 self.env.set_lk_max_locks(25000)
467 self.env.set_lk_max_objects(25000)
468 self.env.set_flags(db.DB_LOG_AUTOREMOVE, 1)
469
470
471 env_flags = db.DB_CREATE | db.DB_PRIVATE |\
472 db.DB_INIT_MPOOL | db.DB_INIT_LOCK |\
473 db.DB_INIT_LOG | db.DB_INIT_TXN | db.DB_THREAD
474
475
476
477 env_flags = env_flags | db.DB_RECOVER
478
479
480 env_name = name
481
482 self.env.open(env_name, env_flags)
483 self.env.txn_checkpoint()
484
485 if callback:
486 callback(25)
487 self.metadata = self.__open_table(self.full_name, META)
488
489
490
491 if not self.version_supported:
492 self.__close_early()
493
494 self.family_map = self.__open_table(self.full_name, FAMILY_TBL)
495 self.place_map = self.__open_table(self.full_name, PLACES_TBL)
496 self.source_map = self.__open_table(self.full_name, SOURCES_TBL)
497 self.media_map = self.__open_table(self.full_name, MEDIA_TBL)
498 self.event_map = self.__open_table(self.full_name, EVENTS_TBL)
499 self.person_map = self.__open_table(self.full_name, PERSON_TBL)
500 self.repository_map = self.__open_table(self.full_name, REPO_TBL)
501 self.note_map = self.__open_table(self.full_name, NOTE_TBL)
502 self.reference_map = self.__open_table(self.full_name, REF_MAP,
503 dbtype=db.DB_BTREE)
504 if callback:
505 callback(37)
506
507 self.name_group = db.DB(self.env)
508 self.name_group.set_flags(db.DB_DUP)
509 if self.readonly:
510 self.name_group.open(_mkname(self.full_name, NAME_GROUP),
511 NAME_GROUP, db.DB_HASH, flags=db.DB_RDONLY)
512 else:
513 self.name_group.open(_mkname(self.full_name, NAME_GROUP),
514 NAME_GROUP, db.DB_HASH,
515 flags=self.__open_flags())
516 self.__load_metadata()
517
518 gstats = self.metadata.get('gender_stats', default=None)
519
520 if not self.readonly:
521
522 the_txn = self.env.txn_begin()
523
524 if gstats == None:
525
526 self.metadata.put('version', _DBVERSION, txn=the_txn)
527 elif not self.metadata.has_key('version'):
528
529
530 self.metadata.put('version', 0, txn=the_txn)
531
532 the_txn.commit()
533
534 self.genderStats = GenderStats(gstats)
535
536
537
538
539
540
541 if self.need_upgrade():
542 self.gramps_upgrade(callback)
543
544 if callback:
545 callback(50)
546
547 if not self.secondary_connected:
548 self.__connect_secondary()
549
550 if callback:
551 callback(75)
552
553 self.open_undodb()
554 self.db_is_open = True
555
556 if callback:
557 callback(87)
558
559
560 self.undoindex = -1
561 self.translist = [None] * len(self.translist)
562 self.abort_possible = True
563 self.undo_history_timestamp = time.time()
564
565 return 1
566
568 """
569 Override method from GrampsDbBase because in DIR setup we want the
570 undo database to be inside the dir.
571 """
572 if not self.readonly:
573 self.undolog = os.path.join(self.full_name, "undo.db")
574 self.undodb = db.DB()
575 self.undodb.open(self.undolog, db.DB_RECNO, db.DB_CREATE)
576
577 - def load_from(self, other_database, filename, callback):
586
651
653 """
654 Connect or creates secondary index tables.
655
656 It assumes that the tables either exist and are in the right
657 format or do not exist (in which case they get created).
658
659 It is the responsibility of upgrade code to either create
660 or remove invalid secondary index tables.
661 """
662
663
664 if self.readonly:
665 table_flags = db.DB_RDONLY
666 else:
667 table_flags = self.__open_flags()
668
669 self.surnames = db.DB(self.env)
670 self.surnames.set_flags(db.DB_DUP | db.DB_DUPSORT)
671 self.surnames.open(_mkname(self.full_name, SURNAMES), SURNAMES,
672 db.DB_BTREE, flags=table_flags)
673
674 self.id_trans = db.DB(self.env)
675 self.id_trans.set_flags(db.DB_DUP)
676 self.id_trans.open(_mkname(self.full_name, IDTRANS), IDTRANS,
677 db.DB_HASH, flags=table_flags)
678
679 self.fid_trans = db.DB(self.env)
680 self.fid_trans.set_flags(db.DB_DUP)
681 self.fid_trans.open(_mkname(self.full_name, FIDTRANS), FIDTRANS,
682 db.DB_HASH, flags=table_flags)
683
684 self.eid_trans = db.DB(self.env)
685 self.eid_trans.set_flags(db.DB_DUP)
686 self.eid_trans.open(_mkname(self.full_name, EIDTRANS), EIDTRANS,
687 db.DB_HASH, flags=table_flags)
688
689 self.pid_trans = db.DB(self.env)
690 self.pid_trans.set_flags(db.DB_DUP)
691 self.pid_trans.open(_mkname(self.full_name, PIDTRANS), PIDTRANS,
692 db.DB_HASH, flags=table_flags)
693
694 self.sid_trans = db.DB(self.env)
695 self.sid_trans.set_flags(db.DB_DUP)
696 self.sid_trans.open(_mkname(self.full_name, SIDTRANS), SIDTRANS,
697 db.DB_HASH, flags=table_flags)
698
699 self.oid_trans = db.DB(self.env)
700 self.oid_trans.set_flags(db.DB_DUP)
701 self.oid_trans.open(_mkname(self.full_name, OIDTRANS), OIDTRANS,
702 db.DB_HASH, flags=table_flags)
703
704 self.rid_trans = db.DB(self.env)
705 self.rid_trans.set_flags(db.DB_DUP)
706 self.rid_trans.open(_mkname(self.full_name, RIDTRANS), RIDTRANS,
707 db.DB_HASH, flags=table_flags)
708
709 self.nid_trans = db.DB(self.env)
710 self.nid_trans.set_flags(db.DB_DUP)
711 self.nid_trans.open(_mkname(self.full_name, NIDTRANS), NIDTRANS,
712 db.DB_HASH, flags=table_flags)
713
714 self.reference_map_primary_map = db.DB(self.env)
715 self.reference_map_primary_map.set_flags(db.DB_DUP)
716 self.reference_map_primary_map.open(
717 _mkname(self.full_name, REF_PRI),
718 REF_PRI, db.DB_BTREE, flags=table_flags)
719
720 self.reference_map_referenced_map = db.DB(self.env)
721 self.reference_map_referenced_map.set_flags(db.DB_DUP|db.DB_DUPSORT)
722 self.reference_map_referenced_map.open(
723 _mkname(self.full_name, REF_REF),
724 REF_REF, db.DB_BTREE, flags=table_flags)
725
726 if not self.readonly:
727 self.person_map.associate(self.surnames, find_surname, table_flags)
728 self.person_map.associate(self.id_trans, find_idmap, table_flags)
729 self.family_map.associate(self.fid_trans, find_idmap, table_flags)
730 self.event_map.associate(self.eid_trans, find_idmap, table_flags)
731 self.repository_map.associate(self.rid_trans, find_idmap,
732 table_flags)
733 self.note_map.associate(self.nid_trans, find_idmap, table_flags)
734 self.place_map.associate(self.pid_trans, find_idmap, table_flags)
735 self.media_map.associate(self.oid_trans, find_idmap, table_flags)
736 self.source_map.associate(self.sid_trans, find_idmap, table_flags)
737 self.reference_map.associate(self.reference_map_primary_map,
738 find_primary_handle,
739 table_flags)
740 self.reference_map.associate(self.reference_map_referenced_map,
741 find_referenced_handle,
742 table_flags)
743 self.secondary_connected = True
744
745 self.smap_index = len(self.source_map)
746 self.emap_index = len(self.event_map)
747 self.pmap_index = len(self.person_map)
748 self.fmap_index = len(self.family_map)
749 self.lmap_index = len(self.place_map)
750 self.omap_index = len(self.media_map)
751 self.rmap_index = len(self.repository_map)
752 self.nmap_index = len(self.note_map)
753
755 try:
756 self.__rebuild_secondary(callback)
757 except DBERRS, msg:
758 self.__log_error()
759 raise Errors.DbError(msg)
760
762 if self.readonly:
763 return
764
765 table_flags = self.__open_flags()
766
767
768
769 index = 1
770
771 items = [
772 ( self.id_trans, IDTRANS ),
773 ( self.surnames, SURNAMES ),
774 ( self.fid_trans, FIDTRANS ),
775 ( self.pid_trans, PIDTRANS ),
776 ( self.oid_trans, OIDTRANS ),
777 ( self.eid_trans, EIDTRANS ),
778 ( self.rid_trans, RIDTRANS ),
779 ( self.nid_trans, NIDTRANS ),
780 ( self.reference_map_primary_map, REF_PRI),
781 ( self.reference_map_referenced_map, REF_REF),
782 ]
783
784 for (database, name) in items:
785 database.close()
786 env = db.DB(self.env)
787 env.remove(_mkname(self.full_name, name), name)
788 if callback:
789 callback(index)
790 index += 1
791
792 if callback:
793 callback(11)
794
795
796
797 self.secondary_connected = False
798 self.__connect_secondary()
799 if callback:
800 callback(12)
801
803 try:
804 return self.__find_backlink_handles(handle, include_classes)
805 except DBERRS, msg:
806 self.__log_error()
807 raise Errors.DbError(msg)
808
810 """
811 Find all objects that hold a reference to the object handle.
812
813 Returns an interator over a list of (class_name, handle) tuples.
814
815 @param handle: handle of the object to search for.
816 @type handle: database handle
817 @param include_classes: list of class names to include in the results.
818 Default: None means include all classes.
819 @type include_classes: list of class names
820
821 Note that this is a generator function, it returns a iterator for
822 use in loops. If you want a list of the results use:
823
824 > result_list = [i for i in find_backlink_handles(handle)]
825 """
826
827
828
829 referenced_cur = self.get_reference_map_referenced_cursor()
830
831 try:
832 ret = referenced_cur.set(handle)
833 except:
834 ret = None
835
836 while (ret is not None):
837 (key, data) = ret
838
839
840
841
842
843
844
845
846
847
848 if self.readonly:
849 data = self.reference_map.get(data)
850 else:
851 data = pickle.loads(data)
852 if include_classes == None or \
853 KEY_TO_CLASS_MAP[data[0][0]] in include_classes:
854 yield (KEY_TO_CLASS_MAP[data[0][0]], data[0][1])
855
856 ret = referenced_cur.next_dup()
857
858 referenced_cur.close()
859
861 """
862 Remove all references to the primary object from the reference_map.
863 """
864
865 primary_cur = self.get_reference_map_primary_cursor()
866
867 try:
868 ret = primary_cur.set(handle)
869 except:
870 ret = None
871
872 remove_list = set()
873 while (ret is not None):
874 (key, data) = ret
875
876
877
878
879
880
881
882
883 main_key = (handle, pickle.loads(data)[1][1])
884
885
886
887 remove_list.add(main_key)
888
889 ret = primary_cur.next_dup()
890
891 primary_cur.close()
892
893
894 for main_key in remove_list:
895 self.__remove_reference(main_key, transaction, txn)
896
898 """
899 If txn is given, then changes are written right away using txn.
900 """
901
902
903
904
905 handle = obj.handle
906 update = self.reference_map_primary_map.has_key(str(handle))
907
908 if update:
909
910
911
912
913
914
915 existing_references = set()
916
917 primary_cur = self.get_reference_map_primary_cursor()
918
919 try:
920 ret = primary_cur.set(handle)
921 except:
922 ret = None
923
924 while (ret is not None):
925 (key, data) = ret
926
927
928
929
930
931
932
933
934
935
936 existing_reference = pickle.loads(data)[1]
937 existing_references.add(
938 (KEY_TO_CLASS_MAP[existing_reference[0]],
939 existing_reference[1]))
940 ret = primary_cur.next_dup()
941
942 primary_cur.close()
943
944
945
946
947
948 current_references = set(obj.get_referenced_handles_recursively())
949
950 no_longer_required_references = existing_references.difference(
951 current_references)
952
953 new_references = current_references.difference(existing_references)
954
955 else:
956
957
958 no_longer_required_references = set()
959 new_references = set(obj.get_referenced_handles_recursively())
960
961
962 for (ref_class_name, ref_handle) in new_references:
963 data = ((CLASS_TO_KEY_MAP[obj.__class__.__name__], handle),
964 (CLASS_TO_KEY_MAP[ref_class_name], ref_handle),)
965 self.__add_reference((handle, ref_handle), data, transaction, txn)
966
967
968 for (ref_class_name, ref_handle) in no_longer_required_references:
969 try:
970 self.__remove_reference((handle, ref_handle), transaction, txn)
971 except:
972
973 pass
974
976 """
977 Remove the reference specified by the key, preserving the change in
978 the passed transaction.
979 """
980 if not self.readonly:
981 if transaction.batch:
982 self.reference_map.delete(str(key), txn=txn)
983 else:
984 old_data = self.reference_map.get(str(key), txn=self.txn)
985 transaction.add(REFERENCE_KEY, str(key), old_data, None)
986 transaction.reference_del.append(str(key))
987
989 """
990 Add the reference specified by the key and the data, preserving the
991 change in the passed transaction.
992 """
993
994 if self.readonly or not key:
995 return
996
997 if transaction.batch:
998 self.reference_map.put(str(key), data, txn=txn)
999 else:
1000 transaction.add(REFERENCE_KEY, str(key), None, data)
1001 transaction.reference_add.append((str(key), data))
1002
1004 try:
1005 self.__reindex_reference_map(callback)
1006 except DBERRS, msg:
1007 self.__log_error()
1008 raise Errors.DbError(msg)
1009
1011 """
1012 Reindex all primary records in the database.
1013
1014 This will be a slow process for large databases.
1015 """
1016
1017
1018 self.reference_map_referenced_map.close()
1019 junk = db.DB(self.env)
1020 junk.remove(_mkname(self.full_name, REF_REF), REF_REF)
1021 callback(1)
1022
1023 self.reference_map_primary_map.close()
1024 junk = db.DB(self.env)
1025 junk.remove(_mkname(self.full_name, REF_PRI), REF_PRI)
1026 callback(2)
1027
1028 self.reference_map.close()
1029 junk = db.DB(self.env)
1030 junk.remove(_mkname(self.full_name, REF_MAP), REF_MAP)
1031 callback(3)
1032
1033
1034 self.reference_map = self.__open_table(self.full_name, REF_MAP,
1035 dbtype=db.DB_BTREE)
1036
1037 open_flags = self.__open_flags()
1038 self.reference_map_primary_map = db.DB(self.env)
1039 self.reference_map_primary_map.set_flags(db.DB_DUP)
1040 self.reference_map_primary_map.open(
1041 _mkname(self.full_name, REF_PRI), REF_PRI, db.DB_BTREE,
1042 flags=open_flags)
1043
1044 self.reference_map.associate(self.reference_map_primary_map,
1045 find_primary_handle, open_flags)
1046
1047
1048
1049 primary_tables = {
1050 'Person': {'cursor_func': self.get_person_cursor,
1051 'class_func': Person},
1052 'Family': {'cursor_func': self.get_family_cursor,
1053 'class_func': Family},
1054 'Event': {'cursor_func': self.get_event_cursor,
1055 'class_func': Event},
1056 'Place': {'cursor_func': self.get_place_cursor,
1057 'class_func': Place},
1058 'Source': {'cursor_func': self.get_source_cursor,
1059 'class_func': Source},
1060 'MediaObject': {'cursor_func': self.get_media_cursor,
1061 'class_func': MediaObject},
1062 'Repository': {'cursor_func': self.get_repository_cursor,
1063 'class_func': Repository},
1064 'Note': {'cursor_func': self.get_note_cursor,
1065 'class_func': Note},
1066 }
1067
1068 transaction = self.transaction_begin(batch=True, no_magic=True)
1069 callback(4)
1070
1071
1072
1073 for primary_table_name in primary_tables.keys():
1074
1075 cursor = primary_tables[primary_table_name]['cursor_func']()
1076 data = cursor.first()
1077
1078
1079
1080 class_func = primary_tables[primary_table_name]['class_func']
1081 while data:
1082 found_handle, val = data
1083 obj = InstanceType(class_func)
1084 obj.unserialize(val)
1085
1086 the_txn = self.env.txn_begin()
1087 self.update_reference_map(obj, transaction, the_txn)
1088 if the_txn:
1089 the_txn.commit()
1090
1091 data = cursor.next()
1092
1093 cursor.close()
1094 callback(5)
1095 self.transaction_commit(transaction, _("Rebuild reference map"))
1096
1097 self.reference_map_referenced_map = db.DB(self.env)
1098 self.reference_map_referenced_map.set_flags(db.DB_DUP|db.DB_DUPSORT)
1099 self.reference_map_referenced_map.open(
1100 _mkname(self.full_name, REF_REF),
1101 REF_REF, db.DB_BTREE,flags=open_flags)
1102 self.reference_map.associate(self.reference_map_referenced_map,
1103 find_referenced_handle, open_flags)
1104 callback(6)
1105
1173
1175 """
1176 Bail out if the incompatible version is discovered:
1177 * close cleanly to not damage data/env
1178 * raise exception
1179 """
1180 self.metadata.close()
1181 self.env.close()
1182 self.metadata = None
1183 self.env = None
1184 self.db_is_open = False
1185 raise FileVersionError(
1186 "The database version is not supported by this "
1187 "version of GRAMPS.\nPlease upgrade to the "
1188 "corresponding version or use XML for porting"
1189 "data between different database versions.")
1190
1192 try:
1193 self.__close()
1194 clear_lock_file(self.get_save_path())
1195 except DBERRS, msg:
1196 self.__log_error()
1197 raise Errors.DbError(msg)
1198 except IOError:
1199 pass
1200
1202 if not self.db_is_open:
1203 return
1204
1205 self.env.txn_checkpoint()
1206
1207 self.__close_metadata()
1208 self.name_group.close()
1209 self.surnames.close()
1210 self.id_trans.close()
1211 self.fid_trans.close()
1212 self.eid_trans.close()
1213 self.rid_trans.close()
1214 self.nid_trans.close()
1215 self.oid_trans.close()
1216 self.sid_trans.close()
1217 self.pid_trans.close()
1218 self.reference_map_primary_map.close()
1219 self.reference_map_referenced_map.close()
1220 self.reference_map.close()
1221
1222
1223
1224 self.person_map.close()
1225 self.family_map.close()
1226 self.repository_map.close()
1227 self.note_map.close()
1228 self.place_map.close()
1229 self.source_map.close()
1230 self.media_map.close()
1231 self.event_map.close()
1232 self.env.close()
1233
1234 try:
1235 self.close_undodb()
1236 except db.DBNoSuchFileError:
1237 pass
1238
1239 self.person_map = None
1240 self.family_map = None
1241 self.repository_map = None
1242 self.note_map = None
1243 self.place_map = None
1244 self.source_map = None
1245 self.media_map = None
1246 self.event_map = None
1247 self.surnames = None
1248 self.name_group = None
1249 self.env = None
1250 self.metadata = None
1251 self.db_is_open = False
1252
1254 if self.readonly or not handle:
1255 return
1256
1257 handle = str(handle)
1258 if transaction.batch:
1259 the_txn = self.env.txn_begin()
1260 self.delete_primary_from_reference_map(handle, transaction,
1261 txn=the_txn)
1262 data_map.delete(handle, txn=the_txn)
1263 if the_txn:
1264 the_txn.commit()
1265 else:
1266 self.delete_primary_from_reference_map(handle, transaction)
1267 old_data = data_map.get(handle, txn=self.txn)
1268 transaction.add(key, handle, old_data, None)
1269 del_list.append(handle)
1270
1272 self.person_map.delete(str(handle), txn=self.txn)
1273
1275 self.source_map.delete(str(handle), txn=self.txn)
1276
1278 self.repository_map.delete(str(handle), txn=self.txn)
1279
1281 self.note_map.delete(str(handle), txn=self.txn)
1282
1284 self.place_map.delete(str(handle), txn=self.txn)
1285
1288
1290 self.family_map.delete(str(handle), txn=self.txn)
1291
1293 self.event_map.delete(str(handle), txn=self.txn)
1294
1296 """
1297 Make name group under the value of group.
1298
1299 If group =None, the old grouping is deleted.
1300 """
1301 try:
1302 self.__set_name_group_mapping(name, group)
1303 except DBERRS, msg:
1304 self.__log_error()
1305 raise Errors.DbError(msg)
1306
1308 if not self.readonly:
1309
1310 the_txn = self.env.txn_begin()
1311
1312 name = str(name)
1313 data = self.name_group.get(name, txn=the_txn)
1314 if data is not None:
1315 self.name_group.delete(name, txn=the_txn)
1316 if group is not None:
1317 self.name_group.put(name, group, txn=the_txn)
1318 the_txn.commit()
1319 self.emit('person-rebuild')
1320
1322 try:
1323 self.surname_list = list(set(self.surnames.keys()))
1324 self.sort_surname_list()
1325 except DBERRS, msg:
1326 self.__log_error()
1327 raise Errors.DbError(msg)
1328
1330 """
1331 Check whether there are persons with the same surname left in
1332 the database.
1333
1334 If not then we need to remove the name from the list.
1335 The function must be overridden in the derived class.
1336 """
1337 name = str(person.get_primary_name().get_surname())
1338 try:
1339 if self.surnames.keys().count(name) == 1:
1340 self.surname_list.remove(unicode(name))
1341 except ValueError:
1342 pass
1343 except DBERRS, msg:
1344 self.__log_error()
1345 raise Errors.DbError(msg)
1346
1348 try:
1349 if tbl.has_key(str(val)):
1350 data = tbl.get(str(val), txn=self.txn)
1351 obj = class_init()
1352
1353
1354
1355
1356 if self.readonly:
1357 tuple_data = prim_tbl.get(data, txn=self.txn)
1358 else:
1359 tuple_data = pickle.loads(data)
1360 obj.unserialize(tuple_data)
1361 return obj
1362 else:
1363 return None
1364 except DBERRS, msg:
1365 self.__log_error()
1366 raise Errors.DbError(msg)
1367
1369 """
1370 Find a Person in the database from the passed gramps' ID.
1371
1372 If no such Person exists, None is returned.
1373 """
1374 return self.__get_obj_from_gramps_id(val, self.id_trans, Person,
1375 self.person_map)
1376
1378 """
1379 Find a Family in the database from the passed gramps' ID.
1380
1381 If no such Family exists, None is return.
1382 """
1383 return self.__get_obj_from_gramps_id(val, self.fid_trans, Family,
1384 self.family_map)
1385
1387 """
1388 Find an Event in the database from the passed gramps' ID.
1389
1390 If no such Family exists, None is returned.
1391 """
1392 return self.__get_obj_from_gramps_id(val, self.eid_trans, Event,
1393 self.event_map)
1394
1396 """
1397 Find a Place in the database from the passed gramps' ID.
1398
1399 If no such Place exists, None is returned.
1400 """
1401 return self.__get_obj_from_gramps_id(val, self.pid_trans, Place,
1402 self.place_map)
1403
1405 """
1406 Find a Source in the database from the passed gramps' ID.
1407
1408 If no such Source exists, None is returned.
1409 """
1410 return self.__get_obj_from_gramps_id(val, self.sid_trans, Source,
1411 self.source_map)
1412
1414 """
1415 Find a MediaObject in the database from the passed gramps' ID.
1416
1417 If no such MediaObject exists, None is returned.
1418 """
1419 return self.__get_obj_from_gramps_id(val, self.oid_trans, MediaObject,
1420 self.media_map)
1421
1423 """
1424 Find a Repository in the database from the passed gramps' ID.
1425
1426 If no such Repository exists, None is returned.
1427 """
1428 return self.__get_obj_from_gramps_id(val, self.rid_trans, Repository,
1429 self.repository_map)
1430
1432 """
1433 Find a Note in the database from the passed gramps' ID.
1434
1435 If no such Note exists, None is returned.
1436 """
1437 return self.__get_obj_from_gramps_id(val, self.nid_trans, Note,
1438 self.note_map)
1439
1440 - def commit_base(self, obj, data_map, key, update_list, add_list,
1441 transaction, change_time):
1442 """
1443 Commit the specified object to the database, storing the changes as
1444 part of the transaction.
1445 """
1446 if self.readonly or not obj or not obj.handle:
1447 return
1448
1449 if change_time:
1450 obj.change = int(change_time)
1451 else:
1452 obj.change = int(time.time())
1453 handle = str(obj.handle)
1454
1455 if transaction.batch:
1456 the_txn = self.env.txn_begin()
1457 self.update_reference_map(obj, transaction, txn=the_txn)
1458 data_map.put(handle, obj.serialize(), txn=the_txn)
1459 if the_txn:
1460 the_txn.commit()
1461 old_data = None
1462 else:
1463 self.update_reference_map(obj, transaction)
1464 old_data = data_map.get(handle, txn=self.txn)
1465 new_data = obj.serialize()
1466 transaction.add(key, handle, old_data, new_data)
1467 if old_data:
1468 update_list.append((handle, new_data))
1469 else:
1470 add_list.append((handle, new_data))
1471 return old_data
1472
1474 retlist = []
1475 for (handle, data) in add_list:
1476 db_map.put(handle, data, self.txn)
1477 retlist.append(str(handle))
1478 return retlist
1479
1481 try:
1482 data = data_map.get(str(handle), txn=self.txn)
1483 except:
1484 data = None
1485
1486
1487 if data_map:
1488 log.error("Failed to get from handle", exc_info=True)
1489 if data:
1490 newobj = InstanceType(class_type)
1491 newobj.unserialize(data)
1492 return newobj
1493 return None
1494
1496 """
1497 Find a object of class_type in the database from the passed handle.
1498
1499 If no object exists, a new object is added to the database.
1500
1501 @return: Returns a tuple, first the object, second a bool which is True
1502 if the object is new
1503 @rtype: tuple
1504 """
1505 obj = class_type()
1506 handle = str(handle)
1507 new = True
1508 if dmap.has_key(handle):
1509 data = dmap.get(handle, txn=self.txn)
1510 obj.unserialize(data)
1511
1512 if obj.gramps_id is not None:
1513 new = False
1514 else:
1515 obj.set_handle(handle)
1516 add_func(obj, transaction)
1517 return obj, new
1518
1520 try:
1521 return self.__transaction_begin(msg, batch, no_magic)
1522 except DBERRS, msg:
1523 self.__log_error()
1524 raise Errors.DbError(msg)
1525
1527 """
1528 Create a new Transaction tied to the current UNDO database.
1529
1530 The transaction has no effect until it is committed using the
1531 transaction_commit function of the this database object.
1532 """
1533
1534 if batch:
1535
1536
1537 self.abort_possible = False
1538
1539 self.undoindex = -1
1540 self.translist = [None] * len(self.translist)
1541 transaction = BdbTransaction(msg, self.undodb, batch, no_magic)
1542 if transaction.batch:
1543 self.env.txn_checkpoint()
1544 self.env.set_flags(db.DB_TXN_NOSYNC, 1)
1545
1546 if self.secondary_connected and not transaction.no_magic:
1547
1548 self.surnames.close()
1549 junk = db.DB(self.env)
1550 junk.remove(_mkname(self.full_name, SURNAMES), SURNAMES)
1551
1552 self.reference_map_referenced_map.close()
1553 junk = db.DB(self.env)
1554 junk.remove(_mkname(self.full_name, REF_REF), REF_REF)
1555
1556 return transaction
1557
1559 try:
1560 self.__transaction_commit(transaction, msg)
1561 except DBERRS, msg:
1562 self.__log_error()
1563 raise Errors.DbError(msg)
1564
1566
1567
1568 self.txn = self.env.txn_begin()
1569
1570 GrampsDbBase.transaction_commit(self, transaction, msg)
1571
1572 for (key, data) in transaction.reference_add:
1573 self.reference_map.put(str(key), data, txn=self.txn)
1574
1575 for key in transaction.reference_del:
1576 self.reference_map.delete(str(key), txn=self.txn)
1577
1578
1579 self.txn.commit()
1580 if transaction.batch:
1581 self.env.txn_checkpoint()
1582 self.env.set_flags(db.DB_TXN_NOSYNC, 0)
1583
1584 if not transaction.no_magic:
1585
1586 open_flags = self.__open_flags()
1587 dupe_flags = db.DB_DUP|db.DB_DUPSORT
1588
1589 self.surnames = db.DB(self.env)
1590 self.surnames.set_flags(dupe_flags)
1591 self.surnames.open(
1592 _mkname(self.full_name, "surnames"),
1593 'surnames', db.DB_BTREE,flags=open_flags)
1594 self.person_map.associate(self.surnames, find_surname,
1595 open_flags)
1596
1597 self.reference_map_referenced_map = db.DB(self.env)
1598 self.reference_map_referenced_map.set_flags(dupe_flags)
1599 self.reference_map_referenced_map.open(
1600 _mkname(self.full_name, REF_REF),
1601 REF_REF, db.DB_BTREE,flags=open_flags)
1602 self.reference_map.associate(self.reference_map_referenced_map,
1603 find_referenced_handle, open_flags)
1604
1605
1606 self.build_surname_list()
1607
1608 self.txn = None
1609
1610 - def undo(self, update_history=True):
1611 try:
1612 self.txn = self.env.txn_begin()
1613 status = GrampsDbBase.undo(self, update_history)
1614 if status:
1615 self.txn.commit()
1616 else:
1617 self.txn.abort()
1618 self.txn = None
1619 return status
1620 except DBERRS, msg:
1621 self.__log_error()
1622 raise Errors.DbError(msg)
1623
1624 - def redo(self, update_history=True):
1625 try:
1626 self.txn = self.env.txn_begin()
1627 status = GrampsDbBase.redo(self, update_history)
1628 if status:
1629 self.txn.commit()
1630 else:
1631 self.txn.abort()
1632 self.txn = None
1633 return status
1634 except DBERRS, msg:
1635 self.__log_error()
1636 raise Errors.DbError(msg)
1637
1639 try:
1640 if data == None:
1641 self.reference_map.delete(handle, txn=self.txn)
1642 else:
1643 self.reference_map.put(handle, data, txn=self.txn)
1644 except DBERRS, msg:
1645 self.__log_error()
1646 raise Errors.DbError(msg)
1647
1648 - def undo_data(self, data, handle, db_map, signal_root):
1649 try:
1650 if data == None:
1651 self.emit(signal_root + '-delete', ([handle],))
1652 db_map.delete(handle, txn=self.txn)
1653 else:
1654 ex_data = db_map.get(handle, txn=self.txn)
1655 if ex_data:
1656 signal = signal_root + '-update'
1657 else:
1658 signal = signal_root + '-add'
1659 db_map.put(handle, data, txn=self.txn)
1660 self.emit(signal, ([handle],))
1661 except DBERRS, msg:
1662 self.__log_error()
1663 raise Errors.DbError(msg)
1664
1666 UpdateCallback.__init__(self, callback)
1667
1668
1669
1670 t = time.time()
1671
1672
1673 print "Upgrade time:", int(time.time()-t), "seconds"
1674
1675
1677 - def __init__(self, msg, db, batch=False, no_magic=False):
1678 Transaction.__init__(self, msg, db, batch, no_magic)
1679 self.reference_del = []
1680 self.reference_add = []
1681
1683 return os.path.join(path, name + ".db")
1684
1686 try:
1687 os.unlink(os.path.join(name, "lock"))
1688 except OSError:
1689 return
1690
1692 if not os.path.isdir(name):
1693 os.mkdir(name)
1694 f = open(os.path.join(name, "lock"), "w")
1695 if os.name == 'nt':
1696 text = os.environ['USERNAME']
1697 else:
1698 host = os.uname()[1]
1699
1700 try:
1701 user = os.getlogin()
1702 except:
1703 user = os.environ.get('USER')
1704 text = "%s@%s" % (user, host)
1705 f.write(_("Locked by %s") % text)
1706 f.close()
1707
1708 if __name__ == "__main__":
1709
1710 import sys
1711
1712 d = GrampsDBDir()
1713 d.load(sys.argv[1], lambda x: x)
1714
1715 c = d.get_person_cursor()
1716 data = c.first()
1717 while data:
1718 person = Person(data[1])
1719 print data[0], person.get_primary_name().get_name(),
1720 data = c.next()
1721 c.close()
1722
1723 print d.surnames.keys()
1724