def instance_from_struct(cls, struct): """ Given a struct with metadata, create a Gramps object. self is class when called as a classmethod. """ from gramps.gen.lib import (Person, Family, Event, Source, Place, Citation, Repository, Media, Note, Tag, Date) if isinstance(struct, dict): if "_class" in struct.keys(): if struct["_class"] == "Person": return Person.create(Person.from_struct(struct)) elif struct["_class"] == "Family": return Family.create(Family.from_struct(struct)) elif struct["_class"] == "Event": return Event.create(Event.from_struct(struct)) elif struct["_class"] == "Source": return Source.create(Source.from_struct(struct)) elif struct["_class"] == "Place": return Place.create(Place.from_struct(struct)) elif struct["_class"] == "Citation": return Citation.create(Citation.from_struct(struct)) elif struct["_class"] == "Repository": return Repository.create(Repository.from_struct(struct)) elif struct["_class"] == "Media": return Media.create(Media.from_struct(struct)) elif struct["_class"] == "Note": return Note.create(Note.from_struct(struct)) elif struct["_class"] == "Tag": return Tag.create(Tag.from_struct(struct)) elif struct["_class"] == "Date": return Date().unserialize(Date.from_struct(struct, full=True)) raise AttributeError("invalid struct: %s" % struct)
def sort_date(self,data): obj = Media() obj.unserialize(data) d = obj.get_date_object() if d: return "%09d" % d.get_sort_value() else: return ''
def drag_data_received(self, widget, context, x, y, sel_data, info, time): """ Handle the standard gtk interface for drag_data_received. If the selection data is define, extract the value from sel_data.data, and decide if this is a move or a reorder. The only data we accept on mediaview is dropping a file, so URI_LIST. We assume this is what we obtain """ if not sel_data: return files = sel_data.get_uris() for file in files: protocol, site, mfile, j, k, l = urlparse(file) if protocol == "file": name = url2pathname(mfile) mime = get_type(name) if not is_valid_type(mime): return photo = Media() self.uistate.set_busy_cursor(True) photo.set_checksum(create_checksum(name)) self.uistate.set_busy_cursor(False) base_dir = str(media_path(self.dbstate.db)) if os.path.exists(base_dir): name = relative_path(name, base_dir) photo.set_path(name) photo.set_mime_type(mime) basename = os.path.basename(name) (root, ext) = os.path.splitext(basename) photo.set_description(root) with DbTxn(_("Drag Media Object"), self.dbstate.db) as trans: self.dbstate.db.add_media(photo, trans) widget.emit_stop_by_name('drag_data_received')
def _prepare(self): self.set_total(self.db.get_number_of_media()) with self.db.get_media_cursor() as cursor: for handle, data in cursor: obj = Media() obj.unserialize(data) if os.path.isabs(obj.path): self.handle_list.append(handle) self.path_list.append(obj.path) self.update() self.reset()
def _prepare(self): from_text = str(self.from_entry.get_text()) self.set_total(self.db.get_number_of_media()) with self.db.get_media_cursor() as cursor: for handle, data in cursor: obj = Media() obj.unserialize(data) if obj.get_path().find(from_text) != -1: self.handle_list.append(handle) self.path_list.append(obj.path) self.update() self.reset() self.prepared = True
def importData(db, filename, user): """Function called by Gramps to import data on persons in CSV format.""" db.disable_signals() try: with DbTxn(_("JSON import"), db, batch=True) as trans: with OpenFileOrStdin(filename, encoding="utf-8") as fp: line = fp.readline() while line: data = json.loads(line) if data["_class"] == "Person": obj = Person.create(Person.from_struct(data)) db.add_person(obj, trans) elif data["_class"] == "Family": obj = Family.create(Family.from_struct(data)) db.add_family(obj, trans) elif data["_class"] == "Event": obj = Event.create(Event.from_struct(data)) db.add_event(obj, trans) elif data["_class"] == "Media": obj = Media.create(Media.from_struct(data)) db.add_media(obj, trans) elif data["_class"] == "Repository": obj = Repository.create(Repository.from_struct(data)) db.add_repository(obj, trans) elif data["_class"] == "Tag": obj = Tag.create(Tag.from_struct(data)) db.add_tag(obj, trans) elif data["_class"] == "Source": obj = Source.create(Source.from_struct(data)) db.add_source(obj, trans) elif data["_class"] == "Citation": obj = Citation.create(Citation.from_struct(data)) db.add_citation(obj, trans) elif data["_class"] == "Note": obj = Note.create(Note.from_struct(data)) db.add_note(obj, trans) elif data["_class"] == "Place": obj = Place.create(Place.from_struct(data)) db.add_place(obj, trans) else: LOG.warn("ignored: " + data) line = fp.readline() except EnvironmentError as err: user.notify_error(_("%s could not be opened\n") % filename, str(err)) db.enable_signals() db.request_rebuild()
def _run(self): """ Go through directories that are mentioned in the database via media files, and include all images that are not all ready included. """ if not self.prepared: self.prepare() self.set_total(len(self.dir_list)) for directory in self.dir_list: for (dirpath, dirnames, filenames) in os.walk(directory): if ".git" in dirnames: dirnames.remove('.git') # don't visit .git directories for filename in filenames: media_full_path = os.path.join(dirpath, filename) if media_full_path not in self.path_list: self.path_list.append(media_full_path) mime_type = get_type(media_full_path) if is_image_type(mime_type): obj = Media() obj.set_path(media_full_path) obj.set_mime_type(mime_type) (root, ext) = os.path.splitext(filename) obj.set_description(root) self.db.add_media(obj, self.trans) self.update() return True
def _prepare(self): """ Get all of the fullpaths, and the directories of media objects in the database. """ self.dir_list = set() self.set_total(self.db.get_number_of_media()) with self.db.get_media_cursor() as cursor: for handle, data in cursor: obj = Media() obj.unserialize(data) self.handle_list.append(handle) full_path = media_path_full(self.db, obj.path) self.path_list.append(full_path) directory, filename = os.path.split(full_path) if directory not in self.dir_list: self.dir_list.add(directory) self.update() self.reset()
def drag_data_received(self, widget, context, x, y, sel_data, info, time): """ Handle the standard gtk interface for drag_data_received. If the selection data is define, extract the value from sel_data.data, and decide if this is a move or a reorder. """ if sel_data and sel_data.get_data(): try: (mytype, selfid, obj, row_from) = pickle.loads(sel_data.get_data()) # make sure this is the correct DND type for this object if mytype == self._DND_TYPE.drag_type: # determine the destination row data = self.iconlist.get_dest_item_at_pos(x, y) if data: (path, pos) = data row = path.get_indices()[0] if pos == Gtk.IconViewDropPosition.DROP_LEFT: row = max(row, 0) elif pos == Gtk.IconViewDropPosition.DROP_RIGHT: row = min(row, len(self.get_data())) elif pos == Gtk.IconViewDropPosition.DROP_INTO: row = min(row+1, len(self.get_data())) else: row = len(self.get_data()) # if the is same object, we have a move, otherwise, # it is a standard drag-n-drop if id(self) == selfid: self._move(row_from, row, obj) else: self._handle_drag(row, obj) self.rebuild() elif mytype == DdTargets.MEDIAOBJ.drag_type: oref = MediaRef() oref.set_reference_handle(obj) self.get_data().append(oref) self.changed = True self.rebuild() elif self._DND_EXTRA and mytype == self._DND_EXTRA.drag_type: self.handle_extra_type(mytype, obj) except pickle.UnpicklingError: files = sel_data.get_uris() for file in files: protocol, site, mfile, j, k, l = urlparse(file) if protocol == "file": name = url2pathname(mfile) mime = get_type(name) if not is_valid_type(mime): return photo = Media() self.uistate.set_busy_cursor(True) photo.set_checksum(create_checksum(name)) self.uistate.set_busy_cursor(False) base_dir = str(media_path(self.dbstate.db)) if os.path.exists(base_dir): name = relative_path(name, base_dir) photo.set_path(name) photo.set_mime_type(mime) basename = os.path.basename(name) (root, ext) = os.path.splitext(basename) photo.set_description(root) with DbTxn(_("Drag Media Object"), self.dbstate.db) as trans: self.dbstate.db.add_media(photo, trans) oref = MediaRef() oref.set_reference_handle(photo.get_handle()) self.get_data().append(oref) self.changed = True self.rebuild()
def add(self, obj): """Add a new media object to the media list""" try: EditMedia(self.dbstate, self.uistate, [], Media()) except WindowActiveError: pass
def drag_data_received(self, widget, context, x, y, sel_data, info, time): """ Handle the standard gtk interface for drag_data_received. If the selection data is define, extract the value from sel_data.data, and decide if this is a move or a reorder. """ if sel_data and sel_data.get_data(): sel_list = [] try: rets = pickle.loads(sel_data.get_data()) # single dnd item if isinstance(rets, tuple): sel_list.append(rets) # multiple dnd items elif isinstance(rets, list): for ret in rets: sel_list.append(pickle.loads(ret)) for sel in sel_list: (mytype, selfid, obj, row_from) = sel # make sure this is the correct DND type for this object if mytype == self._DND_TYPE.drag_type: # determine the destination row data = self.iconlist.get_dest_item_at_pos(x, y) if data: (path, pos) = data row = path.get_indices()[0] if pos == Gtk.IconViewDropPosition.DROP_LEFT: row = max(row, 0) elif pos == Gtk.IconViewDropPosition.DROP_RIGHT: row = min(row, len(self.get_data())) elif pos == Gtk.IconViewDropPosition.DROP_INTO: row = min(row + 1, len(self.get_data())) else: row = len(self.get_data()) # if the is same object, we have a move, otherwise, # it is a standard drag-n-drop if id(self) == selfid: self._move(row_from, row, obj) else: self._handle_drag(row, obj) self.rebuild() elif mytype == DdTargets.MEDIAOBJ.drag_type: oref = MediaRef() oref.set_reference_handle(obj) self.get_data().append(oref) self.changed = True self.rebuild() elif self._DND_EXTRA and mytype == self._DND_EXTRA.drag_type: self.handle_extra_type(mytype, obj) except pickle.UnpicklingError: files = sel_data.get_uris() for file in files: protocol, site, mfile, j, k, l = urlparse(file) if protocol == "file": name = url2pathname(mfile) mime = get_type(name) if not is_valid_type(mime): return photo = Media() self.uistate.set_busy_cursor(True) photo.set_checksum(create_checksum(name)) self.uistate.set_busy_cursor(False) base_dir = str(media_path(self.dbstate.db)) if os.path.exists(base_dir): name = relative_path(name, base_dir) photo.set_path(name) photo.set_mime_type(mime) basename = os.path.basename(name) (root, ext) = os.path.splitext(basename) photo.set_description(root) with DbTxn(_("Drag Media Object"), self.dbstate.db) as trans: self.dbstate.db.add_media(photo, trans) oref = MediaRef() oref.set_reference_handle(photo.get_handle()) self.get_data().append(oref) self.changed = True self.rebuild()
def empty_object(self): return Media()
def exportData(database, filename, error_dialog=None, option_box=None, callback=None): if not callable(callback): callback = lambda percent: None # dummy with OpenFileOrStdout(filename, encoding="utf-8") as fp: total = (len(database.note_map) + len(database.person_map) + len(database.event_map) + len(database.family_map) + len(database.repository_map) + len(database.place_map) + len(database.media_map) + len(database.citation_map) + len(database.source_map) + len(database.tag_map)) count = 0.0 # --------------------------------- # Notes # --------------------------------- for handle in database.note_map.keys(): serial = database.note_map[handle] write_line(fp, Note.create(serial)) count += 1 callback(100 * count/total) # --------------------------------- # Event # --------------------------------- for handle in database.event_map.keys(): serial = database.event_map[handle] write_line(fp, Event.create(serial)) count += 1 callback(100 * count/total) # --------------------------------- # Person # --------------------------------- for handle in database.person_map.keys(): serial = database.person_map[handle] write_line(fp, Person.create(serial)) count += 1 callback(100 * count/total) # --------------------------------- # Family # --------------------------------- for handle in database.family_map.keys(): serial = database.family_map[handle] write_line(fp, Family.create(serial)) count += 1 callback(100 * count/total) # --------------------------------- # Repository # --------------------------------- for handle in database.repository_map.keys(): serial = database.repository_map[handle] write_line(fp, Repository.create(serial)) count += 1 callback(100 * count/total) # --------------------------------- # Place # --------------------------------- for handle in database.place_map.keys(): serial = database.place_map[handle] write_line(fp, Place.create(serial)) count += 1 callback(100 * count/total) # --------------------------------- # Source # --------------------------------- for handle in database.source_map.keys(): serial = database.source_map[handle] write_line(fp, Source.create(serial)) count += 1 callback(100 * count/total) # --------------------------------- # Citation # --------------------------------- for handle in database.citation_map.keys(): serial = database.citation_map[handle] write_line(fp, Citation.create(serial)) count += 1 callback(100 * count/total) # --------------------------------- # Media # --------------------------------- for handle in database.media_map.keys(): serial = database.media_map[handle] write_line(fp, Media.create(serial)) count += 1 callback(100 * count/total) # --------------------------------- # Tag # --------------------------------- for handle in database.tag_map.keys(): serial = database.tag_map[handle] write_line(fp, Tag.create(serial)) count += 1 callback(100 * count/total) return True
def exportData(database, filename, error_dialog=None, option_box=None, callback=None): if not callable(callback): callback = lambda percent: None # dummy with OpenFileOrStdout(filename, encoding="utf-8") as fp: total = (len(database.note_map) + len(database.person_map) + len(database.event_map) + len(database.family_map) + len(database.repository_map) + len(database.place_map) + len(database.media_map) + len(database.citation_map) + len(database.source_map) + len(database.tag_map)) count = 0.0 # --------------------------------- # Notes # --------------------------------- for handle in database.note_map.keys(): serial = database.note_map[handle] write_line(fp, Note.create(serial)) count += 1 callback(100 * count / total) # --------------------------------- # Event # --------------------------------- for handle in database.event_map.keys(): serial = database.event_map[handle] write_line(fp, Event.create(serial)) count += 1 callback(100 * count / total) # --------------------------------- # Person # --------------------------------- for handle in database.person_map.keys(): serial = database.person_map[handle] write_line(fp, Person.create(serial)) count += 1 callback(100 * count / total) # --------------------------------- # Family # --------------------------------- for handle in database.family_map.keys(): serial = database.family_map[handle] write_line(fp, Family.create(serial)) count += 1 callback(100 * count / total) # --------------------------------- # Repository # --------------------------------- for handle in database.repository_map.keys(): serial = database.repository_map[handle] write_line(fp, Repository.create(serial)) count += 1 callback(100 * count / total) # --------------------------------- # Place # --------------------------------- for handle in database.place_map.keys(): serial = database.place_map[handle] write_line(fp, Place.create(serial)) count += 1 callback(100 * count / total) # --------------------------------- # Source # --------------------------------- for handle in database.source_map.keys(): serial = database.source_map[handle] write_line(fp, Source.create(serial)) count += 1 callback(100 * count / total) # --------------------------------- # Citation # --------------------------------- for handle in database.citation_map.keys(): serial = database.citation_map[handle] write_line(fp, Citation.create(serial)) count += 1 callback(100 * count / total) # --------------------------------- # Media # --------------------------------- for handle in database.media_map.keys(): serial = database.media_map[handle] write_line(fp, Media.create(serial)) count += 1 callback(100 * count / total) # --------------------------------- # Tag # --------------------------------- for handle in database.tag_map.keys(): serial = database.tag_map[handle] write_line(fp, Tag.create(serial)) count += 1 callback(100 * count / total) return True
def empty_media(self): return Media()