def add(self, cls, label, dtype="DString"): # cls = DClass or str # label = DLabel or other type (will be converted according to dtype) # dtype = DString / DDateTime / DResource / DGeometry / DNone # returns the set / created DDescriptor if cls.__class__.__name__ != "DClass": cls = self.store.classes.add(str(cls)) del self[cls.name] if label is None: return if not isinstance(label, DLabel): if (dtype == "DResource") and (not self.store.local_folder is None): label = self.store.files.store_local(label) if label is None: raise Exception("Storing resource failed: %s" % (label.value)) else: if dtype == "DResource": label = DLabel(as_url(label)).asdtype(dtype) format = self.store.images.get_format(label.value) if (format is None) and (not os.path.splitext( label.value)[1]): with label.open() as f: format = self.store.images.get_format(f) label.set_image((not format is None)) if label.is_image(): label.set_projection(get_raster_projection( label.value)) label.set_worldfile(get_worldfile(label.value)) else: label = DLabel(label).asdtype(dtype) self[cls.name] = DDescriptor(self, cls, label) for cls2 in self.parent.classes: self.parent.classes[cls2].add_descriptor(cls.name) self.broadcast(Broadcasts.ELEMENT_CHANGED, self.parent) self.broadcast(Broadcasts.ELEMENT_CHANGED, cls) self.store.events.add(self.parent, self.parent.add_descriptor, cls.name, label.value, dtype) return self[cls.name]
def get_datasource(self, identifier, connstr, store=None): if store is None: store = self if identifier: identifier = as_url(identifier) if not connstr is None: ds = store.datasources.DBRel() if ds.set_connstr(connstr) and ds.is_valid() and ds.load(): return ds if not identifier is None: ds = store.datasources.DB(identifier=identifier, connstr=connstr) if ds.load(): return ds elif not identifier is None: # dsources = {"json": store.datasources.JSON, "rdf": store.datasources.RDF, "pickle": store.datasources.Pickle} dsources = { "json": store.datasources.JSON, "pickle": store.datasources.Pickle } # TODO implement RDF if identifier[-1] == "#": for ext in dsources: ds = dsources[ext](url="%s.%s" % (identifier[:-1], ext)) if (ds is not None) and ds.load(): return ds ds = None ext = identifier.split(".")[-1].lower().strip() if ext in dsources: ds = dsources[ext](url=identifier) if (ds is not None) and ds.load(): return ds return None
def on_connect(self): path = self.get_path() if not path: return self.view.registry.set("recent_dir", os.path.split(path)[0]) if (not os.path.isfile(path)) and (not self.parent.creating_enabled()): QtWidgets.QMessageBox.critical(self, "Error", "Could not create database.") return None invalid = check_path_invalid_chars(path) if invalid: QtWidgets.QMessageBox.critical( self, "Error", "Invalid characters in path: %s" % (" ".join(invalid))) return None url = as_url(path) self.parent.on_connect(url, None)
def add_objects(self, identifier_ds, connstr=None, selected_ids=None, localise=False): # add objects from a different store, specified by identifier and connstr # identifier_ds = identifier or DataSource # if selected_ids == None: import all objects def set_data(data): # data = {local_folder, changed, classes, objects, events, user_tools, queries} resource_uris = [] for id in data["objects"]: for name in data["objects"][id]["descriptors"]: if (data["objects"][id]["descriptors"][name]["label"] ["dtype"] == "DResource") and ( not data["objects"][id]["descriptors"][name] ["label"]["path"] is None): if not data["objects"][id]["descriptors"][name][ "label"]["value"] in resource_uris: resource_uris.append( data["objects"][id]["descriptors"][name] ["label"]["value"]) local_folder = self.local_folder self.clear() self.local_folder = local_folder self.classes.from_dict(data["classes"]) self.objects.from_dict(data["objects"]) self.set_local_resource_uris(resource_uris) self.changed = data["changed"] self.events.from_list(data["events"]) self.user_tools.from_list(data["user_tools"]) self.queries.from_dict(data["queries"]) self.localise_resources(True) self.images.load_thumbnails() def add_data(data): # TODO implement resource_uris = [] for id in data["objects"]: for name in data["objects"][id]["descriptors"]: if (data["objects"][id]["descriptors"][name]["label"] ["dtype"] == "DResource") and ( not data["objects"][id]["descriptors"][name] ["label"]["path"] is None): if not data["objects"][id]["descriptors"][name][ "label"]["value"] in resource_uris: resource_uris.append( data["objects"][id]["descriptors"][name] ["label"]["value"]) # TODO # add data["classes"] = {name: class data, ...} # name, objects, subclasses, descriptors, relations # add data["objects"] = {id: object data, ...} # set_local_resource_uris # add data["user_tools"] # add data["queries"] # localise_resources(True, ids) self.images.load_thumbnails() def collect_ids(id, selected_ids, selected_classes, store, found): queue = set([id]) while queue: print("\r%d " % (len(queue)), end="") id1 = queue.pop() if id1 in found: continue obj = store.objects[id1] if obj is None: return found.add(id1) for rel in obj.relations: if rel.startswith("~") and (id1 not in selected_ids): continue for id2 in obj.relations[rel]: if id2 is None: continue if id2 in found: continue if (id2 not in selected_ids ) and selected_classes.intersection( store.objects[id2].classes.keys()): continue queue.add(id2) if isinstance(identifier_ds, dict): self.stop_broadcasts() if (not len(self.objects)) and (not len(self.classes)): set_data(identifier_ds) self.resume_broadcasts() return # TODO else use add_data store = Store() ds = DataSource(store) ds.from_dict(identifier_ds) store.set_datasource(ds) else: if (identifier_ds == self.identifier) and (connstr == self.connstr): return store = Store() ds = self.get_datasource(identifier_ds, connstr, store) if ds is None: return self.stop_broadcasts() store.set_datasource(ds) if selected_ids is None: found_ids = set(list(store.objects.keys())) selected_ids = found_ids selected_classes = set(list(store.classes.keys())) else: selected_ids = set(selected_ids) selected_classes = set([]) found_ids = set([]) for id in selected_ids: selected_classes.update(store.objects[id].classes.keys()) for id in selected_ids: collect_ids(id, selected_ids, selected_classes, store, found_ids) id_lookup = {} # {orig_id: new_id, ...} for id_orig in found_ids: id_lookup[id_orig] = self.objects.add().id cmax = len(found_ids) cnt = 1 rel_collect = {} # {rel: [[cls_name, ...], [cls_name, ...]], ...} for id_orig in found_ids: print("\rImporting %d/%d " % (cnt, cmax), end="") cnt += 1 obj_orig = store.objects[id_orig] obj_new = self.objects[id_lookup[id_orig]] for cls in obj_orig.classes: obj_new.classes.add(cls) for rel in obj_orig.relations: if rel not in rel_collect: rel_collect[rel] = [set([]), set([])] rel_collect[rel][0].update(obj_orig.classes.keys()) relation = obj_orig.relations[rel] obj_new.relations[rel] = DRelation(obj_new.relations, rel) for id2 in relation: if id2 is None: continue if id2 not in id_lookup: continue if (id2 not in selected_ids ) and selected_classes.intersection( store.objects[id2].classes.keys()): continue weight = None if id2 in relation._weights: weight = relation._weights[id2] obj2 = store.objects[id2] rel_collect[rel][1].update(obj2.classes.keys()) obj_new.relations[rel][id_lookup[id2]] = obj2 if weight is not None: obj_new.relations[rel]._weights[ id_lookup[id2]] = weight for descr in obj_orig.descriptors: label = obj_orig.descriptors[descr].label if label.__class__.__name__ == "DResource": if label.is_stored(): label._value = as_url(label._path) label._path = None descr = obj_new.descriptors.add(descr, label) descr.geotag = obj_orig.descriptors[descr].geotag for rel in rel_collect: if rel.startswith("~"): continue classes1, classes2 = rel_collect[rel] if not classes1: continue else: for name1 in classes1: if not classes2: self.classes[name1].add_relation(rel, "!*") else: for name2 in classes2: self.classes[name1].add_relation(rel, name2) self.populate_descriptor_names() self.populate_relation_names() if localise: ids = [id_lookup[id_orig] for id_orig in id_lookup] self.localise_resources(True, ids) self.resume_broadcasts()
def load(self): if self.url is None: return False self.stop_broadcasts() self.store.events.stop_recording() self.store.clear() parsed = urlparse(self.url) path = os.path.normpath( os.path.abspath(parsed.path.strip("//").replace("%20", " "))) data = None if not os.path.isfile(path): self.save() if not self.wait_if_busy(): return False self.is_busy = True try: data = self.load_file(path) except: print("LOAD ERROR: %s" % (str(sys.exc_info()))) self.is_busy = False return False for name in ["classes", "objects", "changed"]: if name not in data: self.is_busy = False return False self.store.local_folder = os.path.split(path)[0] for id in data["objects"]: for name in data["objects"][id]["descriptors"]: if (data["objects"][id]["descriptors"][name]["label"]["dtype"] == "DResource") and ( not data["objects"][id]["descriptors"][name] ["label"]["path"] is None): path = data["objects"][id]["descriptors"][name]["label"][ "path"] path, fname = os.path.split(path) path = os.path.split(path)[1] data["objects"][id]["descriptors"][name]["label"][ "value"] = as_url( os.path.join(self.store.local_folder, path, fname)) data["objects"][id]["descriptors"][name]["label"][ "path"] = os.path.join(self.store.local_folder, path, fname) self.store.classes.from_dict(data["classes"]) self.store.objects.from_dict(data["objects"]) has_class_descriptors = False # TODO will be obsolete for new databases for name in data["classes"]: has_class_descriptors = ("descriptors" in data["classes"][name]) break resource_uris = [] for id in data["objects"]: for name in data["objects"][id]["descriptors"]: if (data["objects"][id]["descriptors"][name]["label"]["dtype"] == "DResource") and ( not data["objects"][id]["descriptors"][name] ["label"]["path"] is None): if not data["objects"][id]["descriptors"][name]["label"][ "value"] in resource_uris: resource_uris.append(data["objects"][id]["descriptors"] [name]["label"]["value"]) self.store.set_local_resource_uris(resource_uris) if not has_class_descriptors: # TODO will be obsolete for new databases self.store.populate_descriptor_names( ) # TODO will be obsolete for new databases self.store.populate_relation_names( ) # TODO will be obsolete for new databases self.store.changed = data["changed"] if "events" in data: # TODO will be obsolete for new databases self.store.events.from_list(data["events"]) if "user_tools" in data: # TODO will be obsolete for new databases self.store.user_tools.from_list(data["user_tools"]) if "queries" in data: # TODO will be obsolete for new databases self.store.queries.from_dict(data["queries"]) self.store.images.load_thumbnails() self.store.set_datasource(self) self.is_busy = False self.store.events.resume_recording() self.resume_broadcasts() self.broadcast(Broadcasts.STORE_LOADED) return True