def clear_datastore(cls, ds_name=None, prefix=None): if CFG.system.mockdb: log.warn("Cannot clear MockDB") return generic_ds = DatastoreManager.get_datastore_instance("") if ds_name: # First interpret ds_name as unqualified name if DatastoreManager.exists(ds_name, scoped=False): generic_ds.delete_datastore(ds_name) return # New interpret as logical name if DatastoreManager.exists(ds_name, scoped=True): generic_ds.delete_datastore(ds_name) else: log.warn("Datastore does not exist: %s" % ds_name) elif prefix: db_list = generic_ds.list_datastores() cleared, ignored = 0, 0 for db_name in db_list: if db_name.startswith(prefix): generic_ds.delete_datastore(db_name) log.debug("Cleared couch datastore '%s'" % db_name) cleared += 1 else: ignored += 1 log.info("Cleared %d couch datastores, ignored %d" % (cleared, ignored)) else: log.warn("Cannot clear datastore without prefix or datastore name")
def _dump_datastore(cls, outpath_base, ds_name, clear_dir=True): if not DatastoreManager.exists(ds_name): log.warn("Datastore does not exist: %s" % ds_name) return ds = DatastoreManager.get_datastore_instance(ds_name) if not os.path.exists(outpath_base): os.makedirs(outpath_base) outpath = "%s/%s" % (outpath_base, ds_name) if not os.path.exists(outpath): os.makedirs(outpath) if clear_dir: [os.remove(os.path.join(outpath, f)) for f in os.listdir(outpath)] objs = ds.find_by_view("_all_docs", None, id_only=False, convert_doc=False) numwrites = 0 for obj_id, obj_key, obj in objs: fn = obj_id # Some object ids have slashes fn = obj_id.replace("/", "_") with open("%s/%s.yml" % (outpath, fn), 'w') as f: yaml.dump(obj, f, default_flow_style=False) numwrites += 1 log.info("Wrote %s objects to %s" % (numwrites, outpath))
def delete_ui(self): resource_types = [ 'UIInternalResourceType', 'UIInformationLevel', 'UIScreenLabel', 'UIAttribute', 'UIBlock', 'UIGroup', 'UIRepresentation', 'UIResourceType', 'UIView', 'UIBlockAttribute', 'UIBlockRepresentation', 'UIGroupBlock', 'UIViewGroup'] res_ids = [] for restype in resource_types: res_is_list, _ = self.container.resource_registry.find_resources(restype, id_only=True) res_ids.extend(res_is_list) log.debug("Found %s resources of type %s" % (len(res_is_list), restype)) ds = DatastoreManager.get_datastore_instance("resources") docs = ds.read_doc_mult(res_ids) for doc in docs: doc['_deleted'] = True ds.create_doc_mult(docs, allow_ids=True)
def clear_datastore(cls, ds_name=None, prefix=None): if CFG.system.mockdb: log.warn("Cannot clear MockDB") return generic_ds = DatastoreManager.get_datastore_instance("") if ds_name: # First interpret ds_name as unqualified name if DatastoreManager.exists(ds_name, scoped=False): generic_ds.delete_datastore(ds_name) return # New interpret as logical name if DatastoreManager.exists(ds_name, scoped=True): generic_ds.delete_datastore(ds_name) else: log.warn("Datastore does not exist: %s" % ds_name) elif prefix: db_list = generic_ds.list_datastores() cleared, ignored = 0, 0 for db_name in db_list: if db_name.startswith(prefix): generic_ds.delete_datastore(db_name) log.debug("Cleared couch datastore '%s'" % db_name) cleared += 1 else: ignored += 1 log.info("Cleared %d couch datastores, ignored %d" % (cleared, ignored)) else: log.warn("Cannot clear datastore without prefix or datastore name")
def _load_datastore(cls, path=None, ds_name=None, ignore_errors=True): if not DatastoreManager.exists(ds_name): log.warn("Datastore does not exist: %s" % ds_name) ds = DatastoreManager.get_datastore_instance(ds_name) objects = [] for fn in os.listdir(path): fp = os.path.join(path, fn) try: with open(fp, 'r') as f: yaml_text = f.read() obj = yaml.load(yaml_text) if "_rev" in obj: del obj["_rev"] objects.append(obj) except Exception as ex: if ignore_errors: log.warn("load error id=%s err=%s" % (fn, str(ex))) else: raise ex if objects: try: res = ds.create_doc_mult(objects, allow_ids=True) log.info("DatastoreLoader: Loaded %s objects into %s" % (len(res), ds_name)) except Exception as ex: if ignore_errors: log.warn("load error id=%s err=%s" % (fn, str(ex))) else: raise ex
def _load_datastore(cls, path=None, ds_name=None, ignore_errors=True): if not DatastoreManager.exists(ds_name): log.warn("Datastore does not exist: %s" % ds_name) ds = DatastoreManager.get_datastore_instance(ds_name) objects = [] for fn in os.listdir(path): fp = os.path.join(path, fn) try: with open(fp, 'r') as f: yaml_text = f.read() obj = yaml.load(yaml_text) if "_rev" in obj: del obj["_rev"] objects.append(obj) except Exception as ex: if ignore_errors: log.warn("load error id=%s err=%s" % (fn, str(ex))) else: raise ex if objects: try: res = ds.create_doc_mult(objects, allow_ids=True) log.info("DatastoreLoader: Loaded %s objects into %s" % (len(res), ds_name)) except Exception as ex: if ignore_errors: log.warn("load error id=%s err=%s" % (fn, str(ex))) else: raise ex
def _dump_datastore(cls, outpath_base, ds_name, clear_dir=True): if not DatastoreManager.exists(ds_name): log.warn("Datastore does not exist: %s" % ds_name) return ds = DatastoreManager.get_datastore_instance(ds_name) if not os.path.exists(outpath_base): os.makedirs(outpath_base) outpath = "%s/%s" % (outpath_base, ds_name) if not os.path.exists(outpath): os.makedirs(outpath) if clear_dir: [os.remove(os.path.join(outpath, f)) for f in os.listdir(outpath)] objs = ds.find_by_view("_all_docs", None, id_only=False, convert_doc=False) numwrites = 0 for obj_id, obj_key, obj in objs: fn = obj_id # Some object ids have slashes fn = obj_id.replace("/", "_") with open("%s/%s.yml" % (outpath, fn), "w") as f: yaml.dump(obj, f, default_flow_style=False) numwrites += 1 log.info("Wrote %s objects to %s" % (numwrites, outpath))
def create_resources_snapshot(self, persist=False, filename=None): ds = DatastoreManager.get_datastore_instance(DataStore.DS_RESOURCES, DataStore.DS_PROFILE.RESOURCES) all_objs = ds.find_docs_by_view("_all_docs", None, id_only=False) log.info("Found %s objects in datastore resources", len(all_objs)) resources = {} associations = {} snapshot = dict(resources=resources, associations=associations) for obj_id, key, obj in all_objs: if obj_id.startswith("_design"): continue if not isinstance(obj, dict): raise Inconsistent("Object of bad type found: %s" % type(obj)) obj_type = obj.get("type_", None) if obj_type == "Association": associations[obj_id] = obj.get("ts", None) elif obj_type: resources[obj_id] = obj.get("ts_updated", None) else: raise Inconsistent("Object with no type_ found: %s" % obj) if persist: dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S') path = filename or "interface/rrsnapshot_%s.json" % dtstr snapshot_json = json.dumps(snapshot) with open(path, "w") as f: #yaml.dump(snapshot, f, default_flow_style=False) f.write(snapshot_json) log.debug("Created resource registry snapshot. %s resources, %s associations", len(resources), len(associations)) return snapshot
def get_blame_objects(cls): ds_list = ['resources', 'objects', 'state', 'events', 'directory', 'scidata'] blame_objs = {} for ds_name in ds_list: ds = DatastoreManager.get_datastore_instance(ds_name) ret_objs = ds.find_by_view("_all_docs", None, id_only=False, convert_doc=False) objs = [] for obj_id, obj_key, obj in ret_objs: if "blame_" in obj: objs.append(obj) blame_objs[ds_name] = objs return blame_objs
def _load_datastore(cls, path=None, ds_name=None, ignore_errors=True): if not DatastoreManager.exists(ds_name): log.warn("Datastore does not exist: %s" % ds_name) ds = DatastoreManager.get_datastore_instance(ds_name) for fn in os.listdir(path): fp = os.path.join(path, fn) try: cls._read_and_create_obj(ds, fp) except Exception as ex: if ignore_errors: log.warn("load error id=%s err=%s" % (fn, str(ex))) else: raise ex
def delete_ooi_assets(self): res_ids = [] ooi_asset_types = ['InstrumentModel', 'PlatformModel', 'Observatory', 'Subsite', 'PlatformSite', 'InstrumentSite', 'InstrumentAgent', 'InstrumentAgentInstance', 'InstrumentDevice', 'PlatformAgent', 'PlatformAgentInstance', 'PlatformDevice', 'Deployment', 'DataProduct' ] self.resource_ds = DatastoreManager.get_datastore_instance(DataStore.DS_RESOURCES, DataStore.DS_PROFILE.RESOURCES) del_objs = {} del_assocs = {} all_objs = self.resource_ds.find_by_view("_all_docs", None, id_only=False, convert_doc=False) for obj_id, key, obj in all_objs: if obj_id.startswith("_design") or not isinstance(obj, dict): continue obj_type = obj.get("type_", None) if obj_type and obj_type in ooi_asset_types: del_objs[obj_id] = obj for obj_id, key, obj in all_objs: if obj_id.startswith("_design") or not isinstance(obj, dict): continue obj_type = obj.get("type_", None) if obj_type == "Association": if obj['o'] in del_objs or obj['s'] in del_objs: del_assocs[obj_id] = obj for doc in del_objs.values(): doc_id, doc_rev = doc['_id'], doc['_rev'] doc.clear() doc.update(dict(_id=doc_id, _rev=doc_rev, _deleted=True)) for doc in del_assocs.values(): doc_id, doc_rev = doc['_id'], doc['_rev'] doc.clear() doc.update(dict(_id=doc_id, _rev=doc_rev, _deleted=True)) self.resource_ds.update_doc_mult(del_objs.values()) self.resource_ds.update_doc_mult(del_assocs.values()) log.info("Deleted %s OOI resources and %s associations", len(del_objs), len(del_assocs))
def delete_ui(self): res_ids = [] for restype in self.UI_RESOURCE_TYPES: res_is_list, _ = self.container.resource_registry.find_resources(restype, id_only=True) res_ids.extend(res_is_list) #log.debug("Found %s resources of type %s" % (len(res_is_list), restype)) ds = DatastoreManager.get_datastore_instance("resources") docs = ds.read_doc_mult(res_ids) for doc in docs: doc['_deleted'] = True # TODO: Also delete associations ds.update_doc_mult(docs) log.info("Deleted %s UI resources and associations", len(docs))
def delete_ui(self): res_ids = [] for restype in self.UI_RESOURCE_TYPES: res_is_list, _ = self.container.resource_registry.find_resources( restype, id_only=True) res_ids.extend(res_is_list) #log.debug("Found %s resources of type %s" % (len(res_is_list), restype)) ds = DatastoreManager.get_datastore_instance("resources") docs = ds.read_doc_mult(res_ids) for doc in docs: doc['_deleted'] = True # TODO: Also delete associations ds.update_doc_mult(docs) log.info("Deleted %s UI resources and associations", len(docs))
def get_blame_objects(cls): ds_list = [ 'resources', 'objects', 'state', 'events', 'directory', 'scidata' ] blame_objs = {} for ds_name in ds_list: ret_objs = [] try: ds = DatastoreManager.get_datastore_instance(ds_name) ret_objs = ds.find_by_view("_all_docs", None, id_only=False, convert_doc=False) except BadRequest: continue objs = [] for obj_id, obj_key, obj in ret_objs: if "blame_" in obj: objs.append(obj) blame_objs[ds_name] = objs return blame_objs
def dump_resources_as_xlsx(self, filename=None): self._clear() ds = DatastoreManager.get_datastore_instance(DataStore.DS_RESOURCES, DataStore.DS_PROFILE.RESOURCES) all_objs = ds.find_docs_by_view("_all_docs", None, id_only=False) log.info("Found %s objects in datastore resources", len(all_objs)) self._analyze_objects(all_objs) self._wb = xlwt.Workbook() self._worksheets = {} self._dump_observatories() self._dump_network() for restype in sorted(self._res_by_type.keys()): self._dump_resource_type(restype) dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S') path = filename or "interface/resources_%s.xls" % dtstr self._wb.save(path)
def dump_resources_as_xlsx(self, filename=None): self._clear() ds = DatastoreManager.get_datastore_instance( DataStore.DS_RESOURCES, DataStore.DS_PROFILE.RESOURCES) all_objs = ds.find_docs_by_view("_all_docs", None, id_only=False) log.info("Found %s objects in datastore resources", len(all_objs)) self._analyze_objects(all_objs) self._wb = xlwt.Workbook() self._worksheets = {} self._dump_observatories() self._dump_network() for restype in sorted(self._res_by_type.keys()): self._dump_resource_type(restype) dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S') path = filename or "interface/resources_%s.xls" % dtstr self._wb.save(path)
def create_resources_snapshot(self, persist=False, filename=None): ds = DatastoreManager.get_datastore_instance( DataStore.DS_RESOURCES, DataStore.DS_PROFILE.RESOURCES) all_objs = ds.find_docs_by_view("_all_docs", None, id_only=False) log.info("Found %s objects in datastore resources", len(all_objs)) resources = {} associations = {} snapshot = dict(resources=resources, associations=associations) for obj_id, key, obj in all_objs: if obj_id.startswith("_design"): continue if not isinstance(obj, dict): raise Inconsistent("Object of bad type found: %s" % type(obj)) obj_type = obj.get("type_", None) if obj_type == "Association": associations[obj_id] = obj.get("ts", None) elif obj_type: resources[obj_id] = obj.get("ts_updated", None) else: raise Inconsistent("Object with no type_ found: %s" % obj) if persist: dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S') path = filename or "interface/rrsnapshot_%s.json" % dtstr snapshot_json = json.dumps(snapshot) with open(path, "w") as f: #yaml.dump(snapshot, f, default_flow_style=False) f.write(snapshot_json) log.debug( "Created resource registry snapshot. %s resources, %s associations", len(resources), len(associations)) return snapshot
try: with open(filename, "rb") as csvfile: reader = self._get_csv_reader(csvfile) for row in reader: catfunc(row) row_do += 1 except IOError, ioe: log.warn("UI category file %s error: %s" % (filename, str(ioe))) log.info( "Loaded UI category %s: %d rows imported, %d rows skipped" % (category, row_do, row_skip)) try: ds = DatastoreManager.get_datastore_instance("resources") self._finalize_uirefs(ds) res = ds.create_mult(self.ui_obj_by_id.values(), allow_ids=True) log.info("Loaded %s UI resource objects into resource registry" % (len(res))) res = ds.create_mult(self.ui_assocs) log.info( "Loaded %s UI resource associations into resource registry" % (len(res))) except Exception as ex: log.exception("load error err=%s" % (str(ex))) def _add_ui_object(self, refid, obj): while refid in self.ui_objs: log.warn("Object duplicate id=%s, obj=%s" % (refid, obj)) refid = refid + "!"
def __init__(self): from pyon.core.bootstrap import container_instance self.container = container_instance self.rr = self.container.resource_registry self.rr_store = DatastoreManager.get_datastore_instance("resources") self.timer = ooi.timer.Timer()
def bulk_delete(cls, objs): for ds_name in objs: ds = DatastoreManager.get_datastore_instance(ds_name) for obj in objs[ds_name]: ds.delete(obj["_id"])
def _proc(): rr_store = DatastoreManager.get_datastore_instance("resources") for i in xrange(int(num_read/num_thread)): res_obj = rr_store.read(self.res_ids[random.randint(0, len(self.res_ids)-1)])
if obj_list: spec_obj = obj_list[0] spec_obj.spec = specs self.container.resource_registry.update(spec_obj) else: spec_obj = IonObject('UISpec', name="ION UI Specs", spec=specs) res_id = self.container.resource_registry.create(spec_obj) spec_size = len(json.dumps(spec_obj.spec)) log.info("Wrote UISpec object, size=%s", spec_size) if specs_path: self.export_ui_specs(specs_path, specs=specs) else: # Write the full set of UIResource objects self._finalize_uirefs() ds = DatastoreManager.get_datastore_instance("resources") res = ds.create_mult(self.ui_obj_by_id.values(), allow_ids=True) log.info("Stored %s UI resource objects into resource registry" % (len(res))) res = ds.create_mult(self.ui_assocs, allow_ids=True) log.info("Stored %s UI resource associations into resource registry" % (len(res))) except Exception as ex: log.exception("Store in resource registry error err=%s" % (str(ex))) def _get_ui_files(self, path): dirurl = path or self.DEFAULT_UISPEC_LOCATION if not dirurl.endswith("/"): dirurl += "/" log.info("Accessing UI specs URL: %s", dirurl) dirpage = requests.get(dirurl).text csvfiles = re.findall('(?:href|HREF)="([-%/\w]+\.csv)"', dirpage)
def bulk_delete(cls, objs): for ds_name in objs: ds = DatastoreManager.get_datastore_instance(ds_name) for obj in objs[ds_name]: ds.delete(obj["_id"])
def __init__(self): from pyon.core.bootstrap import container_instance self.container = container_instance self.rr = self.container.resource_registry self.rr_store = DatastoreManager.get_datastore_instance("resources") self.timer = ooi.timer.Timer()
def _proc(): rr_store = DatastoreManager.get_datastore_instance("resources") for i in xrange(int(num_read / num_thread)): res_obj = rr_store.read(self.res_ids[random.randint( 0, len(self.res_ids) - 1)])