def clear_datastore(self, ds_name=None, prefix=None): """ Clears a datastore or a set of datastores of common prefix """ ds = CouchDataStore(config=self.config, scope=self.sysname) try: if ds_name: try: ds.delete_datastore(ds_name) except NotFound: try: # Try the unscoped version ds1 = CouchDataStore(config=self.config) ds1.delete_datastore(ds_name) except NotFound: pass elif prefix: prefix = prefix.lower() ds_noscope = CouchDataStore(config=self.config) for dsn in ds_noscope.list_datastores(): if dsn.lower().startswith(prefix): ds_noscope.delete_datastore(dsn) else: log.warn( "Cannot clear datastore without prefix or datastore name") finally: ds.close()
def declare_queue_impl(self, client, queue, durable=False, auto_delete=True): log.debug("AMQPTransport.declare_queue_impl(%s): %s, D %s, AD %s", client.channel_number, queue, durable, auto_delete) arguments = {} if os.environ.get('QUEUE_BLAME', None) is not None: ds_name, testid = os.environ['QUEUE_BLAME'].split(',') arguments.update({'created-by': testid}) frame = self._sync_call(client, client.queue_declare, 'callback', queue=queue or '', auto_delete=auto_delete, durable=durable, arguments=arguments) if os.environ.get('QUEUE_BLAME', None) is not None: from pyon.datastore.couchdb.couchdb_standalone import CouchDataStore ds = CouchDataStore(datastore_name=ds_name) ds.create_doc({ 'test_id': testid, 'queue_name': frame.method.queue }) ds.close() return frame.method.queue
def dump_resources_as_xlsx(self, filename=None): self._clear() # TODO: Use DatastoreFactory for couch independence ds = CouchDataStore(DataStore.DS_RESOURCES, profile=DataStore.DS_PROFILE.RESOURCES, config=CFG, scope=self.sysname) all_objs = ds.find_docs_by_view("_all_docs", None, id_only=False) log.info("Found %s objects in datastore resources", len(all_objs)) self._analyze_objects(all_objs) self._wb = xlwt.Workbook() self._worksheets = {} self._dump_observatories() self._dump_network() for restype in sorted(self._res_by_type.keys()): self._dump_resource_type(restype) dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S') path = filename or "interface/resources_%s.xls" % dtstr self._wb.save(path)
def dump_datastore(self, path=None, ds_name=None, clear_dir=True, compact=False): """ Dumps CouchDB datastores into a directory as YML files. @param ds_name Logical name (such as "resources") of an ION datastore @param path Directory to put dumped datastores into (defaults to "res/preload/local/dump_[timestamp]") @param clear_dir if True, delete contents of datastore dump dirs @param compact if True, saves all objects in one big YML file """ if not path: dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S') path = "res/preload/local/dump_%s" % dtstr if ds_name: ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname) if ds.exists_datastore(ds_name): self._dump_datastore(path, ds_name, clear_dir, compact) else: log.warn("Datastore does not exist") ds.close() else: ds_list = [ 'resources', 'objects', 'state', 'events', 'directory', 'scidata' ] for dsn in ds_list: self._dump_datastore(path, dsn, clear_dir, compact)
def _load_datastore(self, path=None, ds_name=None, ignore_errors=True): ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname) try: objects = [] for fn in os.listdir(path): fp = os.path.join(path, fn) try: with open(fp, 'r') as f: yaml_text = f.read() obj = yaml.load(yaml_text) if obj and type(obj) is list and obj[0] == "COMPACTDUMP": objects.extend(obj[1:]) else: objects.append(obj) except Exception as ex: if ignore_errors: log.warn("load error id=%s err=%s" % (fn, str(ex))) else: raise ex if objects: for obj in objects: if "_rev" in obj: del obj["_rev"] try: res = ds.create_doc_mult(objects) log.info("DatastoreLoader: Loaded %s objects into %s" % (len(res), ds_name)) except Exception as ex: if ignore_errors: log.warn("load error id=%s err=%s" % (fn, str(ex))) else: raise ex finally: ds.close()
def _clear_couch(host, port, username, password, prefix, verbose=False): db_server = CouchDataStore(host=host, port=str(port), username=username, password=password) if verbose: print "clear_couch: Connected to couch server http://%s:%d" % (host, port) db_list = db_server.list_datastores() ignored_num = 0 for db_name in db_list: if (prefix == '*' and not db_name.startswith('_')) or db_name.lower().startswith( prefix.lower()): db_server.delete_datastore(db_name) print 'clear_couch: Dropped couch database: %s' % db_name else: if verbose: print 'clear_couch: Ignored couch database: %s' % db_name ignored_num += 1 print 'clear_couch: Ignored %s existing databases' % ignored_num db_server.close()
def __init__(self, sysname=None, orgname=None, config=None): self.orgname = orgname or get_safe(config, 'system.root_org', 'ION') sysname = sysname or get_default_sysname() self.datastore_name = "resources" self.datastore = CouchDataStore(self.datastore_name, config=config, scope=sysname) try: self.datastore.read_doc("_design/directory") except NotFound: self.datastore.define_profile_views("RESOURCES")
def create_core_datastores(self): """ Main entry point into creating core datastores """ ds = CouchDataStore(config=self.config, scope=self.sysname) datastores = ['resources'] for local_dsn in datastores: if not ds.exists_datastore(local_dsn): ds.create_datastore(local_dsn)
def _force_clean(cls, recreate=False): from pyon.core.bootstrap import get_sys_name, CFG from pyon.datastore.couchdb.couchdb_standalone import CouchDataStore datastore = CouchDataStore(config=CFG) dbs = datastore.list_datastores() things_to_clean = filter(lambda x: x.startswith('%s_' % get_sys_name()), dbs) try: for thing in things_to_clean: datastore.delete_datastore(datastore_name=thing) if recreate: datastore.create_datastore(datastore_name=thing) finally: datastore.close()
def _dump_datastore(self, outpath_base, ds_name, clear_dir=True, compact=False): ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname) try: if not ds.exists_datastore(ds_name): log.warn("Datastore does not exist: %s" % ds_name) return if not os.path.exists(outpath_base): os.makedirs(outpath_base) outpath = "%s/%s" % (outpath_base, ds_name) if not os.path.exists(outpath): os.makedirs(outpath) if clear_dir: [ os.remove(os.path.join(outpath, f)) for f in os.listdir(outpath) ] objs = ds.find_docs_by_view("_all_docs", None, id_only=False) numwrites = 0 if compact: compact_obj = [obj for obj_id, obj_key, obj in objs] compact_obj.insert(0, "COMPACTDUMP") with open("%s/%s_compact.yml" % (outpath, ds_name), 'w') as f: yaml.dump(compact_obj, f, default_flow_style=False) numwrites = len(objs) else: for obj_id, obj_key, obj in objs: # Some object ids have slashes fn = obj_id.replace("/", "_") with open("%s/%s.yml" % (outpath, fn), 'w') as f: yaml.dump(obj, f, default_flow_style=False) numwrites += 1 log.info("Wrote %s objects to %s" % (numwrites, outpath)) finally: ds.close()
def create_resources_snapshot(self, persist=False, filename=None): ds = CouchDataStore(DataStore.DS_RESOURCES, profile=DataStore.DS_PROFILE.RESOURCES, config=CFG, scope=self.sysname) all_objs = ds.find_docs_by_view("_all_docs", None, id_only=False) log.info("Found %s objects in datastore resources", len(all_objs)) resources = {} associations = {} snapshot = dict(resources=resources, associations=associations) for obj_id, key, obj in all_objs: if obj_id.startswith("_design"): continue if not isinstance(obj, dict): raise Inconsistent("Object of bad type found: %s" % type(obj)) obj_type = obj.get("type_", None) if obj_type == "Association": associations[obj_id] = obj.get("ts", None) elif obj_type: resources[obj_id] = obj.get("ts_updated", None) else: raise Inconsistent("Object with no type_ found: %s" % obj) if persist: dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S') path = filename or "interface/rrsnapshot_%s.json" % dtstr snapshot_json = json.dumps(snapshot) with open(path, "w") as f: #yaml.dump(snapshot, f, default_flow_style=False) f.write(snapshot_json) log.debug( "Created resource registry snapshot. %s resources, %s associations", len(resources), len(associations)) return snapshot
def get_blame_objects(self): ds_list = [ 'resources', 'objects', 'state', 'events', 'directory', 'scidata' ] blame_objs = {} for ds_name in ds_list: ret_objs = [] try: ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname) ret_objs = ds.find_docs_by_view("_all_docs", None, id_only=False) ds.close() except BadRequest: continue objs = [] for obj_id, obj_key, obj in ret_objs: if "blame_" in obj: objs.append(obj) blame_objs[ds_name] = objs return blame_objs
def begin(self): from pyon.datastore.couchdb.couchdb_standalone import CouchDataStore from pyon.public import CFG self.ds = CouchDataStore(datastore_name=self.ds_name, config=CFG)