def dump_datastore(self, path=None, ds_name=None, clear_dir=True, compact=False): """ Dumps CouchDB datastores into a directory as YML files. @param ds_name Logical name (such as "resources") of an ION datastore @param path Directory to put dumped datastores into (defaults to "res/preload/local/dump_[timestamp]") @param clear_dir if True, delete contents of datastore dump dirs @param compact if True, saves all objects in one big YML file """ if not path: dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S') path = "res/preload/local/dump_%s" % dtstr if ds_name: ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname) if ds.exists_datastore(ds_name): self._dump_datastore(path, ds_name, clear_dir, compact) else: log.warn("Datastore does not exist") ds.close() else: ds_list = [ 'resources', 'objects', 'state', 'events', 'directory', 'scidata' ] for dsn in ds_list: self._dump_datastore(path, dsn, clear_dir, compact)
def _dump_datastore(self, outpath_base, ds_name, clear_dir=True, compact=False): ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname) try: if not ds.exists_datastore(ds_name): log.warn("Datastore does not exist: %s" % ds_name) return if not os.path.exists(outpath_base): os.makedirs(outpath_base) outpath = "%s/%s" % (outpath_base, ds_name) if not os.path.exists(outpath): os.makedirs(outpath) if clear_dir: [os.remove(os.path.join(outpath, f)) for f in os.listdir(outpath)] objs = ds.find_docs_by_view("_all_docs", None, id_only=False) numwrites = 0 if compact: compact_obj = [obj for obj_id, obj_key, obj in objs] compact_obj.insert(0, "COMPACTDUMP") with open("%s/%s_compact.yml" % (outpath, ds_name), 'w') as f: yaml.dump(compact_obj, f, default_flow_style=False) numwrites = len(objs) else: for obj_id, obj_key, obj in objs: # Some object ids have slashes fn = obj_id.replace("/","_") with open("%s/%s.yml" % (outpath, fn), 'w') as f: yaml.dump(obj, f, default_flow_style=False) numwrites += 1 log.info("Wrote %s objects to %s" % (numwrites, outpath)) finally: ds.close()
def create_core_datastores(self): """ Main entry point into creating core datastores """ ds = CouchDataStore(config=self.config, scope=self.sysname) datastores = ['resources'] for local_dsn in datastores: if not ds.exists_datastore(local_dsn): ds.create_datastore(local_dsn)
def create_core_datastores(self): """ Main entry point into creating core datastores """ ds = CouchDataStore(config=self.config, scope=self.sysname) datastores = ["resources", "events"] count = 0 for local_dsn in datastores: if not ds.exists_datastore(local_dsn): ds.create_datastore(local_dsn) count += 1 # NOTE: Views and other datastores are created by containers' DatastoreManager print "store_interfaces: Created %s datastores..." % count
def _dump_datastore(self, outpath_base, ds_name, clear_dir=True, compact=False): ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname) try: if not ds.exists_datastore(ds_name): log.warn("Datastore does not exist: %s" % ds_name) return if not os.path.exists(outpath_base): os.makedirs(outpath_base) outpath = "%s/%s" % (outpath_base, ds_name) if not os.path.exists(outpath): os.makedirs(outpath) if clear_dir: [ os.remove(os.path.join(outpath, f)) for f in os.listdir(outpath) ] objs = ds.find_docs_by_view("_all_docs", None, id_only=False) numwrites = 0 if compact: compact_obj = [obj for obj_id, obj_key, obj in objs] compact_obj.insert(0, "COMPACTDUMP") with open("%s/%s_compact.yml" % (outpath, ds_name), 'w') as f: yaml.dump(compact_obj, f, default_flow_style=False) numwrites = len(objs) else: for obj_id, obj_key, obj in objs: # Some object ids have slashes fn = obj_id.replace("/", "_") with open("%s/%s.yml" % (outpath, fn), 'w') as f: yaml.dump(obj, f, default_flow_style=False) numwrites += 1 log.info("Wrote %s objects to %s" % (numwrites, outpath)) finally: ds.close()
def dump_datastore(self, path=None, ds_name=None, clear_dir=True, compact=False): """ Dumps CouchDB datastores into a directory as YML files. @param ds_name Logical name (such as "resources") of an ION datastore @param path Directory to put dumped datastores into (defaults to "res/preload/local/dump_[timestamp]") @param clear_dir if True, delete contents of datastore dump dirs @param compact if True, saves all objects in one big YML file """ if not path: dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S') path = "res/preload/local/dump_%s" % dtstr if ds_name: ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname) if ds.exists_datastore(ds_name): self._dump_datastore(path, ds_name, clear_dir, compact) else: log.warn("Datastore does not exist") ds.close() else: ds_list = ['resources', 'objects', 'state', 'events', 'directory', 'scidata'] for dsn in ds_list: self._dump_datastore(path, dsn, clear_dir, compact)