Esempio n. 1
0
 def clear_datastore(self, ds_name=None, prefix=None):
     """
     Clears a datastore or a set of datastores of common prefix
     """
     ds = DatastoreFactory.get_datastore(config=self.config, scope=self.sysname)
     try:
         if ds_name:
             try:
                 ds.delete_datastore(ds_name)
             except NotFound:
                 try:
                     # Try the unscoped version
                     ds1 = DatastoreFactory.get_datastore(config=self.config)
                     ds1.delete_datastore(ds_name)
                 except NotFound:
                     pass
         elif prefix:
             prefix = prefix.lower()
             ds_noscope = DatastoreFactory.get_datastore(config=self.config)
             for dsn in ds_noscope.list_datastores():
                 if dsn.lower().startswith(prefix):
                     ds_noscope.delete_datastore(dsn)
         else:
             log.warn("Cannot clear datastore without prefix or datastore name")
     finally:
         ds.close()
Esempio n. 2
0
 def clear_datastore(self, ds_name=None, prefix=None):
     """
     Clears a datastore or a set of datastores of common prefix
     """
     ds = DatastoreFactory.get_datastore(config=self.config,
                                         scope=self.sysname)
     try:
         if ds_name:
             try:
                 ds.delete_datastore(ds_name)
             except NotFound:
                 try:
                     # Try the unscoped version
                     ds1 = DatastoreFactory.get_datastore(
                         config=self.config)
                     ds1.delete_datastore(ds_name)
                 except NotFound:
                     pass
         elif prefix:
             prefix = prefix.lower()
             ds_noscope = DatastoreFactory.get_datastore(config=self.config)
             for dsn in ds_noscope.list_datastores():
                 if dsn.lower().startswith(prefix):
                     ds_noscope.delete_datastore(dsn)
         else:
             log.warn(
                 "Cannot clear datastore without prefix or datastore name")
     finally:
         ds.close()
Esempio n. 3
0
 def tearDownClass(cls):
     # Removes temporary files
     # Comment this out if you need to inspect the HDF5 files.
     shutil.rmtree(cls.working_dir)
     span_store = DatastoreFactory.get_datastore(datastore_name="coverage_spans", config=CFG)
     coverage_store = DatastoreFactory.get_datastore(datastore_name="coverage", config=CFG)
     if span_store is None:
         raise RuntimeError("Unable to load datastore for coverage_spans")
     if coverage_store is None:
         raise RuntimeError("Unable to load datastore for coverages")
     for guid in cls.coverages:
         with span_store.pool.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
             cur.execute("DELETE FROM %s WHERE coverage_id='%s'" % (span_store._get_datastore_name(), guid))
             cur.execute("DELETE FROM %s WHERE id='%s'" % (coverage_store._get_datastore_name(), guid))
Esempio n. 4
0
def _clear_couch(config, prefix, verbose=False, sysname=None):
    cfg_copy = dict(config)
    if "password" in cfg_copy:
        cfg_copy["password"] = "******"
    print 'clear_couch: Clearing CouchDB databases using config=', cfg_copy

    db_server = DatastoreFactory.get_datastore(config=config)
    import logging
    db_server.log = logging

    if verbose:
        print "clear_couch: Connected to couch server with config %s" % (config)

    db_list = db_server.list_datastores()

    ignored_num = 0
    for db_name in db_list:

        if (prefix == '*' and not db_name.startswith('_')) or db_name.lower().startswith(prefix.lower()):
            db_server.delete_datastore(db_name)
            print 'clear_couch: Dropped couch database: %s' % db_name

        else:
            if verbose:
                print 'clear_couch: Ignored couch database: %s' % db_name

            ignored_num += 1
    print 'clear_couch: Ignored %s existing databases' % ignored_num

    db_server.close()
Esempio n. 5
0
    def _dump_datastore(self, outpath_base, ds_name, clear_dir=True):
        ds = DatastoreFactory.get_datastore(datastore_name=ds_name,
                                            config=self.config,
                                            scope=self.sysname)
        try:
            if not ds.datastore_exists(ds_name):
                log.warn("Datastore does not exist: %s" % ds_name)
                return

            if not os.path.exists(outpath_base):
                os.makedirs(outpath_base)

            outpath = "%s/%s" % (outpath_base, ds_name)
            if not os.path.exists(outpath):
                os.makedirs(outpath)
            if clear_dir:
                [
                    os.remove(os.path.join(outpath, f))
                    for f in os.listdir(outpath)
                ]

            objs = ds.find_docs_by_view("_all_docs", None, id_only=False)
            compact_obj = [obj for obj_id, obj_key, obj in objs]
            compact_obj = ["COMPACTDUMP", compact_obj]
            with open("%s/%s_compact.json" % (outpath, ds_name), 'w') as f:
                json.dump(compact_obj, f)
            numwrites = len(objs)

            log.info("Wrote %s files to %s" % (numwrites, outpath))
        finally:
            ds.close()
Esempio n. 6
0
 def dump_datastore(self, path=None, ds_name=None, clear_dir=True):
     """
     Dumps CouchDB datastores into a directory as YML files.
     @param ds_name Logical name (such as "resources") of an ION datastore
     @param path Directory to put dumped datastores into (defaults to
                 "res/preload/local/dump_[timestamp]")
     @param clear_dir if True, delete contents of datastore dump dirs
     @param compact if True, saves all objects in one big YML file
     """
     if not path:
         dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S')
         path = "res/preload/local/dump_%s" % dtstr
     if ds_name:
         ds = DatastoreFactory.get_datastore(datastore_name=ds_name,
                                             config=self.config,
                                             scope=self.sysname)
         if ds.datastore_exists(ds_name):
             self._dump_datastore(path, ds_name, clear_dir)
         else:
             log.warn("Datastore does not exist")
         ds.close()
     else:
         ds_list = ['resources', 'objects', 'state', 'events']
         for dsn in ds_list:
             self._dump_datastore(path, dsn, clear_dir)
 def __init__(self, sysname=None, orgname=None, config=None):
     self.orgname = orgname or get_safe(config, 'system.root_org', 'ION')
     sysname = sysname or get_default_sysname()
     self.datastore_name = "resources"
     self.datastore = DatastoreFactory.get_datastore(datastore_name=self.datastore_name, config=config,
                                                     scope=sysname, profile=DataStore.DS_PROFILE.RESOURCES,
                                                     variant=DatastoreFactory.DS_BASE)
Esempio n. 8
0
    def _dump_datastore(self, outpath_base, ds_name, clear_dir=True):
        ds = DatastoreFactory.get_datastore(datastore_name=ds_name, config=self.config, scope=self.sysname)
        try:
            if not ds.datastore_exists(ds_name):
                log.warn("Datastore does not exist: %s" % ds_name)
                return

            if not os.path.exists(outpath_base):
                os.makedirs(outpath_base)

            outpath = "%s/%s" % (outpath_base, ds_name)
            if not os.path.exists(outpath):
                os.makedirs(outpath)
            if clear_dir:
                [os.remove(os.path.join(outpath, f)) for f in os.listdir(outpath)]

            objs = ds.find_docs_by_view("_all_docs", None, id_only=False)
            compact_obj = [obj for obj_id, obj_key, obj in objs]
            compact_obj= ["COMPACTDUMP", compact_obj]
            with open("%s/%s_compact.json" % (outpath, ds_name), 'w') as f:
                json.dump(compact_obj, f)
            numwrites = len(objs)

            log.info("Wrote %s files to %s" % (numwrites, outpath))
        finally:
            ds.close()
Esempio n. 9
0
    def _force_clean(cls, recreate=False, initial=False):
        # Database resources
        from pyon.core.bootstrap import get_sys_name, CFG
        from pyon.datastore.datastore_common import DatastoreFactory
        datastore = DatastoreFactory.get_datastore(config=CFG, variant=DatastoreFactory.DS_BASE, scope=get_sys_name())
        if initial:
            datastore._init_database(datastore.database)

        dbs = datastore.list_datastores()
        clean_prefix = '%s_' % get_sys_name().lower()
        things_to_clean = [x for x in dbs if x.startswith(clean_prefix)]
        try:
            for thing in things_to_clean:
                datastore.delete_datastore(datastore_name=thing)
                if recreate:
                    datastore.create_datastore(datastore_name=thing)

        finally:
            datastore.close()

        # Broker resources
        from putil.rabbitmq.rabbit_util import RabbitManagementUtil
        rabbit_util = RabbitManagementUtil(CFG, sysname=bootstrap.get_sys_name())
        deleted_exchanges, deleted_queues = rabbit_util.clean_by_sysname()
        log.info("Deleted %s exchanges, %s queues" % (len(deleted_exchanges), len(deleted_queues)))

        # File system
        from pyon.util.file_sys import FileSystem
        FileSystem._clean(CFG)
Esempio n. 10
0
    def _load_datastore(self, path=None, ds_name=None, ignore_errors=True):
        ds = DatastoreFactory.get_datastore(datastore_name=ds_name, config=self.config, scope=self.sysname)
        try:
            objects = []
            for fn in os.listdir(path):
                fp = os.path.join(path, fn)
                try:
                    with open(fp, 'r') as f:
                        json_text = f.read()
                    obj = json.loads(json_text)
                    if obj and type(obj) is list and obj[0] == "COMPACTDUMP":
                        objects.extend(obj[1])
                    else:
                        objects.append(obj)
                except Exception as ex:
                    if ignore_errors:
                        log.warn("load error id=%s err=%s" % (fn, str(ex)))
                    else:
                        raise ex

            if objects:
                for obj in objects:
                    if "_rev" in obj:
                        del obj["_rev"]
                try:
                    res = ds.create_doc_mult(objects)
                    log.info("DatastoreLoader: Loaded %s objects into %s" % (len(res), ds_name))
                except Exception as ex:
                    if ignore_errors:
                        log.warn("load error err=%s" % (str(ex)))
                    else:
                        raise ex
        finally:
            ds.close()
 def __init__(self, sysname=None, orgname=None, config=None):
     self.orgname = orgname or get_safe(config, 'system.root_org', 'ION')
     sysname = sysname or get_default_sysname()
     self.datastore_name = "resources"
     self.datastore = DatastoreFactory.get_datastore(datastore_name=self.datastore_name, config=config,
                                                     scope=sysname, profile=DataStore.DS_PROFILE.DIRECTORY,
                                                     variant=DatastoreFactory.DS_BASE)
 def __init__(self, sysname=None, orgname=None, config=None):
     self.orgname = orgname or get_safe(config, "system.root_org", "ION")
     sysname = sysname or get_default_sysname()
     self.datastore_name = "resources"
     self.datastore = DatastoreFactory.get_datastore(
         datastore_name=self.datastore_name,
         config=config,
         scope=sysname,
         profile=DataStore.DS_PROFILE.DIRECTORY,
         variant=DatastoreFactory.DS_BASE,
     )
Esempio n. 13
0
    def get_datastore_instance(cls, ds_name, profile=None):
        profile = profile or DataStore.DS_PROFILE_MAPPING.get(
            ds_name, DataStore.DS_PROFILE.BASIC)
        new_ds = DatastoreFactory.get_datastore(
            datastore_name=ds_name,
            profile=profile,
            scope=get_sys_name(),
            config=CFG,
            variant=DatastoreFactory.DS_FULL)

        return new_ds
Esempio n. 14
0
 def get_blame_objects(self):
     ds_list = ['resources', 'objects', 'state', 'events']
     blame_objs = {}
     for ds_name in ds_list:
         ret_objs = []
         try:
             ds = DatastoreFactory.get_datastore(datastore_name=ds_name, config=self.config, scope=self.sysname)
             ret_objs = ds.find_docs_by_view("_all_docs", None, id_only=False)
             ds.close()
         except BadRequest:
             continue
         objs = []
         for obj_id, obj_key, obj in ret_objs:
             if "blame_" in obj:
                 objs.append(obj)
         blame_objs[ds_name] = objs
     return blame_objs
Esempio n. 15
0
 def test_span_insert(self):
     scov, cov_name = self.construct_cov(nt=10)
     self.coverages.add(scov.persistence_guid)
     self.assertIsNotNone(scov)
     span_store = DatastoreFactory.get_datastore(datastore_name='coverage_spans', config=CFG)
     if span_store is None:
         raise RuntimeError("Unable to load datastore for coverage_spans")
     mm = scov._persistence_layer.master_manager
     span_addr = []
     with span_store.pool.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
         cur.execute("SELECT span_address from %s where coverage_id='%s'" % (span_store._get_datastore_name(), mm.guid))
         self.assertGreater(cur.rowcount, 0)
         for row in cur:
             span_addr.append(row['span_address'])
     with span_store.pool.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
         for addr in span_addr:
             cur.execute("DELETE FROM %s WHERE span_address='%s'" % (span_store._get_datastore_name(), addr))
Esempio n. 16
0
    def _force_clean(cls, recreate=False):
        from pyon.core.bootstrap import get_sys_name, CFG
        from pyon.datastore.datastore_common import DatastoreFactory
        datastore = DatastoreFactory.get_datastore(config=CFG, variant=DatastoreFactory.DS_BASE, scope=get_sys_name())
        #datastore = DatastoreFactory.get_datastore(config=CFG, variant=DatastoreFactory.DS_BASE)

        dbs = datastore.list_datastores()
        things_to_clean = filter(lambda x: x.startswith('%s_' % get_sys_name().lower()), dbs)
        try:
            for thing in things_to_clean:
                datastore.delete_datastore(datastore_name=thing)
                if recreate:
                    datastore.create_datastore(datastore_name=thing)

        finally:
            datastore.close()
        FileSystem._clean(CFG)
Esempio n. 17
0
 def create_core_datastores(self):
     """ Create core datastores, so that they exist when containers start concurrently. """
     ds = DatastoreFactory.get_datastore(config=self.config, scope=self.sysname, variant=DatastoreFactory.DS_BASE)
     datastores = [
         ('resources', 'RESOURCES'),
         ('events', 'EVENTS'),
         ('state', 'STATE'),
         ('objects', 'OBJECTS'),
     ]
     count = 0
     ds_created = []
     for local_dsn, profile in datastores:
         if not ds.datastore_exists(local_dsn):
             ds.create_datastore(datastore_name=local_dsn, profile=profile)
             count += 1
             ds_created.append(local_dsn)
     if count:
         log.info("store_interfaces: Created %s datastores: %s" % (count, ds_created))
Esempio n. 18
0
 def create_core_datastores(self):
     """
     Main entry point into creating core datastores
     """
     ds = DatastoreFactory.get_datastore(config=self.config, scope=self.sysname, variant=DatastoreFactory.DS_BASE)
     datastores = [
         ('resources', 'RESOURCES'),
         ('events', 'EVENTS'),
         ('state', 'STATE'),
         ('objects', 'OBJECTS'),
     ]
     count = 0
     ds_created = []
     for local_dsn, profile in datastores:
         if not ds.datastore_exists(local_dsn):
             ds.create_datastore(datastore_name=local_dsn, profile=profile)
             count += 1
             ds_created.append(local_dsn)
     print "store_interfaces: Created %s datastores: %s" % (count, ds_created)
Esempio n. 19
0
    def _force_clean(cls, recreate=False):
        from pyon.core.bootstrap import get_sys_name, CFG
        from pyon.datastore.datastore_common import DatastoreFactory
        datastore = DatastoreFactory.get_datastore(
            config=CFG, variant=DatastoreFactory.DS_BASE, scope=get_sys_name())
        #datastore = DatastoreFactory.get_datastore(config=CFG, variant=DatastoreFactory.DS_BASE)

        dbs = datastore.list_datastores()
        things_to_clean = filter(
            lambda x: x.startswith('%s_' % get_sys_name().lower()), dbs)
        try:
            for thing in things_to_clean:
                datastore.delete_datastore(datastore_name=thing)
                if recreate:
                    datastore.create_datastore(datastore_name=thing)

        finally:
            datastore.close()

        if os.environ.get('CEI_LAUNCH_TEST', None) is None:
            FileSystem._clean(CFG)
Esempio n. 20
0
 def get_blame_objects(self):
     ds_list = ['resources', 'objects', 'state', 'events']
     blame_objs = {}
     for ds_name in ds_list:
         ret_objs = []
         try:
             ds = DatastoreFactory.get_datastore(datastore_name=ds_name,
                                                 config=self.config,
                                                 scope=self.sysname)
             ret_objs = ds.find_docs_by_view("_all_docs",
                                             None,
                                             id_only=False)
             ds.close()
         except BadRequest:
             continue
         objs = []
         for obj_id, obj_key, obj in ret_objs:
             if "blame_" in obj:
                 objs.append(obj)
         blame_objs[ds_name] = objs
     return blame_objs
Esempio n. 21
0
 def create_core_datastores(self):
     """ Create core datastores, so that they exist when containers start concurrently. """
     ds = DatastoreFactory.get_datastore(config=self.config,
                                         scope=self.sysname,
                                         variant=DatastoreFactory.DS_BASE)
     datastores = [
         ('resources', 'RESOURCES'),
         ('events', 'EVENTS'),
         ('state', 'STATE'),
         ('objects', 'OBJECTS'),
     ]
     count = 0
     ds_created = []
     for local_dsn, profile in datastores:
         if not ds.datastore_exists(local_dsn):
             ds.create_datastore(datastore_name=local_dsn, profile=profile)
             count += 1
             ds_created.append(local_dsn)
     if count:
         log.info("store_interfaces: Created %s datastores: %s" %
                  (count, ds_created))
Esempio n. 22
0
 def dump_datastore(self, path=None, ds_name=None, clear_dir=True):
     """
     Dumps CouchDB datastores into a directory as YML files.
     @param ds_name Logical name (such as "resources") of an ION datastore
     @param path Directory to put dumped datastores into (defaults to
                 "res/preload/local/dump_[timestamp]")
     @param clear_dir if True, delete contents of datastore dump dirs
     @param compact if True, saves all objects in one big YML file
     """
     if not path:
         dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S')
         path = "res/preload/local/dump_%s" % dtstr
     if ds_name:
         ds = DatastoreFactory.get_datastore(datastore_name=ds_name, config=self.config, scope=self.sysname)
         if ds.datastore_exists(ds_name):
             self._dump_datastore(path, ds_name, clear_dir)
         else:
             log.warn("Datastore does not exist")
         ds.close()
     else:
         ds_list = ['resources', 'objects', 'state', 'events']
         for dsn in ds_list:
             self._dump_datastore(path, dsn, clear_dir)
Esempio n. 23
0
    def _load_datastore(self, path=None, ds_name=None, ignore_errors=True):
        ds = DatastoreFactory.get_datastore(datastore_name=ds_name,
                                            config=self.config,
                                            scope=self.sysname)
        try:
            objects = []
            for fn in os.listdir(path):
                fp = os.path.join(path, fn)
                try:
                    with open(fp, 'r') as f:
                        json_text = f.read()
                    obj = json.loads(json_text)
                    if obj and type(obj) is list and obj[0] == "COMPACTDUMP":
                        objects.extend(obj[1])
                    else:
                        objects.append(obj)
                except Exception as ex:
                    if ignore_errors:
                        log.warn("load error id=%s err=%s" % (fn, str(ex)))
                    else:
                        raise ex

            if objects:
                for obj in objects:
                    if "_rev" in obj:
                        del obj["_rev"]
                try:
                    res = ds.create_doc_mult(objects)
                    log.info("DatastoreLoader: Loaded %s objects into %s" %
                             (len(res), ds_name))
                except Exception as ex:
                    if ignore_errors:
                        log.warn("load error err=%s" % (str(ex)))
                    else:
                        raise ex
        finally:
            ds.close()
Esempio n. 24
0
def clear_db(config, prefix, sysname=None):
    config = DatastoreFactory.get_server_config(config)
    _clear_db(config=config,
              prefix=prefix,
              sysname=sysname)
Esempio n. 25
0
    def get_datastore_instance(cls, ds_name, profile=None):
        profile = profile or DataStore.DS_PROFILE_MAPPING.get(ds_name, DataStore.DS_PROFILE.BASIC)
        new_ds = DatastoreFactory.get_datastore(datastore_name=ds_name, profile=profile, scope=get_sys_name(),
                                                config=CFG, variant=DatastoreFactory.DS_FULL)

        return new_ds
Esempio n. 26
0
def clear_db(config, prefix, sysname=None):
    config = DatastoreFactory.get_server_config(config)
    _clear_db(config=config, prefix=prefix, sysname=sysname)