コード例 #1
0
ファイル: datastore_admin.py プロジェクト: swarbhanu/pyon
    def _load_datastore(self, path=None, ds_name=None, ignore_errors=True):
        ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname)
        try:
            objects = []
            for fn in os.listdir(path):
                fp = os.path.join(path, fn)
                try:
                    with open(fp, 'r') as f:
                        yaml_text = f.read()
                    obj = yaml.load(yaml_text)
                    if obj and type(obj) is list and obj[0] == "COMPACTDUMP":
                        objects.extend(obj[1:])
                    else:
                        objects.append(obj)
                except Exception as ex:
                    if ignore_errors:
                        log.warn("load error id=%s err=%s" % (fn, str(ex)))
                    else:
                        raise ex

            if objects:
                for obj in objects:
                    if "_rev" in obj:
                        del obj["_rev"]
                try:
                    res = ds.create_doc_mult(objects)
                    log.info("DatastoreLoader: Loaded %s objects into %s" % (len(res), ds_name))
                except Exception as ex:
                    if ignore_errors:
                        log.warn("load error id=%s err=%s" % (fn, str(ex)))
                    else:
                        raise ex
        finally:
            ds.close()
コード例 #2
0
    def _load_datastore(self, path=None, ds_name=None, ignore_errors=True):
        ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname)
        try:
            objects = []
            for fn in os.listdir(path):
                fp = os.path.join(path, fn)
                try:
                    with open(fp, 'r') as f:
                        yaml_text = f.read()
                    obj = yaml.load(yaml_text)
                    if obj and type(obj) is list and obj[0] == "COMPACTDUMP":
                        objects.extend(obj[1:])
                    else:
                        objects.append(obj)
                except Exception as ex:
                    if ignore_errors:
                        log.warn("load error id=%s err=%s" % (fn, str(ex)))
                    else:
                        raise ex

            if objects:
                for obj in objects:
                    if "_rev" in obj:
                        del obj["_rev"]
                try:
                    res = ds.create_doc_mult(objects)
                    log.info("DatastoreLoader: Loaded %s objects into %s" %
                             (len(res), ds_name))
                except Exception as ex:
                    if ignore_errors:
                        log.warn("load error id=%s err=%s" % (fn, str(ex)))
                    else:
                        raise ex
        finally:
            ds.close()
コード例 #3
0
ファイル: datastore_admin.py プロジェクト: swarbhanu/pyon
    def _dump_datastore(self, outpath_base, ds_name, clear_dir=True, compact=False):
        ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname)
        try:
            if not ds.exists_datastore(ds_name):
                log.warn("Datastore does not exist: %s" % ds_name)
                return

            if not os.path.exists(outpath_base):
                os.makedirs(outpath_base)

            outpath = "%s/%s" % (outpath_base, ds_name)
            if not os.path.exists(outpath):
                os.makedirs(outpath)
            if clear_dir:
                [os.remove(os.path.join(outpath, f)) for f in os.listdir(outpath)]

            objs = ds.find_docs_by_view("_all_docs", None, id_only=False)
            numwrites = 0
            if compact:
                compact_obj = [obj for obj_id, obj_key, obj in objs]
                compact_obj.insert(0, "COMPACTDUMP")
                with open("%s/%s_compact.yml" % (outpath, ds_name), 'w') as f:
                    yaml.dump(compact_obj, f, default_flow_style=False)
                numwrites = len(objs)
            else:
                for obj_id, obj_key, obj in objs:
                    # Some object ids have slashes
                    fn = obj_id.replace("/","_")
                    with open("%s/%s.yml" % (outpath, fn), 'w') as f:
                        yaml.dump(obj, f, default_flow_style=False)
                        numwrites += 1

            log.info("Wrote %s objects to %s" % (numwrites, outpath))
        finally:
            ds.close()
コード例 #4
0
    def dump_resources_as_xlsx(self, filename=None):
        self._clear()
        # TODO: Use DatastoreFactory for couch independence
        ds = CouchDataStore(DataStore.DS_RESOURCES,
                            profile=DataStore.DS_PROFILE.RESOURCES,
                            config=CFG,
                            scope=self.sysname)
        all_objs = ds.find_docs_by_view("_all_docs", None, id_only=False)

        log.info("Found %s objects in datastore resources", len(all_objs))

        self._analyze_objects(all_objs)

        self._wb = xlwt.Workbook()
        self._worksheets = {}

        self._dump_observatories()

        self._dump_network()

        for restype in sorted(self._res_by_type.keys()):
            self._dump_resource_type(restype)

        dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S')
        path = filename or "interface/resources_%s.xls" % dtstr
        self._wb.save(path)
コード例 #5
0
 def dump_datastore(self,
                    path=None,
                    ds_name=None,
                    clear_dir=True,
                    compact=False):
     """
     Dumps CouchDB datastores into a directory as YML files.
     @param ds_name Logical name (such as "resources") of an ION datastore
     @param path Directory to put dumped datastores into (defaults to
                 "res/preload/local/dump_[timestamp]")
     @param clear_dir if True, delete contents of datastore dump dirs
     @param compact if True, saves all objects in one big YML file
     """
     if not path:
         dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S')
         path = "res/preload/local/dump_%s" % dtstr
     if ds_name:
         ds = CouchDataStore(ds_name,
                             config=self.config,
                             scope=self.sysname)
         if ds.exists_datastore(ds_name):
             self._dump_datastore(path, ds_name, clear_dir, compact)
         else:
             log.warn("Datastore does not exist")
         ds.close()
     else:
         ds_list = [
             'resources', 'objects', 'state', 'events', 'directory',
             'scidata'
         ]
         for dsn in ds_list:
             self._dump_datastore(path, dsn, clear_dir, compact)
コード例 #6
0
    def create_resources_snapshot(self, persist=False, filename=None):
        ds = CouchDataStore(DataStore.DS_RESOURCES, profile=DataStore.DS_PROFILE.RESOURCES, config=CFG, scope=self.sysname)
        all_objs = ds.find_docs_by_view("_all_docs", None, id_only=False)

        log.info("Found %s objects in datastore resources", len(all_objs))

        resources = {}
        associations = {}
        snapshot = dict(resources=resources, associations=associations)

        for obj_id, key, obj in all_objs:
            if obj_id.startswith("_design"):
                continue
            if not isinstance(obj, dict):
                raise Inconsistent("Object of bad type found: %s" % type(obj))
            obj_type = obj.get("type_", None)
            if obj_type == "Association":
                associations[obj_id] = obj.get("ts", None)
            elif obj_type:
                resources[obj_id] = obj.get("ts_updated", None)
            else:
                raise Inconsistent("Object with no type_ found: %s" % obj)

        if persist:
            dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S')
            path = filename or "interface/rrsnapshot_%s.json" % dtstr
            snapshot_json = json.dumps(snapshot)
            with open(path, "w") as f:
                #yaml.dump(snapshot, f, default_flow_style=False)
                f.write(snapshot_json)

        log.debug("Created resource registry snapshot. %s resources, %s associations", len(resources), len(associations))

        return snapshot
コード例 #7
0
ファイル: interfaces.py プロジェクト: swarbhanu/pyon
 def create_core_datastores(self):
     """
     Main entry point into creating core datastores
     """
     ds = CouchDataStore(config=self.config, scope=self.sysname)
     datastores = ['resources']
     for local_dsn in datastores:
         if not ds.exists_datastore(local_dsn):
             ds.create_datastore(local_dsn)
コード例 #8
0
ファイル: interfaces.py プロジェクト: pkediyal/pyon
 def create_core_datastores(self):
     """
     Main entry point into creating core datastores
     """
     ds = CouchDataStore(config=self.config, scope=self.sysname)
     datastores = ['resources']
     for local_dsn in datastores:
         if not ds.exists_datastore(local_dsn):
             ds.create_datastore(local_dsn)
コード例 #9
0
 def __init__(self, sysname=None, orgname=None, config=None):
     self.orgname = orgname or get_safe(config, 'system.root_org', 'ION')
     sysname = sysname or get_default_sysname()
     self.datastore_name = "resources"
     self.datastore = CouchDataStore(self.datastore_name, config=config, scope=sysname)
     try:
         self.datastore.read_doc("_design/directory")
     except NotFound:
         self.datastore.define_profile_views("RESOURCES")
コード例 #10
0
ファイル: interfaces.py プロジェクト: seman/pyon
 def create_core_datastores(self):
     """
     Main entry point into creating core datastores
     """
     ds = CouchDataStore(config=self.config, scope=self.sysname)
     datastores = ["resources", "events"]
     count = 0
     for local_dsn in datastores:
         if not ds.exists_datastore(local_dsn):
             ds.create_datastore(local_dsn)
             count += 1
             # NOTE: Views and other datastores are created by containers' DatastoreManager
     print "store_interfaces: Created %s datastores..." % count
コード例 #11
0
ファイル: datastore_admin.py プロジェクト: swarbhanu/pyon
 def get_blame_objects(self):
     ds_list = ['resources', 'objects', 'state', 'events', 'directory', 'scidata']
     blame_objs = {}
     for ds_name in ds_list:
         ret_objs = []
         try:
             ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname)
             ret_objs = ds.find_docs_by_view("_all_docs", None, id_only=False)
             ds.close()
         except BadRequest:
             continue
         objs = []
         for obj_id, obj_key, obj in ret_objs:
             if "blame_" in obj:
                 objs.append(obj)
         blame_objs[ds_name] = objs
     return blame_objs
コード例 #12
0
ファイル: clear_couch_util.py プロジェクト: pkediyal/pyon
def _clear_couch(host, port, username, password, prefix, verbose=False):
    db_server = CouchDataStore(host=host,
                               port=str(port),
                               username=username,
                               password=password)

    if verbose:
        print "clear_couch: Connected to couch server http://%s:%d" % (host,
                                                                       port)

    db_list = db_server.list_datastores()

    ignored_num = 0
    for db_name in db_list:

        if (prefix == '*'
                and not db_name.startswith('_')) or db_name.lower().startswith(
                    prefix.lower()):
            db_server.delete_datastore(db_name)
            print 'clear_couch: Dropped couch database: %s' % db_name

        else:
            if verbose:
                print 'clear_couch: Ignored couch database: %s' % db_name

            ignored_num += 1
    print 'clear_couch: Ignored %s existing databases' % ignored_num

    db_server.close()
コード例 #13
0
ファイル: int_test.py プロジェクト: swarbhanu/pyon
    def _force_clean(cls, recreate=False):
        from pyon.core.bootstrap import get_sys_name, CFG
        from pyon.datastore.couchdb.couchdb_standalone import CouchDataStore
        datastore = CouchDataStore(config=CFG)
        dbs = datastore.list_datastores()
        things_to_clean = filter(lambda x: x.startswith('%s_' % get_sys_name()), dbs)
        try:
            for thing in things_to_clean:
                datastore.delete_datastore(datastore_name=thing)
                if recreate:
                    datastore.create_datastore(datastore_name=thing)

        finally:
            datastore.close()
コード例 #14
0
 def __init__(self, sysname=None, orgname=None, config=None):
     self.orgname = orgname or get_safe(config, 'system.root_org', 'ION')
     sysname = sysname or get_default_sysname()
     self.datastore_name = "resources"
     self.datastore = CouchDataStore(self.datastore_name, config=config, scope=sysname)
     try:
         self.datastore.read_doc("_design/directory")
     except NotFound:
         self.datastore.define_profile_views("RESOURCES")
コード例 #15
0
ファイル: transport.py プロジェクト: swarbhanu/pyon
    def declare_queue_impl(self, client, queue, durable=False, auto_delete=True):
        log.debug("AMQPTransport.declare_queue_impl(%s): %s, D %s, AD %s", client.channel_number, queue, durable, auto_delete)
        arguments = {}

        if os.environ.get('QUEUE_BLAME', None) is not None:
            ds_name, testid = os.environ['QUEUE_BLAME'].split(',')
            arguments.update({'created-by':testid})

        frame = self._sync_call(client, client.queue_declare, 'callback',
                                queue=queue or '',
                                auto_delete=auto_delete,
                                durable=durable,
                                arguments=arguments)

        if os.environ.get('QUEUE_BLAME', None) is not None:
            from pyon.datastore.couchdb.couchdb_standalone import CouchDataStore
            ds = CouchDataStore(datastore_name=ds_name)
            ds.create_doc({'test_id':testid, 'queue_name':frame.method.queue})
            ds.close()

        return frame.method.queue
コード例 #16
0
    def declare_queue_impl(self,
                           client,
                           queue,
                           durable=False,
                           auto_delete=True):
        log.debug("AMQPTransport.declare_queue_impl(%s): %s, D %s, AD %s",
                  client.channel_number, queue, durable, auto_delete)
        arguments = {}

        if os.environ.get('QUEUE_BLAME', None) is not None:
            ds_name, testid = os.environ['QUEUE_BLAME'].split(',')
            arguments.update({'created-by': testid})

        frame = self._sync_call(client,
                                client.queue_declare,
                                'callback',
                                queue=queue or '',
                                auto_delete=auto_delete,
                                durable=durable,
                                arguments=arguments)

        if os.environ.get('QUEUE_BLAME', None) is not None:
            from pyon.datastore.couchdb.couchdb_standalone import CouchDataStore
            ds = CouchDataStore(datastore_name=ds_name)
            ds.create_doc({
                'test_id': testid,
                'queue_name': frame.method.queue
            })
            ds.close()

        return frame.method.queue
コード例 #17
0
    def _force_clean(cls, recreate=False):
        from pyon.core.bootstrap import get_sys_name, CFG
        from pyon.datastore.couchdb.couchdb_standalone import CouchDataStore
        datastore = CouchDataStore(config=CFG)
        dbs = datastore.list_datastores()
        things_to_clean = filter(lambda x: x.startswith('%s_' % get_sys_name().lower()), dbs)
        try:
            for thing in things_to_clean:
                datastore.delete_datastore(datastore_name=thing)
                if recreate:
                    datastore.create_datastore(datastore_name=thing)

        finally:
            datastore.close()
コード例 #18
0
    def create_resources_snapshot(self, persist=False, filename=None):
        ds = CouchDataStore(DataStore.DS_RESOURCES,
                            profile=DataStore.DS_PROFILE.RESOURCES,
                            config=CFG,
                            scope=self.sysname)
        all_objs = ds.find_docs_by_view("_all_docs", None, id_only=False)

        log.info("Found %s objects in datastore resources", len(all_objs))

        resources = {}
        associations = {}
        snapshot = dict(resources=resources, associations=associations)

        for obj_id, key, obj in all_objs:
            if obj_id.startswith("_design"):
                continue
            if not isinstance(obj, dict):
                raise Inconsistent("Object of bad type found: %s" % type(obj))
            obj_type = obj.get("type_", None)
            if obj_type == "Association":
                associations[obj_id] = obj.get("ts", None)
            elif obj_type:
                resources[obj_id] = obj.get("ts_updated", None)
            else:
                raise Inconsistent("Object with no type_ found: %s" % obj)

        if persist:
            dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S')
            path = filename or "interface/rrsnapshot_%s.json" % dtstr
            snapshot_json = json.dumps(snapshot)
            with open(path, "w") as f:
                #yaml.dump(snapshot, f, default_flow_style=False)
                f.write(snapshot_json)

        log.debug(
            "Created resource registry snapshot. %s resources, %s associations",
            len(resources), len(associations))

        return snapshot
コード例 #19
0
    def dump_resources_as_xlsx(self, filename=None):
        # TODO: Use DatastoreFactory for couch independence
        ds = CouchDataStore(DataStore.DS_RESOURCES, profile=DataStore.DS_PROFILE.RESOURCES, config=CFG, scope=self.sysname)
        all_objs = ds.find_docs_by_view("_all_docs", None, id_only=False)

        log.info("Found %s objects in datastore resources", len(all_objs))

        self._analyze_objects(all_objs)

        self._wb = xlwt.Workbook()
        self._worksheets = {}

        self._dump_observatories()

        self._dump_network()

        for restype in sorted(self._res_by_type.keys()):
            self._dump_resource_type(restype)

        dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S')
        path = filename or "interface/resources_%s.xls" % dtstr
        self._wb.save(path)
コード例 #20
0
ファイル: datastore_admin.py プロジェクト: swarbhanu/pyon
 def dump_datastore(self, path=None, ds_name=None, clear_dir=True, compact=False):
     """
     Dumps CouchDB datastores into a directory as YML files.
     @param ds_name Logical name (such as "resources") of an ION datastore
     @param path Directory to put dumped datastores into (defaults to
                 "res/preload/local/dump_[timestamp]")
     @param clear_dir if True, delete contents of datastore dump dirs
     @param compact if True, saves all objects in one big YML file
     """
     if not path:
         dtstr = datetime.datetime.today().strftime('%Y%m%d_%H%M%S')
         path = "res/preload/local/dump_%s" % dtstr
     if ds_name:
         ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname)
         if ds.exists_datastore(ds_name):
             self._dump_datastore(path, ds_name, clear_dir, compact)
         else:
             log.warn("Datastore does not exist")
         ds.close()
     else:
         ds_list = ['resources', 'objects', 'state', 'events', 'directory', 'scidata']
         for dsn in ds_list:
             self._dump_datastore(path, dsn, clear_dir, compact)
コード例 #21
0
 def get_blame_objects(self):
     ds_list = [
         'resources', 'objects', 'state', 'events', 'directory', 'scidata'
     ]
     blame_objs = {}
     for ds_name in ds_list:
         ret_objs = []
         try:
             ds = CouchDataStore(ds_name,
                                 config=self.config,
                                 scope=self.sysname)
             ret_objs = ds.find_docs_by_view("_all_docs",
                                             None,
                                             id_only=False)
             ds.close()
         except BadRequest:
             continue
         objs = []
         for obj_id, obj_key, obj in ret_objs:
             if "blame_" in obj:
                 objs.append(obj)
         blame_objs[ds_name] = objs
     return blame_objs
コード例 #22
0
    def _dump_datastore(self,
                        outpath_base,
                        ds_name,
                        clear_dir=True,
                        compact=False):
        ds = CouchDataStore(ds_name, config=self.config, scope=self.sysname)
        try:
            if not ds.exists_datastore(ds_name):
                log.warn("Datastore does not exist: %s" % ds_name)
                return

            if not os.path.exists(outpath_base):
                os.makedirs(outpath_base)

            outpath = "%s/%s" % (outpath_base, ds_name)
            if not os.path.exists(outpath):
                os.makedirs(outpath)
            if clear_dir:
                [
                    os.remove(os.path.join(outpath, f))
                    for f in os.listdir(outpath)
                ]

            objs = ds.find_docs_by_view("_all_docs", None, id_only=False)
            numwrites = 0
            if compact:
                compact_obj = [obj for obj_id, obj_key, obj in objs]
                compact_obj.insert(0, "COMPACTDUMP")
                with open("%s/%s_compact.yml" % (outpath, ds_name), 'w') as f:
                    yaml.dump(compact_obj, f, default_flow_style=False)
                numwrites = len(objs)
            else:
                for obj_id, obj_key, obj in objs:
                    # Some object ids have slashes
                    fn = obj_id.replace("/", "_")
                    with open("%s/%s.yml" % (outpath, fn), 'w') as f:
                        yaml.dump(obj, f, default_flow_style=False)
                        numwrites += 1

            log.info("Wrote %s objects to %s" % (numwrites, outpath))
        finally:
            ds.close()
コード例 #23
0
ファイル: clear_couch_util.py プロジェクト: swarbhanu/pyon
def _clear_couch(host, port, username, password, prefix, verbose=False):
    db_server = CouchDataStore(host=host, port=str(port), username=username, password=password)

    if verbose:
        print "clear_couch: Connected to couch server http://%s:%d" % (host, port)

    db_list = db_server.list_datastores()

    ignored_num = 0
    for db_name in db_list:

        if (prefix == '*' and not db_name.startswith('_')) or db_name.lower().startswith(prefix.lower()):
            db_server.delete_datastore(db_name)
            print 'clear_couch: Dropped couch database: %s' % db_name

        else:
            if verbose:
                print 'clear_couch: Ignored couch database: %s' % db_name

            ignored_num += 1
    print 'clear_couch: Ignored %s existing databases' % ignored_num

    db_server.close()
コード例 #24
0
class DirectoryStandalone(object):
    """
    Directory service standalone class
    """

    def __init__(self, sysname=None, orgname=None, config=None):
        self.orgname = orgname or get_safe(config, 'system.root_org', 'ION')
        sysname = sysname or get_default_sysname()
        self.datastore_name = "resources"
        self.datastore = CouchDataStore(self.datastore_name, config=config, scope=sysname)
        try:
            self.datastore.read_doc("_design/directory")
        except NotFound:
            self.datastore.define_profile_views("RESOURCES")

    def close(self):
        self.datastore.close()
        self.datastore = None

    def _get_path(self, parent, key):
        """
        Returns the qualified directory path for a directory entry.
        """
        if parent == "/":
            return parent + key
        elif parent.startswith("/"):
            return parent + "/" + key
        else:
            raise BadRequest("Illegal parent: %s" % parent)

    def _get_key(self, path):
        """
        Returns the key from a qualified directory path
        """
        parent, key = path.rsplit("/", 1)
        return key

    def _read_by_path(self, path, orgname=None):
        """
        Given a qualified path, find entry in directory and return DirEntry
        document or None if not found
        """
        if path is None:
            raise BadRequest("Illegal arguments")
        orgname = orgname or self.orgname
        parent, key = path.rsplit("/", 1)
        parent = parent or "/"
        find_key = [orgname, key, parent]
        view_res = self.datastore.find_docs_by_view('directory', 'by_key', key=find_key, id_only=True)

        match = [doc for docid, index, doc in view_res]
        if len(match) > 1:
            raise Inconsistent("More than one directory entry found for key %s" % path)
        elif match:
            return match[0]
        return None

    def lookup(self, parent, key=None, return_entry=False):
        """
        Read entry residing in directory at parent node level.
        """
        path = self._get_path(parent, key) if key else parent
        direntry = self._read_by_path(path)
        if return_entry:
            return direntry
        else:
            return direntry['attributes'] if direntry else None

    def register(self, parent, key, create_only=False, **kwargs):
        """
        Add/replace an entry within directory, below a parent node or "/".
        Note: Replaces (not merges) the attribute values of the entry if existing
        @retval  DirEntry if previously existing
        """
        if not (parent and key):
            raise BadRequest("Illegal arguments")
        if not type(parent) is str or not parent.startswith("/"):
            raise BadRequest("Illegal arguments: parent")

        dn = self._get_path(parent, key)

        entry_old = None
        direntry = self._read_by_path(dn)
        cur_time = get_ion_ts()

        if direntry and create_only:
            # We only wanted to make sure entry exists. Do not change
            return direntry
        elif direntry:
            # Change existing entry's attributes
            entry_old = direntry.get('attributes')
            direntry['attributes'] = kwargs
            direntry['ts_updated'] = cur_time
            self.datastore.update_doc(direntry)
        else:
            doc = self._create_dir_entry(object_id=create_unique_directory_id(), parent=parent, key=key,
                attributes=kwargs)
            self.datastore.create_doc(doc)

        return entry_old

    def register_mult(self, entries):
        """
        Registers multiple directory entries efficiently in one datastore access.
        Note: this fails of entries are currently existing, so works for create only.
        """
        if type(entries) not in (list, tuple):
            raise BadRequest("Bad entries type")
        de_list = []
        cur_time = get_ion_ts()
        for parent, key, attrs in entries:
            de = self._create_dir_entry(object_id=create_unique_directory_id(), parent=parent, key=key,
                attributes=attrs, ts_created=cur_time, ts_updated=cur_time)
            de_list.append(de)
        self.datastore.create_doc_mult(de_list)

    def _update_dir_entry(self, doc, parent, key, attributes=None, ts_updated=''):
        doc['attributes'] = attributes or {}
        doc['key'] = key
        doc['parent'] = parent
        doc['ts_updated'] = ts_updated or get_ion_ts()
        return doc

    def _create_dir_entry(self, object_id,  parent, key, attributes=None, ts_created='', ts_updated=''):
        doc = {}
        doc['_id'] = object_id
        doc['type_'] = 'DirEntry'
        doc['attributes'] = attributes or {}
        doc['key'] = key
        doc['parent'] = parent
        doc['org'] = self.orgname
        doc['ts_created'] = ts_created or get_ion_ts()
        doc['ts_updated'] = ts_updated or get_ion_ts()
        return doc

    def find_child_entries(self, parent='/', direct_only=True, **kwargs):
        """
        Return all child entries (ordered by path) for the given parent path.
        Does not return the parent itself. Optionally returns child of child entries.
        Additional kwargs are applied to constrain the search results (limit, descending, skip).
        @param parent  Path to parent (must start with "/")
        @param direct_only  If False, includes child of child entries
        @retval  A list of DirEntry objects for the matches
        """
        if not type(parent) is str or not parent.startswith("/"):
            raise BadRequest("Illegal argument parent: %s" % parent)
        if direct_only:
            start_key = [self.orgname, parent, 0]
            end_key = [self.orgname, parent]
            res = self.datastore.find_docs_by_view('directory', 'by_parent',
                start_key=start_key, end_key=end_key, id_only=True, **kwargs)
        else:
            path = parent[1:].split("/")
            start_key = [self.orgname, path, 0]
            end_key = [self.orgname, list(path) + ["ZZZZZZ"]]
            res = self.datastore.find_docs_by_view('directory', 'by_path',
                start_key=start_key, end_key=end_key, id_only=True, **kwargs)

        match = [doc for docid, indexkey, doc in res]
        return match
コード例 #25
0
ファイル: datastore_admin.py プロジェクト: swarbhanu/pyon
 def clear_datastore(self, ds_name=None, prefix=None):
     """
     Clears a datastore or a set of datastores of common prefix
     """
     ds = CouchDataStore(config=self.config, scope=self.sysname)
     try:
         if ds_name:
             try:
                 ds.delete_datastore(ds_name)
             except NotFound:
                 try:
                     # Try the unscoped version
                     ds1 = CouchDataStore(config=self.config)
                     ds1.delete_datastore(ds_name)
                 except NotFound:
                     pass
         elif prefix:
             prefix = prefix.lower()
             ds_noscope = CouchDataStore(config=self.config)
             for dsn in ds_noscope.list_datastores():
                 if dsn.lower().startswith(prefix):
                     ds_noscope.delete_datastore(dsn)
         else:
             log.warn("Cannot clear datastore without prefix or datastore name")
     finally:
         ds.close()
コード例 #26
0
class ResourceRegistryStandalone(object):
    """
    Resource Registry service standalone class
    """

    def __init__(self, sysname=None, orgname=None, config=None):
        self.orgname = orgname or get_safe(config, 'system.root_org', 'ION')
        sysname = sysname or get_default_sysname()
        self.datastore_name = "resources"
        self.datastore = CouchDataStore(self.datastore_name, config=config, scope=sysname)
        try:
            self.datastore.read_doc("_design/directory")
        except NotFound:
            self.datastore.define_profile_views("RESOURCES")

    def close(self):
        self.datastore.close()
        self.datastore = None

    def create(self, object=None, actor_id=None, lcstate=None):
        if object is None:
            raise BadRequest("Object not present")
        if not "type_" in object:
            raise BadRequest("Object is not an IonObject")
        cur_time = get_ion_ts()
        object['lcstate'] =  lcstate or "DEPLOYED_AVAILABLE"
        object['ts_created'] = cur_time
        object['ts_updated'] = cur_time
        new_res_id = create_unique_resource_id()
        res_id, rev = self.datastore.create_doc(object, new_res_id)

        if actor_id and actor_id != 'anonymous':
            self.create_association(res_id, "hasOwner", actor_id)

        return res

    def create_mult(self, res_list, lcstate=None):
        cur_time = get_ion_ts()
        for resobj in res_list:
            resobj['lcstate'] = lcstate or "DEPLOYED_AVAILABLE"
            resobj['ts_created'] = cur_time
            resobj['ts_updated'] = cur_time

        id_list = [create_unique_resource_id() for i in xrange(len(res_list))]
        res = self.datastore.create_doc_mult(res_list, id_list)
        res_list = [(rid,rrv) for success,rid,rrv in res]

        return res_list

    def read(self, object_id='', rev_id=''):
        if not object_id:
            raise BadRequest("The object_id parameter is an empty string")

        return self.datastore.read_doc(object_id, rev_id)

    def read_mult(self, object_ids=None):
        if not object_ids:
            raise BadRequest("The object_ids parameter is empty")
        return self.datastore.read_doc_mult(object_ids)

    def create_association(self, subject=None, predicate=None, obj=None, assoc_type='H2H'):
        """
        Create an association between two IonObjects with a given predicate
        """
        if not subject or not predicate or not obj:
            raise BadRequest("Association must have all elements set")
        if type(subject) is str:
            subject_id = subject
            subject = self.read(subject_id)
        else:
            if "_id" not in subject or "_rev" not in subject:
                raise BadRequest("Subject id or rev not available")
            subject_id = subject._id
        st = type(subject).__name__

        if type(obj) is str:
            object_id = obj
            obj = self.read(object_id)
        else:
            if "_id" not in obj or "_rev" not in obj:
                raise BadRequest("Object id or rev not available")
            object_id = obj._id
        ot = type(obj).__name__

        assoc_type = assoc_type or 'H2H'
        if not assoc_type in AT:
            raise BadRequest("Unsupported assoc_type: %s" % assoc_type)

        # Check that subject and object type are permitted by association definition
        # Note: Need import here, so that import orders are not screwed up
        from pyon.core.registry import getextends
        from pyon.ion.resource import Predicates
        from pyon.core.bootstrap import IonObject

        assoc = dict(type_="Association",
            at=assoc_type,
            s=subject_id, st=st, srv=subject._rev,
            p=predicate,
            o=object_id, ot=ot, orv=obj._rev,
            ts=get_ion_ts())
        return self.datastore.create_doc(assoc, create_unique_association_id())

    def find_by_type(self, restype, id_only=False, **kwargs):
        start_key = [restype]
        end_key = [restype]
        res = self.datastore.find_docs_by_view('resource', 'by_type',
            start_key=start_key, end_key=end_key, id_only=id_only, **kwargs)

        if id_only:
            match = [docid for docid, indexkey, doc in res]
        else:
            match = [doc for docid, indexkey, doc in res]
        return match
コード例 #27
0
class QueueBlame(Plugin):
    name = 'queueblame'

    def __init__(self):
        Plugin.__init__(self)
        import uuid
        self.ds_name = "queueblame-%s" % str(uuid.uuid4())[0:6]

        from collections import defaultdict
        self.queues_by_test = defaultdict(lambda: defaultdict(dict))

    def options(self, parser, env):
        super(QueueBlame, self).options(parser, env=env)

        parser.add_option('--queueblame-by-queue',
                          action='store_true',
                          dest='queueblame_by_queue',
                          help='Show output by queue instead of by test',
                          default=False)
        parser.add_option(
            '--queueblame-full',
            action='store_true',
            dest='queueblame_full',
            help='Display ALL queues, not just queues with consumers/msgs')
        parser.add_option(
            '--queueblame-no-trim',
            action='store_false',
            dest='queueblame_trim',
            help=
            'Trim output so that repeated test names/queue names are omitted for brevity. Human readable but not easily machine readable.',
            default=True)

    def configure(self, options, conf):
        """Configure the plugin and system, based on selected options."""
        super(QueueBlame, self).configure(options, conf)

        self._queueblame_by_queue = options.queueblame_by_queue
        self._queueblame_full = options.queueblame_full
        self._queueblame_trim = options.queueblame_trim

    def begin(self):
        from pyon.datastore.couchdb.couchdb_standalone import CouchDataStore
        from pyon.public import CFG
        self.ds = CouchDataStore(datastore_name=self.ds_name, config=CFG)

    def finalize(self, result):
        self.ds.delete_datastore()
        self.ds.close()

    def beforeTest(self, test):
        import os
        os.environ['QUEUE_BLAME'] = "%s,%s" % (self.ds_name, test.id())

    def afterTest(self, test):
        from pyon.net.transport import NameTrio, TransportError
        from pyon.net.channel import RecvChannel
        import os
        import sys

        # need a connection to node to get queue stats
        from pyon.net.messaging import make_node
        node, ioloop = make_node()

        os.environ.pop('QUEUE_BLAME')
        tid = test.id()

        # grab raw data from database
        obj_ids = self.ds.list_objects()
        objs = self.ds.read_doc_mult(obj_ids)

        for x in objs:
            queue = x['queue_name']

            if 'accesses' in self.queues_by_test[tid][queue]:
                self.queues_by_test[tid][queue]['accesses'] += 1
            else:
                # grab intel from channel
                ch = node.channel(RecvChannel)
                ch._recv_name = NameTrio(queue.split('.')[0], queue)

                try:
                    msgs, consumers = ch.get_stats()
                    exists = True
                    #print >>sys.stderr, "LOG ME", queue, msgs, consumers
                except TransportError:
                    msgs = 0
                    consumers = 0
                    exists = False
                finally:
                    ch.close()

                self.queues_by_test[tid][queue] = {
                    'exists': exists,
                    'msgs': msgs,
                    'consumers': consumers,
                    'accesses': 1
                }

        # must also check all the queues from previous tests, to capture bleed
        bleed_queues = set()
        for test, testqueues in self.queues_by_test.iteritems():
            if test != tid:
                map(bleed_queues.add, testqueues.iterkeys())

        # don't test anything we already just tested
        bleed_queues.difference_update(self.queues_by_test[tid].iterkeys())

        for queue in bleed_queues:
            ch = node.channel(RecvChannel)
            ch._recv_name = NameTrio(queue.split('.')[0], queue)

            try:
                msgs, consumers = ch.get_stats()
                exists = True
            except TransportError:
                msgs = 0
                consumers = 0
                exists = False

            # drain the queue!
            if exists and msgs > 0 and consumers == 0:
                print >> sys.stderr, "DRAIN QUEUE:", queue
                ch.start_consume()
                for x in xrange(msgs):
                    m, h, d = ch.recv()
                    print >> sys.stderr, h
                    ch.ack(d)

            ch.close()

            self.queues_by_test[tid][queue] = {
                'exists': exists,
                'msgs': msgs,
                'consumers': consumers,
                'accesses': 0
            }  # 0 is special here, indicates a bleed check

        # empty the database for next test use
        self.ds.delete_datastore()
        self.ds.create_datastore(create_indexes=False)

        node.stop_node()
        ioloop.join(timeout=5)

    def report(self, stream):

        # format report
        table = []
        self.total_count = 0

        def is_interesting(qd):
            """
            Helper method, returns if a row is interesting based on msgs, consumers and queueblame_full flag.
            """
            return self._queueblame_full or (not self._queueblame_full and
                                             (qd['msgs'] > 0
                                              or qd['consumers'] > 0))

        def add_row(first, second, queuedict):
            """
            Can be called with queue/test or test/queue first, hence generic name.

            Returns bool indicated row was added or not.
            """
            self.total_count += 1
            if is_interesting(queuedict):
                acc = ' '
                if queuedict['accesses'] > 1:
                    acc = queuedict['accesses']
                elif queuedict['accesses'] == 0:
                    acc = 'PREV'
                exists = 'T' if queuedict['exists'] else 'F'
                table.append([
                    str(x) for x in [
                        first, second, acc, exists, queuedict['msgs'],
                        queuedict['consumers']
                    ]
                ])

                return True

            return False

        # create tests by queue, used in a few places below
        from collections import defaultdict
        tests_by_queue = defaultdict(list)

        for test, queues in self.queues_by_test.iteritems():
            for queue, queuedict in queues.iteritems():
                tests_by_queue[queue].append(dict(queuedict, test=test))

        # list of queues that are tagged as PREV, we keep tabs on it but only use it later if correct conditions
        prev_list = set()

        # build output table
        if not self._queueblame_by_queue:
            for test, queues in self.queues_by_test.iteritems():
                for queue, queuedict in queues.iteritems():
                    ret = add_row(test, queue, queuedict)
                    if ret and queuedict['accesses'] == 0:
                        prev_list.add(queue)
        else:
            for queue, calls in tests_by_queue.iteritems():
                for call in calls:
                    ret = add_row(queue, call['test'], call)
                    if ret and queuedict['accesses'] == 0:
                        prev_list.add(queue)

        # generate prev_list table if it is interesting
        prev_list_table = []
        if not self._queueblame_full:
            for prev in prev_list:
                # cut down dict
                prev_accesses = [
                    qd['test'] for qd in tests_by_queue[prev]
                    if qd['accesses'] > 0
                ]

                for pa in prev_accesses:
                    prev_list_table.append([prev, pa])

        # sort by first col
        table.sort(cmp=lambda x, y: cmp(x[0], y[0]))
        prev_list_table.sort(cmp=lambda x, y: cmp(x[0], y[0]))

        # do we trim?
        if self._queueblame_trim:
            last = ""
            for i, x in enumerate(table):
                if x[0] != last:
                    last = x[0]
                else:
                    table[i][0] = ""
            last = ""
            for i, x in enumerate(prev_list_table):
                if x[0] != last:
                    last = x[0]
                else:
                    prev_list_table[i][0] = ""

        if self._queueblame_by_queue:
            table.insert(
                0, ['Queue', 'Test', '# Acc >1', 'Ex?', '# Msgs', '# Cnsmrs'])
        else:
            table.insert(
                0, ['Test', 'Queue', '# Acc >1', 'Ex?', '# Msgs', '# Cnsmrs'])

        if len(prev_list_table) > 0:
            prev_list_table.insert(0, ['Queue', 'Test'])
            # format prev_table too
            widths = [
                max([len(row[x]) for row in prev_list_table])
                for x in xrange(len(prev_list_table[0]))
            ]
            prev_fmt_out = [
                " ".join([x.ljust(widths[i]) for i, x in enumerate(row)])
                for row in prev_list_table
            ]
            prev_fmt_out.insert(
                1, " ".join(
                    [''.ljust(widths[i], '=') for i in xrange(len(widths))]))
        else:
            prev_fmt_out = []

        # calculate widths
        widths = [
            max([len(row[x]) for row in table]) for x in xrange(len(table[0]))
        ]
        fmt_out = [
            " ".join([x.ljust(widths[i]) for i, x in enumerate(row)])
            for row in table
        ]
        # insert col separation row
        fmt_out.insert(
            1,
            " ".join([''.ljust(widths[i], '=') for i in xrange(len(widths))]))

        stream.write("\n" + "=" * len(fmt_out[0]) + "\n\n")
        stream.write(
            "Queue blame report (DB: %s, full: %s, by_queue: %s)\n" %
            (self.ds_name, self._queueblame_full, self._queueblame_by_queue))
        stream.write(
            "If 'PREV' in accesses column, indicates queue was not accessed during this test and could indicate bleed between tests.\n"
        )
        if not self._queueblame_full and len(table) > 1:
            stream.write(
                "\n*** The following queues still have messages or consumers! ***\n"
            )
        stream.write("\n")
        stream.write("\n".join(fmt_out))
        stream.write("\n" + "=" * len(fmt_out[0]) + "\n")
        stream.write("%d shown of %d total\n" %
                     (len(table) - 1, self.total_count))
        stream.write("\n")
        if len(prev_fmt_out) > 0:
            stream.write(
                "\n\nThe following queues were accessed by the associated tests, inspect them for proper cleanup of subscribers!\n\n"
            )
            stream.write("\n".join(prev_fmt_out))
            stream.write("\n" + "=" * len(prev_fmt_out[0]) + "\n")
            stream.write("\n")
コード例 #28
0
 def begin(self):
     from pyon.datastore.couchdb.couchdb_standalone import CouchDataStore
     from pyon.public import CFG
     self.ds = CouchDataStore(datastore_name=self.ds_name, config=CFG)
コード例 #29
0
class ResourceRegistryStandalone(object):
    """
    Resource Registry service standalone class
    """
    def __init__(self, sysname=None, orgname=None, config=None):
        self.orgname = orgname or get_safe(config, 'system.root_org', 'ION')
        sysname = sysname or get_default_sysname()
        self.datastore_name = "resources"
        self.datastore = CouchDataStore(self.datastore_name,
                                        config=config,
                                        scope=sysname)
        try:
            self.datastore.read_doc("_design/directory")
        except NotFound:
            self.datastore.define_profile_views("RESOURCES")

    def close(self):
        self.datastore.close()
        self.datastore = None

    def create(self, object=None, actor_id=None, lcstate=None):
        if object is None:
            raise BadRequest("Object not present")
        if not "type_" in object:
            raise BadRequest("Object is not an IonObject")
        cur_time = get_ion_ts()
        object['lcstate'] = lcstate or "DEPLOYED_AVAILABLE"
        object['ts_created'] = cur_time
        object['ts_updated'] = cur_time
        new_res_id = create_unique_resource_id()
        res_id, rev = self.datastore.create_doc(object, new_res_id)

        if actor_id and actor_id != 'anonymous':
            self.create_association(res_id, "hasOwner", actor_id)

        return res

    def create_mult(self, res_list, lcstate=None):
        cur_time = get_ion_ts()
        for resobj in res_list:
            resobj['lcstate'] = lcstate or "DEPLOYED_AVAILABLE"
            resobj['ts_created'] = cur_time
            resobj['ts_updated'] = cur_time

        id_list = [create_unique_resource_id() for i in xrange(len(res_list))]
        res = self.datastore.create_doc_mult(res_list, id_list)
        res_list = [(rid, rrv) for success, rid, rrv in res]

        return res_list

    def read(self, object_id='', rev_id=''):
        if not object_id:
            raise BadRequest("The object_id parameter is an empty string")

        return self.datastore.read_doc(object_id, rev_id)

    def read_mult(self, object_ids=None):
        if not object_ids:
            raise BadRequest("The object_ids parameter is empty")
        return self.datastore.read_doc_mult(object_ids)

    def create_association(self,
                           subject=None,
                           predicate=None,
                           obj=None,
                           assoc_type='H2H'):
        """
        Create an association between two IonObjects with a given predicate
        """
        if not subject or not predicate or not obj:
            raise BadRequest("Association must have all elements set")
        if type(subject) is str:
            subject_id = subject
            subject = self.read(subject_id)
        else:
            if "_id" not in subject or "_rev" not in subject:
                raise BadRequest("Subject id or rev not available")
            subject_id = subject._id
        st = type(subject).__name__

        if type(obj) is str:
            object_id = obj
            obj = self.read(object_id)
        else:
            if "_id" not in obj or "_rev" not in obj:
                raise BadRequest("Object id or rev not available")
            object_id = obj._id
        ot = type(obj).__name__

        assoc_type = assoc_type or 'H2H'
        if not assoc_type in AT:
            raise BadRequest("Unsupported assoc_type: %s" % assoc_type)

        # Check that subject and object type are permitted by association definition
        # Note: Need import here, so that import orders are not screwed up
        from pyon.core.registry import getextends
        from pyon.ion.resource import Predicates
        from pyon.core.bootstrap import IonObject

        assoc = dict(type_="Association",
                     at=assoc_type,
                     s=subject_id,
                     st=st,
                     srv=subject._rev,
                     p=predicate,
                     o=object_id,
                     ot=ot,
                     orv=obj._rev,
                     ts=get_ion_ts())
        return self.datastore.create_doc(assoc, create_unique_association_id())

    def find_by_type(self, restype, id_only=False, **kwargs):
        start_key = [restype]
        end_key = [restype]
        res = self.datastore.find_docs_by_view('resource',
                                               'by_type',
                                               start_key=start_key,
                                               end_key=end_key,
                                               id_only=id_only,
                                               **kwargs)

        if id_only:
            match = [docid for docid, indexkey, doc in res]
        else:
            match = [doc for docid, indexkey, doc in res]
        return match
コード例 #30
0
class DirectoryStandalone(object):
    """
    Directory service standalone class
    """

    def __init__(self, sysname=None, orgname=None, config=None):
        self.orgname = orgname or get_safe(config, 'system.root_org', 'ION')
        sysname = sysname or get_default_sysname()
        self.datastore_name = "resources"
        self.datastore = CouchDataStore(self.datastore_name, config=config, scope=sysname)
        try:
            self.datastore.read_doc("_design/directory")
        except NotFound:
            self.datastore.define_profile_views("RESOURCES")

    def close(self):
        self.datastore.close()
        self.datastore = None

    def _get_path(self, parent, key):
        """
        Returns the qualified directory path for a directory entry.
        """
        if parent == "/":
            return parent + key
        elif parent.startswith("/"):
            return parent + "/" + key
        else:
            raise BadRequest("Illegal parent: %s" % parent)

    def _get_key(self, path):
        """
        Returns the key from a qualified directory path
        """
        parent, key = path.rsplit("/", 1)
        return key

    def _read_by_path(self, path, orgname=None):
        """
        Given a qualified path, find entry in directory and return DirEntry
        document or None if not found
        """
        if path is None:
            raise BadRequest("Illegal arguments")
        orgname = orgname or self.orgname
        parent, key = path.rsplit("/", 1)
        parent = parent or "/"
        find_key = [orgname, key, parent]
        view_res = self.datastore.find_docs_by_view('directory', 'by_key', key=find_key, id_only=True)

        match = [doc for docid, index, doc in view_res]
        if len(match) > 1:
            raise Inconsistent("More than one directory entry found for key %s" % path)
        elif match:
            return match[0]
        return None

    def lookup(self, parent, key=None, return_entry=False):
        """
        Read entry residing in directory at parent node level.
        """
        path = self._get_path(parent, key) if key else parent
        direntry = self._read_by_path(path)
        if return_entry:
            return direntry
        else:
            return direntry['attributes'] if direntry else None

    def register(self, parent, key, create_only=False, **kwargs):
        """
        Add/replace an entry within directory, below a parent node or "/".
        Note: Replaces (not merges) the attribute values of the entry if existing
        @retval  DirEntry if previously existing
        """
        if not (parent and key):
            raise BadRequest("Illegal arguments")
        if not type(parent) is str or not parent.startswith("/"):
            raise BadRequest("Illegal arguments: parent")

        dn = self._get_path(parent, key)

        entry_old = None
        direntry = self._read_by_path(dn)
        cur_time = get_ion_ts()

        if direntry and create_only:
            # We only wanted to make sure entry exists. Do not change
            return direntry
        elif direntry:
            # Change existing entry's attributes
            entry_old = direntry.get('attributes')
            direntry['attributes'] = kwargs
            direntry['ts_updated'] = cur_time
            self.datastore.update_doc(direntry)
        else:
            doc = self._create_dir_entry(object_id=create_unique_directory_id(), parent=parent, key=key,
                attributes=kwargs)
            self.datastore.create_doc(doc)

        return entry_old

    def register_mult(self, entries):
        """
        Registers multiple directory entries efficiently in one datastore access.
        Note: this fails of entries are currently existing, so works for create only.
        """
        if type(entries) not in (list, tuple):
            raise BadRequest("Bad entries type")
        de_list = []
        cur_time = get_ion_ts()
        for parent, key, attrs in entries:
            de = self._create_dir_entry(object_id=create_unique_directory_id(), parent=parent, key=key,
                attributes=attrs, ts_created=cur_time, ts_updated=cur_time)
            de_list.append(de)
        self.datastore.create_doc_mult(de_list)

    def _update_dir_entry(self, doc, parent, key, attributes=None, ts_updated=''):
        doc['attributes'] = attributes or {}
        doc['key'] = key
        doc['parent'] = parent
        doc['ts_updated'] = ts_updated or get_ion_ts()
        return doc

    def _create_dir_entry(self, object_id,  parent, key, attributes=None, ts_created='', ts_updated=''):
        doc = {}
        doc['_id'] = object_id
        doc['type_'] = 'DirEntry'
        doc['attributes'] = attributes or {}
        doc['key'] = key
        doc['parent'] = parent
        doc['org'] = self.orgname
        doc['ts_created'] = ts_created or get_ion_ts()
        doc['ts_updated'] = ts_updated or get_ion_ts()
        return doc

    def find_child_entries(self, parent='/', direct_only=True, **kwargs):
        """
        Return all child entries (ordered by path) for the given parent path.
        Does not return the parent itself. Optionally returns child of child entries.
        Additional kwargs are applied to constrain the search results (limit, descending, skip).
        @param parent  Path to parent (must start with "/")
        @param direct_only  If False, includes child of child entries
        @retval  A list of DirEntry objects for the matches
        """
        if not type(parent) is str or not parent.startswith("/"):
            raise BadRequest("Illegal argument parent: %s" % parent)
        if direct_only:
            start_key = [self.orgname, parent, 0]
            end_key = [self.orgname, parent]
            res = self.datastore.find_docs_by_view('directory', 'by_parent',
                start_key=start_key, end_key=end_key, id_only=True, **kwargs)
        else:
            path = parent[1:].split("/")
            start_key = [self.orgname, path, 0]
            end_key = [self.orgname, list(path) + ["ZZZZZZ"]]
            res = self.datastore.find_docs_by_view('directory', 'by_path',
                start_key=start_key, end_key=end_key, id_only=True, **kwargs)

        match = [doc for docid, indexkey, doc in res]
        return match
コード例 #31
0
ファイル: queueblame_plugin.py プロジェクト: swarbhanu/pyon
class QueueBlame(Plugin):
    name = 'queueblame'

    def __init__(self):
        Plugin.__init__(self)
        import uuid
        self.ds_name = "queueblame-%s" % str(uuid.uuid4())[0:6]

        from collections import defaultdict
        self.queues_by_test = defaultdict(lambda: defaultdict(dict))

    def options(self, parser, env):
        super(QueueBlame, self).options(parser, env=env)

        parser.add_option('--queueblame-by-queue', action='store_true', dest='queueblame_by_queue', help='Show output by queue instead of by test', default=False)
        parser.add_option('--queueblame-full', action='store_true', dest='queueblame_full', help='Display ALL queues, not just queues with consumers/msgs')
        parser.add_option('--queueblame-no-trim', action='store_false', dest='queueblame_trim', help='Trim output so that repeated test names/queue names are omitted for brevity. Human readable but not easily machine readable.', default=True)

    def configure(self, options, conf):
        """Configure the plugin and system, based on selected options."""
        super(QueueBlame, self).configure(options, conf)

        self._queueblame_by_queue   = options.queueblame_by_queue
        self._queueblame_full       = options.queueblame_full
        self._queueblame_trim       = options.queueblame_trim

    def begin(self):
        from pyon.datastore.couchdb.couchdb_standalone import CouchDataStore
        from pyon.public import CFG
        self.ds = CouchDataStore(datastore_name=self.ds_name, config=CFG)

    def finalize(self, result):
        self.ds.delete_datastore()
        self.ds.close()

    def beforeTest(self, test):
        import os
        os.environ['QUEUE_BLAME'] = "%s,%s" % (self.ds_name, test.id())

    def afterTest(self, test):
        from pyon.net.transport import NameTrio, TransportError
        from pyon.net.channel import RecvChannel
        import os
        import sys

        # need a connection to node to get queue stats
        from pyon.net.messaging import make_node
        node, ioloop = make_node()

        os.environ.pop('QUEUE_BLAME')
        tid = test.id()

        # grab raw data from database
        obj_ids = self.ds.list_objects()
        objs = self.ds.read_doc_mult(obj_ids)

        for x in objs:
            queue = x['queue_name']

            if 'accesses' in self.queues_by_test[tid][queue]:
                self.queues_by_test[tid][queue]['accesses'] += 1
            else:
                # grab intel from channel
                ch = node.channel(RecvChannel)
                ch._recv_name = NameTrio(queue.split('.')[0], queue)

                try:
                    msgs, consumers = ch.get_stats()
                    exists = True
                    #print >>sys.stderr, "LOG ME", queue, msgs, consumers
                except TransportError:
                    msgs = 0
                    consumers = 0
                    exists = False
                finally:
                    ch.close()

                self.queues_by_test[tid][queue] = { 'exists': exists,
                                                    'msgs': msgs,
                                                    'consumers' : consumers,
                                                    'accesses' : 1 }

        # must also check all the queues from previous tests, to capture bleed
        bleed_queues = set()
        for test, testqueues in self.queues_by_test.iteritems():
            if test != tid:
                map(bleed_queues.add, testqueues.iterkeys())

        # don't test anything we already just tested
        bleed_queues.difference_update(self.queues_by_test[tid].iterkeys())

        for queue in bleed_queues:
            ch = node.channel(RecvChannel)
            ch._recv_name = NameTrio(queue.split('.')[0], queue)

            try:
                msgs, consumers = ch.get_stats()
                exists = True
            except TransportError:
                msgs = 0
                consumers = 0
                exists = False

            # drain the queue!
            if exists and msgs > 0 and consumers == 0:
                print >>sys.stderr, "DRAIN QUEUE:", queue
                ch.start_consume()
                for x in xrange(msgs):
                    m, h, d = ch.recv()
                    print >>sys.stderr, h
                    ch.ack(d)

            ch.close()


            self.queues_by_test[tid][queue] = { 'exists': exists,
                                                'msgs': msgs,
                                                'consumers': consumers,
                                                'accesses' : 0 }        # 0 is special here, indicates a bleed check

        # empty the database for next test use
        self.ds.delete_datastore()
        self.ds.create_datastore(create_indexes=False)

        node.stop_node()
        ioloop.join(timeout=5)

    def report(self, stream):

        # format report
        table = []
        self.total_count = 0

        def is_interesting(qd):
            """
            Helper method, returns if a row is interesting based on msgs, consumers and queueblame_full flag.
            """
            return self._queueblame_full or (not self._queueblame_full and (qd['msgs'] > 0 or qd['consumers'] > 0))

        def add_row(first, second, queuedict):
            """
            Can be called with queue/test or test/queue first, hence generic name.

            Returns bool indicated row was added or not.
            """
            self.total_count += 1
            if is_interesting(queuedict):
                acc = ' '
                if queuedict['accesses'] > 1:
                    acc = queuedict['accesses']
                elif queuedict['accesses'] == 0:
                    acc = 'PREV'
                exists = 'T' if queuedict['exists'] else 'F'
                table.append([str(x) for x in [first, second, acc, exists, queuedict['msgs'], queuedict['consumers']]])

                return True

            return False

        # create tests by queue, used in a few places below
        from collections import defaultdict
        tests_by_queue = defaultdict(list)

        for test, queues in self.queues_by_test.iteritems():
            for queue, queuedict in queues.iteritems():
                tests_by_queue[queue].append(dict(queuedict, test=test))

        # list of queues that are tagged as PREV, we keep tabs on it but only use it later if correct conditions
        prev_list = set()

        # build output table
        if not self._queueblame_by_queue:
            for test, queues in self.queues_by_test.iteritems():
                for queue, queuedict in queues.iteritems():
                    ret = add_row(test, queue, queuedict)
                    if ret and queuedict['accesses'] == 0:
                        prev_list.add(queue)
        else:
            for queue, calls in tests_by_queue.iteritems():
                for call in calls:
                    ret = add_row(queue, call['test'], call)
                    if ret and queuedict['accesses'] == 0:
                        prev_list.add(queue)


        # generate prev_list table if it is interesting
        prev_list_table = []
        if not self._queueblame_full:
            for prev in prev_list:
                # cut down dict
                prev_accesses = [qd['test'] for qd in tests_by_queue[prev] if qd['accesses'] > 0]

                for pa in prev_accesses:
                    prev_list_table.append([prev, pa])

        # sort by first col
        table.sort(cmp=lambda x,y: cmp(x[0], y[0]))
        prev_list_table.sort(cmp=lambda x,y: cmp(x[0], y[0]))

        # do we trim?
        if self._queueblame_trim:
            last = ""
            for i, x in enumerate(table):
                if x[0] != last:
                    last = x[0]
                else:
                    table[i][0] = ""
            last = ""
            for i, x in enumerate(prev_list_table):
                if x[0] != last:
                    last = x[0]
                else:
                    prev_list_table[i][0] = ""

        if self._queueblame_by_queue:
            table.insert(0, ['Queue', 'Test', '# Acc >1', 'Ex?', '# Msgs', '# Cnsmrs'])
        else:
            table.insert(0, ['Test', 'Queue', '# Acc >1', 'Ex?', '# Msgs', '# Cnsmrs'])

        if len(prev_list_table) > 0:
            prev_list_table.insert(0, ['Queue', 'Test'])
            # format prev_table too
            widths = [max([len(row[x]) for row in prev_list_table]) for x in xrange(len(prev_list_table[0]))]
            prev_fmt_out = [" ".join([x.ljust(widths[i]) for i, x in enumerate(row)]) for row in prev_list_table]
            prev_fmt_out.insert(1, " ".join([''.ljust(widths[i], '=') for i in xrange(len(widths))]))
        else:
            prev_fmt_out = []

        # calculate widths
        widths = [max([len(row[x]) for row in table]) for x in xrange(len(table[0]))]
        fmt_out = [" ".join([x.ljust(widths[i]) for i, x in enumerate(row)]) for row in table]
        # insert col separation row
        fmt_out.insert(1, " ".join([''.ljust(widths[i], '=') for i in xrange(len(widths))]))

        stream.write("\n" + "=" * len(fmt_out[0]) + "\n\n")
        stream.write("Queue blame report (DB: %s, full: %s, by_queue: %s)\n" % (self.ds_name, self._queueblame_full, self._queueblame_by_queue))
        stream.write("If 'PREV' in accesses column, indicates queue was not accessed during this test and could indicate bleed between tests.\n")
        if not self._queueblame_full and len(table) > 1:
            stream.write("\n*** The following queues still have messages or consumers! ***\n")
        stream.write("\n")
        stream.write("\n".join(fmt_out))
        stream.write("\n" + "=" * len(fmt_out[0]) + "\n")
        stream.write("%d shown of %d total\n" % (len(table)-1, self.total_count))
        stream.write("\n")
        if len(prev_fmt_out) > 0:
            stream.write("\n\nThe following queues were accessed by the associated tests, inspect them for proper cleanup of subscribers!\n\n")
            stream.write("\n".join(prev_fmt_out))
            stream.write("\n" + "=" * len(prev_fmt_out[0]) + "\n")
            stream.write("\n")
コード例 #32
0
ファイル: queueblame_plugin.py プロジェクト: swarbhanu/pyon
 def begin(self):
     from pyon.datastore.couchdb.couchdb_standalone import CouchDataStore
     from pyon.public import CFG
     self.ds = CouchDataStore(datastore_name=self.ds_name, config=CFG)
コード例 #33
0
 def clear_datastore(self, ds_name=None, prefix=None):
     """
     Clears a datastore or a set of datastores of common prefix
     """
     ds = CouchDataStore(config=self.config, scope=self.sysname)
     try:
         if ds_name:
             try:
                 ds.delete_datastore(ds_name)
             except NotFound:
                 try:
                     # Try the unscoped version
                     ds1 = CouchDataStore(config=self.config)
                     ds1.delete_datastore(ds_name)
                 except NotFound:
                     pass
         elif prefix:
             prefix = prefix.lower()
             ds_noscope = CouchDataStore(config=self.config)
             for dsn in ds_noscope.list_datastores():
                 if dsn.lower().startswith(prefix):
                     ds_noscope.delete_datastore(dsn)
         else:
             log.warn(
                 "Cannot clear datastore without prefix or datastore name")
     finally:
         ds.close()