def test_lookup(self): """Test the lookup function. """ # resolve builtin function _open = lookup('{0}.open'.format(open.__module__)) self.assertTrue(_open is open) # resolve lookup _lookup = lookup( 'canopsis.common.utils.lookup') self.assertTrue(_lookup is lookup) # resolve package canopsis = lookup('canopsis') self.assertEqual(canopsis.__name__, 'canopsis') # resolve sub_module canopsis_common = lookup('canopsis.common') self.assertEqual(canopsis_common.__name__, 'canopsis.common')
def test_reciproc(self): """Test the reciprocity of path and lookup functions. """ _path = 'canopsis.common.utils.path' # path(lookup(elt)) == elt self.assertEqual(path(lookup(_path)), _path) # lookup(path(path)) == path self.assertEqual(lookup(path(path)), path)
def fill(self, init=True): tools = [] for module in self.modules: try: migrationcls = lookup(module) except ImportError as err: self.logger.error( 'Impossible to load module "{0}": {1}'.format( module, err ) ) continue migrationtool = migrationcls() migrationtool.logger.addHandler(self.loghandler) tools.append(migrationtool) for tool in tools: if init: tool.init() else: tool.update()
def get_task(self, _id): """ Get task registered at input _id. :param str _id: task identifier. :return: (task object, task info) related to input _id. :rtype: tuple :raises: KeyError if no task corresponds to _id. ImportError if task is impossible to load from runtime. """ # if task is not already registered if _id not in self.tasks: # if file exists in DB _file = self[TaskManager.FILE_STORAGE].get(_id=_id) if _file is not None: # copy _file in self task directory copy(_file, self.task_directory) # import and reload the module module = import_module(_id) reload(module) # if task info exists in DB task_info = self[TaskManager.STORAGE].get_elements(ids=_id) # raises automatically an ImportError if task is not in runtime task = lookup(_id) # save task and task info in self tasks self.tasks[_id] = task, task_info # throws automatically a KeyError if _id is not in self.tasks result = self.tasks[_id] return result
def set_task(self, _id, task=None, task_info=None, _file=None): """ Change of task. :param str _id: task id. :param callable task """ # save task file if necessary if _file is not None: # in storage self[TaskManager.FILE_STORAGE].put(_id, _file) # and in directory copy(_file, self.task_directory) # and reload the module import_module(_file) reload(_file) # save task info self[TaskManager.FILE_STORAGE].put_element(_id=_id, element=task_info) # get task if None if task is None: task = lookup(_id) # save task in self tasks self.tasks[_id] = (task, task_info)
def fill(self, init=None, yes=False, reinit_auth=False): self.__put_canopsis_document() tools = [] for module in self.modules: try: migrationcls = lookup(module) except ImportError as err: self.logger.error( 'Impossible to load module "{0}": {1}'.format(module, err)) continue migrationtool = migrationcls() migrationtool.logger.addHandler(self.loghandler) tools.append(migrationtool) coll = None if init is None: store = MongoStore.get_default() store.authenticate() coll = MongoCollection(store.get_collection(self.FLAG_COLLECTION)) data = coll.find_one({"_id": self.FLAG_COLLECTION}) if data is None: print("Database not intialized. Initializing...") init = True else: print("Database already intialized. Updating...") init = False if init is None and reinit_auth is False: data = { "_id": "initialized", "at": str(time.strftime("%a, %d %b %Y %H:%M:%S +0000")) } print("The canopsis initialization flag did not exist in the " "database. So canopsinit will (re?)initialized the " "database. Meaning, it may delete some important data " "from canopsis database. If you still want to initialize " "the database, call the same command with the " "`--authorize-reinit` flag. Or if you do not want to " "initialize the database, add the document `{0}` in the {1} " "collections.".format(data, self.FLAG_COLLECTION)) exit(1) for tool in tools: if init: tool.init(yes=yes) else: tool.update(yes=yes) if init is True: coll.insert({ "_id": self.FLAG_COLLECTION, "at": str(time.strftime("%a, %d %b %Y %H:%M:%S +0000")) })
def _configure(self, parameters, error_parameters, *args, **kwargs): self.fullscreen = parameters.get(ConfigurationEditor.FULLSCREEN) for name in parameters: parameter = parameters[name] if name.startswith(Configurable.COMPONENT_PREFIX): parser = name[:len(Configurable.COMPONENT_PREFIX)] component = lookup(parameter) self.components_by_parsers[parser] = component
def new_element(**elt_properties): """Instantiate a new graph element related to elt properties. :param dict elt_properties: serialized elt properties. :return: new elt instance. """ result = None cls = elt_properties[GraphElement._CLS] if cls is not None: cls = lookup(cls) result = cls.new(**elt_properties) return result
def receiver_and_callback(receiver): """ Return a tuple of receiver name and callback. :return: the tuple (receiver name, callback) :rtype: tuple """ result = receiver, None if SR_SEPARATOR in receiver: splitted_name = receiver.split(SR_SEPARATOR) callback = lookup(splitted_name[1]) result = splitted_name[0], callback return result
def get_driver(path): """ Add a conf driver by its path definition. :param str path: driver path to add. Must be a full path from a known package/module """ # try to get if from global definition result = ConfigurationDriver._MANAGERS.get(path) # if not already added if result is None: # resolve it and add it in global definition result = lookup(path) ConfigurationDriver._MANAGERS[path] = result return result
def __setitem__(self, name, value): """ Set a new configurable value :param name: new configurable name :type name: str :param value: new configurable value :type value: str (path) or class or instance :param args: args for configurable instanciation if value is a path or a class :param kwargs: kwargs for configurable instanciation if value is a path or a class """ # get configurable type. Configurable by default configurable_type = self.registry._configurable_types.get( name, Configurable) configurable = value # if value is a path if isinstance(configurable, basestring): # get related python object configurable = lookup(configurable) # if configurable is a class if isclass(configurable) and issubclass( configurable, configurable_type): # instantiate a new configurable with input args and kwargs configurable = configurable() # do nothing if configurable is not an instance of configurable_type if not isinstance(configurable, configurable_type): self.registry.logger.error( "Impossible to set configurable {}:{}. Not an instance of {}" .format(name, configurable, configurable_type) ) else: # update self.configurables super(Configurables, self).__setitem__(name, configurable)
def __setitem__(self, name, value): """ Set a new configurable type. :param name: new configurable name. :type name: str :param value: new type value. :type value: str (path) or class """ configurable_type = value # if configurable_type is a path if isinstance(configurable_type, basestring): # get related python object configurable_type = lookup(configurable_type) # check if configurable_type is a subclass of Configurable if not issubclass(configurable_type, Configurable): self.registry.logger.error( "Impossible to set configurable type {}: {}. Wrong type" .format(name, configurable_type) ) else: # check if an old value exiss if name in self.registry._configurables \ and not isinstance( self.registry._configurables[name], configurable_type): # if the old value is not an instance of newly type self.registry.logger.warning( "Old configurable {} removed. Not an instance of {}" .format(name, configurable_type) ) # delete if del self.registry._configurables[name] # set the new type super(ConfigurableTypes, self).__setitem__(name, configurable_type)
def new(**elt_properties): """Instantiate a new graph element related to elt properties. :param dict elt_properties: serialized elt properties. :return: new elt instance. """ result = None try: cls = elt_properties[GraphElement._CLS] except KeyError: raise GraphElement.Error( "Graph element class is not given in {0}".format( elt_properties ) ) else: cls = lookup(cls) result = cls(**elt_properties) return result
def get_task(_id, cache=True): """ Get task related to an id which could be: - a registered task id. - a python path to a function. :param str id: task id to get. :param bool cache: use cache system to quick access to task (True by default). :raises ImportError: if task is not found in runtime. """ result = None if _id in __TASKS_BY_ID: result = __TASKS_BY_ID[_id] else: result = lookup(path=_id, cached=cache) return result
def state_from_sources(event, vertice, ctx, f, manager=None, *args, **kwargs): """ Change ctx vertice state which equals to f result on source nodes. """ # get function f if isinstance(f, basestring): f = lookup(f) # init manager if manager is None: manager = tm # if sources are in ctx, get them if SOURCES_BY_EDGES in ctx: sources_by_edges = ctx[SOURCES_BY_EDGES] else: # else get them with the topology object sources_by_edges = manager.get_sources(ids=vertice.id, add_edges=True) if sources_by_edges: # do something only if sources exist # calculate the state sources = [] for edge_id in sources_by_edges: _, edge_sources = sources_by_edges[edge_id] sources += edge_sources if sources: # if sources exist, check state state = f( source_node.state for source_node in sources ) else: # else get OK state = Check.OK # change state change_state( state=state, event=event, vertice=vertice, ctx=ctx, *args, **kwargs )
def get_configurable(configurable, *args, **kwargs): """Get a configurable instance from a configurable class/path/instance and args, kwargs, None otherwise. :param configurable: configurable path, class or instance :type configurable: str, class or Configurable :return: configurable instance or None if input configurable can not be solved such as a configurable. """ result = configurable if isinstance(configurable, basestring): result = lookup(configurable) if issubclass(result, Configurable): result = result(*args, **kwargs) if not isinstance(result, Configurable): result = None return result
def setUp(self): """initialize storages""" self.storages = [] testconf = self._testconfcls() if testconf.storages: for storage in testconf.storages: storagecls = lookup(storage) storage = storagecls( data_scope=data_scope, conf_paths=testconf.conf_paths, **testconf.params ) self.storages.append(storage) else: for protocol in testconf.protocols: for data_type in testconf.data_types: for data_scope in testconf.data_scopes: storage = Storage.get_middleware( protocol=protocol, data_type=data_type, data_scope=data_scope, conf_paths=testconf.conf_paths, **testconf.params ) self.storages.append(storage)
def run(self, scenarios=None, *args, **kwargs): """ Run input scenario with self in parameter :param scenarios: scenarios to run :type scenarios: list of {str, callable} :return: a dictionary of scenarios result by scenario entry :rtype: dict(scenario, result) """ result = dict() if scenarios is None: scenarios = self.scenarios.split(',') # convert strings to callable objects if required _scenarios = [lookup(scenario) if isinstance(scenario, basestring) else scenario for scenario in scenarios] for index, scenario in enumerate(_scenarios): result[scenarios[index]] = scenario(self, *args, **kwargs) return result
def path(value): return lookup(value)
def state( self, ids=None, state=None, criticity=HARD, f=DEFAULT_F, query=None, cache=False ): """Get/update entity state(s). :param ids: entity id(s). Default is all entity ids. :type ids: str or list :param int state: state to update if not None. :param int criticity: state criticity level (HARD by default). :param f: new state calculation function if state is not None. :param dict query: additional query to use in order to find states. :param bool cache: storage cache when udpate state. :return: entity states by entity id or one state value if ids is a str. None if ids is a str, related entity does not exists and no update is required. :rtype: int or dict """ # default result is None result = {} # get state document state_documents = self[CheckManager.CHECK_STORAGE].get_elements( ids=ids, query=query ) # if state document exists if state_documents is not None: # ensure state_documents is a list if isinstance(state_documents, dict): state_documents = [state_documents] # save id and state field name id_field, state_field = CheckManager.ID, CheckManager.STATE # result is a dictionary of entity id, state value result = {} for state_document in state_documents: entity_id = state_document[id_field] entity_state = state_document[state_field] result[entity_id] = entity_state # if state has to be updated if state is not None: # get the right state function f = lookup(f) if isinstance(f, basestring) else f # save field name for quick access id_name = CheckManager.ID state_name = CheckManager.STATE # save storage for quick access storage = self[CheckManager.CHECK_STORAGE] # ensure entity_ids is a set if isinstance(ids, basestring): entity_ids = set([ids]) elif ids is None: if state_documents is None: entity_ids = set() else: entity_ids = set([sd[id_name] for sd in state_documents]) else: entity_ids = set(ids) # if states exist in DB if state_documents is not None: # for all found documents for state_document in state_documents: # get document id _id = state_document[id_name] # remove _id from entity_ids entity_ids.remove(_id) # get new state with f new_state_document = f( state_document=state_document, state=state, criticity=criticity ) # save new state_document if old != new if state_document != new_state_document: storage.put_element( _id=_id, element=new_state_document, cache=cache ) # save state entity in result result[_id] = new_state_document[state_name] # for all not found documents for entity_id in entity_ids: # create a new document state_document = { id_name: entity_id, } new_state_document = f( state_document=state_document, state=state, criticity=criticity ) # save it in storage storage.put_element( _id=entity_id, element=new_state_document, cache=cache ) # and put entity state in the result result[entity_id] = state # ensure result is a state if ids is a basestring if result is not None and isinstance(ids, basestring): result = result[ids] if result else None return result
def setup(add_etc=True, **kwargs): """ Setup dedicated to canolibs projects. :param add_etc: add automatically etc files (default True) :type add_etc: bool :param kwargs: enrich setuptools.setup method """ # get setup path which corresponds to first python argument filename = argv[0] _path = dirname(abspath(expanduser(filename))) name = basename(_path) # add path to python path path.append(_path) # extend canopsis path with new sub modules and packages # canopsis.__path__ = extend_path(canopsis.__path__, canopsis.__name__) # get package package = lookup("canopsis.{0}".format(name)) # set default parameters if not setted kwargs.setdefault('name', package.__name__) kwargs.setdefault('author', AUTHOR) kwargs.setdefault('author_email', AUTHOR_EMAIL) kwargs.setdefault('license', LICENSE) kwargs.setdefault('zip_safe', ZIP_SAFE) kwargs.setdefault('url', URL) kwargs.setdefault('package_dir', {'': _path}) kwargs.setdefault('keywords', kwargs.get('keywords', '') + KEYWORDS) # set version version = getattr(package, '__version__', DEFAULT_VERSION) if version is not None: kwargs.setdefault('version', version) if '--no-conf' not in argv: # add etc content if exist and if --no-conf if add_etc: etc_path = join(_path, 'etc') if exists(etc_path): data_files = kwargs.get('data_files', []) target = getenv('CPS_PREFIX', join(sys_prefix, 'etc')) for root, dirs, files in walk(etc_path): files_to_copy = [join(root, _file) for _file in files] final_target = join(target, root[len(etc_path) + 1:]) data_files.append((final_target, files_to_copy)) kwargs['data_files'] = data_files else: argv.remove('--no-conf') # add scripts if exist if 'scripts' not in kwargs: scripts_path = join(_path, 'scripts') if exists(scripts_path): scripts = [] for root, dirs, files in walk(scripts_path): for _file in files: scripts.append(join(root, _file)) kwargs['scripts'] = scripts # add packages if 'packages' not in kwargs: packages = find_packages(where=_path, exclude=TEST_FOLDERS) kwargs['packages'] = packages # add description if 'long_description' not in kwargs: readme_path = join(_path, 'README') if exists(readme_path): with open(join(_path, 'README')) as f: kwargs['long_description'] = f.read() # add test if 'test_suite' not in kwargs: test_folders = \ [folder for folder in TEST_FOLDERS if exists(join(_path, folder))] if test_folders: for test_folder in test_folders: kwargs['test_suite'] = test_folder break _setup(**kwargs)
def state(self, ids=None, state=None, criticity=HARD, f=DEFAULT_F, query=None, cache=False): """Get/update entity state(s). :param ids: entity id(s). Default is all entity ids. :type ids: str or list :param int state: state to update if not None. :param int criticity: state criticity level (HARD by default). :param f: new state calculation function if state is not None. :param dict query: additional query to use in order to find states. :param bool cache: storage cache when udpate state. :return: entity states by entity id or one state value if ids is a str. None if ids is a str, related entity does not exists and no update is required. :rtype: int or dict """ # default result is None result = {} # get state document state_documents = self.check_storage.get_elements(ids=ids, query=query) # if state document exists if state_documents is not None: # ensure state_documents is a list if isinstance(state_documents, dict): state_documents = [state_documents] # save id and state field name id_field, state_field = CheckManager.ID, CheckManager.STATE # result is a dictionary of entity id, state value result = {} for state_document in state_documents: entity_id = state_document[id_field] entity_state = state_document[state_field] result[entity_id] = entity_state # if state has to be updated if state is not None: # get the right state function f = lookup(f) if isinstance(f, basestring) else f # save field name for quick access id_name = CheckManager.ID state_name = CheckManager.STATE # ensure entity_ids is a set if isinstance(ids, basestring): entity_ids = set([ids]) elif ids is None: if state_documents is None: entity_ids = set() else: entity_ids = set([sd[id_name] for sd in state_documents]) else: entity_ids = set(ids) # if states exist in DB if state_documents is not None: # for all found documents for state_document in state_documents: # get document id _id = state_document[id_name] # remove _id from entity_ids entity_ids.remove(_id) # get new state with f new_state_document = f(state_document=state_document, state=state, criticity=criticity) # save new state_document if old != new if state_document != new_state_document: self.check_storage.put_element( _id=_id, element=new_state_document, cache=cache) # save state entity in result result[_id] = new_state_document[state_name] # for all not found documents for entity_id in entity_ids: # create a new document state_document = { id_name: entity_id, } new_state_document = f(state_document=state_document, state=state, criticity=criticity) # save it in storage self.check_storage.put_element(_id=entity_id, element=new_state_document, cache=cache) # and put entity state in the result result[entity_id] = state # ensure result is a state if ids is a basestring if result is not None and isinstance(ids, basestring): result = result[ids] if result else None return result
def at_least( event, ctx, vertice, state=Check.OK, min_weight=1, rrule=None, f=None, manager=None, edge_types=None, edge_data=None, edge_query=None, **kwargs ): """ Generic condition applied on sources of vertice which check if at least source nodes check a condition. :param dict event: processed event. :param dict ctx: rule context which must contain rule vertice. :param TopoNode vertice: vertice to check. :param int state: state to check among sources nodes. :param float min_weight: minimal weight (default 1) to reach in order to validate this condition. If None, condition results in checking all sources. :param rrule rrule: rrule to consider in order to check condition in time. :param f: function to apply on source vertice state. If None, use equality between input state and source vertice state. :param edge_ids: edge from where find target/source vertices. :type edge_ids: list or str :param edge_types: edge types from where find target/source vertices. :type edge_types: list or str :param dict edge_query: additional edge query. :return: True if condition is checked among source nodes. :rtype: bool """ result = False if manager is None: manager = singleton_per_scope(TopologyManager) # ensure min_weight is exclusively a float or None if min_weight: min_weight = float(min_weight) elif min_weight != 0: min_weight = None sources_by_edges = manager.get_sources( ids=vertice.id, add_edges=True, edge_types=edge_types, edge_data=edge_data, edge_query=edge_query ) if sources_by_edges and min_weight is None: # if edges & checking all nodes is required, result is True by default result = True if isinstance(f, basestring): f = lookup(f) # for all edges for edge_id in sources_by_edges: # get edge and sources edge, sources = sources_by_edges[edge_id] # get edge_weight which is 1 by default for source in sources: source_state = source.state if source_state == state if f is None else f(source_state): if min_weight is not None: # if min_weight is not None min_weight -= edge.weight # remove edge_weight from result if min_weight <= 0: # if min_weight is negative, ends loop result = True break elif min_weight is None: # stop if condition is not checked and min_weight is None result = False break # if result, save source_nodes in ctx in order to save read data from db if result: ctx[SOURCES_BY_EDGES] = sources_by_edges return result
def fill(self, init=None, yes=False, reinit_auth=False): self.__put_canopsis_version_document() tools = [] for module in self.modules: try: migrationcls = lookup(module) except ImportError as err: self.logger.error( 'Impossible to load module "{0}": {1}'.format( module, err ) ) continue migrationtool = migrationcls() migrationtool.logger.addHandler(self.loghandler) tools.append(migrationtool) coll = None if init is None: store = MongoStore.get_default() store.authenticate() coll = MongoCollection(store.get_collection(self.FLAG_COLLECTION)) data = coll.find_one({"_id": self.FLAG_COLLECTION}) if data is None: print("Database not intialized. Initializing...") init = True else: print("Database already intialized. Updating...") init = False if init is None and reinit_auth is False: data = { "_id": "initialized", "at": str(time.strftime("%a, %d %b %Y %H:%M:%S +0000")) } print("The canopsis initialization flag did not exist in the " "database. So canopsinit will (re?)initialized the " "database. Meaning, it may delete some important data " "from canopsis database. If you still want to initialize " "the database, call the same command with the " "`--authorize-reinit` flag. Or if you do not want to " "initialize the database, add the document `{0}` in the {1} " "collections.".format(data, self.FLAG_COLLECTION)) exit(1) for tool in tools: if init: tool.init(yes=yes) else: tool.update(yes=yes) if init is True: coll.insert({"_id": self.FLAG_COLLECTION, "at": str(time.strftime( "%a, %d %b %Y %H:%M:%S +0000"))})