def fetch_c_plugins_installed(cls, plugin_type, is_config): libs = utils.find_c_plugin_libs(plugin_type) configs = [] for name, _type in libs: try: if _type == 'binary': jdoc = utils.get_plugin_info(name, dir=plugin_type) if jdoc: if 'flag' in jdoc: if common_utils.bit_at_given_position_set_or_unset( jdoc['flag'], common_utils.DEPRECATED_BIT_POSITION): raise DeprecationWarning plugin_config = { 'name': name, 'type': plugin_type, 'description': jdoc['config']['plugin']['description'], 'version': jdoc['version'] } if is_config: plugin_config.update({'config': jdoc['config']}) configs.append(plugin_config) else: # for c-hybrid plugin hybrid_plugin_config = common.load_and_fetch_c_hybrid_plugin_info( name, is_config) if hybrid_plugin_config: configs.append(hybrid_plugin_config) except DeprecationWarning: _logger.warning('"{}" plugin is deprecated'.format(name)) except Exception as ex: _logger.exception(ex) return configs
def test_get_plugin_info(self, mock_subproc_popen): with patch.object(utils, '_find_c_util', return_value=['']) as patch_util: with patch.object(utils, '_find_c_lib', return_value=['']) as patch_lib: process_mock = MagicMock() attrs = { 'communicate.return_value': (b'{"name": "Random", "version": "1.0.0", "type": "south", "interface": "1.0.0", "config": {"plugin" : { "description" : "Random C south plugin", "type" : "string", "default" : "Random" }, "asset" : { "description" : "Asset name", "type" : "string", "default" : "Random" } } }\n', 'error') } process_mock.configure_mock(**attrs) mock_subproc_popen.return_value = process_mock j = utils.get_plugin_info('Random', dir='south') assert { 'name': 'Random', 'type': 'south', 'version': '1.0.0', 'interface': '1.0.0', 'config': { 'plugin': { 'description': 'Random C south plugin', 'type': 'string', 'default': 'Random' }, 'asset': { 'description': 'Asset name', 'type': 'string', 'default': 'Random' } } } == j patch_lib.assert_called_once_with('Random', 'south') patch_util.assert_called_once_with('get_plugin_info')
def load_and_fetch_c_hybrid_plugin_info(plugin_name: str, is_config: bool, plugin_type='south') -> Dict: plugin_info = None if plugin_type == 'south': config_items = ['default', 'type', 'description'] optional_items = ['readonly', 'order', 'length', 'maximum', 'minimum', 'rule', 'deprecated', 'displayName', 'options'] config_items.extend(optional_items) plugin_dir = _FLEDGE_ROOT + '/' + 'plugins' + '/' + plugin_type if _FLEDGE_PLUGIN_PATH: plugin_paths = _FLEDGE_PLUGIN_PATH.split(";") for pp in plugin_paths: if os.path.isdir(pp): plugin_dir = pp + '/' + plugin_type if not os.path.isdir(plugin_dir + '/' + plugin_name): plugin_dir = _FLEDGE_ROOT + '/' + 'plugins' + '/' + plugin_type file_name = plugin_dir + '/' + plugin_name + '/' + plugin_name + '.json' with open(file_name) as f: data = json.load(f) json_file_keys = ('connection', 'name', 'defaults', 'description') if all(k in data for k in json_file_keys): connection_name = data['connection'] if _FLEDGE_ROOT + '/' + 'plugins' + '/' + plugin_type or os.path.isdir(plugin_dir + '/' + connection_name): jdoc = utils.get_plugin_info(connection_name, dir=plugin_type) if jdoc: plugin_info = {'name': plugin_name, 'type': plugin_type, 'description': data['description'], 'version': jdoc['version']} keys_a = set(jdoc['config'].keys()) keys_b = set(data['defaults'].keys()) intersection = keys_a & keys_b # Merge default and other configuration fields of both connection plugin and hybrid plugin with intersection of 'config' keys # Use Hybrid Plugin name and description defined in json file temp = jdoc['config'] temp['plugin']['default'] = plugin_name temp['plugin']['description'] = data['description'] for _key in intersection: config_item_keys = set(data['defaults'][_key].keys()) for _config_key in config_item_keys: if _config_key in config_items: if temp[_key]['type'] == 'JSON' and _config_key == 'default': temp[_key][_config_key] = json.dumps(data['defaults'][_key][_config_key]) elif temp[_key]['type'] == 'enumeration' and _config_key == 'default': temp[_key][_config_key] = data['defaults'][_key][_config_key] else: temp[_key][_config_key] = str(data['defaults'][_key][_config_key]) if is_config: plugin_info.update({'config': temp}) else: _logger.warning("{} hybrid plugin is not installed which is required for {}".format(connection_name, plugin_name)) else: _logger.warning("{} hybrid plugin is not installed which is required for {}".format(connection_name, plugin_name)) else: raise Exception('Required {} keys are missing for json file'.format(json_file_keys)) return plugin_info
def load_c_plugin(plugin: str, service_type: str) -> Dict: try: plugin_info = apiutils.get_plugin_info(plugin, dir=service_type) if plugin_info['type'] != service_type: msg = "Plugin of {} type is not supported".format(plugin_info['type']) raise TypeError(msg) plugin_config = plugin_info['config'] except Exception: # Now looking for hybrid plugins if exists plugin_info = common.load_and_fetch_c_hybrid_plugin_info(plugin, True) if plugin_info: plugin_config = plugin_info['config'] return plugin_config
def test_get_plugin_info_exception(self): plugin_name = 'Random' with patch.object(utils, '_find_c_util', return_value=None) as patch_util: with patch.object(utils, '_find_c_lib', return_value=None) as patch_lib: with patch.object(utils.subprocess, "Popen", side_effect=Exception): with patch.object(utils._logger, 'exception') as patch_logger: assert {} == utils.get_plugin_info(plugin_name, dir='south') assert 1 == patch_logger.call_count patch_lib.assert_called_once_with(plugin_name, 'south') patch_util.assert_called_once_with('get_plugin_info')
async def create_filter(request: web.Request) -> web.Response: """ Create a new filter with a specific plugin :Example: curl -X POST http://localhost:8081/fledge/filter -d '{"name": "North_Readings_to_PI_scale_stage_1Filter", "plugin": "scale"}' curl -X POST http://localhost:8081/fledge/filter -d '{"name": "North_Readings_to_PI_scale_stage_1Filter", "plugin": "scale", "filter_config": {"offset":"1","enable":"true"}}' 'name' is the filter name 'plugin' is the filter plugin name 'filter_config' is the new configuration of the plugin, part or full, should we desire to modify the config at creation time itself The plugin is loaded and default config from 'plugin_info' is fetched. A new config category 'name' is created: items are: - 'plugin' - all items from default plugin config NOTE: The 'create_category' call is made with keep_original_items = True """ try: data = await request.json() filter_name = data.get('name', None) plugin_name = data.get('plugin', None) filter_config = data.get('filter_config', {}) if not filter_name or not plugin_name: raise TypeError('Filter name, plugin name are mandatory.') storage = connect.get_storage_async() cf_mgr = ConfigurationManager(storage) # Check first whether filter already exists category_info = await cf_mgr.get_category_all_items( category_name=filter_name) if category_info is not None: raise ValueError( "This '{}' filter already exists".format(filter_name)) # Load C/Python filter plugin info #loaded_plugin_info = apiutils.get_plugin_info(plugin_name, dir='filter') try: # Try fetching Python filter plugin_module_path = "{}/python/fledge/plugins/filter/{}".format( _FLEDGE_ROOT, plugin_name) loaded_plugin_info = common.load_and_fetch_python_plugin_info( plugin_module_path, plugin_name, "filter") except FileNotFoundError as ex: # Load C filter plugin loaded_plugin_info = apiutils.get_plugin_info(plugin_name, dir='filter') if not loaded_plugin_info or 'config' not in loaded_plugin_info: message = "Can not get 'plugin_info' detail from plugin '{}'".format( plugin_name) raise ValueError(message) # Sanity checks plugin_config = loaded_plugin_info['config'] loaded_plugin_type = loaded_plugin_info['type'] loaded_plugin_name = plugin_config['plugin']['default'] if plugin_name != loaded_plugin_name or loaded_plugin_type != 'filter': raise ValueError( "Loaded plugin '{}', type '{}', doesn't match the specified one '{}', type 'filter'" .format(loaded_plugin_name, loaded_plugin_type, plugin_name)) # Set dict value for 'default' if type is JSON. This is required by the configuration manager for key, value in plugin_config.items(): if value['type'] == 'JSON': value['default'] = json.loads(json.dumps(value['default'])) # Check if filter exists in filters table payload = PayloadBuilder().WHERE(['name', '=', filter_name]).payload() result = await storage.query_tbl_with_payload("filters", payload) if len(result["rows"]) == 0: # Create entry in filters table payload = PayloadBuilder().INSERT(name=filter_name, plugin=plugin_name).payload() await storage.insert_into_tbl("filters", payload) # Everything ok, now create filter config filter_desc = "Configuration of '{}' filter for plugin '{}'".format( filter_name, plugin_name) await cf_mgr.create_category(category_name=filter_name, category_description=filter_desc, category_value=plugin_config, keep_original_items=True) # If custom filter_config is in POST data, then update the value for each config item if filter_config is not None: if not isinstance(filter_config, dict): raise ValueError('filter_config must be a JSON object') await cf_mgr.update_configuration_item_bulk( filter_name, filter_config) # Fetch the new created filter: get category items category_info = await cf_mgr.get_category_all_items( category_name=filter_name) if category_info is None: raise ValueError("No such '{}' filter found".format(filter_name)) else: return web.json_response({ 'filter': filter_name, 'description': filter_desc, 'value': category_info }) except ValueError as ex: _LOGGER.exception("Add filter, caught exception: " + str(ex)) raise web.HTTPNotFound(reason=str(ex)) except TypeError as ex: _LOGGER.exception("Add filter, caught exception: " + str(ex)) raise web.HTTPBadRequest(reason=str(ex)) except StorageServerError as ex: await _delete_configuration_category(storage, filter_name ) # Revert configuration entry _LOGGER.exception("Failed to create filter. %s", ex.error) raise web.HTTPInternalServerError(reason='Failed to create filter.') except Exception as ex: _LOGGER.exception("Add filter, caught exception: %s", str(ex)) raise web.HTTPInternalServerError(reason=str(ex))
async def add_task(request): """ Create a new task to run a specific plugin :Example: curl -X POST http://localhost:8081/fledge/scheduled/task -d '{ "name": "North Readings to PI", "plugin": "pi_server", "type": "north", "schedule_type": 3, "schedule_day": 0, "schedule_time": 0, "schedule_repeat": 30, "schedule_enabled": true }' curl -sX POST http://localhost:8081/fledge/scheduled/task -d '{"name": "PI-2", "plugin": "pi_server", "type": "north", "schedule_type": 3, "schedule_day": 0, "schedule_time": 0, "schedule_repeat": 30, "schedule_enabled": true, "config": { "producerToken": {"value": "uid=180905062754237&sig=kx5l+"}, "URL": {"value": "https://10.2.5.22:5460/ingress/messages"}}}' """ try: data = await request.json() if not isinstance(data, dict): raise ValueError('Data payload must be a valid JSON') name = data.get('name', None) plugin = data.get('plugin', None) task_type = data.get('type', None) schedule_type = data.get('schedule_type', None) schedule_day = data.get('schedule_day', None) schedule_time = data.get('schedule_time', None) schedule_repeat = data.get('schedule_repeat', None) enabled = data.get('schedule_enabled', None) config = data.get('config', None) if name is None: raise web.HTTPBadRequest(reason='Missing name property in payload.') if plugin is None: raise web.HTTPBadRequest(reason='Missing plugin property in payload.') if task_type is None: raise web.HTTPBadRequest(reason='Missing type property in payload.') if utils.check_reserved(name) is False: raise web.HTTPBadRequest(reason='Invalid name property in payload.') if utils.check_fledge_reserved(name) is False: raise web.HTTPBadRequest(reason="'{}' is reserved for Fledge and can not be used as task name!".format(name)) if utils.check_reserved(plugin) is False: raise web.HTTPBadRequest(reason='Invalid plugin property in payload.') if task_type not in ['north']: raise web.HTTPBadRequest(reason='Only north type is supported.') if schedule_type is None: raise web.HTTPBadRequest(reason='schedule_type is mandatory') if not isinstance(schedule_type, int) and not schedule_type.isdigit(): raise web.HTTPBadRequest(reason='Error in schedule_type: {}'.format(schedule_type)) if int(schedule_type) not in list(Schedule.Type): raise web.HTTPBadRequest(reason='schedule_type error: {}'.format(schedule_type)) if int(schedule_type) == Schedule.Type.STARTUP: raise web.HTTPBadRequest(reason='schedule_type cannot be STARTUP: {}'.format(schedule_type)) schedule_type = int(schedule_type) if schedule_day is not None: if isinstance(schedule_day, float) or (isinstance(schedule_day, str) and (schedule_day.strip() != "" and not schedule_day.isdigit())): raise web.HTTPBadRequest(reason='Error in schedule_day: {}'.format(schedule_day)) else: schedule_day = int(schedule_day) if schedule_day is not None else None if schedule_time is not None and (not isinstance(schedule_time, int) and not schedule_time.isdigit()): raise web.HTTPBadRequest(reason='Error in schedule_time: {}'.format(schedule_time)) else: schedule_time = int(schedule_time) if schedule_time is not None else None if schedule_repeat is not None and (not isinstance(schedule_repeat, int) and not schedule_repeat.isdigit()): raise web.HTTPBadRequest(reason='Error in schedule_repeat: {}'.format(schedule_repeat)) else: schedule_repeat = int(schedule_repeat) if schedule_repeat is not None else None if schedule_type == Schedule.Type.TIMED: if not schedule_time: raise web.HTTPBadRequest(reason='schedule_time cannot be empty/None for TIMED schedule.') if schedule_day is not None and (schedule_day < 1 or schedule_day > 7): raise web.HTTPBadRequest(reason='schedule_day {} must either be None or must be an integer, 1(Monday) ' 'to 7(Sunday).'.format(schedule_day)) if schedule_time < 0 or schedule_time > 86399: raise web.HTTPBadRequest(reason='schedule_time {} must be an integer and in range 0-86399.'.format(schedule_time)) if schedule_type == Schedule.Type.INTERVAL: if schedule_repeat is None: raise web.HTTPBadRequest(reason='schedule_repeat {} is required for INTERVAL schedule_type.'.format(schedule_repeat)) elif not isinstance(schedule_repeat, int): raise web.HTTPBadRequest(reason='schedule_repeat {} must be an integer.'.format(schedule_repeat)) if enabled is not None: if enabled not in ['true', 'false', True, False]: raise web.HTTPBadRequest(reason='Only "true", "false", true, false are allowed for value of enabled.') is_enabled = True if ((type(enabled) is str and enabled.lower() in ['true']) or ( (type(enabled) is bool and enabled is True))) else False # Check if a valid plugin has been provided try: # "plugin_module_path" is fixed by design. It is MANDATORY to keep the plugin in the exactly similar named # folder, within the plugin_module_path. # if multiple plugin with same name are found, then python plugin import will be tried first plugin_module_path = "{}/python/fledge/plugins/{}/{}".format(_FLEDGE_ROOT, task_type, plugin) plugin_info = common.load_and_fetch_python_plugin_info(plugin_module_path, plugin, task_type) plugin_config = plugin_info['config'] script = '["tasks/north"]' process_name = 'north' except FileNotFoundError as ex: # Checking for C-type plugins script = '["tasks/north_c"]' plugin_info = apiutils.get_plugin_info(plugin, dir=task_type) if plugin_info['type'] != task_type: msg = "Plugin of {} type is not supported".format(plugin_info['type']) _logger.exception(msg) return web.HTTPBadRequest(reason=msg) plugin_config = plugin_info['config'] process_name = 'north_c' if not plugin_config: _logger.exception("Plugin %s import problem from path %s. %s", plugin, plugin_module_path, str(ex)) raise web.HTTPNotFound(reason='Plugin "{}" import problem from path "{}"'.format(plugin, plugin_module_path)) except TypeError as ex: raise web.HTTPBadRequest(reason=str(ex)) except Exception as ex: _logger.exception("Failed to fetch plugin configuration. %s", str(ex)) raise web.HTTPInternalServerError(reason='Failed to fetch plugin configuration.') storage = connect.get_storage_async() config_mgr = ConfigurationManager(storage) # Abort the operation if there are already executed tasks payload = PayloadBuilder() \ .SELECT(["id", "schedule_name"]) \ .WHERE(['schedule_name', '=', name]) \ .LIMIT(1) \ .payload() result = await storage.query_tbl_with_payload('tasks', payload) if result['count'] >= 1: msg = 'Unable to reuse name {0}, already used by a previous task.'.format(name) _logger.exception(msg) raise web.HTTPBadRequest(reason=msg) # Check whether category name already exists category_info = await config_mgr.get_category_all_items(category_name=name) if category_info is not None: raise web.HTTPBadRequest(reason="The '{}' category already exists".format(name)) # Check that the schedule name is not already registered count = await check_schedules(storage, name) if count != 0: raise web.HTTPBadRequest(reason='A north instance with this name already exists') # Check that the process name is not already registered count = await check_scheduled_processes(storage, process_name) if count == 0: # Create the scheduled process entry for the new task payload = PayloadBuilder().INSERT(name=process_name, script=script).payload() try: res = await storage.insert_into_tbl("scheduled_processes", payload) except StorageServerError as ex: _logger.exception("Failed to create scheduled process. %s", ex.error) raise web.HTTPInternalServerError(reason='Failed to create north instance.') except Exception as ex: _logger.exception("Failed to create scheduled process. %s", ex) raise web.HTTPInternalServerError(reason='Failed to create north instance.') # If successful then create a configuration entry from plugin configuration try: # Create a configuration category from the configuration defined in the plugin category_desc = plugin_config['plugin']['description'] await config_mgr.create_category(category_name=name, category_description=category_desc, category_value=plugin_config, keep_original_items=True) # Create the parent category for all North tasks await config_mgr.create_category("North", {}, 'North tasks', True) await config_mgr.create_child_category("North", [name]) # If config is in POST data, then update the value for each config item if config is not None: if not isinstance(config, dict): raise ValueError('Config must be a JSON object') for k, v in config.items(): await config_mgr.set_category_item_value_entry(name, k, v['value']) except Exception as ex: await config_mgr.delete_category_and_children_recursively(name) _logger.exception("Failed to create plugin configuration. %s", str(ex)) raise web.HTTPInternalServerError(reason='Failed to create plugin configuration. {}'.format(ex)) # If all successful then lastly add a schedule to run the new task at startup try: schedule = TimedSchedule() if schedule_type == Schedule.Type.TIMED else \ IntervalSchedule() if schedule_type == Schedule.Type.INTERVAL else \ ManualSchedule() schedule.name = name schedule.process_name = process_name schedule.day = schedule_day m, s = divmod(schedule_time if schedule_time is not None else 0, 60) h, m = divmod(m, 60) schedule.time = datetime.time().replace(hour=h, minute=m, second=s) schedule.repeat = datetime.timedelta(seconds=schedule_repeat if schedule_repeat is not None else 0) schedule.exclusive = True schedule.enabled = False # if "enabled" is supplied, it gets activated in save_schedule() via is_enabled flag # Save schedule await server.Server.scheduler.save_schedule(schedule, is_enabled) schedule = await server.Server.scheduler.get_schedule_by_name(name) except StorageServerError as ex: await config_mgr.delete_category_and_children_recursively(name) _logger.exception("Failed to create schedule. %s", ex.error) raise web.HTTPInternalServerError(reason='Failed to create north instance.') except Exception as ex: await config_mgr.delete_category_and_children_recursively(name) _logger.exception("Failed to create schedule. %s", str(ex)) raise web.HTTPInternalServerError(reason='Failed to create north instance.') except ValueError as e: raise web.HTTPBadRequest(reason=str(e)) else: return web.json_response({'name': name, 'id': str(schedule.schedule_id)})