def __init__(self): # TODO: use the regular way to super self.super = super( type(self), self ) # magic proxy (like shown in the TractorSiteStatusFilter.py example) self.super.__init__() self._filter_selector = FILTER_SELECTOR # we need to store information per reference that we can pass to # individual site status filter plugins and modify from within any method self._persistent_data = {} self._plugins = Plugins() def reload_selector(): _LOG.info("Reloading FILTER_SELECTOR...") import jobtronaut.constants reload(jobtronaut.constants) from jobtronaut.constants import FILTER_SELECTOR self._filter_selector = FILTER_SELECTOR # site status filters are called frequently, so don't perform a rediscovery of plugins and a selector # reload all the time self._plugins_initialize = CallIntervalLimiter( self._plugins.initialize, interval=60) self._reload_selector = CallIntervalLimiter(reload_selector, interval=60)
def test_task(self): """ check if we get the task class we would expect """ for task_name in tasks.TASKS_DICT: self.assertEqual(Plugins().task(task_name).__name__, tasks.TASKS_DICT[task_name].__name__) with self.assertRaises(KeyError) as context: Plugins().task("NonExistingTask") self.assertIn("No task found for", context.exception.message)
def test_initialize(self): """ check if initialize clears the cache properly """ with patch("jobtronaut.author.plugins.PLUGIN_PATH", new=[]): self.assertNotEqual([], sorted(Plugins().tasks)) self.assertNotEqual([], sorted(Plugins().processors)) Plugins().initialize() self.assertListEqual([], sorted(Plugins().tasks)) self.assertListEqual([], sorted(Plugins().processors))
def test_processor(self): """ check if we get the processor class we would expect """ for processor_name in processors.PROCESSORS_DICT: self.assertEqual( Plugins().processor(processor_name).__name__, processors.PROCESSORS_DICT[processor_name].__name__) with self.assertRaises(KeyError) as context: Plugins().processor("NonExistingProcessor") self.assertIn("No processor found for", context.exception.message)
def test_sitestatusfilter(self): """ check if we get the processor class we would expect """ for filter_name in sitestatusfilters.FILTERS_DICT: self.assertEqual( Plugins().sitestatusfilter(filter_name).__name__, sitestatusfilters.FILTERS_DICT[filter_name].__name__) with self.assertRaises(KeyError) as context: Plugins().sitestatusfilter("NonExistingFilter") self.assertIn("No sitestatusfilter found for", context.exception.message)
def test_plugin(self): """ check if we get the plugin class we expect """ plugins = processors.PROCESSORS_DICT.copy() plugins.update(tasks.TASKS_DICT) for plugin_name in plugins: self.assertEqual(Plugins().plugin(plugin_name).__name__, plugins[plugin_name].__name__) with self.assertRaises(KeyError) as context: Plugins().plugin("NonExistingPlugin") self.assertIn("No plugin found for", context.exception.message)
def append_jobtronaut_plugins_to_menu(menu): from jobtronaut.author.plugins import Plugins tasks = Plugins().tasks processors = Plugins().processors for name in sorted(tasks): menu.append("/Tasks/{}".format(name), functools.partial(JobtronautTask, name, name), searchText=name) for name in sorted(processors): menu.append("/Processors/{}".format(name), functools.partial(JobtronautProcessor, name, name), searchText=name)
def test_get_module_path(self): """ check if we properly get the module path from where the plugin was sourced """ for task_name in tasks.TASKS_DICT.keys(): self.assertEqual(tasks.__file__, Plugins().get_module_path(task_name)) for processor_name in processors.PROCESSORS_DICT.keys(): self.assertEqual(processors.__file__, Plugins().get_module_path(processor_name)) for filter_name in sitestatusfilters.FILTERS_DICT.keys(): self.assertEqual(sitestatusfilters.__file__, Plugins().get_module_path(filter_name))
def test_get(self): with self.assertRaises(AssertionError) as context: TaskWithOverrides("CmdTaskFixture").get() self.assertEqual("No Task overrides defined.", context.exception.message) with self.assertRaises(AssertionError) as context: TaskWithOverrides("CmdTaskFixture", required=["not_working"]).get() self.assertIn( "No attribute found for required, closest matches are " + "['required_tasks', 'required_arguments']", context.exception.message) _task = TaskWithOverrides( "CmdTaskFixture", title="A Task based on CmdTaskFixture").get() # testing identity (the overridden task should be unique) from jobtronaut.author.plugins import Plugins self.assertFalse(_task is Plugins().task("CmdTaskFixture")) # testing the attributes self.assertEqual("CmdTaskFixtureOverriden", _task.__name__) self.assertHasAttribute(_task, "_has_overrides") self.assertEqual("A Task based on CmdTaskFixture", _task.title) self.assertEqual(["linux64"], _task.services)
def add_code_nodules(self, plugin): code_plug = Gaffer.StringPlug("source", defaultValue=inspect.getsource(plugin)) Gaffer.Metadata.registerPlugValue(code_plug, "nodule:type", "") Gaffer.Metadata.registerValue( code_plug, "layout:section", "Code" ) Gaffer.Metadata.registerValue( code_plug, "plugValueWidget:type", "GafferUI.MultiLineStringPlugValueWidget" ) Gaffer.Metadata.registerValue( code_plug, "multiLineStringPlugValueWidget:role", "code" ) Gaffer.Metadata.registerValue( code_plug, "layout:section:Settings.Code:summary", "Information about the source code of this plugin." ) Gaffer.MetadataAlgo.setReadOnly(code_plug, True) self.addChild(code_plug) module_plug = Gaffer.StringPlug("module", defaultValue=Plugins().get_module_path(self.type_plug.getValue())) Gaffer.Metadata.registerValue( module_plug, "layout:section", "Code" ) Gaffer.MetadataAlgo.setReadOnly(module_plug, True) self.addChild(module_plug)
def test_plugins(self): """ check if available plugins match the ones we provide """ self.assertListEqual( sorted(tasks.TASKS_DICT.keys() + processors.PROCESSORS_DICT.keys() + sitestatusfilters.FILTERS_DICT.keys()), sorted(Plugins().plugins))
def test_initialize_with_duplicates(self): """ check if the duplicates detection works as expected """ with patch("jobtronaut.author.plugins.PLUGIN_PATH", new=[ os.path.dirname(processors.__file__), os.path.join(os.path.dirname(processors.__file__), "duplicates") ]): # ensure that we raise an error if we find duplicates with self.assertRaises(AssertionError) as context: Plugins().initialize() self.assertIn("names are unique", context.exception.message) Plugins().initialize(ignore_duplicates=True)
def test_plugin_class(self): for processor in processors.PROCESSORS_DICT.keys(): self.assertEqual(Plugins().plugin_class(processor), "Processor") for task in tasks.TASKS_DICT.keys(): self.assertEqual(Plugins().plugin_class(task), "Task") for sitestatusfilter in sitestatusfilters.FILTERS_DICT.keys(): self.assertEqual(Plugins().plugin_class(sitestatusfilter), "SiteStatusFilter") with self.assertRaises(AssertionError) as context: Plugins().plugin_class("NonExistingPlugin") self.assertIn("Plugin NonExistingPlugin could not", context.exception.message)
def test_modify(self): root_task, arguments = tasks.TASKS_DICT.keys()[0], {"uno": [1, 2, 3], "dos": 2, "tres": 3} for task in Plugins().tasks.values(): task.flags = task.Flags.PER_ELEMENT task.cmd = lambda x: ["/bin/echo", "Hello World"] task.tags = ["foo", "bar"] job = Job(root_task, arguments) # add idx attribute to each command to differentiate them and # refer to them by number later in the test for idx, cmd in enumerate(job.flat_hierarchy["cmds"]): cmd.MEMBERS.append("idx") cmd.idx = idx def _get_attribute_listed(job, type, attribute): return [_.attributeByName.get(attribute).value for _ in job.flat_hierarchy[type]] job.modify_cmds(predicate=False, attribute="tags", value=["foobar"]) self.assertListEqual( [["foo", "bar"], ["foo", "bar"], ["foo", "bar"]], _get_attribute_listed(job, "cmds", "tags") ) job.modify_cmds(predicate=True, attribute="tags", value=["foobar"]) self.assertListEqual( [["foobar"], ["foobar"], ["foobar"]], _get_attribute_listed(job, "cmds", "tags") ) job.modify_cmds(predicate=lambda x: x.idx != 0, attribute="tags", value=["bar", "foo"]) self.assertListEqual( [["foobar"], ["bar", "foo"], ["bar", "foo"]], _get_attribute_listed(job, "cmds", "tags") ) job.modify_cmds(predicate=lambda x: x.idx != 2, attribute="tags", value=["barfoo"]) self.assertListEqual( [["barfoo"], ["barfoo"], ["bar", "foo"]], _get_attribute_listed(job, "cmds", "tags") ) job.modify_cmds(predicate=lambda x: x.idx != 2, attribute="tags", value=lambda x: [["zero"], ["one"]][x.idx]) self.assertListEqual( [["zero"], ["one"], ["bar", "foo"]], _get_attribute_listed(job, "cmds", "tags") )
def test_stop_traversal(self): """ check if stop_traversal will prevent task creation """ root_task, arguments = tasks.TASKS_DICT.keys()[0], {"uno": [1, 2, 3], "dos": 2, "tres": 3} for task in Plugins().tasks.values(): task.cmd = lambda x: ["/bin/echo", "Hello World"] task.flags = tasks.Task.Flags.PER_ELEMENT job = Job(root_task, arguments) self.assertEqual(len(job.flat_hierarchy["tasks"]), 4) for task in Plugins().tasks.values(): task.stop_traversal = lambda x: x.arguments.uno.processed == 2 job = Job(root_task, arguments) self.assertEqual(len(job.flat_hierarchy["tasks"]), 3) self.assertTrue("Elements 2" not in " ".join([_.title for _ in job.flat_hierarchy["tasks"]])) for task in Plugins().tasks.values(): task.stop_traversal = lambda x: x.arguments.uno.processed in [1, 2, 3] job = Job(root_task, arguments) self.assertEqual(len(job.flat_hierarchy["tasks"]), 1) self.assertTrue("Elements" not in " ".join([_.title for _ in job.flat_hierarchy["tasks"]]))
def test_get_all_arguments(self): """ check if we will get all arguments a task will consume correctly """ all_arguments = list( set([ item for sublist in tasks.PROCESSOR_SCOPE for item in sublist ])) for i, task_name in enumerate(sorted(tasks.TASKS_DICT.keys())): if i != 0: # we expect the first task will get all other available tasks as required all_arguments = [_ for _ in tasks.PROCESSOR_SCOPE[i]] expected_arguments = list(set(all_arguments)) self.assertListEqual( Plugins().get_all_arguments(task_name), list(set([_.split(".")[0] for _ in expected_arguments])))
def __init__(self, name, task_name): super(JobtronautTask, self).__init__(name) Gaffer.Metadata.registerValue(self.__class__, "nodeGadget:color", _TASK_COLOR) in_plug = GafferDispatch.TaskNode.TaskPlug("in", Gaffer.Plug.Direction.In) Gaffer.Metadata.registerPlugValue(in_plug, "nodule:type", "GafferUI::StandardNodule") Gaffer.Metadata.registerPlugValue(in_plug, "nodule:color", _TASK_IN_OUT_COLOR) Gaffer.Metadata.registerPlugValue(in_plug, "noduleLayout:section", "top") Gaffer.Metadata.registerPlugValue(in_plug, "plugValueWidget:type", "") self.addChild(in_plug) self.type_plug = self.getChild("type") if not self.type_plug: self.type_plug = Gaffer.StringPlug("type", Gaffer.Plug.Direction.In) self.addChild(self.type_plug) self.type_plug.setValue(task_name) Gaffer.MetadataAlgo.setReadOnly(self.type_plug, True) Gaffer.Metadata.registerPlugValue(self.type_plug, "nodule:type", "") plugin = Plugins().task(task_name) Gaffer.Metadata.registerValue(self, "description", plugin.description) expansions = get_expand_task_names(plugin) for expansion in expansions: expand_plug = GafferDispatch.TaskNode.TaskPlug(expansion.root, Gaffer.Plug.Direction.Out) Gaffer.Metadata.registerPlugValue(expand_plug, "nodule:type", "GafferUI::StandardNodule") Gaffer.Metadata.registerPlugValue(expand_plug, "nodule:color", _TASK_IN_OUT_COLOR) Gaffer.Metadata.registerPlugValue(expand_plug, "noduleLayout:section", "right") Gaffer.Metadata.registerPlugValue(expand_plug, "plugValueWidget:type", "") self.addChild(expand_plug) for argument in expansion.arguments: arguments_plug = ArgumentsPlug(argument, Gaffer.Plug.Direction.Out) Gaffer.Metadata.registerPlugValue(arguments_plug, "nodule:type", "GafferUI::StandardNodule") Gaffer.Metadata.registerPlugValue(arguments_plug, "nodule:color", _ARGUMENTS_COLOR) Gaffer.Metadata.registerPlugValue(arguments_plug, "noduleLayout:section", "right") Gaffer.Metadata.registerPlugValue(arguments_plug, "plugValueWidget:type", "") Gaffer.Metadata.registerPlugValue(arguments_plug, "connectionGadget:color", _ARGUMENTS_CONNECTION_COLOR) self.addChild(arguments_plug) self.add_code_nodules(plugin)
class TractorSiteStatusFilter(TrStatusFilter): """ Delegate all filter methods to our plugin or fallback to default implementation """ def __init__(self): # TODO: use the regular way to super self.super = super( type(self), self ) # magic proxy (like shown in the TractorSiteStatusFilter.py example) self.super.__init__() self._filter_selector = FILTER_SELECTOR # we need to store information per reference that we can pass to # individual site status filter plugins and modify from within any method self._persistent_data = {} self._plugins = Plugins() def reload_selector(): _LOG.info("Reloading FILTER_SELECTOR...") import jobtronaut.constants reload(jobtronaut.constants) from jobtronaut.constants import FILTER_SELECTOR self._filter_selector = FILTER_SELECTOR # site status filters are called frequently, so don't perform a rediscovery of plugins and a selector # reload all the time self._plugins_initialize = CallIntervalLimiter( self._plugins.initialize, interval=60) self._reload_selector = CallIntervalLimiter(reload_selector, interval=60) def _delegate(self, function, function_args=(), function_kwargs={}, keep_cache=False): """ handle function call delegation to plugin """ self._reload_selector() _LOG.debug("Delegating `{}`".format(function.__name__)) if function.__name__.endswith("State"): selector = lambda: self._filter_selector(function_args[ 0], None) # -> pass `stateDict` and no cmd else: selector = lambda: self._filter_selector({}, function_args[ 0]) # -> pass cmd and empty stateDict try: plugin_names = selector() except: _LOG.error( "Calling filter selector failed. Unable to delegate to any plugin.", exc_info=True) plugin_names = [] plugin = None if plugin_names: if isinstance(plugin_names, basestring): plugin_names = [plugin_names] # enforce bypassing the plugin cache to ensure implemented sites status filter methods # are always up to date if ENABLE_PLUGIN_CACHE and not keep_cache: self._plugins_initialize() for plugin_name in plugin_names: try: plugin = self._plugins.sitestatusfilter(plugin_name)( persistent_data=self._persistent_data) # as the plugin inherits from TrSiteStatusFilter there should always be the actual filter function func = getattr(plugin, function.__name__) break except KeyError: # fallback to original implementation if the plugin can't be found _LOG.error( "Unable to find site status filter `{}`.".format( plugin_name), exc_info=True) func = function # fallback mechanism! # We'd like to prevent bypassing the default implementation of TrSiteStatusFilter if func != function: if plugin: _LOG.debug( "Calling filter function on plugin `{}` from `{}`". format(plugin.__class__.__name__, inspect.getfile(func))) try: return func(*function_args, **function_kwargs) except: _LOG.error( "Fallback to derived implementation, because `{}` failed." .format(func), exc_info=True) return function(*function_args, **function_kwargs) def FilterBasicState(self, stateDict, now): return self._delegate(self.super.FilterBasicState, (stateDict, now)) def TestBasicState(self, stateDict, now): return self._delegate(self.super.TestBasicState, (stateDict, now)) def FilterDynamicState(self, stateDict, now): return self._delegate(self.super.FilterDynamicState, (stateDict, now)) def TestDynamicState(self, stateDict, now): return self._delegate(self.super.TestDynamicState, (stateDict, now)) def SubprocessFailedToStart(self, cmd): return self._delegate(self.super.SubprocessFailedToStart, (cmd, ), keep_cache=True) def SubprocessStarted(self, cmd): return self._delegate(self.super.SubprocessStarted, (cmd, )) def SubprocessEnded(self, cmd): return self._delegate(self.super.SubprocessEnded, (cmd, ), keep_cache=True) def FilterSubprocessOutputLine(self, cmd, textline): return self._delegate(self.super.FilterSubprocessOutputLine, (cmd, textline), keep_cache=True)
def test_sitestatusfilters(self): """ check if available sitestatusfilter match the ones we provide """ self.assertListEqual(sorted(sitestatusfilters.FILTERS_DICT.keys()), sorted(Plugins().sitestatusfilters))
def test_tasks(self): """ check if available tasks match the ones we provide """ self.assertListEqual(sorted(tasks.TASKS_DICT.keys()), sorted(Plugins().tasks))
def test_processors(self): """ check if available processors match the ones we provide """ self.assertListEqual(sorted(processors.PROCESSORS_DICT.keys()), sorted(Plugins().processors))
def setUp(cls): # let us use the fixtures path to test with Plugins().initialize()
def __init__(self, name, processor_name): super(JobtronautProcessor, self).__init__(name) Gaffer.Metadata.registerValue(self.__class__, "nodeGadget:color", _PROCESSOR_COLOR) Gaffer.Metadata.registerValue(self.__class__, "icon", "processor.png") scope_name_plug = Gaffer.StringVectorDataPlug( "scope", Gaffer.Plug.Direction.In, defaultValue=IECore.StringVectorData() ) Gaffer.Metadata.registerPlugValue(scope_name_plug, "nodule:type", "") self.addChild(scope_name_plug) self.type_plug = self.getChild("type") if not self.type_plug: self.type_plug = Gaffer.StringPlug("type", Gaffer.Plug.Direction.In) self.addChild(self.type_plug) self.type_plug.setValue(processor_name) Gaffer.MetadataAlgo.setReadOnly(self.type_plug, True) Gaffer.Metadata.registerPlugValue(self.type_plug, "nodule:type", "") in_plug = ProcessorPlug("in", Gaffer.Plug.Direction.In) Gaffer.Metadata.registerPlugValue(in_plug, "nodule:type", "GafferUI::StandardNodule") Gaffer.Metadata.registerPlugValue(in_plug, "nodule:color", _PROCESSOR_IN_OUT_COLOR) Gaffer.Metadata.registerPlugValue(in_plug, "noduleLayout:section", "top") Gaffer.Metadata.registerPlugValue(in_plug, "plugValueWidget:type", "") self.addChild(in_plug) out_plug = ProcessorPlug("out", Gaffer.Plug.Direction.Out) Gaffer.Metadata.registerPlugValue(out_plug, "nodule:type", "GafferUI::StandardNodule") Gaffer.Metadata.registerPlugValue(out_plug, "nodule:color", _PROCESSOR_IN_OUT_COLOR) Gaffer.Metadata.registerPlugValue(out_plug, "noduleLayout:section", "bottom") Gaffer.Metadata.registerPlugValue(out_plug, "plugValueWidget:type", "") self.addChild(out_plug) plugin = Plugins().processor(processor_name) Gaffer.Metadata.registerValue( self["scope"], "layout:section:Settings.Scope:summary", "The scopes the processed values will be applied to." ) Gaffer.Metadata.registerValue( self["scope"], "layout:section", "Settings.Scope" ) parameters_plug = Gaffer.CompoundDataPlug("parameters", Gaffer.Plug.Direction.In) Gaffer.Metadata.registerPlugValue(parameters_plug, "nodule:type", "") Gaffer.Metadata.registerValue( parameters_plug, "layout:section", "Settings.Parameters" ) Gaffer.Metadata.registerValue( parameters_plug, "layout:section:Settings.Parameters:summary", "The parameters this processor is supposed to work with." ) for parameter_name, parameter_value in plugin.parameters.items(): if isinstance(parameter_value, basestring): plug = Gaffer.NameValuePlug( parameter_name, IECore.StringData(parameter_value), True, name=parameter_name ) elif isinstance(parameter_value, float): plug = Gaffer.NameValuePlug( parameter_name, IECore.FloatData(parameter_value), True, name=parameter_name ) elif isinstance(parameter_value, bool): plug = Gaffer.NameValuePlug( parameter_name, IECore.BoolData(parameter_value), True, name=parameter_name ) elif isinstance(parameter_value, int): plug = Gaffer.NameValuePlug( parameter_name, IECore.IntData(parameter_value), True, name=parameter_name ) elif isinstance(parameter_value, list): if parameter_value and all([isinstance(_, basestring) for _ in parameter_value]): plug = Gaffer.NameValuePlug( parameter_name, IECore.StringVectorData(parameter_value), True, name=parameter_name ) elif parameter_value and all([isinstance(_, bool) for _ in parameter_value]): plug = Gaffer.NameValuePlug( parameter_name, IECore.BoolVectorData(parameter_value), True, name=parameter_name ) elif parameter_value and all([isinstance(_, int) for _ in parameter_value]): plug = Gaffer.NameValuePlug( parameter_name, IECore.IntVectorData(parameter_value), True, name=parameter_name ) elif parameter_value and all([isinstance(_, float) for _ in parameter_value]): plug = Gaffer.NameValuePlug( parameter_name, IECore.FloatVectorData(parameter_value), True, name=parameter_name ) else: plug = Gaffer.NameValuePlug( parameter_name, IECore.StringData(str(parameter_value)), True, name=parameter_name ) parameters_plug.addChild(plug) self.addChild(parameters_plug) self.add_code_nodules(plugin) Gaffer.Metadata.registerValue(self, "description", plugin.description)