def test_runner_create_instance_from_plugin(tmpdir, params): workflow = flexmock(data=flexmock()) workflow.data.image_id = 'image-id' workflow.source = flexmock() workflow.source.dockerfile_path = 'dockerfile-path' workflow.source.path = 'path' workflow.user_params = {'shrubbery': 'yrebburhs'} class MyPlugin(Plugin): key = 'my_plugin' @staticmethod def args_from_user_params(user_params): return {'shrubbery': user_params['shrubbery']} def __init__(self, workflow, spam=None, shrubbery=None): super().__init__(workflow) self.spam = spam self.shrubbery = shrubbery def run(self): pass bpr = PluginsRunner(workflow, []) plugin = bpr.create_instance_from_plugin(MyPlugin, params) assert plugin.spam == params['spam'] assert plugin.shrubbery == 'yrebburhs'
def build_docker_image(self) -> None: """Start the container build. In general, all plugins run in order and the execution can be terminated by sending SIGTERM signal to atomic-reactor. When argument ``keep_plugins_running`` is set, the specified plugins are all ensured to be executed and the SIGTERM signal is ignored. """ print_version_of_tools() try: self.fs_watcher.start() if self.keep_plugins_running: signal.signal(signal.SIGTERM, signal.SIG_IGN) else: signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) runner = PluginsRunner(self, self.plugins_conf, self.plugin_files, self.keep_plugins_running, plugins_results=self.data.plugins_results) runner.run() finally: signal.signal(signal.SIGTERM, signal.SIG_DFL) self.fs_watcher.finish()
def test_runner_create_instance_from_plugin_with_kwargs(tmpdir, params): workflow = flexmock(data=flexmock()) workflow.data.image_id = 'image-id' workflow.source = flexmock() workflow.source.dockerfile_path = 'dockerfile-path' workflow.source.path = 'path' workflow.user_params = {} class MyPlugin(Plugin): key = 'my_plugin' def __init__(self, workflow, spam=None, **kwargs): super().__init__(workflow) self.spam = spam for key, value in kwargs.items(): setattr(self, key, value) def run(self): pass bpr = PluginsRunner(workflow, []) plugin = bpr.create_instance_from_plugin(MyPlugin, params) for key, value in params.items(): assert getattr(plugin, key) == value
def test_labels_from_user_params(workflow): workflow.user_params["release"] = "42" runner = PluginsRunner(workflow, []) plugin = runner.create_instance_from_plugin(AddLabelsPlugin, {}) assert plugin.labels == {"release": "42"}
def test_get_available_plugins(plugins_conf: List[Dict[str, Any]], expected, workflow: DockerBuildWorkflow): if isinstance(expected, list): runner = PluginsRunner( workflow, plugins_conf, plugin_files=[inspect.getfile(PushImagePlugin)], ) expected_exec_info: PluginExecutionInfo for got_exec_info, expected_exec_info in zip(runner.available_plugins, expected): assert got_exec_info.plugin_name == expected_exec_info.plugin_name assert got_exec_info.conf == expected_exec_info.conf assert got_exec_info.is_allowed_to_fail == expected_exec_info.is_allowed_to_fail # For easy comparison. Otherwise, the different path within class # repr has to be handled. For example: # tests.test_plugin.MyPlugin1 and test_plugin.MyPlugin1 left = got_exec_info.plugin_class.__name__.split(".")[-1] right = expected_exec_info.plugin_class.__name__.split(".")[-1] assert left == right else: with expected: PluginsRunner(workflow, plugins_conf) err_msg = workflow.data.plugins_errors["cool_plugin"] assert "no such plugin" in err_msg
def test_skip_plugin(workflow, caplog): setup_flatpak_composes(workflow) runner = PluginsRunner(workflow, [{ 'name': FlatpakCreateOciPlugin.key, 'args': {} }]) runner.run() assert 'not flatpak build, skipping plugin' in caplog.text
def test_store_plugin_result(workflow: DockerBuildWorkflow): runner = PluginsRunner( workflow, [{"name": CleanupPlugin.key}, {"name": PushImagePlugin.key}], plugin_files=[THIS_FILE], ) runner.run() assert runner.plugins_results[CleanupPlugin.key] is None assert "pushed" == runner.plugins_results[PushImagePlugin.key]
def test_append_from_user_params(workflow, flatpak, isolated, append): workflow.user_params["flatpak"] = flatpak workflow.user_params["isolated"] = isolated add_koji_map_in_workflow(workflow, hub_url='', root_url='') session = MockedClientSessionGeneral('') flexmock(koji, ClientSession=session) runner = PluginsRunner(workflow, []) plugin = runner.create_instance_from_plugin(BumpReleasePlugin, {}) assert plugin.append == append
def test_check_no_reload(workflow): """ test if plugins are not reloaded """ PluginsRunner(workflow, [{"name": PushImagePlugin.key}], plugin_files=[THIS_FILE]) module_id_first = id(sys.modules['test_plugin']) PluginsRunner(workflow, [{"name": PushImagePlugin.key}], plugin_files=[THIS_FILE]) module_id_second = id(sys.modules['test_plugin']) assert module_id_first == module_id_second
def test_flatpak_create_oci_no_source(workflow): workflow.user_params['flatpak'] = True setup_flatpak_composes(workflow) (flexmock(FlatpakUtil).should_receive( 'get_flatpak_source_info').and_return(None)) runner = PluginsRunner(workflow, [{ 'name': FlatpakCreateOciPlugin.key, 'args': {} }]) msg = "flatpak_create_dockerfile must be run before flatpak_create_oci" with pytest.raises(PluginFailedException, match=msg): runner.run()
def create_runner(self) -> PluginsRunner: """ Create runner for current plugin (configured using for_plugin()) :return: PluginsRunner instance (instance of appropriate subclass based on plugin phase) """ return PluginsRunner( self.workflow, self.workflow.plugins_conf, plugins_results=self.workflow.data.plugins_results, )
def test_run_plugins_in_keep_going_mode(allow_plugin_fail: bool, workflow: DockerBuildWorkflow, caplog): plugins_conf = [{"name": CleanupPlugin.key}] if allow_plugin_fail: plugins_conf.append({"name": WriteRemoteLogsPlugin.key}) else: plugins_conf.append({"name": StoreArtifactsPlugin.key}) # Let the failure happens firstly plugins_conf.reverse() runner = PluginsRunner(workflow, plugins_conf, plugin_files=[THIS_FILE], keep_going=True) if allow_plugin_fail: runner.run() # The error should just be logged assert "remote host is unavailable" in caplog.text else: with pytest.raises(PluginFailedException, match="no permission"): runner.run() # The error must be recorded assert "no permission" in workflow.data.plugins_errors[ StoreArtifactsPlugin.key] # The subsequent plug should get a chance to run after previous error. assert "continuing..." in caplog.text assert runner.plugins_results[CleanupPlugin.key] is None
def test_load_plugins(use_plugin_file, workflow): """ test loading plugins """ plugins_files = [inspect.getfile(PushImagePlugin)] if use_plugin_file else [] runner = PluginsRunner(workflow, [], plugin_files=plugins_files) assert runner.plugin_classes is not None assert len(runner.plugin_classes) > 0 # Randomly verify the plugin existence assert AddFilesystemPlugin.key in runner.plugin_classes assert TagAndPushPlugin.key in runner.plugin_classes if use_plugin_file: assert PushImagePlugin.key in runner.plugin_classes assert CleanupPlugin.key in runner.plugin_classes