def test_component_import(self): with patch.object( directord, "plugin_import", autospec=True ) as mock_plugin_import: directord.component_import("notaplugin") mock_plugin_import.assert_called_once_with( plugin=".components.builtin_notaplugin" )
def test_component_import_search(self): with patch.object( directord, "plugin_import", autospec=True ) as mock_plugin_import: mock_plugin_import.side_effect = ImportError( "No module named 'directord.components.builtin_notacomponent'" ) with patch("sys.base_prefix", "/test/path"): with patch("sys.prefix", "/test/path"): with patch( "importlib.util.spec_from_file_location", autospec=True ) as mock_spec_from_file: with patch( "importlib.util.module_from_spec", autospec=True ): directord.component_import("notacomponent") mock_spec_from_file.assert_called_once_with( "directord_user_component_notacomponent", "/test/path/share/directord/components/notacomponent.py", # noqa )
def test_component_import_venv_error(self): with patch.object( directord, "plugin_import", autospec=True ) as mock_plugin_import: mock_plugin_import.side_effect = ImportError( "No module named 'directord.components.builtin_notacomponent'" ) with patch("sys.base_prefix", "/test/path"): with patch("sys.prefix", "/test/venv-path"): status, transfer, info = directord.component_import( "notacomponent" ) self.assertEqual(status, False) self.assertEqual( transfer, "/etc/directord/components/notacomponent.py" ) self.assertEqual(info, COMPONENT_VENV_FAILURE_INFO)
def format_action( self, verb, execute, arg_vars=None, targets=None, ignore_cache=False, restrict=None, parent_id=None, parent_sha3_224=None, parent_name=None, job_name=None, return_raw=False, parent_async=False, ): """Return a JSON encode object for task execution. While formatting the message, the method will treat each verb as a case and parse the underlying sub-command, formatting the information into a dictionary. :param verb: Action to parse. :type verb: String :param execute: Execution string to parse. :type execute: String :param targets: Target argents to send job to. :type targets: List :param arg_vars: Argument dictionary, used to set arguments in dictionary format instead of string format. :type arg_vars: Dictionary :param ignore_cache: Instruct the entire execution to ignore client caching. :type ignore_cache: Boolean :param restrict: Restrict job execution based on a provided task SHA3_224. :type restrict: List :param parent_id: Set the parent UUID for execution jobs. :type parent_id: String :param parent_sha3_224: Set the parent sha3_224 for execution jobs. :type parent_sha3_224: String :param parent_name: Set the parent name for execution jobs. :type parent_name: String :param job_name: Set the job name. :type job_name: String :param return_raw: Enable a raw return from the server. :type return_raw: Boolean :param parent_async: Enable a parent job to run asynchronously. :type parent_async: Boolean :returns: String """ data = dict(verb=verb) component_kwargs = dict(exec_array=execute, data=data, arg_vars=arg_vars) success, transfer, component = directord.component_import( component=verb.lower(), job_id=parent_id, ) if not success: raise SystemExit(component) setattr(component, "verb", verb) data.update(component.server(**component_kwargs)) data["timeout"] = getattr(component.known_args, "timeout", 600) data["run_once"] = getattr(component.known_args, "run_once", False) data["job_sha3_224"] = utils.object_sha3_224(obj=data) data["return_raw"] = return_raw data["skip_cache"] = ignore_cache or getattr(component.known_args, "skip_cache", False) if targets: data["targets"] = targets if parent_async: data["parent_async"] = parent_async if parent_id: data["parent_id"] = parent_id if parent_sha3_224: data["parent_sha3_224"] = parent_sha3_224 if parent_name: data["parent_name"] = parent_name if job_name: data["job_name"] = job_name if restrict: data["restrict"] = restrict if transfer: job = { "jobs": [ { "WORKDIR": "/etc/directord/components" }, { "ADD": "--skip-cache {} {}".format( transfer, "/etc/directord/components/") }, ] } self.exec_orchestrations( orchestrations=[job], defined_targets=data.get("targets"), return_raw=True, ) return json.dumps(data)
def job_q_component_run(self, component_kwargs, command, info, lock): """Execute a component operation. Components are dynamically loaded based on the given component name. Upon execution, the results are put into the results queue. :param component_kwargs: Named arguments used with the componenet client. :type component_kwargs: Dictionary :param command: Byte encoded command used to run a given job. :type command: Bytes :param info: Information that was sent over with the original message. :type info: Bytes :param lock: Locking object, used if a component requires it. :type lock: Object """ job = component_kwargs["job"] job_id = job["job_id"] command_lower = command.lower() success, _, component = directord.component_import( component=command_lower, job_id=job_id, ) cached = self.cache.get( job["job_sha3_224"] ) == self.driver.job_end and not job.get( "skip_cache", job.get("ignore_cache", False) ) if not success: self.log.warning("Component lookup failure [ %s ]", component) self.q_return.put( ( None, None, success, None, job, command, 0, None, ) ) elif cached and component.cacheable is True: self.log.info( "Cache hit on [ %s ], task skipped.", job_id, ) self.q_return.put( ( None, None, "skipped", None, job, command, 0, None, ) ) else: self.log.debug( "Starting component execution for job [ %s ].", job_id, ) # Set the comment command argument setattr(component, "command", command) setattr(component, "info", info) setattr(component, "driver", self.driver) locked = False if component.requires_lock: lock_name = "__lock_{}__".format( getattr(component, "lock_name", command_lower) ) try: lock = getattr(self, lock_name) except AttributeError: self.log.debug( "No component lock found for [ %s ], falling back" " to global lock", lock_name, ) else: self.log.debug( "Found component lock [ %s ]", lock_name, ) locked = lock.acquire() self.log.debug("Lock acquired for [ %s ]", job_id) _starttime = time.time() try: stdout, stderr, outcome, info = component.client( cache=self.cache, job=job ) except Exception as e: stderr = "Job [ {} ] Component Failure: {}".format( job_id, str(e) ) self.log.critical(stderr) stdout = None outcome = False info = traceback.format_exc() job["component_exec_timestamp"] = datetime.datetime.fromtimestamp( time.time() ).strftime("%Y-%m-%d %H:%M:%S") try: if component.block_on_tasks: block_on_tasks_data = [ i for i in component.block_on_tasks if self.driver.identity in i.get("targets", list()) ] if outcome and block_on_tasks_data: outcome = None info = "Waiting for callback tasks to complete" else: block_on_tasks_data = None component_return = ( stdout, stderr, outcome, info, job, command, time.time() - _starttime, component.block_on_tasks, ) except UnboundLocalError: component.block_on_tasks = list() component_return = ( None, None, False, "Job was unable to finish", job, command, time.time() - _starttime, None, ) if locked: lock.release() self.log.debug("Lock released for [ %s ]", job_id) self.q_return.put(component_return) try: block_on_task_data = block_on_tasks_data[-1] except IndexError: self.log.debug( "Job [ %s ] no valid callbacks for this node %s.", job["job_id"], self.driver.identity, ) except TypeError: self.log.debug("No callbacks defined.") else: self.log.info( "Job [ %s ] number of job call backs [ %s ]", job["job_id"], len(component.block_on_tasks), ) self.log.debug("Job call backs: %s ", component.block_on_tasks) block_on_task_success = False while True: if self.cache.get(block_on_task_data["job_sha3_224"]) in [ self.driver.job_end, self.driver.job_failed, ]: block_on_task_success = True break else: self.log.debug( "waiting for callback job from [ %s ] to" " complete. %s", job["job_id"], block_on_task_data, ) time.sleep(1) if block_on_task_success: self.log.debug( "Job [ %s ] callback complete", job["job_id"] ) self.q_return.put( ( stdout, stderr, True, "Callback [ {} ] completed".format( block_on_task_data["job_id"] ), job, command, time.time() - _starttime, None, ) ) else: self.log.error( "Job [ %s ] callback never completed", job["job_id"], ) self.q_return.put( ( stdout, stderr, False, "Callback [ {} ] never completed".format( block_on_task_data["job_id"] ), job, command, time.time() - _starttime, None, ) ) self.log.debug( "Component execution complete for job [ %s ].", job["job_id"], )