def run_exec(self): """Execute an exec job. Jobs are parsed and then sent to the server for processing. All return items are captured in an array which is returned on method exit. :returns: List """ format_kwargs = dict( verb=self.args.verb, execute=self.args.exec, parent_async=getattr(self.args, "force_async", False), return_raw=getattr(self.args, "poll", False) or getattr(self.args, "stream", False) or getattr(self.args, "wait", False), ) if self.args.target: format_kwargs["targets"] = list(set(self.args.target)) return [ directord.send_data( socket_path=self.args.socket_path, data=self.format_action(**format_kwargs), ) ]
def analyze_all(self): """Run analysis on a given parent UUID. :param parent_id: Parent UUID :type parent_id: String :returns: String """ data = directord.send_data( socket_path=self.args.socket_path, data=json.dumps(dict(manage={"list_jobs": None})), ) if data: data = dict(json.loads(data)) return self.analyze_data(parent_id="All-Jobs", parent_jobs=list(data.values())) else: return json.dumps({"no_jobs_found": "All-Jobs"})
def analyze_job(self, job_id): """Run analysis on a given job UUID. :param job_id: Job UUID :type job_id: String :returns: String """ data = directord.send_data( socket_path=self.args.socket_path, data=json.dumps(dict(manage={"job_info": job_id})), ) item = list(dict(json.loads(data)).values()) if item and not item[0]: return json.dumps({"job_id_not_found": job_id}) return self.analyze_data(parent_id=job_id, parent_jobs=item)
def analyze_parent(self, parent_id): """Run analysis on a given parent UUID. :param parent_id: Parent UUID :type parent_id: String :returns: String """ data = directord.send_data( socket_path=self.args.socket_path, data=json.dumps(dict(manage={"list_jobs": None})), ) parent_jobs = list() if data: data = dict(json.loads(data)) for value in data.values(): if value["PARENT_JOB_ID"] == parent_id: parent_jobs.append(value) if not parent_jobs: return json.dumps({"parent_id_not_found": parent_id}) return self.analyze_data(parent_id=parent_id, parent_jobs=parent_jobs)
def run(self, override=None): """Send the management command to the server. :param override: Set the job function regardless of args. :type override: String :returns: String """ def _cache_dump(): try: cache = iodict.Cache( path=os.path.join(self.args.cache_path, "client")) print(json.dumps(dict(cache.items()), indent=4)) except KeyError: pass execution_map = { "dump-cache": _cache_dump, "export-jobs": { "list_jobs": None }, "export-nodes": { "list_nodes": None }, "job-info": { "job_info": override }, "list-jobs": { "list_jobs": None }, "list-nodes": { "list_nodes": None }, "purge-jobs": { "purge_jobs": None }, "purge-nodes": { "purge_nodes": None }, "analyze-parent": self.analyze_parent, "analyze-job": self.analyze_job, "analyze-all": self.analyze_all, } if override and override in execution_map: manage = execution_map[override] if callable(manage): return manage() elif isinstance(override, str): manage = execution_map["job-info"] else: for k, v in execution_map.items(): k_obj = k.replace("-", "_") k_arg = getattr(self.args, k_obj, False) if k_arg: if callable(v): if isinstance(override, str): return v(override) elif isinstance(k_arg, str): return v(k_arg) else: return v() else: if isinstance(k_arg, str): v[k_obj] = k_arg manage = v break else: raise SystemExit("No known management function was defined.") self.log.debug("Executing Management Command:%s", manage) return directord.send_data( socket_path=self.args.socket_path, data=json.dumps(dict(manage=manage)), )
def exec_orchestrations( self, orchestrations, defined_targets=None, restrict=None, ignore_cache=False, return_raw=False, ): """Execute orchestration jobs. Iterates over a list of orchestartion blobs, fingerprints the jobs, and then runs them. :param orchestrations: List of Dictionaries which are run as orchestartion. :type orchestrations: List :param defined_targets: List of targets to limit a given execution to. This target list provides an override for targets found within a given orchestation. :type defined_targets: List :param restrict: Restrict a given orchestration job to a set of SHA3_224 job fingerprints. :type restrict: Array :param ignore_cache: Instruct the orchestartion job to ignore cached executions. :type ignore_cache: Boolean :param return_raw: Enable a raw return from the server. :type return_raw: Boolean :returns: List """ job_to_run = list() for orchestrate in orchestrations: parent_sha3_224 = utils.object_sha3_224(obj=orchestrate) parent_name = orchestrate.get("name") parent_id = utils.get_uuid() targets = (defined_targets or orchestrate.pop("assign", list()) or orchestrate.get("targets", list())) force_async = getattr(self.args, "force_async", False) if force_async: parent_async = force_async else: try: parent_async = bool( dist_utils.strtobool(orchestrate.get("async", "False"))) except (ValueError, AttributeError): parent_async = bool(orchestrate.get("async", False)) for job in orchestrate["jobs"]: arg_vars = job.pop("vars", None) job_name = job.pop("name", None) assign = job.pop("assign", None) if assign and not isinstance(assign, list): raise SyntaxError( "Job contained an invalid assignment: {} = {}." " Assignments must be in list format.".format( assign, type(assign))) key, value = next(iter(job.items())) job_to_run.append( dict( verb=key, execute=[value], arg_vars=arg_vars, targets=assign or targets, restrict=restrict, ignore_cache=ignore_cache, parent_id=parent_id, parent_sha3_224=parent_sha3_224, parent_name=parent_name, job_name=job_name, return_raw=return_raw, parent_async=parent_async, )) return_data = list() if getattr(self.args, "finger_print", False): count = 0 for job in job_to_run: tabulated_data = list() formatted_job = self.format_action(**job) item = json.loads(formatted_job) exec_str = " ".join(job["execute"]) if len(exec_str) >= 30: exec_str = "{execute}...".format(execute=exec_str[:27]) tabulated_data.extend([ count, job["parent_name"] or job["parent_sha3_224"], item["verb"], exec_str, job["job_name"] or item["job_sha3_224"], ]) return_data.append(tabulated_data) count += 1 utils.print_tabulated_data( data=return_data, headers=["count", "parent", "verb", "exec", "job"], ) return [] else: for job in job_to_run: formatted_job = self.format_action(**job) return_data.append( directord.send_data(socket_path=self.args.socket_path, data=formatted_job)) return return_data
def test_unix_socket_error(self): with patch.object(directord, "UNIXSocketConnect") as conn: conn.side_effect = PermissionError() with self.assertRaises(PermissionError): directord.send_data("/test.sock", "test")
def test_send_data(self): user.directord.socket.socket = tests.MockSocket returned = directord.send_data(socket_path=self.args.socket_path, data="test") self.assertEqual(returned, b"return data")