Exemplo n.º 1
0
    def _query_coordination(self, job_id):
        """Run Query coordination.

        Query coordination will poll for query job completion, aggregate data
        and then spawn a callback job to update the query cached across all
        workers in the environment.

        :param job_id: Job Id
        :type job_id: String
        """

        while True:
            if self.return_jobs[job_id].failed:
                self.log.critical("Query job [ %s ] encountered failures.",
                                  job_id)
                return
            elif all(self.return_jobs[job_id].STDOUT.values()):
                break

            self.log.info("Waiting for [ %s ], QUERY to complete", job_id)
            time.sleep(1)

        new_task = dict()
        new_task["skip_cache"] = True
        new_task["extend_args"] = True
        new_task["verb"] = "ARG"
        query_data = dict()
        for k, v in self.return_jobs[job_id].STDOUT.items():
            query_data[k] = json.loads(v)
        new_task["args"] = {"query": query_data}
        new_task["parent_async_bypass"] = True
        new_task["job_id"] = utils.get_uuid()
        new_task["job_sha3_224"] = utils.object_sha3_224(obj=new_task)
        new_task["parent_id"] = utils.get_uuid()
        new_task["parent_sha3_224"] = utils.object_sha3_224(obj=new_task)

        targets = self._get_available_workers()

        self.create_return_jobs(
            task=new_task["job_id"],
            job_item=new_task,
            targets=targets,
        )

        for target in targets:
            self.log.debug(
                "Queuing QUERY ARG callback job [ %s ] for identity"
                " [ %s ]",
                new_task["job_id"],
                target,
            )
            self.send_queue.put(
                dict(
                    identity=target,
                    command=new_task["verb"],
                    data=new_task,
                ))
Exemplo n.º 2
0
    def client(self, cache, job):
        """Run query command operation.

        :param cache: Caching object used to template items within a command.
        :type cache: Object
        :param job: Information containing the original job specification.
        :type job: Dictionary
        :returns: tuple
        """

        args = cache.get("args")
        if args:
            query = args.get(job["query"])
        else:
            query = None

        arg_job = job.copy()
        query_item = arg_job.pop("query")
        targets = arg_job.get("targets", list())
        if self.driver.identity in targets:
            if not job.get("no_wait"):
                wait_job = dict(
                    skip_cache=True,
                    verb="QUERY_WAIT",
                    item=query_item,
                    query_timeout=600,
                    parent_async_bypass=True,
                    targets=[self.driver.identity],
                    identity=list(),
                )
                wait_job["job_id"] = utils.get_uuid()
                wait_job["job_sha3_224"] = utils.object_sha3_224(obj=wait_job)
                wait_job["parent_id"] = arg_job["parent_id"]
                wait_job["parent_sha3_224"] = utils.object_sha3_224(
                    obj=wait_job
                )
                self.block_on_tasks = [wait_job]

        return (json.dumps({job["query"]: query}), None, True, job["query"])
Exemplo n.º 3
0
    def wrapper_func(*args, **kwargs):
        self = args[0]
        job = kwargs["job"]
        stdout_arg = job.get("stdout_arg")
        stderr_arg = job.get("stderr_arg")
        stdout, stderr, outcome, command = func(*args, **kwargs)

        if stdout_arg or stderr_arg:
            self.block_on_tasks = list()
            clean_info = (stdout.decode() if stdout
                          and isinstance(stdout, bytes) else stdout or "")
            clean_info_err = (stderr.decode() if stderr
                              and isinstance(stderr, bytes) else stderr or "")
            arg_job = job.copy()
            arg_job.pop("parent_sha3_224", None)
            arg_job.pop("parent_id", None)
            arg_job.pop("job_sha3_224", None)
            arg_job.pop("job_id", None)
            arg_job.pop("stdout_arg", None)
            arg_job.pop("stderr_arg", None)
            arg_job["skip_cache"] = True
            arg_job["extend_args"] = True
            arg_job["verb"] = "ARG"
            arg_job["args"] = {}
            if stdout_arg:
                arg_job["args"].update({stdout_arg: clean_info.strip()})
            if stderr_arg:
                arg_job["args"].update({stderr_arg: clean_info_err.strip()})
            arg_job["parent_async_bypass"] = True
            arg_job["targets"] = [self.driver.identity]
            arg_job["job_id"] = utils.get_uuid()
            arg_job["job_sha3_224"] = utils.object_sha3_224(obj=arg_job)
            arg_job["parent_id"] = utils.get_uuid()
            arg_job["parent_sha3_224"] = utils.object_sha3_224(obj=arg_job)
            self.block_on_tasks.append(arg_job)

        return stdout, stderr, outcome, command
Exemplo n.º 4
0
 def test_object_sha3_224(self):
     sha3_224 = utils.object_sha3_224(obj={"test": "value"})
     self.assertEqual(
         sha3_224,
         "4bc695abcb557b8e893a69389488afa07fcf9b42028de30db92c887b",  # noqa
     )
Exemplo n.º 5
0
    def format_action(
        self,
        verb,
        execute,
        arg_vars=None,
        targets=None,
        ignore_cache=False,
        restrict=None,
        parent_id=None,
        parent_sha3_224=None,
        parent_name=None,
        job_name=None,
        return_raw=False,
        parent_async=False,
    ):
        """Return a JSON encode object for task execution.

        While formatting the message, the method will treat each verb as a
        case and parse the underlying sub-command, formatting the information
        into a dictionary.

        :param verb: Action to parse.
        :type verb: String
        :param execute: Execution string to parse.
        :type execute: String
        :param targets: Target argents to send job to.
        :type targets: List
        :param arg_vars: Argument dictionary, used to set arguments in
                         dictionary format instead of string format.
        :type arg_vars: Dictionary
        :param ignore_cache: Instruct the entire execution to
                             ignore client caching.
        :type ignore_cache: Boolean
        :param restrict: Restrict job execution based on a provided task
                         SHA3_224.
        :type restrict: List
        :param parent_id: Set the parent UUID for execution jobs.
        :type parent_id: String
        :param parent_sha3_224: Set the parent sha3_224 for execution jobs.
        :type parent_sha3_224: String
        :param parent_name: Set the parent name for execution jobs.
        :type parent_name: String
        :param job_name: Set the job name.
        :type job_name: String
        :param return_raw: Enable a raw return from the server.
        :type return_raw: Boolean
        :param parent_async: Enable a parent job to run asynchronously.
        :type parent_async: Boolean
        :returns: String
        """

        data = dict(verb=verb)
        component_kwargs = dict(exec_array=execute,
                                data=data,
                                arg_vars=arg_vars)

        success, transfer, component = directord.component_import(
            component=verb.lower(),
            job_id=parent_id,
        )
        if not success:
            raise SystemExit(component)

        setattr(component, "verb", verb)
        data.update(component.server(**component_kwargs))

        data["timeout"] = getattr(component.known_args, "timeout", 600)
        data["run_once"] = getattr(component.known_args, "run_once", False)
        data["job_sha3_224"] = utils.object_sha3_224(obj=data)
        data["return_raw"] = return_raw
        data["skip_cache"] = ignore_cache or getattr(component.known_args,
                                                     "skip_cache", False)

        if targets:
            data["targets"] = targets

        if parent_async:
            data["parent_async"] = parent_async

        if parent_id:
            data["parent_id"] = parent_id

        if parent_sha3_224:
            data["parent_sha3_224"] = parent_sha3_224

        if parent_name:
            data["parent_name"] = parent_name

        if job_name:
            data["job_name"] = job_name

        if restrict:
            data["restrict"] = restrict

        if transfer:
            job = {
                "jobs": [
                    {
                        "WORKDIR": "/etc/directord/components"
                    },
                    {
                        "ADD":
                        "--skip-cache {} {}".format(
                            transfer, "/etc/directord/components/")
                    },
                ]
            }
            self.exec_orchestrations(
                orchestrations=[job],
                defined_targets=data.get("targets"),
                return_raw=True,
            )

        return json.dumps(data)
Exemplo n.º 6
0
    def exec_orchestrations(
        self,
        orchestrations,
        defined_targets=None,
        restrict=None,
        ignore_cache=False,
        return_raw=False,
    ):
        """Execute orchestration jobs.

        Iterates over a list of orchestartion blobs, fingerprints the jobs,
        and then runs them.

        :param orchestrations: List of Dictionaries which are run as
                               orchestartion.
        :type orchestrations: List
        :param defined_targets: List of targets to limit a given execution to.
                                This target list provides an override for
                                targets found within a given orchestation.
        :type defined_targets: List
        :param restrict: Restrict a given orchestration job to a set of
                         SHA3_224 job fingerprints.
        :type restrict: Array
        :param ignore_cache: Instruct the orchestartion job to ignore cached
                             executions.
        :type ignore_cache: Boolean
        :param return_raw: Enable a raw return from the server.
        :type return_raw: Boolean
        :returns: List
        """

        job_to_run = list()
        for orchestrate in orchestrations:
            parent_sha3_224 = utils.object_sha3_224(obj=orchestrate)
            parent_name = orchestrate.get("name")
            parent_id = utils.get_uuid()
            targets = (defined_targets or orchestrate.pop("assign", list())
                       or orchestrate.get("targets", list()))

            force_async = getattr(self.args, "force_async", False)
            if force_async:
                parent_async = force_async
            else:
                try:
                    parent_async = bool(
                        dist_utils.strtobool(orchestrate.get("async",
                                                             "False")))
                except (ValueError, AttributeError):
                    parent_async = bool(orchestrate.get("async", False))

            for job in orchestrate["jobs"]:
                arg_vars = job.pop("vars", None)
                job_name = job.pop("name", None)
                assign = job.pop("assign", None)
                if assign and not isinstance(assign, list):
                    raise SyntaxError(
                        "Job contained an invalid assignment: {} = {}."
                        " Assignments must be in list format.".format(
                            assign, type(assign)))
                key, value = next(iter(job.items()))
                job_to_run.append(
                    dict(
                        verb=key,
                        execute=[value],
                        arg_vars=arg_vars,
                        targets=assign or targets,
                        restrict=restrict,
                        ignore_cache=ignore_cache,
                        parent_id=parent_id,
                        parent_sha3_224=parent_sha3_224,
                        parent_name=parent_name,
                        job_name=job_name,
                        return_raw=return_raw,
                        parent_async=parent_async,
                    ))

        return_data = list()
        if getattr(self.args, "finger_print", False):
            count = 0
            for job in job_to_run:
                tabulated_data = list()
                formatted_job = self.format_action(**job)
                item = json.loads(formatted_job)
                exec_str = " ".join(job["execute"])
                if len(exec_str) >= 30:
                    exec_str = "{execute}...".format(execute=exec_str[:27])
                tabulated_data.extend([
                    count,
                    job["parent_name"] or job["parent_sha3_224"],
                    item["verb"],
                    exec_str,
                    job["job_name"] or item["job_sha3_224"],
                ])
                return_data.append(tabulated_data)
                count += 1
            utils.print_tabulated_data(
                data=return_data,
                headers=["count", "parent", "verb", "exec", "job"],
            )
            return []
        else:
            for job in job_to_run:
                formatted_job = self.format_action(**job)
                return_data.append(
                    directord.send_data(socket_path=self.args.socket_path,
                                        data=formatted_job))

        return return_data
Exemplo n.º 7
0
    def handle_job(
        self,
        command,
        data,
        info,
    ):
        """Handle a job interaction.

        :param command: Command
        :type command: String
        :param data: Job data
        :type data: Dictionary
        :param info: Job info
        :type info: Dictionary
        """

        job = json.loads(data)
        job["job_id"] = job_id = job.get("job_id", utils.get_uuid())
        job["job_sha3_224"] = job_sha3_224 = job.get(
            "job_sha3_224", utils.object_sha3_224(job)
        )
        job_parent_id = job.get("parent_id")
        job_parent_sha3_224 = job.get("parent_sha3_224")
        self.log.debug(
            "Item received: parent job UUID [ %s ],"
            " parent job sha3_224 [ %s ], job UUID [ %s ],"
            " job sha3_224 [ %s ]",
            job_parent_id,
            job_parent_sha3_224,
            job_id,
            job_sha3_224,
        )

        with utils.ClientStatus(
            job_id=job_id,
            command=command,
            ctx=self,
        ) as c:
            if job_parent_id and not self._parent_check(
                conn=c, cache=self.cache, job=job
            ):
                self.q_return.put(
                    (
                        None,
                        None,
                        False,
                        "Job omitted, parent failure",
                        job,
                        command,
                        0,
                        None,
                    )
                )
            else:
                c.job_state = self.driver.job_processing
                component_kwargs = dict(cache=None, job=job)
                self.log.debug(
                    "Queuing component [ %s ], job_id [ %s ]",
                    command,
                    job_id,
                )
                c.info = "task queued"
                self.q_processes.put(
                    (
                        component_kwargs,
                        command,
                        info,
                    )
                )