def run_job(manifest, success_condition, failure_condition): """ Create a k8s job. For example, the pi-tmpl template in https://github.com/argoproj/argo/blob/master/examples/k8s-jobs.yaml :param manifest: YAML specification of the job to be created. :param success_condition: expression for verifying job success. :param failure_condition: expression for verifying job failure. :return: output """ if manifest is None: raise ValueError("Input manifest can not be null") func_name, caller_line = pyfunc.invocation_location() if func_name not in _templates: template = OrderedDict() template["name"] = func_name template["resource"] = _create_job( manifest=manifest, success_condition=success_condition, failure_condition=failure_condition, ) # TODO: add input support _templates[func_name] = template _update_steps(func_name, caller_line) # TODO: add output support return None
def __init__(self, secret_data): self.data = secret_data function_name, caller_line = pyfunc.invocation_location() self.name = "couler-secret-%s-%d" % (function_name, caller_line) global _secrets _secrets = self.generate_secret_yaml()
def artifact(path): """ configure the output object """ _, caller_line = pyfunc.invocation_location() # TODO: support outputs to an artifact repo later ret = Artifact(path=path, id="output-id-%s" % caller_line, type="parameters") return ret
def concurrent(function_list): """ Start different jobs at the same time """ if not isinstance(function_list, list): raise SyntaxError("require input functions as list") _, con_caller_line = pyfunc.invocation_location() global _concurrent_func_line _concurrent_func_line = con_caller_line global _run_concurrent_lock _run_concurrent_lock = True for function in function_list: if isinstance(function, types.FunctionType): function() else: raise TypeError("require loop over a function to run") _run_concurrent_lock = False
def run_script(image, command=None, source=None, env=None, resources=None): """Generate an Argo script template. For example, https://github.com/argoproj/argo/tree/master/examples#scripts--results """ function_name, caller_line = pyfunc.invocation_location() if function_name not in _templates: template = OrderedDict({"name": function_name}) # Script if source is not None: script = OrderedDict() if image is not None: script["image"] = image if command is None: command = "python" script["command"] = [command] # To retrieve function code script["source"] = (pyfunc.body(source) if command.lower() == "python" else source) if env is not None: script["env"] = _convert_dict_to_env_list(env) if resources is not None: script["resources"] = _resources(resources) template["script"] = script else: raise ValueError("Input script can not be null") # config the pod with cluster specific config template = _update_pod_config(template) _templates[function_name] = template _update_steps(function_name, caller_line) return pyfunc.script_output(function_name, caller_line)
def run_container( image, command=None, args=None, output=None, env=None, secret=None, resources=None, ): """Generate an Argo container template. For example, the template whalesay in https://github.com/argoproj/argo/tree/master/examples#hello-world """ function_name, caller_line = pyfunc.invocation_location() output_id = None if function_name not in _templates: template = OrderedDict({"name": function_name}) # Generate the inputs parameter for the template if args is not None: parameters = [] for i in range(len(args)): para_name = pyfunc.input_parameter(function_name, i) parameters.append({"name": para_name}) inputs = OrderedDict({"parameters": parameters}) template["inputs"] = inputs # Generate the container template container = OrderedDict() if image is not None: container["image"] = image container["command"] = ["bash", "-c"] if isinstance(command, list): container["command"].extend(command) elif command is not None: container["command"].extend([command]) if args is not None: # Rewrite the args into yaml format container["args"] = [] for i in range(len(args)): para_name = pyfunc.input_parameter(function_name, i) arg_yaml = '"{{inputs.parameters.%s}}"' % para_name container["args"].append(arg_yaml) if env is not None: container["env"] = _convert_dict_to_env_list(env) if secret is not None: env_secrets = _convert_secret_to_list(secret) if "env" not in container.keys(): container["env"] = env_secrets else: container["env"].extend(env_secrets) if resources is not None: container["resources"] = _resources(resources) template["container"] = container # Generate the output if output is not None and isinstance(output, Artifact): output_id = output.id path = output.path _output = OrderedDict() _output["parameters"] = [{ "name": output_id, "valueFrom": { "path": path } }] template["outputs"] = _output # else TODO, when container does not output anything # Update the pod with cluster specific config template = _update_pod_config(template) _templates[function_name] = template if _run_concurrent_lock: _update_steps("concurrent_func_name", _concurrent_func_line, args, function_name) else: _update_steps(function_name, caller_line, args) if output_id is None: output_id = "output-id-%s" % caller_line return pyfunc.container_output(function_name, caller_line, output_id)