def _get_params_and_artifacts_from_args(args, input_param_name, prefix): parameters = [] artifacts = [] if not isinstance(args, list): args = [args] i = 0 for arg in args: values = couler.core.templates.output.parse_argo_output(arg, prefix) if isinstance(values, list): for value in values: parameters.append({ "name": utils.input_parameter_name(input_param_name, i), "value": value, }) i += 1 else: if isinstance(arg, OutputArtifact): artifact_dict = { "name": ".".join(arg.value.split(".")[5:]), "from": values, } if not any( [artifact_dict["from"] == x["from"] for x in artifacts]): artifacts.append(artifact_dict) else: parameters.append({ "name": utils.input_parameter_name(input_param_name, i), "value": values, }) i += 1 return parameters, artifacts
def _get_params_and_artifacts_from_args(args, input_param_name, prefix): parameters = [] artifacts = [] if not isinstance(args, list): args = [args] i = 0 for values in args: values = couler.core.templates.output.parse_argo_output(values, prefix) if isinstance(values, list): for value in values: parameters.append({ "name": utils.input_parameter_name(input_param_name, i), "value": value, }) i += 1 else: if isinstance(values, OutputArtifact): artifacts.append({ "name": utils.input_parameter_name(input_param_name, i), "from": values, }) else: parameters.append({ "name": utils.input_parameter_name(input_param_name, i), "value": values, }) i += 1 return parameters, artifacts
def _get_params_and_artifacts_from_args(args, input_param_name, prefix): parameters = [] artifacts = [] if not isinstance(args, list): args = [args] i = 0 for values in args: values = couler.core.templates.output.parse_argo_output(values, prefix) if isinstance(values, list): for value in values: parameters.append( { "name": utils.input_parameter_name( input_param_name, i ), "value": value, } ) i += 1 else: if isinstance(values, OutputArtifact): tmp = values.value.split(".") if len(tmp) < 5: raise ValueError("Incorrect step return representation") step_name = tmp[1] output_id = tmp[3] for item in tmp[4:]: output_id = output_id + "." + item if values.is_global: value = '"{{workflow.outputs.%s}}"' % output_id else: value = '"{{%s.%s.%s}}"' % (prefix, step_name, output_id) artifact = {"name": ".".join(tmp[5:]), "from": value} if not any([value == x["from"] for x in artifacts]): artifacts.append(artifact) else: parameters.append( { "name": utils.input_parameter_name( input_param_name, i ), "value": values, } ) i += 1 return parameters, artifacts
def _convert_args_to_input_parameters(self, args): parameters = [] if args is not None: for i in range(len(args)): o = args[i] if not isinstance(o, OutputArtifact): para_name = utils.input_parameter_name(self.name, i) param_full_name = '"{{inputs.parameters.%s}}"' % para_name if param_full_name not in parameters: parameters.append(param_full_name) return parameters
def _convert_args_to_input_parameters(self, args): parameters = [] if args is not None: for i in range(len(args)): o = args[i] if isinstance(o, OutputArtifact): para_name = o.artifact["name"] parameters.append('"{{inputs.artifacts.%s}}"' % para_name) else: para_name = utils.input_parameter_name(self.name, i) parameters.append('"{{inputs.parameters.%s}}"' % para_name) return parameters
def to_dict(self): template = Template.to_dict(self) # Inputs parameters = [] if self.args is not None: i = 0 for arg in self.args: if not isinstance(self.args[i], OutputArtifact): if isinstance(arg, OutputJob): for _ in range(3): parameters.append({ "name": utils.input_parameter_name(self.name, i) }) i += 1 else: para_name = utils.input_parameter_name(self.name, i) parameters.append({"name": para_name}) i += 1 # Input # Case 1: add the input parameter if len(parameters) > 0: template["inputs"] = OrderedDict() template["inputs"]["parameters"] = parameters # Case 2: add the input artifact if self.input is not None: _input_list = [] for o in self.input: if isinstance(o, TypedArtifact): _input_list.append(o.to_yaml()) if isinstance(o, OutputArtifact): name = o.artifact["name"] if not any(name == x["name"] for x in _input_list): _input_list.append(o.artifact) if len(_input_list) > 0: if "inputs" not in template: template["inputs"] = OrderedDict() template["inputs"]["artifacts"] = _input_list # Node selector if self.node_selector is not None: # TODO: Support inferring node selector values from Argo parameters template["nodeSelector"] = self.node_selector # Container if (not utils.gpu_requested(self.resources) and states._overwrite_nvidia_gpu_envs): if self.env is None: self.env = {} self.env.update(OVERWRITE_GPU_ENVS) template["container"] = self.container_dict() # Output if self.output is not None: _output_list = [] for o in self.output: _output_list.append(o.to_yaml()) if isinstance(o, TypedArtifact): # Require only one kind of output type template["outputs"] = {"artifacts": _output_list} else: template["outputs"] = {"parameters": _output_list} # Volume if self.volume_mounts is not None: if self.volumes is None: template["volumes"] = [] for volume_mount in self.volume_mounts: template["volumes"].append({ "name": volume_mount.name, "emptyDir": {} }) return template
def step_repr( step_name=None, tmpl_name=None, image=None, command=None, source=None, env=None, script_output=None, args=None, input=None, output=None, manifest=None, success_cond=None, failure_cond=None, canned_step_name=None, canned_step_args=None, resources=None, ): assert step_name is not None assert tmpl_name is not None # generate protobuf step representation pb_step = couler_pb2.Step() pb_step.id = get_uniq_step_id() pb_step.name = step_name pb_step.tmpl_name = tmpl_name if env is not None: for k, v in env.items(): if isinstance(v, str): pb_step.container_spec.env[k] = v else: pb_step.container_spec.env[k] = json.dumps(v) # image can be None if manifest specified. if image is not None: pb_step.container_spec.image = image if manifest is not None: pb_step.resource_spec.manifest = manifest if success_cond is not None and failure_cond is not None: pb_step.resource_spec.success_condition = success_cond pb_step.resource_spec.failure_condition = failure_cond else: if command is None: pb_step.container_spec.command.append("python") elif isinstance(command, list): pb_step.container_spec.command.extend(command) elif isinstance(command, str): pb_step.container_spec.command.append(command) else: raise ValueError("command must be str or list") if source is not None: if isinstance(source, str): pb_step.script = source else: pb_step.script = utils.body(source) if canned_step_name is not None and canned_step_args is not None: pb_step.canned_step_spec.name = canned_step_name if isinstance(canned_step_args, dict): for k, v in canned_step_args.items(): pb_step.canned_step_spec.args[k] = v else: raise ValueError("canned_step_spec.args must be a dictionary") # setup resources if resources is not None: if not isinstance(resources, dict): raise ValueError("resources must be type dict") for k, v in resources.items(): # key: cpu, memory, gpu # value: "1", "8", "500m", "1Gi" etc. pb_step.container_spec.resources[k] = str(v) if states._when_prefix is not None: pb_step.when = states._when_prefix # add template to proto workflow wf = get_default_proto_workflow() if tmpl_name not in wf.templates: proto_step_tmpl = couler_pb2.StepTemplate() proto_step_tmpl.name = tmpl_name _add_io_to_template(proto_step_tmpl, pb_step.id, input, output, script_output) wf.templates[tmpl_name].CopyFrom(proto_step_tmpl) # add step arguments if args is not None: for i, arg in enumerate(args): if isinstance(arg, OutputArtifact): pb_art = pb_step.args.add() pb_art.artifact.name = arg.artifact["name"] pb_art.artifact.value = ('"{{inputs.artifacts.%s}}"' % pb_art.artifact.name) else: pb_param = pb_step.args.add() pb_param.parameter.name = utils.input_parameter_name( pb_step.name, i) pb_param.parameter.value = ('"{{inputs.parameters.%s}}"' % pb_param.parameter.name) if states._exit_handler_enable: # add exit handler steps eh_step = wf.exit_handler_steps.add() eh_step.CopyFrom(pb_step) else: # add step to proto workflow concurrent_step = wf.steps.add() inner_step = concurrent_step.steps.add() inner_step.CopyFrom(pb_step) return pb_step
def to_dict(self): template = Template.to_dict(self) # Inputs parameters = [] if self.args is not None: i = 0 for arg in self.args: if not isinstance(self.args[i], OutputArtifact): if isinstance(arg, OutputJob): for _ in range(3): parameters.append({ "name": utils.input_parameter_name(self.name, i) }) i += 1 else: para_name = utils.input_parameter_name(self.name, i) parameters.append({"name": para_name}) i += 1 # Input # Case 1: add the input parameter if len(parameters) > 0: template["inputs"] = OrderedDict() template["inputs"]["parameters"] = parameters # Case 2: add the input artifact if self.input is not None: _input_list = [] for o in self.input: if isinstance(o, TypedArtifact): _input_list.append(o.to_yaml()) if isinstance(o, OutputArtifact): _input_list.append(o.artifact) if len(_input_list) > 0: if "inputs" not in template: template["inputs"] = OrderedDict() template["inputs"]["artifacts"] = _input_list # Container if not utils.gpu_requested(self.resources): if self.env is None: self.env = {} self.env.update(OVERWRITE_GPU_ENVS) template["container"] = self.container_dict() # Output if self.output is not None: _output_list = [] for o in self.output: _output_list.append(o.to_yaml()) if isinstance(o, TypedArtifact): # Require only one kind of output type template["outputs"] = {"artifacts": _output_list} else: template["outputs"] = {"parameters": _output_list} return template
def step_repr( step_name=None, tmpl_name=None, image=None, command=None, source=None, env=None, script_output=None, args=None, input=None, output=None, manifest=None, success_cond=None, failure_cond=None, canned_step_name=None, canned_step_args=None, resources=None, secret=None, action=None, volume_mounts=None, cache=None, ): assert step_name is not None assert tmpl_name is not None # generate protobuf step representation pb_step = couler_pb2.Step() pb_step.id = get_uniq_step_id() pb_step.name = step_name pb_step.tmpl_name = tmpl_name if env is not None: _add_env_to_step(pb_step, env) if secret is not None: for k, _ in secret.data.items(): pb_secret = pb_step.secrets.add() pb_secret.key = k pb_secret.name = secret.name if cache is not None: pb_step.cache.name = cache.name pb_step.cache.key = cache.key pb_step.cache.max_age = cache.max_age # image can be None if manifest specified. if image is not None: pb_step.container_spec.image = image if manifest is not None: pb_step.resource_spec.manifest = manifest if success_cond is not None and failure_cond is not None: pb_step.resource_spec.success_condition = success_cond pb_step.resource_spec.failure_condition = failure_cond if action is not None: pb_step.resource_spec.action = action else: if command is None: pb_step.container_spec.command.append("python") elif isinstance(command, list): pb_step.container_spec.command.extend(command) elif isinstance(command, str): pb_step.container_spec.command.append(command) else: raise ValueError("command must be str or list") if source is not None: if isinstance(source, str): pb_step.script = source else: pb_step.script = utils.body(source) if canned_step_name is not None and canned_step_args is not None: pb_step.canned_step_spec.name = canned_step_name if isinstance(canned_step_args, dict): for k, v in canned_step_args.items(): pb_step.canned_step_spec.args[k] = v else: raise ValueError("canned_step_spec.args must be a dictionary") # setup resources if resources is not None: if not isinstance(resources, dict): raise ValueError("resources must be type dict") for k, v in resources.items(): # key: cpu, memory, gpu # value: "1", "8", "500m", "1Gi" etc. pb_step.container_spec.resources[k] = str(v) # Attach volume mounts if volume_mounts is not None: for vm in volume_mounts: vol_mount = pb_step.container_spec.volume_mounts.add() vol_mount.name = vm.name vol_mount.path = vm.mount_path if states._when_prefix is not None: pb_step.when = states._when_prefix # add template to proto workflow wf = get_default_proto_workflow() if tmpl_name not in wf.templates: proto_step_tmpl = couler_pb2.StepTemplate() proto_step_tmpl.name = tmpl_name _add_io_to_template(proto_step_tmpl, pb_step.id, input, output, script_output) wf.templates[tmpl_name].CopyFrom(proto_step_tmpl) # add step arguments if args is not None: for i, arg in enumerate(args): if isinstance(arg, OutputArtifact): pb_art = pb_step.args.add() pb_art.artifact.name = arg.artifact["name"] pb_art.artifact.value = arg.value else: pb_param = pb_step.args.add() pb_param.parameter.name = utils.input_parameter_name( pb_step.name, i) pb_param.parameter.value = ( # This is casted to string for protobuf and Argo # will automatically convert it to a correct type. str(arg) if isinstance(arg, (str, float, bool, int)) else arg.value) if states._exit_handler_enable: # add exit handler steps eh_step = wf.exit_handler_steps.add() eh_step.CopyFrom(pb_step) else: # add step to proto workflow concurrent_step = wf.steps.add() inner_step = concurrent_step.steps.add() inner_step.CopyFrom(pb_step) return pb_step
def step_repr( step_name=None, tmpl_name=None, image=None, command=None, source=None, script_output=None, args=None, input=None, output=None, manifest=None, success_cond=None, failure_cond=None, ): assert step_name is not None assert tmpl_name is not None # generate protobuf step representation pb_step = couler_pb2.Step() pb_step.id = get_uniq_step_id() pb_step.name = step_name pb_step.tmpl_name = tmpl_name # image can be None if manifest specified. if image is not None: pb_step.container_spec.image = image if manifest is not None: pb_step.resource_spec.manifest = manifest if success_cond is not None: pb_step.resource_spec.success_condition = success_cond pb_step.resource_spec.failure_condition = failure_cond if command is None: pb_step.container_spec.command.append("python") elif isinstance(command, list): pb_step.container_spec.command.extend(command) elif isinstance(command, str): pb_step.container_spec.command.append(command) else: raise ValueError("command must be str or list") if source is not None: if isinstance(source, str): pb_step.script = source else: pb_step.script = utils.body(source) # add template to proto workflow wf = get_default_proto_workflow() if tmpl_name not in wf.templates: proto_step_tmpl = couler_pb2.StepTemplate() proto_step_tmpl.name = tmpl_name _add_io_to_template(proto_step_tmpl, pb_step.id, input, output, script_output) wf.templates[tmpl_name].CopyFrom(proto_step_tmpl) # add step arguments if args is not None: for i, arg in enumerate(args): if isinstance(arg, OutputArtifact): pb_art = couler_pb2.StepIO() pb_art.artifact.name = arg.artifact["name"] pb_art.artifact.value = ('"{{inputs.artifacts.%s}}"' % pb_art.artifact.name) pb_step.args.append(pb_art) else: pb_param = couler_pb2.StepIO() pb_param.parameter.name = utils.input_parameter_name( pb_step.name, i) pb_param.parameter.value = ('"{{inputs.parameters.%s}}"' % pb_param.parameter.name) pb_step.args.append(pb_param) # add step to proto workflow concurrent_step = couler_pb2.ConcurrentSteps() concurrent_step.steps.append(pb_step) wf.steps.append(concurrent_step) return pb_step