def job_d(message): couler.run_container( image="docker/whalesay:latest", command=["cowsay"], args=[message], step_name="D", )
def consume_two(step_name): couler.run_container( image="docker/whalesay:latest", command=["echo"], args=["--input: x"], step_name=step_name, )
def test_resource_setup(self): couler.run_container( image="docker/whalesay", command=["cowsay"], args=["resource test"], resources={"cpu": "1", "memory": "100Mi"}, ) # Because test environment between local and CI is different, # we can not compare the YAML directly. _test_data_dir = "test_data" test_data_dir = os.path.join(os.path.dirname(__file__), _test_data_dir) with open( os.path.join(test_data_dir, "resource_config_golden.yaml"), "r" ) as f: expected = yaml.safe_load(f) output = yaml.safe_load( pyaml.dump(couler.workflow_yaml(), string_val_style="plain") ) _resources = output["spec"]["templates"][1]["container"]["resources"] _expected_resources = expected["spec"]["templates"][1]["container"][ "resources" ] self.assertEqual(_resources, _expected_resources) couler._cleanup()
def job(name): couler.run_container( image="docker/whalesay:latest", command=["cowsay"], args=[name], step_name=name, )
def sqlflow(sql, image="sqlflow/sqlflow", env=None, secret=None): '''sqlflow step call run_container to append a workflow step. ''' couler.run_container(command='''repl -e "%s"''' % escape_sql(sql), image=image, env=env, secret=secret)
def test_run_container_with_workflow_volume(self): pvc = VolumeClaimTemplate("workdir") volume_mount = VolumeMount("workdir", "/mnt/vol") couler.create_workflow_volume(pvc) couler.run_container( image="docker/whalesay:latest", args=["echo -n hello world"], command=["bash", "-c"], step_name="A", volume_mounts=[volume_mount], ) volume_mount = VolumeMount("workdir", "/mnt/vol") couler.run_container( image="docker/whalesay:latest", args=["echo -n hello world"], command=["bash", "-c"], step_name="A", volume_mounts=[volume_mount], ) wf = couler.workflow_yaml() self.assertEqual(len(wf["spec"]["volumeClaimTemplates"]), 1) self.assertEqual(wf["spec"]["volumeClaimTemplates"][0], pvc.to_dict()) self.assertEqual( wf["spec"]["templates"][1]["container"]["volumeMounts"][0], volume_mount.to_dict(), ) couler._cleanup()
def test_output_oss_artifact(self): # the content of local file would be uploaded to OSS output_artifact = couler.create_oss_artifact( path="/home/t1.txt", bucket="test-bucket/", accesskey_id="abcde", accesskey_secret="abc12345", key="osspath/t1", endpoint="xyz.com", ) couler.run_container( image="docker/whalesay:latest", args=["echo -n hello world > %s" % output_artifact.path], command=["bash", "-c"], output=output_artifact, ) proto_wf = get_default_proto_workflow() s = proto_wf.steps[0].steps[0] t = proto_wf.templates[s.tmpl_name] self.assertEqual(s.container_spec.image, "docker/whalesay:latest") self.assertTrue(t.outputs[0].artifact.name.startswith("output-oss")) self.assertEqual(t.outputs[0].artifact.local_path, "/home/t1.txt") self.assertEqual(t.outputs[0].artifact.endpoint, "xyz.com") self.assertEqual(t.outputs[0].artifact.bucket, "test-bucket/") self.assertEqual(t.outputs[0].artifact.access_key.key, "accessKey") proto_sk = t.outputs[0].artifact.secret_key self.assertEqual(proto_sk.key, "secretKey") self.assertEqual(proto_sk.value, states._secrets[proto_sk.name].data[proto_sk.key])
def consumer(inputs): # read the content from an OSS bucket couler.run_container( image="docker/whalesay:latest", args=inputs, command=[("cat %s" % inputs[0].path)], )
def test_create_secret(self): # First job with secret1 user_info = {"uname": "abc", "passwd": "def"} secret1 = couler.create_secret(secret_data=user_info, name="dummy1") couler.run_container( image="python:3.6", secret=secret1, command="echo $uname" ) # Second job with secret2 that exists access_key = ["access_key", "access_value"] secret2 = couler.obtain_secret( secret_keys=access_key, namespace="test", name="dummy2" ) couler.run_container( image="python:3.6", secret=secret2, command="echo $access_value" ) # Check the secret yaml self.assertEqual(len(couler.states._secrets), 2) secret1_yaml = couler.states._secrets[secret1].to_yaml() secret2_yaml = couler.states._secrets[secret2].to_yaml() self.assertEqual(secret1_yaml["metadata"]["name"], "dummy1") self.assertEqual(len(secret1_yaml["data"]), 2) self.assertEqual( secret1_yaml["data"]["uname"], utils.encode_base64("abc") ) self.assertEqual( secret1_yaml["data"]["passwd"], utils.encode_base64("def") ) self.assertEqual(secret2_yaml["metadata"]["namespace"], "test") self.assertEqual(secret2_yaml["metadata"]["name"], "dummy2") self.assertEqual(len(secret2_yaml["data"]), 2)
def consumer(step_name): # read the content from an OSS bucket # inputs = couler.get_step_output(step_name="A") couler.run_container( image="docker/whalesay:latest", args=["--test 1"], command=[("cat %s" % default_path)], step_name=step_name, )
def run_a_gpu_container(self): couler.run_container( image="python:3.6", env=copy.deepcopy(self.envs), resources={ "cpu": 1, "memory": 1024, "gpu": 1 }, command=["bash", "-c", """echo ${MESSAGE}"""], )
def test_exit_handler(self): def send_mail(): return couler.run_container(image="alpine:3.6", command=["echo", "send mail"]) couler.run_container(image="alpine:3.6", command=["exit", "1"]) couler.set_exit_handler(couler.WFStatus.Failed, send_mail) proto_wf = get_default_proto_workflow() self.assertEqual(len(proto_wf.exit_handler_steps), 1) s = proto_wf.exit_handler_steps[0] self.assertEqual(s.when, "{{workflow.status}} == Failed")
def test_run_daemon_container(self): self.assertEqual(len(couler.workflow.templates), 0) couler.run_container(image="python:3.6", command="echo $uname", daemon=True) self.assertEqual(len(couler.workflow.templates), 1) template = couler.workflow.get_template( "test-run-daemon-container").to_dict() self.assertEqual("test-run-daemon-container", template["name"]) self.assertTrue(template["daemon"]) self.assertEqual("python:3.6", template["container"]["image"]) self.assertEqual(["echo $uname"], template["container"]["command"])
def test_run_container_with_toleration(self): toleration = Toleration("example-toleration", "Exists", "NoSchedule") couler.add_toleration(toleration) couler.run_container( image="docker/whalesay:latest", args=["echo -n hello world"], command=["bash", "-c"], step_name="A", ) wf = couler.workflow_yaml() self.assertEqual(wf["spec"]["tolerations"][0], toleration.to_dict()) couler._cleanup()
def test_cluster_config(self): couler.config_workflow(cluster_config_file=os.path.join( os.path.dirname(__file__), "test_data/dummy_cluster_config.py")) couler.run_container( image="docker/whalesay:latest", args=["echo -n hello world"], command=["bash", "-c"], step_name="A", ) wf = couler.workflow_yaml() self.assertTrue(wf["spec"]["hostNetwork"]) self.assertEqual(wf["spec"]["templates"][1]["tolerations"], []) couler._cleanup()
def test_run_container_with_node_selector(self): couler.run_container( image="docker/whalesay:latest", args=["echo -n hello world"], command=["bash", "-c"], step_name="A", node_selector={"beta.kubernetes.io/arch": "amd64"}, ) wf = couler.workflow_yaml() self.assertEqual( wf["spec"]["templates"][1]["nodeSelector"], {"beta.kubernetes.io/arch": "amd64"}, ) couler._cleanup()
def producer(): return couler.run_container( image="docker/whalesay:latest", args=["echo -n hello world"], command=["bash", "-c"], step_name="A", )
def start_pod_with_step(message, step_name): return couler.run_container( image="couler/python:3.6", command=["bash", "-c", "mkdir /home/admin/logs/ | echo"], args=[message], step_name=step_name, )
def job_two(): output_place = couler.create_parameter_artifact( path="/tmp/job_two.txt") return couler.run_container( image="python:3.6", args="echo -n step two > %s" % output_place.path, output=output_place, )
def test_run_container_with_image_pull_secret(self): secret = ImagePullSecret("test-secret") couler.add_image_pull_secret(secret) secret1 = ImagePullSecret("test-secret1") couler.add_image_pull_secret(secret1) couler.run_container( image="docker/whalesay:latest", args=["echo -n hello world"], command=["bash", "-c"], step_name="A", working_dir="/mnt/src", ) wf = couler.workflow_yaml() self.assertEqual(wf["spec"]["imagePullSecrets"][0], secret.to_dict()) self.assertEqual(wf["spec"]["imagePullSecrets"][1], secret1.to_dict()) couler._cleanup()
def producer(step_name): output_place = couler.create_parameter_artifact(path=output_path) return couler.run_container( image="docker/whalesay:latest", args=["echo -n hello world > %s" % output_place.path], command=["bash", "-c"], output=output_place, step_name=step_name, )
def test_run_container_with_volume(self): volume = Volume("workdir", "my-existing-volume") volume_mount = VolumeMount("workdir", "/mnt/vol") couler.add_volume(volume) couler.run_container( image="docker/whalesay:latest", args=["echo -n hello world"], command=["bash", "-c"], step_name="A", volume_mounts=[volume_mount], ) wf = couler.workflow_yaml() self.assertEqual(wf["spec"]["volumes"][0], volume.to_dict()) self.assertEqual( wf["spec"]["templates"][1]["container"]["volumeMounts"][0], volume_mount.to_dict(), ) couler._cleanup()
def consumer(step_name): return couler.run_container( image="docker/whalesay:latest", command=[ "bash", "-c", "echo '{{inputs.parameters.para-B-0}}'", ], step_name=step_name, )
def test_output_oss_artifact(self): # the content of local file would be uploaded to OSS output_artifact = couler.create_oss_artifact( path="/home/t1.txt", bucket="test-bucket/", accesskey_id="abcde", accesskey_secret="abc12345", key="osspath/t1", endpoint="xyz.com", ) couler.run_container( image="docker/whalesay:latest", args=["echo -n hello world > %s" % output_artifact.path], command=["bash", "-c"], output=output_artifact, ) proto_wf = get_default_proto_workflow() s = proto_wf.steps[0] self.assertEqual(s.container_spec.image, "docker/whalesay:latest") self.assertTrue(s.outputs[0].artifact.name.startswith("output-oss"))
def builder_phase( self, c: container, ): obc_env = { "OBC_WORK_PATH": self.work_path, "OBC_TOOL_PATH": self.work_path, "OBC_DATA_PATH": self.work_path } dockerfile_path = os.path.join(self.work_path, "Dockerfile") dockerfile = """ FROM kantale/openbio-env:1 RUN apt-get update ADD . /root/ WORKDIR /root """ dockerfile += "\nENV " + "OBC_WORK_PATH=" + obc_env["OBC_WORK_PATH"] dockerfile += "\nENV " + "OBC_TOOL_PATH=" + obc_env["OBC_TOOL_PATH"] dockerfile += "\nENV " + "OBC_DATA_PATH=" + obc_env["OBC_DATA_PATH"] for art in c.artifacts: dockerfile += "\nRUN chmod +x " + art.path + " && " + art.path # print(dockerfile) c.artifacts.append(rawArtifact(dockerfile_path, dockerfile)) self.builders.append("builder" + c.name) # tmpl = Container("executor"+c.name,c.image, command=["/bin/bash", "-c"], env=obc_env) # couler.workflow.add_template(tmpl) # wfl.templates.append(tmpl) kaniko_args = [ "--dockerfile=Dockerfile", "--cache=true", "--cache-repo=%s/openbio-cache" % self.image_registry, "--context=dir://%s/" % self.work_path.rstrip("/"), "--insecure", "--destination=" + c.image ] couler.run_container(image="gcr.io/kaniko-project/executor:latest", args=kaniko_args, input=c.artifacts, step_name="builder" + c.name, env=obc_env)
def producer_two(step_name): output_one = couler.create_parameter_artifact(path="/mnt/t1.txt") output_two = couler.create_parameter_artifact(path="/mnt/t2.txt") c1 = "echo -n A > %s" % output_one.path c2 = "echo -n B > %s" % output_two.path command = "%s && %s" % (c1, c2) return couler.run_container( image="docker/whalesay:latest", args=command, output=[output_one, output_two], command=["bash", "-c"], step_name=step_name, )
def test_output_s3_artifact(self): # the content of local file would be uploaded to OSS output_artifact = couler.create_s3_artifact( path="/mnt/t1.txt", bucket="test-bucket/", accesskey_id="abcde", accesskey_secret="abc12345", key="s3path/t1", endpoint="xyz.com", ) couler.run_container( image="docker/whalesay:latest", args=["echo -n hello world > %s" % output_artifact.path], command=["bash", "-c"], output=output_artifact, ) wf = couler.workflow_yaml() template = wf["spec"]["templates"][1] artifact = template["outputs"]["artifacts"][0] self._s3_check_helper(artifact) couler._cleanup()
def test_input_oss_artifact(self): input_artifact = couler.create_oss_artifact( path="/mnt/t1.txt", bucket="test-bucket/", accesskey_id="abcde", accesskey_secret="abc12345", key="osspath/t1", endpoint="xyz.com", ) # read the content from an OSS bucket couler.run_container( image="docker/whalesay:latest", args=["cat %s" % input_artifact.path], command=["bash", "-c"], input=input_artifact, ) wf = couler.workflow_yaml() template = wf["spec"]["templates"][1] artifact = template["inputs"]["artifacts"][0] self._oss_check_helper(artifact) couler._cleanup()
def producer_two(): output_one = couler.create_parameter_artifact( path="/mnt/place_one.txt") output_two = couler.create_parameter_artifact( path="/mnt/place_two.txt") c1 = "echo -n output one > %s" % output_one.path c2 = "echo -n output tw0 > %s" % output_two.path command = "%s && %s" % (c1, c2) return couler.run_container( image="docker/whalesay:latest", args=command, output=[output_one, output_two], command=["bash", "-c"], )
def producer(): output_artifact = couler.create_oss_artifact( path="/mnt/t1.txt", bucket="test-bucket/", accesskey_id="abcde", accesskey_secret="abc12345", key="osspath/t1", endpoint="xyz.com", ) outputs = couler.run_container( image="docker/whalesay:latest", args=["echo -n hello world > %s" % output_artifact.path], command=["bash", "-c"], output=output_artifact, ) return outputs