Example #1
0
 def test_port_to_k8s_client_obj(self):
     port = Port('http', 80)
     self.assertEqual(
         port.to_k8s_client_obj(),
         k8s.V1ContainerPort(
             name='http',
             container_port=80
         )
     )
Example #2
0
    def test_port(self):
        port = Port('http', 80)

        k = KubernetesPodOperator(
            namespace='default',
            image="ubuntu:16.04",
            cmds=["bash", "-cx"],
            arguments=["echo 10"],
            labels={"foo": "bar"},
            name="test",
            task_id="task",
            in_cluster=False,
            do_xcom_push=False,
            ports=[port],
        )
        context = create_context(k)
        k.execute(context=context)
        actual_pod = self.api_client.sanitize_for_serialization(k.pod)
        self.expected_pod['spec']['containers'][0]['ports'] = [{
            'name':
            'http',
            'containerPort':
            80
        }]
        assert self.expected_pod == actual_pod
def _extract_ports(ports):
    result = []
    ports = ports or []  # type: List[Union[k8s.V1ContainerPort, dict]]
    for port in ports:
        if isinstance(port, k8s.V1ContainerPort):
            port = api_client.sanitize_for_serialization(port)
            port = Port(name=port.get("name"), container_port=port.get("containerPort"))
        elif not isinstance(port, Port):
            port = Port(name=port.get("name"), container_port=port.get("containerPort"))
        result.append(port)
    return result
Example #4
0
 def test_port_attach_to_pod(self, mock_uuid):
     mock_uuid.return_value = '0'
     pod = PodGenerator(image='airflow-worker:latest',
                        name='base').gen_pod()
     ports = [Port('https', 443), Port('http', 80)]
     k8s_client = ApiClient()
     result = append_to_pod(pod, ports)
     result = k8s_client.sanitize_for_serialization(result)
     self.assertEqual(
         {
             'apiVersion': 'v1',
             'kind': 'Pod',
             'metadata': {
                 'name': 'base-0'
             },
             'spec': {
                 'containers': [{
                     'args': [],
                     'command': [],
                     'env': [],
                     'envFrom': [],
                     'image':
                     'airflow-worker:latest',
                     'imagePullPolicy':
                     'IfNotPresent',
                     'name':
                     'base',
                     'ports': [{
                         'name': 'https',
                         'containerPort': 443
                     }, {
                         'name': 'http',
                         'containerPort': 80
                     }],
                     'volumeMounts': [],
                 }],
                 'hostNetwork':
                 False,
                 'imagePullSecrets': [],
                 'restartPolicy':
                 'Never',
                 'volumes': []
             }
         }, result)
Example #5
0
 def test_port_attach_to_pod(self, mock_uuid):
     import uuid
     static_uuid = uuid.UUID('cf4a56d2-8101-4217-b027-2af6216feb48')
     mock_uuid.return_value = static_uuid
     pod = PodGenerator(image='airflow-worker:latest',
                        name='base').gen_pod()
     ports = [Port('https', 443), Port('http', 80)]
     k8s_client = ApiClient()
     result = append_to_pod(pod, ports)
     result = k8s_client.sanitize_for_serialization(result)
     self.assertEqual(
         {
             'apiVersion': 'v1',
             'kind': 'Pod',
             'metadata': {
                 'name': 'base-' + static_uuid.hex
             },
             'spec': {
                 'containers': [{
                     'args': [],
                     'command': [],
                     'env': [],
                     'envFrom': [],
                     'image':
                     'airflow-worker:latest',
                     'name':
                     'base',
                     'ports': [{
                         'name': 'https',
                         'containerPort': 443
                     }, {
                         'name': 'http',
                         'containerPort': 80
                     }],
                     'volumeMounts': [],
                 }],
                 'hostNetwork':
                 False,
                 'imagePullSecrets': [],
                 'volumes': []
             }
         }, result)
    def test_port():
        port = Port('http', 80)

        k = KubernetesPodOperator(namespace='default',
                                  image="ubuntu:16.04",
                                  cmds=["bash", "-cx"],
                                  arguments=["echo 10"],
                                  labels={"foo": "bar"},
                                  name="test",
                                  task_id="task",
                                  ports=[port])
        k.execute(None)
Example #7
0
    def test_port(self):
        port = Port('http', 80)

        k = KubernetesPodOperator(namespace='default',
                                  image="ubuntu:16.04",
                                  cmds=["bash", "-cx"],
                                  arguments=["echo 10"],
                                  labels={"foo": "bar"},
                                  name="test",
                                  task_id="task",
                                  ports=[port])
        k.execute(None)
        actual_pod = self.api_client.sanitize_for_serialization(k.pod)
        self.expected_pod['spec']['containers'][0]['ports'] = [{
            'name':
            'http',
            'containerPort':
            80
        }]
        self.assertEqual(self.expected_pod, actual_pod)
Example #8
0
    def make_task(operator: str, task_params: Dict[str, Any]) -> BaseOperator:
        """
        Takes an operator and params and creates an instance of that operator.

        :returns: instance of operator object
        """
        try:
            # class is a Callable https://stackoverflow.com/a/34578836/3679900
            operator_obj: Callable[..., BaseOperator] = import_string(operator)
        except Exception as err:
            raise f"Failed to import operator: {operator}" from err
        try:
            if operator_obj == PythonOperator:
                if not task_params.get(
                        "python_callable_name") and not task_params.get(
                            "python_callable_file"):
                    raise Exception(
                        "Failed to create task. PythonOperator requires `python_callable_name` \
                        and `python_callable_file` parameters.")
                task_params[
                    "python_callable"]: Callable = utils.get_python_callable(
                        task_params["python_callable_name"],
                        task_params["python_callable_file"],
                    )

            # KubernetesPodOperator
            if operator_obj == KubernetesPodOperator:
                task_params["secrets"] = ([
                    Secret(**v) for v in task_params.get("secrets")
                ] if task_params.get("secrets") is not None else None)

                task_params["ports"] = ([
                    Port(**v) for v in task_params.get("ports")
                ] if task_params.get("ports") is not None else None)
                task_params["volume_mounts"] = ([
                    VolumeMount(**v) for v in task_params.get("volume_mounts")
                ] if task_params.get("volume_mounts") is not None else None)
                task_params["volumes"] = ([
                    Volume(**v) for v in task_params.get("volumes")
                ] if task_params.get("volumes") is not None else None)
                task_params["pod_runtime_info_envs"] = ([
                    PodRuntimeInfoEnv(**v)
                    for v in task_params.get("pod_runtime_info_envs")
                ] if task_params.get("pod_runtime_info_envs") is not None else
                                                        None)
                task_params["full_pod_spec"] = (
                    V1Pod(**task_params.get("full_pod_spec"))
                    if task_params.get("full_pod_spec") is not None else None)
                task_params["init_containers"] = ([
                    V1Container(**v)
                    for v in task_params.get("init_containers")
                ] if task_params.get("init_containers") is not None else None)

            if utils.check_dict_key(task_params, "execution_timeout_secs"):
                task_params["execution_timeout"]: timedelta = timedelta(
                    seconds=task_params["execution_timeout_secs"])
                del task_params["execution_timeout_secs"]

            # use variables as arguments on operator
            if utils.check_dict_key(task_params, "variables_as_arguments"):
                variables: List[Dict[str, str]] = task_params.get(
                    "variables_as_arguments")
                for variable in variables:
                    if Variable.get(variable["variable"],
                                    default_var=None) is not None:
                        task_params[variable["attribute"]] = Variable.get(
                            variable["variable"], default_var=None)
                del task_params["variables_as_arguments"]

            task: BaseOperator = operator_obj(**task_params)
        except Exception as err:
            raise f"Failed to create {operator_obj} task" from err
        return task
Example #9
0
    def make_task(operator: str, task_params: Dict[str, Any]) -> BaseOperator:
        """
        Takes an operator and params and creates an instance of that operator.

        :returns: instance of operator object
        """
        try:
            # class is a Callable https://stackoverflow.com/a/34578836/3679900
            operator_obj: Callable[..., BaseOperator] = import_string(operator)
        except Exception as err:
            raise Exception(f"Failed to import operator: {operator}") from err
        try:
            if operator_obj in [PythonOperator, BranchPythonOperator]:
                if not task_params.get(
                        "python_callable_name") and not task_params.get(
                            "python_callable_file"):
                    raise Exception(
                        "Failed to create task. PythonOperator and BranchPythonOperator requires \
                        `python_callable_name` and `python_callable_file` parameters."
                    )
                task_params[
                    "python_callable"]: Callable = utils.get_python_callable(
                        task_params["python_callable_name"],
                        task_params["python_callable_file"],
                    )
                # remove dag-factory specific parameters
                # Airflow 2.0 doesn't allow these to be passed to operator
                del task_params["python_callable_name"]
                del task_params["python_callable_file"]

            # Check for the custom success and failure callables in SqlSensor. These are considered
            # optional, so no failures in case they aren't found. Note: there's no reason to
            # declare both a callable file and a lambda function for success/failure parameter.
            # If both are found the object will not throw and error, instead callable file will
            # take precedence over the lambda function
            if operator_obj in [SqlSensor]:
                # Success checks
                if task_params.get("success_check_file") and task_params.get(
                        "success_check_name"):
                    task_params[
                        "success"]: Callable = utils.get_python_callable(
                            task_params["success_check_name"],
                            task_params["success_check_file"],
                        )
                    del task_params["success_check_name"]
                    del task_params["success_check_file"]
                elif task_params.get("success_check_lambda"):
                    task_params[
                        "success"]: Callable = utils.get_python_callable_lambda(
                            task_params["success_check_lambda"])
                    del task_params["success_check_lambda"]
                # Failure checks
                if task_params.get("failure_check_file") and task_params.get(
                        "failure_check_name"):
                    task_params[
                        "failure"]: Callable = utils.get_python_callable(
                            task_params["failure_check_name"],
                            task_params["failure_check_file"],
                        )
                    del task_params["failure_check_name"]
                    del task_params["failure_check_file"]
                elif task_params.get("failure_check_lambda"):
                    task_params[
                        "failure"]: Callable = utils.get_python_callable_lambda(
                            task_params["failure_check_lambda"])
                    del task_params["failure_check_lambda"]

            if operator_obj in [HttpSensor]:
                if not (task_params.get("response_check_name")
                        and task_params.get("response_check_file")
                        ) and not task_params.get("response_check_lambda"):
                    raise Exception(
                        "Failed to create task. HttpSensor requires \
                        `response_check_name` and `response_check_file` parameters \
                        or `response_check_lambda` parameter.")
                if task_params.get("response_check_file"):
                    task_params[
                        "response_check"]: Callable = utils.get_python_callable(
                            task_params["response_check_name"],
                            task_params["response_check_file"],
                        )
                    # remove dag-factory specific parameters
                    # Airflow 2.0 doesn't allow these to be passed to operator
                    del task_params["response_check_name"]
                    del task_params["response_check_file"]
                else:
                    task_params[
                        "response_check"]: Callable = utils.get_python_callable_lambda(
                            task_params["response_check_lambda"])
                    # remove dag-factory specific parameters
                    # Airflow 2.0 doesn't allow these to be passed to operator
                    del task_params["response_check_lambda"]

            # KubernetesPodOperator
            if operator_obj == KubernetesPodOperator:
                task_params["secrets"] = ([
                    Secret(**v) for v in task_params.get("secrets")
                ] if task_params.get("secrets") is not None else None)

                task_params["ports"] = ([
                    Port(**v) for v in task_params.get("ports")
                ] if task_params.get("ports") is not None else None)
                task_params["volume_mounts"] = ([
                    VolumeMount(**v) for v in task_params.get("volume_mounts")
                ] if task_params.get("volume_mounts") is not None else None)
                task_params["volumes"] = ([
                    Volume(**v) for v in task_params.get("volumes")
                ] if task_params.get("volumes") is not None else None)
                task_params["pod_runtime_info_envs"] = ([
                    PodRuntimeInfoEnv(**v)
                    for v in task_params.get("pod_runtime_info_envs")
                ] if task_params.get("pod_runtime_info_envs") is not None else
                                                        None)
                task_params["full_pod_spec"] = (
                    V1Pod(**task_params.get("full_pod_spec"))
                    if task_params.get("full_pod_spec") is not None else None)
                task_params["init_containers"] = ([
                    V1Container(**v)
                    for v in task_params.get("init_containers")
                ] if task_params.get("init_containers") is not None else None)

            if utils.check_dict_key(task_params, "execution_timeout_secs"):
                task_params["execution_timeout"]: timedelta = timedelta(
                    seconds=task_params["execution_timeout_secs"])
                del task_params["execution_timeout_secs"]

            if utils.check_dict_key(task_params, "sla_secs"):
                task_params["sla"]: timedelta = timedelta(
                    seconds=task_params["sla_secs"])
                del task_params["sla_secs"]

            if utils.check_dict_key(task_params, "execution_delta_secs"):
                task_params["execution_delta"]: timedelta = timedelta(
                    seconds=task_params["execution_delta_secs"])
                del task_params["execution_delta_secs"]

            if utils.check_dict_key(
                    task_params,
                    "execution_date_fn_name") and utils.check_dict_key(
                        task_params, "execution_date_fn_file"):
                task_params[
                    "execution_date_fn"]: Callable = utils.get_python_callable(
                        task_params["execution_date_fn_name"],
                        task_params["execution_date_fn_file"],
                    )
                del task_params["execution_date_fn_name"]
                del task_params["execution_date_fn_file"]

            # on_execute_callback is an Airflow 2.0 feature
            if utils.check_dict_key(
                    task_params, "on_execute_callback"
            ) and version.parse(AIRFLOW_VERSION) >= version.parse("2.0.0"):
                task_params["on_execute_callback"]: Callable = import_string(
                    task_params["on_execute_callback"])

            if utils.check_dict_key(task_params, "on_failure_callback"):
                task_params["on_failure_callback"]: Callable = import_string(
                    task_params["on_failure_callback"])

            if utils.check_dict_key(task_params, "on_success_callback"):
                task_params["on_success_callback"]: Callable = import_string(
                    task_params["on_success_callback"])

            if utils.check_dict_key(task_params, "on_retry_callback"):
                task_params["on_retry_callback"]: Callable = import_string(
                    task_params["on_retry_callback"])

            # use variables as arguments on operator
            if utils.check_dict_key(task_params, "variables_as_arguments"):
                variables: List[Dict[str, str]] = task_params.get(
                    "variables_as_arguments")
                for variable in variables:
                    if Variable.get(variable["variable"],
                                    default_var=None) is not None:
                        task_params[variable["attribute"]] = Variable.get(
                            variable["variable"], default_var=None)
                del task_params["variables_as_arguments"]

            task: BaseOperator = operator_obj(**task_params)
        except Exception as err:
            raise Exception(f"Failed to create {operator_obj} task") from err
        return task
Example #10
0
# [START howto_operator_k8s_cluster_resources]
secret_file = Secret('volume', '/etc/sql_conn', 'airflow-secrets',
                     'sql_alchemy_conn')
secret_env = Secret('env', 'SQL_CONN', 'airflow-secrets', 'sql_alchemy_conn')
secret_all_keys = Secret('env', None, 'airflow-secrets-2')
volume_mount = VolumeMount('test-volume',
                           mount_path='/root/mount_file',
                           sub_path=None,
                           read_only=True)
configmaps = ['test-configmap-1', 'test-configmap-2']
volume_config = {'persistentVolumeClaim': {'claimName': 'test-volume'}}
volume = Volume(name='test-volume', configs=volume_config)
# [END howto_operator_k8s_cluster_resources]

port = Port('http', 80)

init_container_volume_mounts = [
    k8s.V1VolumeMount(mount_path='/etc/foo',
                      name='test-volume',
                      sub_path=None,
                      read_only=True)
]

init_environments = [
    k8s.V1EnvVar(name='key1', value='value1'),
    k8s.V1EnvVar(name='key2', value='value2')
]

init_container = k8s.V1Container(
    name="init-container",
    def test_convert_to_airflow_pod(self):
        input_pod = k8s.V1Pod(
            metadata=k8s.V1ObjectMeta(name="foo", namespace="bar"),
            spec=k8s.V1PodSpec(
                init_containers=[
                    k8s.V1Container(name="init-container",
                                    volume_mounts=[
                                        k8s.V1VolumeMount(mount_path="/tmp",
                                                          name="init-secret")
                                    ])
                ],
                containers=[
                    k8s.V1Container(
                        name="base",
                        command=["foo"],
                        image="myimage",
                        env=[
                            k8s.V1EnvVar(
                                name="AIRFLOW_SECRET",
                                value_from=k8s.V1EnvVarSource(
                                    secret_key_ref=k8s.V1SecretKeySelector(
                                        name="ai", key="secret_key")))
                        ],
                        ports=[
                            k8s.V1ContainerPort(
                                name="myport",
                                container_port=8080,
                            )
                        ],
                        volume_mounts=[
                            k8s.V1VolumeMount(name="myvolume",
                                              mount_path="/tmp/mount",
                                              read_only="True"),
                            k8s.V1VolumeMount(name='airflow-config',
                                              mount_path='/config',
                                              sub_path='airflow.cfg',
                                              read_only=True),
                            k8s.V1VolumeMount(name="airflow-secret",
                                              mount_path="/opt/mount",
                                              read_only=True)
                        ])
                ],
                security_context=k8s.V1PodSecurityContext(
                    run_as_user=0,
                    fs_group=0,
                ),
                volumes=[
                    k8s.V1Volume(name="myvolume"),
                    k8s.V1Volume(
                        name="airflow-config",
                        config_map=k8s.V1ConfigMap(data="airflow-data")),
                    k8s.V1Volume(name="airflow-secret",
                                 secret=k8s.V1SecretVolumeSource(
                                     secret_name="secret-name", )),
                    k8s.V1Volume(name="init-secret",
                                 secret=k8s.V1SecretVolumeSource(
                                     secret_name="init-secret", ))
                ]))
        result_pod = _convert_to_airflow_pod(input_pod)

        expected = Pod(
            name="foo",
            namespace="bar",
            envs={},
            init_containers=[{
                'name':
                'init-container',
                'volumeMounts': [{
                    'mountPath': '/tmp',
                    'name': 'init-secret'
                }]
            }],
            cmds=["foo"],
            image="myimage",
            ports=[Port(name="myport", container_port=8080)],
            volume_mounts=[
                VolumeMount(name="myvolume",
                            mount_path="/tmp/mount",
                            sub_path=None,
                            read_only="True"),
                VolumeMount(name="airflow-config",
                            read_only=True,
                            mount_path="/config",
                            sub_path="airflow.cfg"),
                VolumeMount(name="airflow-secret",
                            mount_path="/opt/mount",
                            sub_path=None,
                            read_only=True)
            ],
            secrets=[Secret("env", "AIRFLOW_SECRET", "ai", "secret_key")],
            security_context={
                'fsGroup': 0,
                'runAsUser': 0
            },
            volumes=[
                Volume(name="myvolume", configs={'name': 'myvolume'}),
                Volume(name="airflow-config",
                       configs={
                           'configMap': {
                               'data': 'airflow-data'
                           },
                           'name': 'airflow-config'
                       }),
                Volume(name='airflow-secret',
                       configs={
                           'name': 'airflow-secret',
                           'secret': {
                               'secretName': 'secret-name'
                           }
                       }),
                Volume(name='init-secret',
                       configs={
                           'name': 'init-secret',
                           'secret': {
                               'secretName': 'init-secret'
                           }
                       })
            ],
        )
        expected_dict = expected.as_dict()
        result_dict = result_pod.as_dict()
        print(result_pod.volume_mounts)
        parsed_configs = self.pull_out_volumes(result_dict)
        result_dict['volumes'] = parsed_configs
        self.assertEqual(result_dict['secrets'], expected_dict['secrets'])
        self.assertDictEqual(expected_dict, result_dict)
Example #12
0
    def make_task(operator: str, task_params: Dict[str, Any],
                  af_vars: Dict[str, Any]) -> BaseOperator:
        """
        Takes an operator and params and creates an instance of that operator.

        :returns: instance of operator object
        """
        try:
            # class is a Callable https://stackoverflow.com/a/34578836/3679900
            operator_obj: Callable[..., BaseOperator] = import_string(operator)
        except Exception as err:
            raise Exception(f"Failed to import operator: {operator}") from err
        try:
            if operator_obj in [
                    PythonOperator, BranchPythonOperator, PythonSensor
            ]:
                if (not task_params.get("python_callable")
                        and not task_params.get("python_callable_name")
                        and not task_params.get("python_callable_file")):
                    # pylint: disable=line-too-long
                    raise Exception(
                        "Failed to create task. PythonOperator, BranchPythonOperator and PythonSensor requires \
                        `python_callable_name` and `python_callable_file` "
                        "parameters.\nOptionally you can load python_callable "
                        "from a file. with the special pyyaml notation:\n"
                        "  python_callable_file: !!python/name:my_module.my_func"
                    )
                if not task_params.get("python_callable"):
                    task_params[
                        "python_callable"]: Callable = utils.get_python_callable(
                            task_params["python_callable_name"],
                            task_params["python_callable_file"],
                        )
                    # remove dag-factory specific parameters
                    # Airflow 2.0 doesn't allow these to be passed to operator
                    del task_params["python_callable_name"]
                    del task_params["python_callable_file"]

            # Check for the custom success and failure callables in SqlSensor. These are considered
            # optional, so no failures in case they aren't found. Note: there's no reason to
            # declare both a callable file and a lambda function for success/failure parameter.
            # If both are found the object will not throw and error, instead callable file will
            # take precedence over the lambda function
            if operator_obj in [SqlSensor]:
                # Success checks
                if task_params.get("success_check_file") and task_params.get(
                        "success_check_name"):
                    task_params[
                        "success"]: Callable = utils.get_python_callable(
                            task_params["success_check_name"],
                            task_params["success_check_file"],
                        )
                    del task_params["success_check_name"]
                    del task_params["success_check_file"]
                elif task_params.get("success_check_lambda"):
                    task_params[
                        "success"]: Callable = utils.get_python_callable_lambda(
                            task_params["success_check_lambda"])
                    del task_params["success_check_lambda"]
                # Failure checks
                if task_params.get("failure_check_file") and task_params.get(
                        "failure_check_name"):
                    task_params[
                        "failure"]: Callable = utils.get_python_callable(
                            task_params["failure_check_name"],
                            task_params["failure_check_file"],
                        )
                    del task_params["failure_check_name"]
                    del task_params["failure_check_file"]
                elif task_params.get("failure_check_lambda"):
                    task_params[
                        "failure"]: Callable = utils.get_python_callable_lambda(
                            task_params["failure_check_lambda"])
                    del task_params["failure_check_lambda"]

            if operator_obj in [HttpSensor]:
                if not (task_params.get("response_check_name")
                        and task_params.get("response_check_file")
                        ) and not task_params.get("response_check_lambda"):
                    raise Exception(
                        "Failed to create task. HttpSensor requires \
                        `response_check_name` and `response_check_file` parameters \
                        or `response_check_lambda` parameter.")
                if task_params.get("response_check_file"):
                    task_params[
                        "response_check"]: Callable = utils.get_python_callable(
                            task_params["response_check_name"],
                            task_params["response_check_file"],
                        )
                    # remove dag-factory specific parameters
                    # Airflow 2.0 doesn't allow these to be passed to operator
                    del task_params["response_check_name"]
                    del task_params["response_check_file"]
                else:
                    task_params[
                        "response_check"]: Callable = utils.get_python_callable_lambda(
                            task_params["response_check_lambda"])
                    # remove dag-factory specific parameters
                    # Airflow 2.0 doesn't allow these to be passed to operator
                    del task_params["response_check_lambda"]

            # KubernetesPodOperator
            if operator_obj == KubernetesPodOperator:
                task_params["secrets"] = ([
                    Secret(**v) for v in task_params.get("secrets")
                ] if task_params.get("secrets") is not None else None)

                task_params["ports"] = ([
                    Port(**v) for v in task_params.get("ports")
                ] if task_params.get("ports") is not None else None)
                task_params["volume_mounts"] = ([
                    VolumeMount(**v) for v in task_params.get("volume_mounts")
                ] if task_params.get("volume_mounts") is not None else None)
                task_params["volumes"] = ([
                    Volume(**v) for v in task_params.get("volumes")
                ] if task_params.get("volumes") is not None else None)
                task_params["pod_runtime_info_envs"] = ([
                    PodRuntimeInfoEnv(**v)
                    for v in task_params.get("pod_runtime_info_envs")
                ] if task_params.get("pod_runtime_info_envs") is not None else
                                                        None)
                task_params["full_pod_spec"] = (
                    V1Pod(**task_params.get("full_pod_spec"))
                    if task_params.get("full_pod_spec") is not None else None)
                task_params["init_containers"] = ([
                    V1Container(**v)
                    for v in task_params.get("init_containers")
                ] if task_params.get("init_containers") is not None else None)
            if operator_obj == DockerOperator:
                if task_params.get("environment") is not None:
                    task_params["environment"] = {
                        k: os.environ.get(v, v)
                        for k, v in task_params["environment"].items()
                    }

            if operator_obj == EcsOperator:
                for c in task_params["overrides"]["containerOverrides"]:
                    if c.get('environment') is not None:
                        for env in c['environment']:
                            env['value'] = os.environ.get(
                                env['value'], env['value'])

                if 'ECS_SECURITY_GROUPS' in af_vars and 'network_configuration' in task_params:
                    task_params["network_configuration"]["awsvpcConfiguration"]['securityGroups'] \
                        = af_vars['ECS_SECURITY_GROUPS']

                if 'ECS_SUBNETS' in af_vars and 'network_configuration' in task_params:
                    task_params['network_configuration'][
                        "awsvpcConfiguration"]["subnets"] = af_vars[
                            "ECS_SUBNETS"]

                if af_vars.get('ECS_CLUSTER'):
                    task_params['cluster'] = af_vars["ECS_CLUSTER"]
                    task_params['task_definition'] = (
                        af_vars.get('ECS_CLUSTER') + '_' +
                        task_params['task_definition']).lower()

                    task_params['awslogs_group'] = \
                        task_params['awslogs_group'] + '/' + af_vars.get('ECS_CLUSTER').lower()

            if utils.check_dict_key(task_params, "execution_timeout_secs"):
                task_params["execution_timeout"]: timedelta = timedelta(
                    seconds=task_params["execution_timeout_secs"])
                del task_params["execution_timeout_secs"]

            if utils.check_dict_key(task_params, "sla_secs"):
                task_params["sla"]: timedelta = timedelta(
                    seconds=task_params["sla_secs"])
                del task_params["sla_secs"]

            if utils.check_dict_key(task_params, "execution_delta_secs"):
                task_params["execution_delta"]: timedelta = timedelta(
                    seconds=task_params["execution_delta_secs"])
                del task_params["execution_delta_secs"]

            if utils.check_dict_key(
                    task_params,
                    "execution_date_fn_name") and utils.check_dict_key(
                        task_params, "execution_date_fn_file"):
                task_params[
                    "execution_date_fn"]: Callable = utils.get_python_callable(
                        task_params["execution_date_fn_name"],
                        task_params["execution_date_fn_file"],
                    )
                del task_params["execution_date_fn_name"]
                del task_params["execution_date_fn_file"]

            # on_execute_callback is an Airflow 2.0 feature
            if utils.check_dict_key(
                    task_params, "on_execute_callback"
            ) and version.parse(AIRFLOW_VERSION) >= version.parse("2.0.0"):
                task_params["on_execute_callback"]: Callable = import_string(
                    task_params["on_execute_callback"])

            if utils.check_dict_key(task_params, "on_failure_callback"):
                task_params["on_failure_callback"]: Callable = import_string(
                    task_params["on_failure_callback"])

            if utils.check_dict_key(task_params, "on_success_callback"):
                task_params["on_success_callback"]: Callable = import_string(
                    task_params["on_success_callback"])

            if utils.check_dict_key(task_params, "on_retry_callback"):
                task_params["on_retry_callback"]: Callable = import_string(
                    task_params["on_retry_callback"])

            # use variables as arguments on operator
            if utils.check_dict_key(task_params, "variables_as_arguments"):
                variables: List[Dict[str, str]] = task_params.get(
                    "variables_as_arguments")
                for variable in variables:
                    if Variable.get(variable["variable"],
                                    default_var=None) is not None:
                        task_params[variable["attribute"]] = Variable.get(
                            variable["variable"], default_var=None)
                del task_params["variables_as_arguments"]

            # use variables as arguments on operator
            if utils.check_dict_key(task_params, "af_vars_as_arguments"):
                variables: List[Dict[str, str]] = task_params.get(
                    "af_vars_as_arguments")
                for variable in variables:
                    if af_vars.get(variable["variable"], None) is not None:
                        task_params[variable["attribute"]] = af_vars.get(
                            variable["variable"], None)
                del task_params["af_vars_as_arguments"]

            task: BaseOperator = operator_obj(**task_params)
        except Exception as err:
            raise Exception(f"Failed to create {operator_obj} task") from err
        return task