def test_import_string(self):
        cls = import_string('airflow.utils.module_loading.import_string')
        self.assertEqual(cls, import_string)

        # Test exceptions raised
        with self.assertRaises(ImportError):
            import_string('no_dots_in_path')
        msg = 'Module "airflow.utils" does not define a "nonexistent" attribute'
        with self.assertRaisesRegexp(ImportError, msg):
            import_string('airflow.utils.nonexistent')
def configure_logging():
    logging_class_path = ''
    try:
        # Prepare the classpath so we are sure that the config folder
        # is on the python classpath and it is reachable
        prepare_classpath()

        logging_class_path = conf.get('core', 'logging_config_class')
    except AirflowConfigException:
        log.debug('Could not find key logging_config_class in config')

    if logging_class_path:
        try:
            logging_config = import_string(logging_class_path)

            # Make sure that the variable is in scope
            assert (isinstance(logging_config, dict))

            log.info(
                'Successfully imported user-defined logging config from %s',
                logging_class_path
            )
        except Exception as err:
            # Import default logging configurations.
            raise ImportError(
                'Unable to load custom logging from {} due to {}'
                .format(logging_class_path, err)
            )
    else:
        logging_class_path = 'airflow.config_templates.' \
                             'airflow_local_settings.DEFAULT_LOGGING_CONFIG'
        logging_config = import_string(logging_class_path)
        log.debug('Unable to load custom logging, using default config instead')

    try:
        # Try to init logging
        dictConfig(logging_config)
    except ValueError as e:
        log.warning('Unable to load the config, contains a configuration error.')
        # When there is an error in the config, escalate the exception
        # otherwise Airflow would silently fall back on the default config
        raise e

    validate_logging_config(logging_config)

    return logging_class_path
Exemple #3
0
def _get_backend():
    backend = None

    try:
        _backend_str = conf.get("lineage", "backend")
        backend = import_string(_backend_str)
    except ImportError as ie:
        log.debug("Cannot import %s due to %s", _backend_str, ie)
    except conf.AirflowConfigException:
        log.debug("Could not find lineage backend key in config")

    return backend
from airflow.exceptions import AirflowException
from airflow.executors.base_executor import BaseExecutor
from airflow import configuration
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.module_loading import import_string

PARALLELISM = configuration.get('core', 'PARALLELISM')

'''
To start the celery worker, run the command:
airflow worker
'''

if configuration.has_option('celery', 'celery_config_options'):
    celery_configuration = import_string(
        configuration.get('celery', 'celery_config_options')
    )
else:
    celery_configuration = DEFAULT_CELERY_CONFIG

app = Celery(
    configuration.get('celery', 'CELERY_APP_NAME'),
    config_source=celery_configuration)


@app.task
def execute_command(command):
    log = LoggingMixin().log
    log.info("Executing command in Celery: %s", command)
    try:
        subprocess.check_call(command, shell=True)
from airflow.models.taskinstance import SimpleTaskInstance, TaskInstanceKeyType, TaskInstanceStateType
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.module_loading import import_string
from airflow.utils.timeout import timeout

# Make it constant for unit test.
CELERY_FETCH_ERR_MSG_HEADER = 'Error fetching Celery task state'

CELERY_SEND_ERR_MSG_HEADER = 'Error sending Celery task'
'''
To start the celery worker, run the command:
airflow celery worker
'''

if conf.has_option('celery', 'celery_config_options'):
    celery_configuration = import_string(
        conf.get('celery', 'celery_config_options'))
else:
    celery_configuration = DEFAULT_CELERY_CONFIG

app = Celery(conf.get('celery', 'CELERY_APP_NAME'),
             config_source=celery_configuration)


@app.task
def execute_command(command_to_exec: CommandType) -> None:
    """Executes command."""
    log = LoggingMixin().log
    log.info("Executing command in Celery: %s", command_to_exec)
    env = os.environ.copy()
    try:
        subprocess.check_call(command_to_exec,
Exemple #6
0
    def _deserialize_operator_extra_links(
            cls, encoded_op_links: list) -> Dict[str, BaseOperatorLink]:
        """
        Deserialize Operator Links if the Classes  are registered in Airflow Plugins.
        Error is raised if the OperatorLink is not found in Plugins too.

        :param encoded_op_links: Serialized Operator Link
        :return: De-Serialized Operator Link
        """
        from airflow import plugins_manager
        plugins_manager.initialize_extra_operators_links_plugins()

        if plugins_manager.registered_operator_link_classes is None:
            raise AirflowException("Can't load plugins")
        op_predefined_extra_links = {}

        for _operator_links_source in encoded_op_links:
            # Get the key, value pair as Tuple where key is OperatorLink ClassName
            # and value is the dictionary containing the arguments passed to the OperatorLink
            #
            # Example of a single iteration:
            #
            #   _operator_links_source =
            #   {
            #       'airflow.providers.google.cloud.operators.bigquery.BigQueryConsoleIndexableLink': {
            #           'index': 0
            #       }
            #   },
            #
            #   list(_operator_links_source.items()) =
            #   [
            #       (
            #           'airflow.providers.google.cloud.operators.bigquery.BigQueryConsoleIndexableLink',
            #           {'index': 0}
            #       )
            #   ]
            #
            #   list(_operator_links_source.items())[0] =
            #   (
            #       'airflow.providers.google.cloud.operators.bigquery.BigQueryConsoleIndexableLink',
            #       {
            #           'index': 0
            #       }
            #   )

            _operator_link_class_path, data = list(
                _operator_links_source.items())[0]
            if _operator_link_class_path in BUILTIN_OPERATOR_EXTRA_LINKS:
                single_op_link_class = import_string(_operator_link_class_path)
            elif _operator_link_class_path in plugins_manager.registered_operator_link_classes:
                single_op_link_class = plugins_manager.registered_operator_link_classes[
                    _operator_link_class_path]
            else:
                raise KeyError("Operator Link class %r not registered" %
                               _operator_link_class_path)

            op_predefined_extra_link: BaseOperatorLink = cattr.structure(
                data, single_op_link_class)

            op_predefined_extra_links.update(
                {op_predefined_extra_link.name: op_predefined_extra_link})

        return op_predefined_extra_links
Exemple #7
0
from airflow.exceptions import AirflowException, AirflowTaskTimeout
from airflow.models import BaseOperator, SensorInstance, SkipMixin, TaskInstance
from airflow.settings import LOGGING_CLASS_PATH
from airflow.stats import Stats
from airflow.utils import helpers, timezone
from airflow.utils.decorators import apply_defaults
from airflow.utils.email import send_email
from airflow.utils.log.logging_mixin import set_context
from airflow.utils.module_loading import import_string
from airflow.utils.net import get_hostname
from airflow.utils.session import provide_session
from airflow.utils.state import PokeState, State
from airflow.utils.timeout import timeout

config = import_string(LOGGING_CLASS_PATH)
handler_config = config['handlers']['task']
try:
    formatter_config = config['formatters'][handler_config['formatter']]
except Exception as err:  # pylint: disable=broad-except
    formatter_config = None
    print(err)
dictConfigurator = DictConfigurator(config)


class SensorWork:
    """
    This class stores a sensor work with decoded context value. It is only used
    inside of smart sensor. Create a sensor work based on sensor instance record.
    A sensor work object has the following attributes:
    `dag_id`: sensor_instance dag_id.
Exemple #8
0
def _get_instance(meta: Metadata):
    """Instantiate an object from Metadata"""
    cls = import_string(meta.type_name)
    return structure(meta.data, cls)
Exemple #9
0
def configure_orm(disable_connection_pool=False):
    log.debug("Setting up DB connection pool (PID %s)" % os.getpid())
    global engine
    global Session
    engine_args = {}

    pool_connections = conf.getboolean('core', 'SQL_ALCHEMY_POOL_ENABLED')
    if disable_connection_pool or not pool_connections:
        engine_args['poolclass'] = NullPool
        log.debug("settings.configure_orm(): Using NullPool")
    elif 'sqlite' not in SQL_ALCHEMY_CONN:
        # Pool size engine args not supported by sqlite.
        # If no config value is defined for the pool size, select a reasonable value.
        # 0 means no limit, which could lead to exceeding the Database connection limit.
        pool_size = conf.getint('core', 'SQL_ALCHEMY_POOL_SIZE', fallback=5)

        # The maximum overflow size of the pool.
        # When the number of checked-out connections reaches the size set in pool_size,
        # additional connections will be returned up to this limit.
        # When those additional connections are returned to the pool, they are disconnected and discarded.
        # It follows then that the total number of simultaneous connections
        # the pool will allow is pool_size + max_overflow,
        # and the total number of “sleeping” connections the pool will allow is pool_size.
        # max_overflow can be set to -1 to indicate no overflow limit;
        # no limit will be placed on the total number
        # of concurrent connections. Defaults to 10.
        max_overflow = conf.getint('core',
                                   'SQL_ALCHEMY_MAX_OVERFLOW',
                                   fallback=10)

        # The DB server already has a value for wait_timeout (number of seconds after
        # which an idle sleeping connection should be killed). Since other DBs may
        # co-exist on the same server, SQLAlchemy should set its
        # pool_recycle to an equal or smaller value.
        pool_recycle = conf.getint('core',
                                   'SQL_ALCHEMY_POOL_RECYCLE',
                                   fallback=1800)

        # Check connection at the start of each connection pool checkout.
        # Typically, this is a simple statement like “SELECT 1”, but may also make use
        # of some DBAPI-specific method to test the connection for liveness.
        # More information here:
        # https://docs.sqlalchemy.org/en/13/core/pooling.html#disconnect-handling-pessimistic
        pool_pre_ping = conf.getboolean('core',
                                        'SQL_ALCHEMY_POOL_PRE_PING',
                                        fallback=True)

        log.info(
            "settings.configure_orm(): Using pool settings. pool_size={}, max_overflow={}, "
            "pool_recycle={}, pid={}".format(pool_size, max_overflow,
                                             pool_recycle, os.getpid()))
        engine_args['pool_size'] = pool_size
        engine_args['pool_recycle'] = pool_recycle
        engine_args['pool_pre_ping'] = pool_pre_ping
        engine_args['max_overflow'] = max_overflow

    # Allow the user to specify an encoding for their DB otherwise default
    # to utf-8 so jobs & users with non-latin1 characters can still use us.
    engine_args['encoding'] = conf.get('core',
                                       'SQL_ENGINE_ENCODING',
                                       fallback='utf-8')

    if conf.has_option('core', 'sql_alchemy_connect_args'):
        connect_args = import_string(
            conf.get('core', 'sql_alchemy_connect_args'))
    else:
        connect_args = {}

    engine = create_engine(SQL_ALCHEMY_CONN,
                           connect_args=connect_args,
                           **engine_args)
    setup_event_handlers(engine)

    Session = scoped_session(
        sessionmaker(autocommit=False,
                     autoflush=False,
                     bind=engine,
                     expire_on_commit=False))
Exemple #10
0
 def command(*args, **kwargs):
     func = import_string(import_path)
     return func(*args, **kwargs)
Exemple #11
0
 def test_should_have_valid_imports(self, import_path):
     assert import_string(import_path) is not None
Exemple #12
0
    def make_task(operator: str, task_params: Dict[str, Any]) -> BaseOperator:
        """
        Takes an operator and params and creates an instance of that operator.

        :returns: instance of operator object
        """
        try:
            # class is a Callable https://stackoverflow.com/a/34578836/3679900
            operator_obj: Callable[..., BaseOperator] = import_string(operator)
        except Exception as err:
            raise f"Failed to import operator: {operator}" from err
        try:
            if operator_obj == PythonOperator:
                if not task_params.get(
                        "python_callable_name") and not task_params.get(
                            "python_callable_file"):
                    raise Exception(
                        "Failed to create task. PythonOperator requires `python_callable_name` \
                        and `python_callable_file` parameters.")
                task_params[
                    "python_callable"]: Callable = utils.get_python_callable(
                        task_params["python_callable_name"],
                        task_params["python_callable_file"],
                    )
                # remove dag-factory specific parameters
                # Airflow 2.0 doesn't allow these to be passed to operator
                del task_params["python_callable_name"]
                del task_params["python_callable_file"]

            # KubernetesPodOperator
            if operator_obj == KubernetesPodOperator:
                task_params["secrets"] = ([
                    Secret(**v) for v in task_params.get("secrets")
                ] if task_params.get("secrets") is not None else None)

                task_params["ports"] = ([
                    Port(**v) for v in task_params.get("ports")
                ] if task_params.get("ports") is not None else None)
                task_params["volume_mounts"] = ([
                    VolumeMount(**v) for v in task_params.get("volume_mounts")
                ] if task_params.get("volume_mounts") is not None else None)
                task_params["volumes"] = ([
                    Volume(**v) for v in task_params.get("volumes")
                ] if task_params.get("volumes") is not None else None)
                task_params["pod_runtime_info_envs"] = ([
                    PodRuntimeInfoEnv(**v)
                    for v in task_params.get("pod_runtime_info_envs")
                ] if task_params.get("pod_runtime_info_envs") is not None else
                                                        None)
                task_params["full_pod_spec"] = (
                    V1Pod(**task_params.get("full_pod_spec"))
                    if task_params.get("full_pod_spec") is not None else None)
                task_params["init_containers"] = ([
                    V1Container(**v)
                    for v in task_params.get("init_containers")
                ] if task_params.get("init_containers") is not None else None)

            if utils.check_dict_key(task_params, "execution_timeout_secs"):
                task_params["execution_timeout"]: timedelta = timedelta(
                    seconds=task_params["execution_timeout_secs"])
                del task_params["execution_timeout_secs"]

            if utils.check_dict_key(task_params, "execution_delta_secs"):
                task_params["execution_delta"]: timedelta = timedelta(
                    seconds=task_params["execution_delta_secs"])
                del task_params["execution_delta_secs"]

            if utils.check_dict_key(
                    task_params,
                    "execution_date_fn_name") and utils.check_dict_key(
                        task_params, "execution_date_fn_file"):
                task_params[
                    "execution_date_fn"]: Callable = utils.get_python_callable(
                        task_params["execution_date_fn_name"],
                        task_params["execution_date_fn_file"],
                    )
                del task_params["execution_date_fn_name"]
                del task_params["execution_date_fn_file"]

            # use variables as arguments on operator
            if utils.check_dict_key(task_params, "variables_as_arguments"):
                variables: List[Dict[str, str]] = task_params.get(
                    "variables_as_arguments")
                for variable in variables:
                    if Variable.get(variable["variable"],
                                    default_var=None) is not None:
                        task_params[variable["attribute"]] = Variable.get(
                            variable["variable"], default_var=None)
                del task_params["variables_as_arguments"]

            task: BaseOperator = operator_obj(**task_params)
        except Exception as err:
            raise f"Failed to create {operator_obj} task" from err
        return task
Exemple #13
0
def get_default_logging_config():
    logging_class_path = 'airflow.config_templates.' \
                         'airflow_local_settings.DEFAULT_LOGGING_CONFIG'
    return import_string(logging_class_path)
Exemple #14
0
    def get_dag_params(self) -> Dict[str, Any]:
        """
        Merges default config with dag config, sets dag_id, and extropolates dag_start_date

        :returns: dict of dag parameters
        """
        try:
            dag_params: Dict[str, Any] = utils.merge_configs(
                self.dag_config, self.default_config)
        except Exception as err:
            raise Exception(
                "Failed to merge config with default config") from err
        dag_params["dag_id"]: str = self.dag_name

        if dag_params.get("task_groups") and version.parse(
                AIRFLOW_VERSION) < version.parse("2.0.0"):
            raise Exception(
                "`task_groups` key can only be used with Airflow 2.x.x")

        if (utils.check_dict_key(dag_params, "schedule_interval")
                and dag_params["schedule_interval"] == "None"):
            dag_params["schedule_interval"] = None

        # Convert from 'dagrun_timeout_sec: int' to 'dagrun_timeout: timedelta'
        if utils.check_dict_key(dag_params, "dagrun_timeout_sec"):
            dag_params["dagrun_timeout"]: timedelta = timedelta(
                seconds=dag_params["dagrun_timeout_sec"])
            del dag_params["dagrun_timeout_sec"]

        # Convert from 'end_date: Union[str, datetime, date]' to 'end_date: datetime'
        if utils.check_dict_key(dag_params["default_args"], "end_date"):
            dag_params["default_args"][
                "end_date"]: datetime = utils.get_datetime(
                    date_value=dag_params["default_args"]["end_date"],
                    timezone=dag_params["default_args"].get("timezone", "UTC"),
                )

        if utils.check_dict_key(dag_params["default_args"], "retry_delay_sec"):
            dag_params["default_args"]["retry_delay"]: timedelta = timedelta(
                seconds=dag_params["default_args"]["retry_delay_sec"])
            del dag_params["default_args"]["retry_delay_sec"]

        if utils.check_dict_key(dag_params["default_args"],
                                "sla_miss_callback"):
            if isinstance(dag_params["default_args"]["sla_miss_callback"],
                          str):
                dag_params["default_args"][
                    "sla_miss_callback"]: Callable = import_string(
                        dag_params["default_args"]["sla_miss_callback"])

        if utils.check_dict_key(dag_params["default_args"],
                                "on_success_callback"):
            if isinstance(dag_params["default_args"]["on_success_callback"],
                          str):
                dag_params["default_args"][
                    "on_success_callback"]: Callable = import_string(
                        dag_params["default_args"]["on_success_callback"])

        if utils.check_dict_key(dag_params["default_args"],
                                "on_failure_callback"):
            if isinstance(dag_params["default_args"]["on_failure_callback"],
                          str):
                dag_params["default_args"][
                    "on_failure_callback"]: Callable = import_string(
                        dag_params["default_args"]["on_failure_callback"])

        if utils.check_dict_key(dag_params, "sla_miss_callback"):
            if isinstance(dag_params["sla_miss_callback"], str):
                dag_params["sla_miss_callback"]: Callable = import_string(
                    dag_params["sla_miss_callback"])

        if utils.check_dict_key(dag_params, "on_success_callback"):
            if isinstance(dag_params["on_success_callback"], str):
                dag_params["on_success_callback"]: Callable = import_string(
                    dag_params["on_success_callback"])

        if utils.check_dict_key(dag_params, "on_failure_callback"):
            if isinstance(dag_params["on_failure_callback"], str):
                dag_params["on_failure_callback"]: Callable = import_string(
                    dag_params["on_failure_callback"])

        if utils.check_dict_key(
                dag_params,
                "on_success_callback_name") and utils.check_dict_key(
                    dag_params, "on_success_callback_file"):
            dag_params[
                "on_success_callback"]: Callable = utils.get_python_callable(
                    dag_params["on_success_callback_name"],
                    dag_params["on_success_callback_file"],
                )

        if utils.check_dict_key(
                dag_params,
                "on_failure_callback_name") and utils.check_dict_key(
                    dag_params, "on_failure_callback_file"):
            dag_params[
                "on_failure_callback"]: Callable = utils.get_python_callable(
                    dag_params["on_failure_callback_name"],
                    dag_params["on_failure_callback_file"],
                )

        # set schedule_interval from airflow variable
        if utils.check_dict_key(dag_params, "schedule_interval_var"):
            if self.af_vars.get(dag_params["schedule_interval_var"],
                                None) is not None:
                dag_params["schedule_interval"] = self.af_vars.get(
                    dag_params["schedule_interval_var"],
                    dag_params["schedule_interval"])

        try:
            # ensure that default_args dictionary contains key "start_date"
            # with "datetime" value in specified timezone
            dag_params["default_args"][
                "start_date"]: datetime = utils.get_datetime(
                    date_value=dag_params["default_args"]["start_date"],
                    timezone=dag_params["default_args"].get("timezone", "UTC"),
                )
        except KeyError as err:
            raise Exception(
                f"{self.dag_name} config is missing start_date") from err
        return dag_params
Exemple #15
0
    def make_task(operator: str, task_params: Dict[str, Any],
                  af_vars: Dict[str, Any]) -> BaseOperator:
        """
        Takes an operator and params and creates an instance of that operator.

        :returns: instance of operator object
        """
        try:
            # class is a Callable https://stackoverflow.com/a/34578836/3679900
            operator_obj: Callable[..., BaseOperator] = import_string(operator)
        except Exception as err:
            raise Exception(f"Failed to import operator: {operator}") from err
        try:
            if operator_obj in [
                    PythonOperator, BranchPythonOperator, PythonSensor
            ]:
                if (not task_params.get("python_callable")
                        and not task_params.get("python_callable_name")
                        and not task_params.get("python_callable_file")):
                    # pylint: disable=line-too-long
                    raise Exception(
                        "Failed to create task. PythonOperator, BranchPythonOperator and PythonSensor requires \
                        `python_callable_name` and `python_callable_file` "
                        "parameters.\nOptionally you can load python_callable "
                        "from a file. with the special pyyaml notation:\n"
                        "  python_callable_file: !!python/name:my_module.my_func"
                    )
                if not task_params.get("python_callable"):
                    task_params[
                        "python_callable"]: Callable = utils.get_python_callable(
                            task_params["python_callable_name"],
                            task_params["python_callable_file"],
                        )
                    # remove dag-factory specific parameters
                    # Airflow 2.0 doesn't allow these to be passed to operator
                    del task_params["python_callable_name"]
                    del task_params["python_callable_file"]

            # Check for the custom success and failure callables in SqlSensor. These are considered
            # optional, so no failures in case they aren't found. Note: there's no reason to
            # declare both a callable file and a lambda function for success/failure parameter.
            # If both are found the object will not throw and error, instead callable file will
            # take precedence over the lambda function
            if operator_obj in [SqlSensor]:
                # Success checks
                if task_params.get("success_check_file") and task_params.get(
                        "success_check_name"):
                    task_params[
                        "success"]: Callable = utils.get_python_callable(
                            task_params["success_check_name"],
                            task_params["success_check_file"],
                        )
                    del task_params["success_check_name"]
                    del task_params["success_check_file"]
                elif task_params.get("success_check_lambda"):
                    task_params[
                        "success"]: Callable = utils.get_python_callable_lambda(
                            task_params["success_check_lambda"])
                    del task_params["success_check_lambda"]
                # Failure checks
                if task_params.get("failure_check_file") and task_params.get(
                        "failure_check_name"):
                    task_params[
                        "failure"]: Callable = utils.get_python_callable(
                            task_params["failure_check_name"],
                            task_params["failure_check_file"],
                        )
                    del task_params["failure_check_name"]
                    del task_params["failure_check_file"]
                elif task_params.get("failure_check_lambda"):
                    task_params[
                        "failure"]: Callable = utils.get_python_callable_lambda(
                            task_params["failure_check_lambda"])
                    del task_params["failure_check_lambda"]

            if operator_obj in [HttpSensor]:
                if not (task_params.get("response_check_name")
                        and task_params.get("response_check_file")
                        ) and not task_params.get("response_check_lambda"):
                    raise Exception(
                        "Failed to create task. HttpSensor requires \
                        `response_check_name` and `response_check_file` parameters \
                        or `response_check_lambda` parameter.")
                if task_params.get("response_check_file"):
                    task_params[
                        "response_check"]: Callable = utils.get_python_callable(
                            task_params["response_check_name"],
                            task_params["response_check_file"],
                        )
                    # remove dag-factory specific parameters
                    # Airflow 2.0 doesn't allow these to be passed to operator
                    del task_params["response_check_name"]
                    del task_params["response_check_file"]
                else:
                    task_params[
                        "response_check"]: Callable = utils.get_python_callable_lambda(
                            task_params["response_check_lambda"])
                    # remove dag-factory specific parameters
                    # Airflow 2.0 doesn't allow these to be passed to operator
                    del task_params["response_check_lambda"]

            # KubernetesPodOperator
            if operator_obj == KubernetesPodOperator:
                task_params["secrets"] = ([
                    Secret(**v) for v in task_params.get("secrets")
                ] if task_params.get("secrets") is not None else None)

                task_params["ports"] = ([
                    Port(**v) for v in task_params.get("ports")
                ] if task_params.get("ports") is not None else None)
                task_params["volume_mounts"] = ([
                    VolumeMount(**v) for v in task_params.get("volume_mounts")
                ] if task_params.get("volume_mounts") is not None else None)
                task_params["volumes"] = ([
                    Volume(**v) for v in task_params.get("volumes")
                ] if task_params.get("volumes") is not None else None)
                task_params["pod_runtime_info_envs"] = ([
                    PodRuntimeInfoEnv(**v)
                    for v in task_params.get("pod_runtime_info_envs")
                ] if task_params.get("pod_runtime_info_envs") is not None else
                                                        None)
                task_params["full_pod_spec"] = (
                    V1Pod(**task_params.get("full_pod_spec"))
                    if task_params.get("full_pod_spec") is not None else None)
                task_params["init_containers"] = ([
                    V1Container(**v)
                    for v in task_params.get("init_containers")
                ] if task_params.get("init_containers") is not None else None)
            if operator_obj == DockerOperator:
                if task_params.get("environment") is not None:
                    task_params["environment"] = {
                        k: os.environ.get(v, v)
                        for k, v in task_params["environment"].items()
                    }

            if operator_obj == EcsOperator:
                for c in task_params["overrides"]["containerOverrides"]:
                    if c.get('environment') is not None:
                        for env in c['environment']:
                            env['value'] = os.environ.get(
                                env['value'], env['value'])

                if 'ECS_SECURITY_GROUPS' in af_vars and 'network_configuration' in task_params:
                    task_params["network_configuration"]["awsvpcConfiguration"]['securityGroups'] \
                        = af_vars['ECS_SECURITY_GROUPS']

                if 'ECS_SUBNETS' in af_vars and 'network_configuration' in task_params:
                    task_params['network_configuration'][
                        "awsvpcConfiguration"]["subnets"] = af_vars[
                            "ECS_SUBNETS"]

                if af_vars.get('ECS_CLUSTER'):
                    task_params['cluster'] = af_vars["ECS_CLUSTER"]
                    task_params['task_definition'] = (
                        af_vars.get('ECS_CLUSTER') + '_' +
                        task_params['task_definition']).lower()

                    task_params['awslogs_group'] = \
                        task_params['awslogs_group'] + '/' + af_vars.get('ECS_CLUSTER').lower()

            if utils.check_dict_key(task_params, "execution_timeout_secs"):
                task_params["execution_timeout"]: timedelta = timedelta(
                    seconds=task_params["execution_timeout_secs"])
                del task_params["execution_timeout_secs"]

            if utils.check_dict_key(task_params, "sla_secs"):
                task_params["sla"]: timedelta = timedelta(
                    seconds=task_params["sla_secs"])
                del task_params["sla_secs"]

            if utils.check_dict_key(task_params, "execution_delta_secs"):
                task_params["execution_delta"]: timedelta = timedelta(
                    seconds=task_params["execution_delta_secs"])
                del task_params["execution_delta_secs"]

            if utils.check_dict_key(
                    task_params,
                    "execution_date_fn_name") and utils.check_dict_key(
                        task_params, "execution_date_fn_file"):
                task_params[
                    "execution_date_fn"]: Callable = utils.get_python_callable(
                        task_params["execution_date_fn_name"],
                        task_params["execution_date_fn_file"],
                    )
                del task_params["execution_date_fn_name"]
                del task_params["execution_date_fn_file"]

            # on_execute_callback is an Airflow 2.0 feature
            if utils.check_dict_key(
                    task_params, "on_execute_callback"
            ) and version.parse(AIRFLOW_VERSION) >= version.parse("2.0.0"):
                task_params["on_execute_callback"]: Callable = import_string(
                    task_params["on_execute_callback"])

            if utils.check_dict_key(task_params, "on_failure_callback"):
                task_params["on_failure_callback"]: Callable = import_string(
                    task_params["on_failure_callback"])

            if utils.check_dict_key(task_params, "on_success_callback"):
                task_params["on_success_callback"]: Callable = import_string(
                    task_params["on_success_callback"])

            if utils.check_dict_key(task_params, "on_retry_callback"):
                task_params["on_retry_callback"]: Callable = import_string(
                    task_params["on_retry_callback"])

            # use variables as arguments on operator
            if utils.check_dict_key(task_params, "variables_as_arguments"):
                variables: List[Dict[str, str]] = task_params.get(
                    "variables_as_arguments")
                for variable in variables:
                    if Variable.get(variable["variable"],
                                    default_var=None) is not None:
                        task_params[variable["attribute"]] = Variable.get(
                            variable["variable"], default_var=None)
                del task_params["variables_as_arguments"]

            # use variables as arguments on operator
            if utils.check_dict_key(task_params, "af_vars_as_arguments"):
                variables: List[Dict[str, str]] = task_params.get(
                    "af_vars_as_arguments")
                for variable in variables:
                    if af_vars.get(variable["variable"], None) is not None:
                        task_params[variable["attribute"]] = af_vars.get(
                            variable["variable"], None)
                del task_params["af_vars_as_arguments"]

            task: BaseOperator = operator_obj(**task_params)
        except Exception as err:
            raise Exception(f"Failed to create {operator_obj} task") from err
        return task