def refactor_amazon_package(self):
        """
        Fixes to "amazon" providers package.

        Copies some of the classes used from core Airflow to "common.utils" package of
        the provider and renames imports to use them from there.

        We copy typing_compat.py and change import as in example diff:

        .. code-block:: diff

            --- ./airflow/providers/amazon/aws/operators/ecs.py
            +++ ./airflow/providers/amazon/aws/operators/ecs.py
            @@ -24,7 +24,7 @@
             from airflow.models import BaseOperator
             from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
             from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
            -from airflow.typing_compat import Protocol, runtime_checkable
            +from airflow.providers.amazon.common.utils.typing_compat import Protocol, runtime_checkable
             from airflow.utils.decorators import apply_defaults

        """
        def amazon_package_filter(node: LN, capture: Capture,
                                  filename: Filename) -> bool:
            return filename.startswith("./airflow/providers/amazon/")

        os.makedirs(os.path.join(get_target_providers_package_folder("amazon"),
                                 "common", "utils"),
                    exist_ok=True)
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils",
                         "__init__.py"),
            os.path.join(get_target_providers_package_folder("amazon"),
                         "common", "__init__.py"),
        )
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils",
                         "__init__.py"),
            os.path.join(get_target_providers_package_folder("amazon"),
                         "common", "utils", "__init__.py"),
        )
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow",
                         "typing_compat.py"),
            os.path.join(get_target_providers_package_folder("amazon"),
                         "common", "utils", "typing_compat.py"),
        )
        (self.qry.select_module("airflow.typing_compat").filter(
            callback=amazon_package_filter).rename(
                "airflow.providers.amazon.common.utils.typing_compat"))

        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils",
                         "email.py"),
            os.path.join(get_target_providers_package_folder("amazon"),
                         "common", "utils", "email.py"),
        )
        (self.qry.select_module("airflow.utils.email").filter(
            callback=amazon_package_filter).rename(
                "airflow.providers.amazon.common.utils.email"))
    def refactor_apache_beam_package(self):
        r"""
        Fixes to "apache_beam" providers package.

        Copies some of the classes used from core Airflow to "common.utils" package of the
        the provider and renames imports to use them from there. Note that in this case we also rename
        the imports in the copied files.

        For example we copy python_virtualenv.py, process_utils.py and change import as in example diff:

        .. code-block:: diff

            --- ./airflow/providers/apache/beam/common/utils/python_virtualenv.py
            +++ ./airflow/providers/apache/beam/common/utils/python_virtualenv.py
            @@ -21,7 +21,7 @@
             \"\"\"
            from typing import List, Optional

            -from airflow.utils.process_utils import execute_in_subprocess
            +from airflow.providers.apache.beam.common.utils.process_utils import execute_in_subprocess


            def _generate_virtualenv_cmd(tmp_dir: str, python_bin: str, system_site_packages: bool)

        """

        def apache_beam_package_filter(node: LN, capture: Capture, filename: Filename) -> bool:
            return filename.startswith("./airflow/providers/apache/beam")

        os.makedirs(
            os.path.join(get_target_providers_package_folder("apache.beam"), "common", "utils"), exist_ok=True
        )
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils", "__init__.py"),
            os.path.join(
                get_target_providers_package_folder("apache.beam"), "common", "utils", "__init__.py"
            ),
        )
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils", "python_virtualenv.py"),
            os.path.join(
                get_target_providers_package_folder("apache.beam"), "common", "utils", "python_virtualenv.py"
            ),
        )
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils", "process_utils.py"),
            os.path.join(
                get_target_providers_package_folder("apache.beam"), "common", "utils", "process_utils.py"
            ),
        )
        (
            self.qry.select_module("airflow.utils.python_virtualenv")
            .filter(callback=apache_beam_package_filter)
            .rename("airflow.providers.apache.beam.common.utils.python_virtualenv")
        )
        (
            self.qry.select_module("airflow.utils.process_utils")
            .filter(callback=apache_beam_package_filter)
            .rename("airflow.providers.apache.beam.common.utils.process_utils")
        )
    def rename_deprecated_modules(self) -> None:
        """
        Renames back to deprecated modules imported. Example diff generated:

        .. code-block:: diff

            --- ./airflow/providers/dingding/operators/dingding.py
            +++ ./airflow/providers/dingding/operators/dingding.py
            @@ -16,7 +16,7 @@
             # specific language governing permissions and limitations
             # under the License.

            -from airflow.operators.baseoperator import BaseOperator
            +from airflow.operators.bash_operator import BaseOperator
             from airflow.providers.dingding.hooks.dingding import DingdingHook
             from airflow.utils.decorators import apply_defaults

        """
        changes = [
            ("airflow.hooks.base", "airflow.hooks.base_hook"),
            ("airflow.hooks.dbapi", "airflow.hooks.dbapi_hook"),
            ("airflow.operators.bash", "airflow.operators.bash_operator"),
            ("airflow.operators.branch", "airflow.operators.branch_operator"),
            ("airflow.operators.dummy", "airflow.operators.dummy_operator"),
            ("airflow.operators.python", "airflow.operators.python_operator"),
            ("airflow.sensors.base", "airflow.sensors.base_sensor_operator"),
            ("airflow.sensors.date_time", "airflow.sensors.date_time_sensor"),
            ("airflow.sensors.external_task",
             "airflow.sensors.external_task_sensor"),
            ("airflow.sensors.sql", "airflow.sensors.sql_sensor"),
            ("airflow.sensors.time_delta",
             "airflow.sensors.time_delta_sensor"),
            ("airflow.sensors.weekday",
             "airflow.contrib.sensors.weekday_sensor"),
            ("airflow.utils.session", "airflow.utils.db"),
        ]
        for new, old in changes:
            self.qry.select_module(new).rename(old)

        def is_not_k8spodop(node: LN, capture: Capture,
                            filename: Filename) -> bool:
            return not filename.endswith("/kubernetes_pod.py")

        self.qry.select_module(
            "airflow.providers.cncf.kubernetes.backcompat").filter(
                callback=is_not_k8spodop).rename("airflow.kubernetes")

        self.qry.select_module(
            "airflow.providers.cncf.kubernetes.backcompat.pod_runtime_info_env"
        ).rename("airflow.kubernetes.pod_runtime_info_env")

        backcompat_target_folder = os.path.join(
            get_target_providers_package_folder("cncf.kubernetes"),
            "backcompat")
        # Remove backcompat classes that are imported from "airflow.kubernetes"
        for file in [
                'pod.py', 'pod_runtime_info_env.py', 'volume.py',
                'volume_mount.py'
        ]:
            os.remove(os.path.join(backcompat_target_folder, file))
    def refactor_odbc_package(self):
        """
        Fixes to "odbc" providers package.

        Copies some of the classes used from core Airflow to "common.utils" package of the
        the provider and renames imports to use them from there.

        We copy helpers.py and change import as in example diff:

        .. code-block:: diff

            --- ./airflow/providers/google/cloud/example_dags/example_datacatalog.py
            +++ ./airflow/providers/google/cloud/example_dags/example_datacatalog.py
            @@ -37,7 +37,7 @@
                 CloudDataCatalogUpdateTagTemplateOperator,
             )
             from airflow.utils.dates import days_ago
            -from airflow.utils.helpers import chain
            +from airflow.providers.odbc.utils.helpers import chain

             default_args = {"start_date": days_ago(1)}


        """
        def odbc_package_filter(node: LN, capture: Capture,
                                filename: Filename) -> bool:
            return filename.startswith("./airflow/providers/odbc/")

        os.makedirs(os.path.join(get_target_providers_folder(), "odbc",
                                 "utils"),
                    exist_ok=True)
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils",
                         "__init__.py"),
            os.path.join(get_target_providers_package_folder("odbc"), "utils",
                         "__init__.py"),
        )
        copy_helper_py_file(
            os.path.join(get_target_providers_package_folder("odbc"), "utils",
                         "helpers.py"))

        (self.qry.select_module("airflow.utils.helpers").filter(
            callback=odbc_package_filter).rename(
                "airflow.providers.odbc.utils.helpers"))
    def refactor_google_package(self):
        r"""
        Fixes to "google" providers package.

        Copies some of the classes used from core Airflow to "common.utils" package of the
        the provider and renames imports to use them from there. Note that in this case we also rename
        the imports in the copied files.

        For example we copy python_virtualenv.py, process_utils.py and change import as in example diff:

        .. code-block:: diff

            --- ./airflow/providers/google/cloud/operators/kubernetes_engine.py
            +++ ./airflow/providers/google/cloud/operators/kubernetes_engine.py
            @@ -28,11 +28,11 @@

             from airflow.exceptions import AirflowException
             from airflow.models import BaseOperator
            -from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator
            +from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator
             from airflow.providers.google.cloud.hooks.kubernetes_engine import GKEHook
             from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
             from airflow.utils.decorators import apply_defaults
            -from airflow.utils.process_utils import execute_in_subprocess, patch_environ
            +from airflow.providers.google.common.utils.process_utils import execute_in_subprocess


        And in the copied python_virtualenv.py we also change import to process_utils.py. This happens
        automatically and is solved by Pybowler.


        .. code-block:: diff

            --- ./airflow/providers/google/common/utils/python_virtualenv.py
            +++ ./airflow/providers/google/common/utils/python_virtualenv.py
            @@ -21,7 +21,7 @@
             \"\"\"
            from typing import List, Optional

            -from airflow.utils.process_utils import execute_in_subprocess
            +from airflow.providers.google.common.utils.process_utils import execute_in_subprocess


            def _generate_virtualenv_cmd(tmp_dir: str, python_bin: str, system_site_packages: bool)


        We also rename Base operator links to deprecated names:


        .. code-block:: diff

            --- ./airflow/providers/google/cloud/operators/mlengine.py
            +++ ./airflow/providers/google/cloud/operators/mlengine.py
            @@ -24,7 +24,7 @@
             from typing import List, Optional

             from airflow.exceptions import AirflowException
            -from airflow.models import BaseOperator, BaseOperatorLink
            +from airflow.models.baseoperator import BaseOperator, BaseOperatorLink
             from airflow.models.taskinstance import TaskInstance
             from airflow.providers.google.cloud.hooks.mlengine import MLEngineHook
             from airflow.utils.decorators import apply_defaults

        We also copy (google.common.utils) and rename imports to the helpers.

        .. code-block:: diff

            --- ./airflow/providers/google/cloud/example_dags/example_datacatalog.py
            +++ ./airflow/providers/google/cloud/example_dags/example_datacatalog.py
            @@ -37,7 +37,7 @@
                 CloudDataCatalogUpdateTagTemplateOperator,
             )
             from airflow.utils.dates import days_ago
            -from airflow.utils.helpers import chain
            +from airflow.providers.google.common.utils.helpers import chain

             default_args = {"start_date": days_ago(1)}

        And also module_loading  which is used by helpers

        .. code-block:: diff

            --- ./airflow/providers/google/common/utils/helpers.py
            +++ ./airflow/providers/google/common/utils/helpers.py
            @@ -26,7 +26,7 @@
             from jinja2 import Template

             from airflow.exceptions import AirflowException
            -from airflow.utils.module_loading import import_string
            +from airflow.providers.google.common.utils.module_loading import import_string

             KEY_REGEX = re.compile(r'^[\\w.-]+$')

        """
        def google_package_filter(node: LN, capture: Capture,
                                  filename: Filename) -> bool:
            return filename.startswith("./airflow/providers/google/")

        def pure_airflow_models_filter(node: LN, capture: Capture,
                                       filename: Filename) -> bool:
            """Check if select is exactly [airflow, . , models]"""
            return len(list(node.children[1].leaves())) == 3

        def _contains_chain_in_import_filter(node: LN, capture: Capture,
                                             filename: Filename) -> bool:
            if "module_import" in capture:
                return bool(
                    "chain" in capture["module_import"].value
                ) and filename.startswith("./airflow/providers/google/")
            return False

        os.makedirs(os.path.join(get_target_providers_package_folder("google"),
                                 "common", "utils"),
                    exist_ok=True)
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils",
                         "__init__.py"),
            os.path.join(get_target_providers_package_folder("google"),
                         "common", "utils", "__init__.py"),
        )
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils",
                         "python_virtualenv.py"),
            os.path.join(get_target_providers_package_folder("google"),
                         "common", "utils", "python_virtualenv.py"),
        )

        copy_helper_py_file(
            os.path.join(get_target_providers_package_folder("google"),
                         "common", "utils", "helpers.py"))

        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils",
                         "module_loading.py"),
            os.path.join(get_target_providers_package_folder("google"),
                         "common", "utils", "module_loading.py"),
        )
        (self.qry.select_module("airflow.utils.python_virtualenv").filter(
            callback=google_package_filter).rename(
                "airflow.providers.google.common.utils.python_virtualenv"))
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils",
                         "process_utils.py"),
            os.path.join(get_target_providers_package_folder("google"),
                         "common", "utils", "process_utils.py"),
        )
        (self.qry.select_module("airflow.utils.process_utils").filter(
            callback=google_package_filter).rename(
                "airflow.providers.google.common.utils.process_utils"))

        (self.qry.select_module("airflow.models.baseoperator").filter(
            callback=_contains_chain_in_import_filter).rename(
                "airflow.providers.google.common.utils.helpers"))

        (self.qry.select_module("airflow.utils.helpers").filter(
            callback=google_package_filter).rename(
                "airflow.providers.google.common.utils.helpers"))

        (self.qry.select_module("airflow.utils.module_loading").filter(
            callback=google_package_filter).rename(
                "airflow.providers.google.common.utils.module_loading"))

        (
            # Fix BaseOperatorLinks imports
            self.qry.select_module("airflow.models").is_filename(
                include=r"bigquery\.py|mlengine\.py"
            ).filter(callback=google_package_filter
                     ).filter(pure_airflow_models_filter).rename(
                         "airflow.models.baseoperator"))
    def refactor_elasticsearch_package(self):
        """
        Fixes to "elasticsearch" providers package.

        Copies some of the classes used from core Airflow to "common.utils" package of
        the provider and renames imports to use them from there.

        We copy file_task_handler.py and change import as in example diff:

        .. code-block:: diff

            --- ./airflow/providers/elasticsearch/log/es_task_handler.py
            +++ ./airflow/providers/elasticsearch/log/es_task_handler.py
            @@ -24,7 +24,7 @@
            from airflow.configuration import conf
            from airflow.models import TaskInstance
            from airflow.utils import timezone
            from airflow.utils.helpers import parse_template_string
            -from airflow.utils.log.file_task_handler import FileTaskHandler
            +from airflow.providers.elasticsearch.common.utils.log.file_task_handler import FileTaskHandler
            from airflow.utils.log.json_formatter import JSONFormatter
            from airflow.utils.log.logging_mixin import LoggingMixin

        """
        def elasticsearch_package_filter(node: LN, capture: Capture,
                                         filename: Filename) -> bool:
            return filename.startswith("./airflow/providers/elasticsearch/")

        os.makedirs(
            os.path.join(get_target_providers_package_folder("elasticsearch"),
                         "common", "utils", "log"),
            exist_ok=True,
        )
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils",
                         "__init__.py"),
            os.path.join(get_target_providers_package_folder("elasticsearch"),
                         "common", "__init__.py"),
        )
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils",
                         "__init__.py"),
            os.path.join(get_target_providers_package_folder("elasticsearch"),
                         "common", "utils", "__init__.py"),
        )
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils",
                         "log", "__init__.py"),
            os.path.join(get_target_providers_package_folder("elasticsearch"),
                         "common", "utils", "log", "__init__.py"),
        )
        copyfile(
            os.path.join(get_source_airflow_folder(), "airflow", "utils",
                         "log", "file_task_handler.py"),
            os.path.join(
                get_target_providers_package_folder("elasticsearch"),
                "common",
                "utils",
                "log",
                "file_task_handler.py",
            ),
        )
        (self.qry.select_module("airflow.utils.log.file_task_handler").filter(
            callback=elasticsearch_package_filter
        ).rename(
            "airflow.providers.elasticsearch.common.utils.log.file_task_handler"
        ))