Exemple #1
0
def victim():
    m_client = mock.MagicMock()
    m_client.consumer_groups.list_by_event_hub.return_value = {
        MockEventHubClientResponse("group1"),
        MockEventHubClientResponse("group2")
    }
    m_client.consumer_groups.create_or_update.return_value = {}
    m_client.event_hubs.list_by_namespace.return_value = {
        MockEventHubClientResponse("hub1"),
        MockEventHubClientResponse("hub2")
    }
    m_client.event_hubs.list_authorization_rules.return_value = {
        MockEventHubClientResponse("rule1"),
        MockEventHubClientResponse("rule2")
    }
    m_client.event_hubs.list_keys.return_value = MockEventHubClientResponse(
        'potatoes1', 'potato-connection')

    with mock.patch("takeoff.step.ApplicationName.get", return_value="my_little_pony"), \
         mock.patch("takeoff.azure.configure_eventhub.ConfigureEventHub._get_eventhub_client", return_value=m_client), \
         mock.patch("takeoff.azure.configure_eventhub.KeyVaultClient.vault_and_client", return_value=(None, None)):
        conf = {**takeoff_config(), **BASE_CONF}
        conf['azure'].update({"eventhub_naming": "eventhub{env}"})
        return ConfigureEventHub(ApplicationVersion('DEV', 'local', 'foo'),
                                 conf)
Exemple #2
0
    def test_publish_python_package_pypi(self, _, __):
        conf = {**takeoff_config(), **BASE_CONF, "target": ["pypi"]}

        with mock.patch.object(victim, 'publish_to_pypi') as m:
            victim(FAKE_ENV, conf).publish_python_package()

        m.assert_called_once()
Exemple #3
0
    def test_publish_python_package_blob(self, m1, m2, m3):
        conf = {**takeoff_config(), **BASE_CONF, "target": ["cloud_storage"]}

        with mock.patch.object(victim, 'upload_to_cloud_storage') as m:
            victim(FAKE_ENV, conf).publish_python_package()

        m.assert_called_once_with(file="some.whl", file_extension=".whl")
def victim():
    with mock.patch.dict(os.environ, env_variables), \
         mock.patch("takeoff.step.ApplicationName.get", return_value="my_little_pony"), \
         mock.patch("takeoff.azure.deploy_to_kubernetes.KeyVaultClient.vault_and_client", return_value=(None, None)):
        conf = {**takeoff_config(), **BASE_CONF}
        conf['azure'].update({"kubernetes_naming": "kubernetes{env}"})
        return DeployToKubernetes(ApplicationVersion("dev", "v", "branch"), conf)
def victim():
    m_jobs_api_client = mock.MagicMock()
    m_runs_api_client = mock.MagicMock()

    m_jobs_api_client.list_jobs.return_value = {
        "jobs": [
            {"job_id": "id1", "settings": {"name": "job1"}},
            {"job_id": "id2", "settings": {"name": "job2"}},
        ]
    }
    m_jobs_api_client.delete_job.return_value = True
    m_jobs_api_client.create_job.return_value = {"job_id": "job1"}
    m_jobs_api_client.run_now.return_value = {"run_id": "run1"}

    m_runs_api_client.list_runs.return_value = {
        "runs": [{"run_id": "run1"}, {"run_id": "run2"}]
    }

    with mock.patch("takeoff.azure.deploy_to_databricks.KeyVaultClient.vault_and_client", return_value=(None, None)), \
         mock.patch("takeoff.step.ApplicationName.get", return_value="my_app"), \
         mock.patch("takeoff.azure.deploy_to_databricks.Databricks", return_value=MockDatabricksClient()), \
         mock.patch("takeoff.azure.deploy_to_databricks.JobsApi", return_value=m_jobs_api_client), \
         mock.patch("takeoff.azure.deploy_to_databricks.RunsApi", return_value=m_runs_api_client):
        conf = {**takeoff_config(), **BASE_CONF}
        return DeployToDatabricks(ApplicationVersion('ACP', 'bar', 'foo'), conf)
Exemple #6
0
 def test_build_python_wheel_fail(self, m1, m2):
     conf = {**takeoff_config(), **BASE_CONF}
     with pytest.raises(ChildProcessError):
         with mock.patch("takeoff.build_artifact.run_shell_command",
                         return_value=(1, ['output_lines'])) as m:
             victim(FAKE_ENV, conf).build_sbt_assembly_jar()
         m.assert_called_once_with(["sbt", "clean", "assembly"])
Exemple #7
0
def victim():
    conf = {**takeoff_config(), **BASE_CONF}

    with mock.patch("takeoff.step.KeyVaultClient.vault_and_client",
                    return_value=(None, None)):
        return CreateApplicationInsights(
            ApplicationVersion("dev", "0.0.0", "my-branch"), conf)
Exemple #8
0
    def test_application_insights_with_databricks_secret(self, m1, m2, m3):
        conf = {
            **takeoff_config(),
            **BASE_CONF, 'create_databricks_secret': True
        }
        target = CreateApplicationInsights(
            ApplicationVersion("dev", "0.0.0", "my-branch"), conf)

        m_client = mock.MagicMock()
        m_client.components.create_or_update.return_value = MockApplicationInsights(
            "something", "my-key")

        with mock.patch(
                "takeoff.azure.create_application_insights.CreateApplicationInsights._create_client",
                return_value=m_client):
            with mock.patch(
                    "takeoff.azure.create_application_insights.ApplicationInsightsComponent"
            ) as m_app_insights_component:
                with mock.patch(
                        "takeoff.azure.create_application_insights.CreateApplicationInsights.create_databricks_secret"
                ) as m_create_databricks_secret:
                    target.create_application_insights()

        m_app_insights_component.assert_called_once_with(
            application_type='other', kind='other', location='west europe')

        m_create_databricks_secret.assert_called_once_with(
            'my_little_pony', Secret("instrumentation-key", "my-key"))
Exemple #9
0
    def test_validate_schema_invalid_target(self, _):
        conf = {**takeoff_config(), **BASE_CONF, "target": ["ivy"]}

        with pytest.raises(vol.Invalid):
            victim(ApplicationVersion("dev", "v", "branch"), conf)

        conf = {
            **takeoff_config(),
            **BASE_CONF,
            **{
                "language": "sbt",
                "target": ["pypi"]
            }
        }

        with pytest.raises(vol.Invalid):
            victim(ApplicationVersion("dev", "v", "branch"), conf)
    def test_validate_minimal_schema(self, _, __):
        conf = {**takeoff_config(), **BASE_CONF}
        conf['azure'].update({"kubernetes_naming": "kubernetes{env}"})

        res = DeployToKubernetes(ApplicationVersion("dev", "v", "branch"),
                                 conf)
        assert res.config[
            'kubernetes_config_path'] == "kubernetes_config/k8s.yml.j2"
Exemple #11
0
 def test_validate_invalid_schema(self):
     INVALID_CONF = {
          'task': 'create_application_insights',
          'application_type': 'invalid',
          'kind': 'invalid'
     }
     conf = {**takeoff_config(), **INVALID_CONF}
     with pytest.raises(MultipleInvalid):
         CreateApplicationInsights(ApplicationVersion("dev", "v", "branch"), conf)
Exemple #12
0
    def test_write_version(self):
        mopen = mock.mock_open()
        conf = {**takeoff_config(), **BASE_CONF}
        with mock.patch("builtins.open", mopen):
            victim(FAKE_ENV, conf)._write_version()

        mopen.assert_called_once_with("version.py", "w+")
        handle = mopen()
        handle.write.assert_called_once_with("__version__='v'")
    def test_get_custom_values_invalid_env(self, _):
        custom_conf = {'custom_values': {'invalid_env': {'my_custom_value': 'hello'}}, **BASE_CONF}
        conf = {**takeoff_config(), **custom_conf}
        conf['azure'].update({"kubernetes_naming": "kubernetes{env}"})

        res = DeployToKubernetes(ApplicationVersion("dev", "v", "branch"), conf)

        with pytest.raises(ValueError):
            res._get_custom_values()
    def test_schema_validity(self, victim):
        conf = {
            **takeoff_config(),
            **{
                "task": "deploy_to_databricks",
                "jobs": [{"main_name": "foo", "name": "some-name"}],
            },
        }
        res = SCHEMA(conf)["jobs"][0]
        assert res["arguments"] == [{}]
        assert res["lang"] == "python"

        conf = {
            **takeoff_config(),
            **{
                "task": "deploy_to_databricks",
                "jobs": [
                    {
                        "main_name": "foo",
                        "name": "some-name",
                        "arguments": [{"key": "val"}],
                    }
                ],
            },
        }
        res = SCHEMA(conf)["jobs"][0]
        assert res["arguments"] == [{"key": "val"}]

        conf = {
            **takeoff_config(),
            **{
                "task": "deploy_to_databricks",
                "jobs": [
                    {
                        "main_name": "foo",
                        "name": "some-name",
                        "arguments": [{"key": "val"}, {"key2": "val2"}],
                    }
                ],
            },
        }
        res = SCHEMA(conf)["jobs"][0]
        assert res["arguments"] == [{"key": "val"}, {"key2": "val2"}]
    def test_validate_full_schema(self, _):
        conf = {**takeoff_config(),
                **BASE_CONF, **{
                "dockerfiles": [{
                    "file": "Dockerfile_custom",
                    "postfix": "Dave",
                    "custom_image_name": "Mustaine"
                }]}}

        DockerImageBuilder(ApplicationVersion("dev", "v", "branch"), conf)
Exemple #16
0
 def test_publish_to_pypi(self, m1, m2, m3):
     conf = {
         **takeoff_config(),
         **BASE_CONF, "language": "python",
         "target": ["pypi"]
     }
     env = ApplicationVersion('prd', '1.0.0', 'branch')
     with mock.patch("takeoff.azure.publish_artifact.upload") as m:
         victim(env, conf).publish_to_pypi()
     m.assert_called_once_with(upload_settings="foo", dists=["dist/*"])
Exemple #17
0
 def test_publish_to_pypi_no_tag(self, m1, m2, m3):
     conf = {
         **takeoff_config(),
         **BASE_CONF,
         "language": "python",
         "target": ["pypi"],
     }
     with mock.patch("takeoff.azure.publish_artifact.upload") as m:
         victim(FAKE_ENV, conf).publish_to_pypi()
     m.assert_not_called()
Exemple #18
0
def setup_victim(add_secrets: bool):
    secrets_conf = {}
    if add_secrets:
        secrets_conf = {
            'task':
            'create_databricks_secrets_from_vault',
            'dev': [
                {
                    'FOO': 'foo_value'
                },
                {
                    'BAR': 'bar_value'
                },
            ],
            'acp': [
                {
                    'FOO': 'fooacc_value'
                },
                {
                    'BAR': 'baracc_value'
                },
                {
                    'BAZ': 'baz_value'
                },
            ]
        }

    m_client = mock.MagicMock()
    m_client.consumer_groups.list_by_event_hub.return_value = {}
    m_client.list_scopes.return_value = {
        "scopes": [{
            "name": "scope1"
        }, {
            "name": " scope2"
        }]
    }
    m_client.create_scope.return_value = True
    m_client.put_secret.return_value = True

    with mock.patch("takeoff.step.ApplicationName.get", return_value="my_little_pony"), \
         mock.patch("takeoff.azure.create_databricks_secrets.KeyVaultClient.vault_and_client", return_value=(None, None)), \
         mock.patch("takeoff.azure.create_databricks_secrets.Databricks", return_value=MockDatabricksClient()), \
         mock.patch("takeoff.azure.create_databricks_secrets.SecretApi", return_value=m_client):
        conf = {
            **takeoff_config(),
            **BASE_CONF,
            **{
                "common": {
                    "databricks_library_path": "/path"
                }
            },
            **secrets_conf
        }
        return CreateDatabricksSecretsFromVault(
            ApplicationVersion('ACP', '0.0.0', 'my-branch'), conf)
    def test_get_custom_values(self, _):
        custom_conf = {'custom_values': {'dev': {'my_custom_value': 'hello'}}, **BASE_CONF}
        conf = {**takeoff_config(), **custom_conf}
        conf['azure'].update({"kubernetes_naming": "kubernetes{env}"})

        res = DeployToKubernetes(ApplicationVersion("dev", "v", "branch"), conf)

        result = res._get_custom_values()
        expected_result = {"my_custom_value": "hello"}

        assert result == expected_result
Exemple #20
0
    def test_publish_jar_package_ivy(self, m1, m2):
        conf = {
            **takeoff_config(),
            **BASE_CONF, "language": "scala",
            "target": ["ivy"]
        }

        with mock.patch.object(victim, 'publish_to_ivy') as m:
            victim(FAKE_ENV, conf).publish_jvm_package()

        m.assert_called_once()
Exemple #21
0
 def test_publish_to_ivy(self, _, __, ___):
     conf = {
         **takeoff_config(),
         **BASE_CONF, "language": "scala",
         "target": ["ivy"]
     }
     with mock.patch("takeoff.azure.publish_artifact.run_shell_command",
                     return_value=(0, ['output_lines'])) as m:
         victim(FAKE_ENV, conf).publish_to_ivy()
     m.assert_called_once_with(
         ["sbt", 'set version := "v-SNAPSHOT"', "publish"])
Exemple #22
0
 def test_publish_to_ivy_with_tag(self, m1, m2, m3):
     conf = {
         **takeoff_config(),
         **BASE_CONF, "language": "scala",
         "target": ["ivy"]
     }
     env = ApplicationVersion('prd', '1.0.0', 'branch')
     with mock.patch("takeoff.azure.publish_artifact.run_shell_command",
                     return_value=(0, ['output_lines'])) as m:
         victim(env, conf).publish_to_ivy()
     m.assert_called_once_with(["sbt", 'set version := "1.0.0"', "publish"])
    def test_validate_minimal_schema(self, _):
        conf = {**takeoff_config(), **BASE_CONF}

        res = DockerImageBuilder(ApplicationVersion("dev", "v", "branch"),
                                 conf)
        assert res.config['dockerfiles'] == [{
            "file": "Dockerfile",
            "postfix": None,
            "prefix": None,
            "custom_image_name": None,
            'tag_release_as_latest': True
        }]
    def test_validate_await_invalid_resource_name(self, _, __):
        custom_conf = {
            'wait_for_rollout': {
                'resource_name': 'invalid_name',
                'resource_namespace': 'my_space'
            },
            **BASE_CONF
        }
        conf = {**takeoff_config(), **custom_conf}

        with pytest.raises(voluptuous.error.MultipleInvalid):
            DeployToKubernetes(ApplicationVersion("dev", "v", "branch"), conf)
Exemple #25
0
 def test_upload_file_to_blob(self, m1, m2):
     conf = {
         **takeoff_config(),
         **BASE_CONF, "language": "scala",
         "target": ["ivy"]
     }
     with mock.patch.object(azure.storage.blob, "BlockBlobService") as m:
         victim(FAKE_ENV, conf)._upload_file_to_azure_storage_account(
             m, "Dave", "Mustaine", "mylittlepony")
     m.create_blob_from_path.assert_called_once_with(
         container_name="mylittlepony",
         blob_name="Mustaine",
         file_path="Dave")
Exemple #26
0
    def test_publish_python_package_blob_with_file(self, m1, m2, m3):
        conf = {
            **takeoff_config(),
            **BASE_CONF,
            "target": ["cloud_storage"],
            "python_file_path": "main.py",
        }

        with mock.patch.object(victim, 'upload_to_cloud_storage') as m:
            victim(FAKE_ENV, conf).publish_python_package()

        calls = [
            mock.call(file="some.whl", file_extension=".whl"),
            mock.call(file="main.py", file_extension=".py")
        ]
        m.assert_has_calls(calls)
Exemple #27
0
    def test_deploy_to_databricks_custom_name(self, _, victim):
        CUSTOM_CONF = {"task": "deploy_to_databricks", "jobs": [{"main_name": "Dave", "name": "baboon-job"}]}
        victim.config = victim.validate({**takeoff_config(), **CUSTOM_CONF})

        job_config = {
            "new_cluster": {
                "spark_version": "4.1.x-scala2.11",
                "spark_conf": {
                    "spark.sql.warehouse.dir": "/some_",
                    "some.setting": "true",
                },
                "cluster_log_conf": {
                    "dbfs": {"destination": "dbfs:/mnt/sdh/logs/job_with_schedule"}
                },
            },
            "name": "job_with_schedule",
            "libraries": [
                {"whl": "dbfs:/mnt/libraries/version/version-bar-py3-none-any.whl"},
                {"jar": "some.jar"}
            ],
            "spark_python_task": {
                "python_file": "dbfs:/mnt/libraries/version/version-main-bar.py",
                "parameters": ["--key", "val", "--key2", "val2"]
            }
        }
        with mock.patch(
                "takeoff.azure.deploy_to_databricks.DeployToDatabricks.create_config",
                return_value=job_config,
        ) as config_mock:
            with mock.patch(
                    "takeoff.azure.deploy_to_databricks.DeployToDatabricks.remove_job"
            ) as remove_mock:
                with mock.patch(
                        "takeoff.azure.deploy_to_databricks.DeployToDatabricks._submit_job"
                ) as submit_mock:
                    victim.deploy_to_databricks()

        remove_mock.assert_called_once_with("my_app-baboon-job-SNAPSHOT", is_streaming=True)
        submit_mock.assert_called_once_with(job_config)
Exemple #28
0
 def test_build_python_wheel(self, m1, m2):
     conf = {**takeoff_config(), **BASE_CONF}
     with mock.patch("takeoff.build_artifact.run_shell_command",
                     return_value=(0, ['output_lines'])) as m:
         victim(FAKE_ENV, conf).build_python_wheel()
     m.assert_called_once_with(["python", "setup.py", "bdist_wheel"])
Exemple #29
0
    def test_build_sbt(self):
        conf = {**takeoff_config(), **BASE_CONF, "build_tool": "sbt"}

        with mock.patch.object(victim, "build_sbt_assembly_jar") as m:
            victim(FAKE_ENV, conf).run()
        m.assert_called_once()
Exemple #30
0
    def test_build_python(self):
        conf = {**takeoff_config(), **BASE_CONF}

        with mock.patch.object(victim, "build_python_wheel") as m:
            victim(FAKE_ENV, conf).run()
        m.assert_called_once()