Example #1
0
    def test_slug(self, _):
        DAGAuthorizedDatasource.objects.create(dag=self.PIPELINE,
                                               datasource=self.BUCKET,
                                               slug="slug1")

        # Test
        credentials = PipelinesCredentials(self.PIPELINE)
        pipelines_credentials(credentials)

        # Validate
        for key in [
                "AWS_ACCESS_KEY_ID",
                "AWS_SECRET_ACCESS_KEY",
                "AWS_SESSION_TOKEN",
        ]:
            self.assertIsInstance(credentials.env[key], str)
            self.assertGreater(len(credentials.env[key]), 0)
            del credentials.env[key]

        self.assertEqual(
            {
                "AWS_DEFAULT_REGION": "eu-central-1",
                "AWS_S3_BUCKET_NAMES": "hexa-test-bucket-1",
                "AWS_BUCKET_SLUG1_NAME": "hexa-test-bucket-1",
                "AWS_FRESH_ROLE": "TRUE",
            },
            credentials.env,
        )
Example #2
0
    def test_existing_role(self, _):
        """
        When the role already exists, we should not create it again
        But the policy should be updated
        """

        # Setup
        self.CLIENT.create_role(
            Path="/hexa-app-unittest/pipelines/",
            RoleName="hexa-app-test-p-" + str(self.PIPELINE.id),
            AssumeRolePolicyDocument="some document",
        )

        DAGAuthorizedDatasource.objects.create(dag=self.PIPELINE,
                                               datasource=self.BUCKET)

        # Test
        credentials = PipelinesCredentials(self.PIPELINE)
        pipelines_credentials(credentials)

        # Check that we did not create a new role
        roles_data = self.CLIENT.list_roles()
        self.assertEqual(1, len(roles_data["Roles"]))

        # Check that the role has the correct policies
        expected_role_name = "hexa-app-test-p-" + str(self.PIPELINE.id)
        policy_data = self.CLIENT.get_role_policy(RoleName=expected_role_name,
                                                  PolicyName="s3-access")
        self.assertEqual(
            policy_data["PolicyDocument"],
            {
                "Statement": [{
                    "Action":
                    "s3:*",
                    "Effect":
                    "Allow",
                    "Resource": [
                        "arn:aws:s3:::hexa-test-bucket-1",
                        "arn:aws:s3:::hexa-test-bucket-1/*",
                    ],
                    "Sid":
                    "S3AllActions",
                }],
                "Version":
                "2012-10-17",
            },
        )

        # Check that the STS credentials belong to the correct role
        client = boto3.client(
            "sts",
            aws_access_key_id=credentials.env["AWS_ACCESS_KEY_ID"],
            aws_secret_access_key=credentials.env["AWS_SECRET_ACCESS_KEY"],
        )
        response = client.get_caller_identity()

        self.assertEqual(
            parse_arn(response["Arn"])["resource"].split("/")[0],
            expected_role_name)
Example #3
0
    def test_slug(self):
        DAGAuthorizedDatasource.objects.create(
            dag=self.PIPELINE, datasource=self.INSTANCE, slug="slug1"
        )

        credentials = PipelinesCredentials(self.PIPELINE)
        pipelines_credentials(credentials)

        self.assertEqual(
            {
                "DHIS2_INSTANCES_SLUGS": "SLUG1",
                "DHIS2_SLUG1_PASSWORD": "******",
                "DHIS2_SLUG1_URL": "https://dhis2.example.com",
                "DHIS2_SLUG1_USERNAME": "******",
            },
            credentials.env,
        )
Example #4
0
    def test_single(self):
        DAGAuthorizedDatasource.objects.create(dag=self.PIPELINE,
                                               datasource=self.DATABASE)

        credentials = PipelinesCredentials(self.PIPELINE)
        pipelines_credentials(credentials)

        self.assertEqual(
            {
                "POSTGRESQL_DATABASE_NAMES": "DB1",
                "POSTGRESQL_DB1_DATABASE": "db1",
                "POSTGRESQL_DB1_HOSTNAME": "localhost",
                "POSTGRESQL_DB1_PASSWORD": "******",
                "POSTGRESQL_DB1_PORT": "5432",
                "POSTGRESQL_DB1_URL":
                "postgresql://*****:*****@localhost:5432/db1",
                "POSTGRESQL_DB1_USERNAME": "******",
            },
            credentials.env,
        )
Example #5
0
    def test_new_role(self, _):
        # Setup
        DAGAuthorizedDatasource.objects.create(dag=self.PIPELINE,
                                               datasource=self.BUCKET)

        # Test
        credentials = PipelinesCredentials(self.PIPELINE)
        pipelines_credentials(credentials)

        # Check that we did create the role
        roles_data = self.CLIENT.list_roles()
        self.assertEqual(1, len(roles_data["Roles"]))

        expected_role_name = "hexa-app-test-p-" + str(self.PIPELINE.id)
        self.assertEqual(expected_role_name,
                         roles_data["Roles"][0]["RoleName"])

        expected_role_path = "/hexa-app-unittest/pipelines/connector_airflow/"
        self.assertEqual(expected_role_path, roles_data["Roles"][0]["Path"])

        # Check that the role has the correct policies
        policy_data = self.CLIENT.get_role_policy(RoleName=expected_role_name,
                                                  PolicyName="s3-access")
        self.assertEqual(
            policy_data["PolicyDocument"],
            {
                "Statement": [{
                    "Action":
                    "s3:*",
                    "Effect":
                    "Allow",
                    "Resource": [
                        "arn:aws:s3:::hexa-test-bucket-1",
                        "arn:aws:s3:::hexa-test-bucket-1/*",
                    ],
                    "Sid":
                    "S3AllActions",
                }],
                "Version":
                "2012-10-17",
            },
        )

        # Check that the STS credentials belong to the correct role
        client = boto3.client(
            "sts",
            aws_access_key_id=credentials.env["AWS_ACCESS_KEY_ID"],
            aws_secret_access_key=credentials.env["AWS_SECRET_ACCESS_KEY"],
        )
        response = client.get_caller_identity()

        self.assertEqual(
            parse_arn(response["Arn"])["resource"].split("/")[0],
            expected_role_name)

        # Check that we do send the correct env variables
        for key in [
                "AWS_ACCESS_KEY_ID",
                "AWS_SECRET_ACCESS_KEY",
                "AWS_SESSION_TOKEN",
        ]:
            self.assertIsInstance(credentials.env[key], str)
            self.assertGreater(len(credentials.env[key]), 0)
            del credentials.env[key]

        self.assertEqual(
            {
                "AWS_DEFAULT_REGION": "eu-central-1",
                "AWS_S3_BUCKET_NAMES": "hexa-test-bucket-1",
                "AWS_FRESH_ROLE": "TRUE",
            },
            credentials.env,
        )