Example #1
0
        def _api_call(self, **kwargs):
            operation_model = service_model.operation_model(operation_name)
            event_name = (
                'before-parameter-build.{endpoint_prefix}.{operation_name}')
            self.meta.events.emit(event_name.format(
                endpoint_prefix=service_model.endpoint_prefix,
                operation_name=operation_name),
                                  params=kwargs,
                                  model=operation_model)

            request_dict = self._serializer.serialize_to_request(
                kwargs, operation_model)

            self.meta.events.emit(
                'before-call.{endpoint_prefix}.{operation_name}'.format(
                    endpoint_prefix=service_model.endpoint_prefix,
                    operation_name=operation_name),
                model=operation_model,
                params=request_dict,
                request_signer=self._request_signer)

            http, parsed_response = self._endpoint.make_request(
                operation_model, request_dict)

            self.meta.events.emit(
                'after-call.{endpoint_prefix}.{operation_name}'.format(
                    endpoint_prefix=service_model.endpoint_prefix,
                    operation_name=operation_name),
                http_response=http,
                parsed=parsed_response,
                model=operation_model)

            if http.status_code >= 300:
                raise ClientError(parsed_response, operation_name)
            else:
                return parsed_response
    def test_get_file_keys_from_s3_with_manifest_id(self):
        """Test get_file_keys_from_s3_with_manifest_id."""
        files = self.report_processor.get_file_keys_from_s3_with_manifest_id(
            "request_id", "s3_path", "manifest_id")
        self.assertEqual(files, [])

        with patch("masu.processor.parquet.parquet_report_processor.settings",
                   ENABLE_PARQUET_PROCESSING=True):
            with patch(
                    "masu.processor.parquet.parquet_report_processor.get_s3_resource"
            ) as mock_s3:
                files = self.report_processor.get_file_keys_from_s3_with_manifest_id(
                    "request_id", None, "manifest_id")
                self.assertEqual(files, [])

        with patch("masu.processor.parquet.parquet_report_processor.settings",
                   ENABLE_PARQUET_PROCESSING=True):
            with patch(
                    "masu.processor.parquet.parquet_report_processor.get_s3_resource"
            ) as mock_s3:
                mock_s3.side_effect = ClientError({}, "Error")
                files = self.report_processor.get_file_keys_from_s3_with_manifest_id(
                    "request_id", "s3_path", "manifest_id")
                self.assertEqual(files, [])
Example #3
0
    def test_bucket_exists_with_non_existent_bucket(self):
        # connection_manager.call is called twice, and should throw the
        # Not Found ClientError only for the first call.
        self.template.s3_details = {
            "bucket_name": "bucket-name",
            "bucket_key": "bucket-key"
        }

        self.template.connection_manager.call.side_effect = [
            ClientError(
                {
                    "Error": {
                        "Code": 404,
                        "Message": "Not Found"
                    }
                },
                sentinel.operation
            ),
            None
        ]

        existance = self.template._bucket_exists()

        assert existance is False
def test_invoke_does_raise_if_service_error(runner, mock_cli_factory):
    deployed_resources = DeployedResources({"resources": []})
    mock_cli_factory.create_config_obj.return_value = FakeConfig(
        deployed_resources)
    invoke_handler = mock.Mock(spec=LambdaInvokeHandler)
    invoke_handler.invoke.side_effect = ClientError(
        {'Error': {
            'Code': 'LambdaError',
            'Message': 'Error message'
        }}, 'Invoke')
    mock_cli_factory.create_lambda_invoke_handler.return_value = invoke_handler
    mock_reader = mock.Mock(spec=PipeReader)
    mock_reader.read.return_value = 'barbaz'
    mock_cli_factory.create_stdin_reader.return_value = mock_reader
    with runner.isolated_filesystem():
        cli.create_new_project_skeleton('testproject')
        os.chdir('testproject')
        result = _run_cli_command(runner,
                                  cli.invoke, ['-n', 'foo'],
                                  cli_factory=mock_cli_factory)
        assert result.exit_code == 1
    assert invoke_handler.invoke.call_args == mock.call('barbaz')
    assert ("Error: got 'LambdaError' exception back from Lambda\n"
            "Error message") in result.output
Example #5
0
 def _mock_describe_document(Name):
     # Using two wait flags to simulate that AWS is taking time to delete and
     # create documents
     if Name not in mock_asiaq_document_contents:
         if wait_flags['delete']:
             wait_flags['delete'] = False
             return {'Document': {'Name': Name, 'Status': 'Active'}}
         else:
             wait_flags['delete'] = True
             raise ClientError(
                 {
                     'Error': {
                         'Code': 'Mock_code',
                         'Message': 'mock message'
                     }
                 }, 'DescribeDocument')
     else:
         if wait_flags['create']:
             wait_flags['create'] = False
             res = {'Document': {'Name': Name, 'Status': 'Creating'}}
         else:
             wait_flags['create'] = True
             res = {'Document': {'Name': Name, 'Status': 'Active'}}
         return res
    def test_execute_lambda_occ_conflict(
            self, mock_start_transaction, mock_session, mock_transaction,
            mock_is_occ_conflict_exception, mock_is_retriable_exception,
            mock_logger_warning, mock_executor, mock_throw_if_closed,
            mock_no_throw_abort):
        ce = ClientError(MOCK_CLIENT_ERROR_MESSAGE, MOCK_MESSAGE)
        mock_start_transaction.return_value = mock_transaction
        mock_is_occ_conflict_exception.return_value = True
        mock_is_retriable_exception.return_value = False
        qldb_session = QldbSession(mock_session, MOCK_READ_AHEAD,
                                   MOCK_RETRY_LIMIT, mock_executor)

        mock_lambda = Mock()
        mock_lambda.side_effect = ce
        self.assertRaises(ClientError, qldb_session.execute_lambda,
                          mock_lambda)
        mock_throw_if_closed.assert_called_once_with()

        mock_start_transaction.assert_has_calls(
            [call(), call(), call(), call(),
             call()])
        mock_no_throw_abort.assert_has_calls([
            call(mock_transaction),
            call(mock_transaction),
            call(mock_transaction),
            call(mock_transaction),
            call(mock_transaction)
        ])
        mock_is_occ_conflict_exception.assert_has_calls(
            [call(ce), call(ce),
             call(ce), call(ce),
             call(ce)])
        self.assertEqual(mock_lambda.call_count, qldb_session._retry_limit + 1)
        self.assertEqual(mock_logger_warning.call_count,
                         qldb_session._retry_limit + 1)
        mock_transaction.commit.assert_not_called()
Example #7
0
    def test_storage_open_no_write(self):
        """
        Test opening file in write mode and closing without writing.

        A file should be created as by obj.put(...).
        """
        name = 'test_open_no_write.txt'

        # Set the encryption flag used for puts
        self.storage.encryption = True
        self.storage.reduced_redundancy = True
        self.storage.default_acl = 'public-read'

        file = self.storage.open(name, 'w')
        self.storage.bucket.Object.assert_called_with(name)
        obj = self.storage.bucket.Object.return_value
        obj.load.side_effect = ClientError(
            {
                'Error': {},
                'ResponseMetadata': {
                    'HTTPStatusCode': 404
                }
            }, 'head_bucket')

        # Set the name of the mock object
        obj.key = name

        # Save the internal file before closing
        file.close()

        obj.load.assert_called_once_with()
        obj.put.assert_called_once_with(ACL='public-read',
                                        Body=b"",
                                        ContentType='text/plain',
                                        ServerSideEncryption='AES256',
                                        StorageClass='REDUCED_REDUNDANCY')
Example #8
0
def test_vcenter_info_exists_returns_False(get_handler):
    (
        secret_manager,
        system_manager_mock,
        secrets_manager_mock,
        handler,
    ) = __create_secret_manager_subject(get_handler)

    # setup
    secrets_manager_mock.get_secret_value.side_effect = ClientError(
        {"Error": {
            "Message": "bla",
            "Code": "ResourceNotFoundException"
        }},
        "secretsmanager:GetSecretValue",
    )
    key = f"/rpcv/{__STAGE}/orgs/{__ORD_ID}/clusters/{__CLUSTER_ID}/vcenters/{__IP}"

    # when
    actual = secret_manager.secret_info_exists(key)

    # then
    secrets_manager_mock.get_secret_value.assert_called_with(SecretId=key)
    assert actual is False
def test_setup_bucket_with_resources_upload_failure(mocker, caplog,
                                                    mock_generated_bucket_name,
                                                    expected_bucket_name,
                                                    provided_bucket_name,
                                                    expected_remove_bucket):
    """Verify that create_bucket_with_batch_resources behaves as expected in case of upload failure."""
    region = "eu-west-1"
    mock_artifact_dir = "artifact_dir"
    stack_name = "test-stack"
    error = "ExpiredToken"
    client_error = ClientError({"Error": {"Code": error}}, "upload_fileobj")

    mocker.patch("pcluster.utils.generate_random_name_with_prefix",
                 side_effect=[mock_artifact_dir, mock_generated_bucket_name])
    mocker.patch("pcluster.utils.create_s3_bucket")
    check_bucket_mock = mocker.patch("pcluster.utils.check_s3_bucket_exists")
    mocker.patch("pcluster.utils.upload_resources_artifacts",
                 side_effect=client_error)
    cleanup_s3_mock = mocker.patch("pcluster.utils.cleanup_s3_resources")

    pcluster_config_mock = _mock_pcluster_config(mocker, "slurm", region,
                                                 provided_bucket_name)
    storage_data = pcluster_config_mock.to_storage()

    with pytest.raises(ClientError, match=error):
        _setup_bucket_with_resources(pcluster_config_mock, storage_data,
                                     stack_name, {})
    if provided_bucket_name:
        check_bucket_mock.assert_called_with(provided_bucket_name)
    else:
        check_bucket_mock.assert_not_called()
    # if resource upload fails we delete the bucket
    cleanup_s3_mock.assert_called_with(expected_bucket_name, mock_artifact_dir,
                                       expected_remove_bucket)
    assert_that(caplog.text).contains(
        "Unable to upload cluster resources to the S3 bucket")
Example #10
0
    def get_parameter(self, **kwargs):
        """Mocked get_parameter function that returns a value for the key from a dict

        Keyword Arguments:
            Name (str): The name of the parameter to retrieve

        Returns:
            dict: Parameter dictionary containing this parameter's value
        """
        # Raise a botocore ClientError if the param doesn't exist
        if kwargs.get('Name') not in self._parameters:
            err = {
                'Error': {
                    'Code': 403,
                    'Message': 'parameter does not exist'
                }
            }
            raise ClientError(err, 'get_parameter')

        return {
            'Parameter': {
                'Value': self._parameters.get(kwargs.get('Name'))
            }
        }
def test_it_provides_logs_for_failed_rollback_client_error(
        mock_error_handler, mock_delete, mock_load, mock_verify_integrity,
        message_stub):
    parquet_file = MagicMock()
    parquet_file.num_row_groups = 1
    mock_load.return_value = parquet_file
    mock_s3 = MagicMock()
    mock_s3.delete_object.side_effect = ClientError({}, "DeleteObject")
    mock_verify_integrity.side_effect = IntegrityCheckFailedError(
        "Some error", mock_s3, "bucket", "test/basic.parquet", "new_version")
    mock_delete.return_value = pa.BufferOutputStream(), {"DeletedRows": 1}
    execute("https://queue/url", message_stub(), "receipt_handle")
    mock_verify_integrity.assert_called()
    assert mock_error_handler.call_args_list == [
        call(ANY, ANY, "Object version integrity check failed: Some error"),
        call(
            ANY,
            ANY,
            "ClientError: An error occurred (Unknown) when calling the DeleteObject operation: Unknown. "
            "Version rollback caused by version integrity conflict failed",
            "ObjectRollbackFailed",
            False,
        ),
    ]
Example #12
0
 def test_dict_added_to_unprocessed_items_after_5_failures(self):
     base = BaseDispatcher('test_subject',
                           'send_lots',
                           'send_one',
                           max_batch_size=3)
     client_error = ClientError(
         {"Error": {
             "message": "Something went wrong",
             "code": 0
         }}, "A Test")
     base._individual_dispatch_method = Mock(side_effect=[
         client_error, client_error, client_error, client_error,
         client_error
     ])
     test_payload = {"abc": 123}
     base._send_individual_payload(test_payload)
     base._individual_dispatch_method.assert_has_calls([
         call(**test_payload),
         call(**test_payload),
         call(**test_payload),
         call(**test_payload),
         call(**test_payload)
     ])
     self.assertEqual([test_payload], base.unprocessed_items)
def test_encrypt_and_push_creds_to_s3_kms_failure(log_mock, boto_mock):
    """CLI - Outputs - Encrypt and push creds to s3 - kms failure"""
    props = {
        'secret':
        OutputProperty(description='short description of secret needed',
                       value='1908AGSG98A8908AG',
                       cred_requirement=True)
    }

    err_response = {
        'Error': {
            'Code': 100,
            'Message': 'BAAAD',
            'BucketName': 'bucket'
        }
    }

    # Add ClientError side_effect to mock
    boto_mock.side_effect = ClientError(err_response, 'operation')
    encrypt_and_push_creds_to_s3('us-east-1', 'bucket', 'key', props,
                                 'test_alias')

    log_mock.assert_called_with(
        'An error occurred during credential encryption')
if _has_required_boto():
    region = 'us-east-1'
    access_key = 'GKTADJGHEIQSXMKKRBJ08H'
    secret_key = 'askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs'
    conn_parameters = {
        'region': region,
        'key': access_key,
        'keyid': secret_key,
        'profile': {}
    }
    error_message = 'An error occurred (101) when calling the {0} operation: Test-defined error'
    not_found_error = ClientError(
        {
            'Error': {
                'Code': 'ResourceNotFoundException',
                'Message': "Test-defined error"
            }
        }, 'msg')
    error_content = {'Error': {'Code': 101, 'Message': "Test-defined error"}}
    domain_ret = dict(
        DomainName='testdomain',
        ElasticsearchClusterConfig={},
        EBSOptions={},
        AccessPolicies={},
        SnapshotOptions={},
        AdvancedOptions={},
        ElasticsearchVersion='1.5',
    )

    def test_individual_write_throws_exceptions(self):
        mock_client_error = ClientError(
            {'Error': {
                'Code': 500,
                'Message': 'broken'
            }}, "Dynamo")

        kinesis_client = KinesisBatchDispatcher(
            stream_name='test_stream', partition_key_identifier='m_id')

        mock_boto3 = Mock()
        kinesis_client._aws_service = mock_boto3

        test_payloads = [{
            'm_id': 1,
            'message': 'message contents 1'
        }, {
            'm_id': 2,
            'message': 'message contents 2'
        }]

        #  All records fail in first attempt
        failure_response = {
            'FailedRecordCount':
            2,
            'Records': [{
                'm_id': 1,
                'message': 'message contents 1',
                'ErrorCode': 'badness'
            }, {
                'm_id': 2,
                'message': 'message contents 2',
                'ErrorCode': 'badness'
            }],
            'EncryptionType':
            'NONE'
        }

        kinesis_client._batch_dispatch_method = Mock(
            side_effect=[failure_response])
        kinesis_client._individual_dispatch_method = Mock(side_effect=[
            mock_client_error, mock_client_error, mock_client_error,
            mock_client_error, mock_client_error, mock_client_error,
            mock_client_error, mock_client_error, mock_client_error,
            mock_client_error
        ])

        for test_payload in test_payloads:
            kinesis_client.submit_payload(test_payload)

        kinesis_client.flush_payloads()

        kinesis_client._batch_dispatch_method.assert_called_once_with(
            **{
                'Records': [{
                    'Data': '{"m_id": 1, "message": "message contents 1"}',
                    'PartitionKey': '1'
                }, {
                    'Data': '{"m_id": 2, "message": "message contents 2"}',
                    'PartitionKey': '2'
                }],
                'StreamName':
                'test_stream'
            })
        kinesis_client._individual_dispatch_method.assert_has_calls([
            call(Data='{"m_id": 1, "message": "message contents 1"}',
                 PartitionKey='1',
                 StreamName='test_stream'),
            call(Data='{"m_id": 1, "message": "message contents 1"}',
                 PartitionKey='1',
                 StreamName='test_stream'),
            call(Data='{"m_id": 1, "message": "message contents 1"}',
                 PartitionKey='1',
                 StreamName='test_stream'),
            call(Data='{"m_id": 1, "message": "message contents 1"}',
                 PartitionKey='1',
                 StreamName='test_stream'),
            call(Data='{"m_id": 1, "message": "message contents 1"}',
                 PartitionKey='1',
                 StreamName='test_stream'),
            call(Data='{"m_id": 2, "message": "message contents 2"}',
                 PartitionKey='2',
                 StreamName='test_stream'),
            call(Data='{"m_id": 2, "message": "message contents 2"}',
                 PartitionKey='2',
                 StreamName='test_stream'),
            call(Data='{"m_id": 2, "message": "message contents 2"}',
                 PartitionKey='2',
                 StreamName='test_stream'),
            call(Data='{"m_id": 2, "message": "message contents 2"}',
                 PartitionKey='2',
                 StreamName='test_stream'),
            call(Data='{"m_id": 2, "message": "message contents 2"}',
                 PartitionKey='2',
                 StreamName='test_stream')
        ])
        self.assertEqual(test_payloads, kinesis_client.unprocessed_items)
    def test_batch_write_throws_exceptions(self):
        kinesis_client = KinesisBatchDispatcher(
            stream_name='test_stream', partition_key_identifier='m_id')
        mock_client_error = ClientError(
            {'Error': {
                'Code': 500,
                'Message': 'broken'
            }}, "Dynamo")
        mock_boto3 = Mock()
        kinesis_client._aws_service = mock_boto3

        test_payloads = [{
            'm_id': 1,
            'message': 'message contents 1'
        }, {
            'm_id': 2,
            'message': 'message contents 2'
        }, {
            'm_id': 3,
            'message': 'message contents 3'
        }, {
            'm_id': 4,
            'message': 'message contents 4'
        }, {
            'm_id': 5,
            'message': 'message contents 5'
        }]

        kinesis_client._batch_dispatch_method = Mock(side_effect=[
            mock_client_error, mock_client_error, mock_client_error,
            mock_client_error, mock_client_error
        ])
        kinesis_client._individual_dispatch_method = Mock()

        for test_payload in test_payloads:
            kinesis_client.submit_payload(test_payload)

        kinesis_client.flush_payloads()

        kinesis_client._batch_dispatch_method.assert_has_calls([
            call(
                **{
                    'Records': [{
                        'Data': '{"m_id": 1, "message": "message contents 1"}',
                        'PartitionKey': '1'
                    }, {
                        'Data': '{"m_id": 2, "message": "message contents 2"}',
                        'PartitionKey': '2'
                    }, {
                        'Data': '{"m_id": 3, "message": "message contents 3"}',
                        'PartitionKey': '3'
                    }, {
                        'Data': '{"m_id": 4, "message": "message contents 4"}',
                        'PartitionKey': '4'
                    }, {
                        'Data': '{"m_id": 5, "message": "message contents 5"}',
                        'PartitionKey': '5'
                    }],
                    'StreamName':
                    'test_stream'
                }),
            call(
                **{
                    'Records': [{
                        'Data': '{"m_id": 1, "message": "message contents 1"}',
                        'PartitionKey': '1'
                    }, {
                        'Data': '{"m_id": 2, "message": "message contents 2"}',
                        'PartitionKey': '2'
                    }, {
                        'Data': '{"m_id": 3, "message": "message contents 3"}',
                        'PartitionKey': '3'
                    }, {
                        'Data': '{"m_id": 4, "message": "message contents 4"}',
                        'PartitionKey': '4'
                    }, {
                        'Data': '{"m_id": 5, "message": "message contents 5"}',
                        'PartitionKey': '5'
                    }],
                    'StreamName':
                    'test_stream'
                }),
            call(
                **{
                    'Records': [{
                        'Data': '{"m_id": 1, "message": "message contents 1"}',
                        'PartitionKey': '1'
                    }, {
                        'Data': '{"m_id": 2, "message": "message contents 2"}',
                        'PartitionKey': '2'
                    }, {
                        'Data': '{"m_id": 3, "message": "message contents 3"}',
                        'PartitionKey': '3'
                    }, {
                        'Data': '{"m_id": 4, "message": "message contents 4"}',
                        'PartitionKey': '4'
                    }, {
                        'Data': '{"m_id": 5, "message": "message contents 5"}',
                        'PartitionKey': '5'
                    }],
                    'StreamName':
                    'test_stream'
                })
        ])
        kinesis_client._individual_dispatch_method.assert_not_called()
        self.assertEqual(test_payloads, kinesis_client.unprocessed_items)
if _has_required_boto():
    region = "us-east-1"
    access_key = "GKTADJGHEIQSXMKKRBJ08H"
    secret_key = "askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs"
    conn_parameters = {
        "region": region,
        "key": access_key,
        "keyid": secret_key,
        "profile": {},
    }
    error_message = "An error occurred (101) when calling the {0} operation: Test-defined error"
    not_found_error = ClientError(
        {
            "Error": {
                "Code": "ResourceNotFoundException",
                "Message": "Test-defined error",
            }
        },
        "msg",
    )
    topic_rule_not_found_error = ClientError(
        {"Error": {"Code": "UnauthorizedException", "Message": "Test-defined error"}},
        "msg",
    )
    error_content = {"Error": {"Code": 101, "Message": "Test-defined error"}}
    policy_ret = dict(
        policyName="testpolicy",
        policyDocument='{"Version": "2012-10-17", "Statement": [{"Action": ["iot:Publish"], "Resource": ["*"], "Effect": "Allow"}]}',
        policyArn="arn:aws:iot:us-east-1:123456:policy/my_policy",
        policyVersionId=1,
        defaultVersionId=1,
Example #18
0
 def mocked_poller(account, stream):
     raise ClientError({"Error": {
         "Message": "",
         "Code": "AccessDenied"
     }}, "sts:AssumeRole")
Example #19
0
 def func():
     self.count += 1
     raise ClientError({"Error": {"Code": 42}}, "something")
Example #20
0
 def test_absent_with_failure(self):
     self.conn.delete_bucket.side_effect = ClientError(error_content, 'delete_bucket')
     result = salt_states['boto_s3_bucket.absent']('test', 'testbucket')
     self.assertFalse(result['result'])
     self.assertTrue('An error occurred' in result['comment'])
Example #21
0
    if not HAS_BOTO:
        return False
    elif LooseVersion(boto3.__version__) < LooseVersion(required_boto3_version):
        return False
    else:
        return True

if _has_required_boto():
    region = 'us-east-1'
    access_key = 'GKTADJGHEIQSXMKKRBJ08H'
    secret_key = 'askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs'
    conn_parameters = {'region': region, 'key': access_key, 'keyid': secret_key, 'profile': {}}
    error_message = 'An error occurred (101) when calling the {0} operation: Test-defined error'
    not_found_error = ClientError({
        'Error': {
            'Code': '404',
            'Message': "Test-defined error"
        }
    }, 'msg')
    error_content = {
      'Error': {
        'Code': 101,
        'Message': "Test-defined error"
      }
    }
    list_ret = {
        'Buckets': [{
            'Name': 'mybucket',
            'CreationDate': None
        }],
        'Owner': {
            'DisplayName': 'testuser',
Example #22
0
 def writer_side_effect(*args, **kwargs):
     raise ClientError({'Error': {'Code': 403, 'Message': 'Access denied'}}, 'information')
Example #23
0
    def test_that_when_describing_thing_type_and_boto3_returns_error_an_error_the_describe_thing_type_method_returns_error(self):
        self.conn.describe_thing_type.side_effect = ClientError(error_content, 'describe_thing_type')
        result = boto_iot.describe_thing_type(thingTypeName='mythingtype', **conn_parameters)

        self.assertEqual(result.get('error', {}).get('message'), error_message.format('describe_thing_type'))
def test_entire_environment_process_together(monkeypatch):
    boto3_client = MagicMock()
    boto3_client.describe_task_definition.side_effect = ClientError({}, None)
    boto3_client.register_task_definition.return_value = {}
    boto3_client.run_task.return_value = {}
    monkeypatch.setattr("boto3.client", MagicMock(return_value=boto3_client))

    flow_runner = MagicMock()
    monkeypatch.setattr(
        "prefect.engine.get_default_flow_runner_class",
        MagicMock(return_value=flow_runner),
    )

    monkeypatch.setenv("AWS_ACCESS_KEY_ID", "id")
    monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "secret")
    monkeypatch.setenv("AWS_SESSION_TOKEN", "session")
    monkeypatch.setenv("REGION_NAME", "region")

    with prefect.context({"flow_run_id": "id"}), set_temporary_config(
        {"cloud.auth_token": "test", "logging.extra_loggers": "['test_logger']",}
    ):
        storage = Docker(registry_url="test", image_name="image", image_tag="tag")
        flow = Flow("name", storage=storage)
        environment = FargateTaskEnvironment(
            containerDefinitions=[
                {
                    "name": "flow-container",
                    "image": "image",
                    "command": [],
                    "environment": [],
                    "essential": True,
                }
            ],
            cluster="test",
            family="test",
            taskDefinition="test",
        )

        assert environment
        assert environment.aws_access_key_id == "id"
        assert environment.aws_secret_access_key == "secret"
        assert environment.aws_session_token == "session"
        assert environment.region_name == "region"

        environment.setup(flow=flow)

        assert boto3_client.describe_task_definition.called
        assert boto3_client.register_task_definition.called
        assert boto3_client.register_task_definition.call_args[1]["family"] == "test"
        assert boto3_client.register_task_definition.call_args[1][
            "containerDefinitions"
        ] == [
            {
                "name": "flow-container",
                "image": "test/image:tag",
                "command": [
                    "/bin/sh",
                    "-c",
                    "python -c 'import prefect; prefect.environments.FargateTaskEnvironment().run_flow()'",
                ],
                "environment": [
                    {
                        "name": "PREFECT__CLOUD__GRAPHQL",
                        "value": prefect.config.cloud.graphql,
                    },
                    {"name": "PREFECT__CLOUD__USE_LOCAL_SECRETS", "value": "false"},
                    {
                        "name": "PREFECT__ENGINE__FLOW_RUNNER__DEFAULT_CLASS",
                        "value": "prefect.engine.cloud.CloudFlowRunner",
                    },
                    {
                        "name": "PREFECT__ENGINE__TASK_RUNNER__DEFAULT_CLASS",
                        "value": "prefect.engine.cloud.CloudTaskRunner",
                    },
                    {"name": "PREFECT__LOGGING__LOG_TO_CLOUD", "value": "true"},
                    {
                        "name": "PREFECT__LOGGING__EXTRA_LOGGERS",
                        "value": "['test_logger']",
                    },
                ],
                "essential": True,
            }
        ]

        environment.execute(flow=flow)

        assert boto3_client.run_task.called
        assert boto3_client.run_task.call_args[1]["taskDefinition"] == "test"
        assert boto3_client.run_task.call_args[1]["overrides"] == {
            "containerOverrides": [
                {
                    "name": "flow-container",
                    "environment": [
                        {
                            "name": "PREFECT__CLOUD__AUTH_TOKEN",
                            "value": prefect.config.cloud.get("auth_token"),
                        },
                        {"name": "PREFECT__CONTEXT__FLOW_RUN_ID", "value": "id"},
                        {"name": "PREFECT__CONTEXT__IMAGE", "value": "test/image:tag"},
                    ],
                }
            ]
        }
        assert boto3_client.run_task.call_args[1]["launchType"] == "FARGATE"
        assert boto3_client.run_task.call_args[1]["cluster"] == "test"

        with tempfile.TemporaryDirectory() as directory:
            d = Local(directory)
            d.add_flow(prefect.Flow("name"))

            gql_return = MagicMock(
                return_value=MagicMock(
                    data=MagicMock(
                        flow_run=[
                            GraphQLResult(
                                {
                                    "flow": GraphQLResult(
                                        {"name": "name", "storage": d.serialize(),}
                                    )
                                }
                            )
                        ],
                    )
                )
            )
            client = MagicMock()
            client.return_value.graphql = gql_return
            monkeypatch.setattr("prefect.environments.execution.base.Client", client)

            with set_temporary_config({"cloud.auth_token": "test"}):
                environment.run_flow()

            assert flow_runner.call_args[1]["flow"].name == "name"
Example #25
0
        return False
    elif LooseVersion(found_botocore_version) < LooseVersion(required_botocore_version):
        return False
    else:
        return True


if _has_required_boto():
    region = 'us-east-1'
    access_key = 'GKTADJGHEIQSXMKKRBJ08H'
    secret_key = 'askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs'
    conn_parameters = {'region': region, 'key': access_key, 'keyid': secret_key, 'profile': {}}
    error_message = 'An error occurred (101) when calling the {0} operation: Test-defined error'
    not_found_error = ClientError({
        'Error': {
            'Code': 'ResourceNotFoundException',
            'Message': "Test-defined error"
        }
    }, 'msg')
    topic_rule_not_found_error = ClientError({
        'Error': {
            'Code': 'UnauthorizedException',
            'Message': "Test-defined error"
        }
    }, 'msg')
    error_content = {
      'Error': {
        'Code': 101,
        'Message': "Test-defined error"
      }
    }
    policy_ret = dict(policyName='testpolicy',
Example #26
0
    def test_task_client_error_one_file(self):
        """
        Test retries for restore error for one file.
        """
        exp_event = {}
        exp_event["config"] = {"glacier-bucket": "some_bucket"}
        exp_event["input"] = {
            "granules": [{
                "granuleId": "MOD09GQ.A0219114.N5aUCG.006.0656338553321",
                "keys": [KEY1]
            }]
        }

        os.environ['RESTORE_RETRY_SLEEP_SECS'] = '.5'
        requests_db.request_id_generator = Mock(side_effect=[
            REQUEST_GROUP_ID_EXP_1, REQUEST_ID1, REQUEST_ID2, REQUEST_ID3
        ])
        boto3.client = Mock()
        s3_cli = boto3.client('s3')
        s3_cli.head_object = Mock()
        s3_cli.restore_object = Mock(side_effect=[
            ClientError({'Error': {
                'Code': 'NoSuchBucket'
            }}, 'restore_object'),
            ClientError({'Error': {
                'Code': 'NoSuchBucket'
            }}, 'restore_object'),
            ClientError({'Error': {
                'Code': 'NoSuchBucket'
            }}, 'restore_object')
        ])
        CumulusLogger.info = Mock()
        CumulusLogger.error = Mock()
        mock_ssm_get_parameter(1)
        #exp_gran = {}
        #exp_gran['granuleId'] = 'MOD09GQ.A0219114.N5aUCG.006.0656338553321'

        exp_gran = {
            'granuleId':
            'MOD09GQ.A0219114.N5aUCG.006.0656338553321',
            'files': [{
                'key':
                FILE1,
                'dest_bucket':
                PROTECTED_BUCKET,
                'success':
                False,
                'err_msg':
                'An error occurred (NoSuchBucket) when calling the restore_object '
                'operation: Unknown'
            }]
        }
        exp_err = f"One or more files failed to be requested. {exp_gran}"
        try:
            request_files.task(exp_event, self.context)
            self.fail("RestoreRequestError expected")
        except request_files.RestoreRequestError as err:
            self.assertEqual(exp_err, str(err))
        del os.environ['RESTORE_RETRY_SLEEP_SECS']
        boto3.client.assert_called_with('ssm')
        s3_cli.head_object.assert_called_with(Bucket='some_bucket', Key=FILE1)
        restore_req_exp = {
            'Days': 5,
            'GlacierJobParameters': {
                'Tier': 'Standard'
            }
        }
        s3_cli.restore_object.assert_called_with(
            Bucket='some_bucket', Key=FILE1, RestoreRequest=restore_req_exp)
Example #27
0
    def batch_get_item(self, **kwargs):
        """Mock batch_get_item method and return mimicking dynamodb response
        Keyword Argments:
            exception (bool): True raise exception.

        Returns:
            (dict): Response dictionary containing fake results.
        """
        if self.exception:
            err = {'Error': {'Code': 400, 'Message': 'raising test exception'}}
            raise ClientError(err, 'batch_get_item')

        if not kwargs.get('RequestItems'):
            err = {
                'Error': {
                    'Code': 403,
                    'Message':
                    'raising test exceptionParameter validation failed'
                }
            }
            raise ParamValidationError(report=err)

        # Validate query keys
        for _, item_value in kwargs['RequestItems'].iteritems():
            if not item_value.get('Keys'):
                err = {
                    'Error': {
                        'Code': 400,
                        'Message': '[Keys] parameter is required'
                    }
                }
                raise ParamValidationError(report=err)
            self._validate_keys(item_value['Keys'])

        response = {
            'UnprocessedKeys': {},
            'Responses': {
                'test_table_name': [{
                    'ioc_value': {
                        'S': '1.1.1.2'
                    },
                    'sub_type': {
                        'S': 'mal_ip'
                    }
                }, {
                    'ioc_value': {
                        'S': 'evil.com'
                    },
                    'sub_type': {
                        'S': 'c2_domain'
                    }
                }]
            },
            'ResponseMetadata': {
                'RetryAttempts': 0,
                'HTTPStatusCode': 200,
                'RequestId': 'ABCD1234',
                'HTTPHeaders': {}
            }
        }
        if self.has_unprocessed_keys:
            response['UnprocessedKeys'] = {
                'test_table_name': {
                    'Keys': [{
                        'ioc_value': {
                            'S': 'foo'
                        }
                    }, {
                        'ioc_value': {
                            'S': 'bar'
                        }
                    }]
                }
            }

        return response
Example #28
0
    def test_task_client_error_3_times(self):
        """
        Test three files, two successful, one errors on all retries and fails.
        """
        exp_event = {}
        exp_event["config"] = {"glacier-bucket": "some_bucket"}
        gran = {}
        granule_id = "MOD09GQ.A0219114.N5aUCG.006.0656338553321"
        gran["granuleId"] = granule_id
        keys = [KEY1, KEY3, KEY4]
        gran["keys"] = keys
        exp_event["input"] = {"granules": [gran]}

        requests_db.request_id_generator = Mock(side_effect=[
            REQUEST_GROUP_ID_EXP_1, REQUEST_ID1, REQUEST_GROUP_ID_EXP_3,
            REQUEST_ID2, REQUEST_ID3, REQUEST_ID4
        ])
        boto3.client = Mock()
        s3_cli = boto3.client('s3')
        s3_cli.head_object = Mock()
        s3_cli.restore_object = Mock(side_effect=[
            None,
            ClientError({'Error': {
                'Code': 'NoSuchBucket'
            }}, 'restore_object'), None,
            ClientError({'Error': {
                'Code': 'NoSuchBucket'
            }}, 'restore_object'),
            ClientError({'Error': {
                'Code': 'NoSuchKey'
            }}, 'restore_object')
        ])
        CumulusLogger.info = Mock()
        CumulusLogger.error = Mock()
        mock_ssm_get_parameter(3)
        exp_gran = {}
        exp_gran['granuleId'] = granule_id
        exp_files = []

        exp_file = {}
        exp_file['key'] = FILE1
        exp_file['dest_bucket'] = PROTECTED_BUCKET
        exp_file['success'] = True
        exp_file['err_msg'] = ''
        exp_files.append(exp_file)

        exp_file = {}
        exp_file['key'] = FILE3
        exp_file['dest_bucket'] = None
        exp_file['success'] = False
        exp_file['err_msg'] = 'An error occurred (NoSuchKey) when calling the restore_object ' \
                              'operation: Unknown'
        exp_files.append(exp_file)

        exp_file = {}
        exp_file['key'] = FILE4
        exp_file['dest_bucket'] = PUBLIC_BUCKET
        exp_file['success'] = True
        exp_file['err_msg'] = ''
        exp_files.append(exp_file)

        exp_gran['files'] = exp_files
        exp_err = f"One or more files failed to be requested. {exp_gran}"

        print_rows("begin")
        try:
            request_files.task(exp_event, self.context)
            self.fail("RestoreRequestError expected")
        except request_files.RestoreRequestError as err:
            self.assertEqual(exp_err, str(err))
        print_rows("end")
Example #29
0
 def writer_side_effect(*args, **kwargs):
     raise ClientError({'Error': {}}, 'some operation')
Example #30
0
    def test_task_client_error_2_times(self):
        """
        Test two files, first successful, second has two errors, then success.
        """
        file1 = {"key": FILE1, "dest_bucket": "sndbx-cumulus-protected"}
        file2 = {"key": FILE2, "dest_bucket": "sndbx-cumulus-protected"}
        exp_event = {}
        exp_event["config"] = {"glacier-bucket": "some_bucket"}
        gran = {}
        granule_id = "MOD09GQ.A0219114.N5aUCG.006.0656338553321"
        gran["granuleId"] = granule_id
        keys = []
        keys.append(file1)
        keys.append(file2)
        gran["keys"] = keys
        exp_event["input"] = {"granules": [gran]}

        requests_db.request_id_generator = Mock(side_effect=[
            REQUEST_GROUP_ID_EXP_1, REQUEST_ID1, REQUEST_GROUP_ID_EXP_2,
            REQUEST_ID2, REQUEST_ID3
        ])
        boto3.client = Mock()
        s3_cli = boto3.client('s3')
        s3_cli.head_object = Mock()
        s3_cli.restore_object = Mock(side_effect=[
            None,
            ClientError({'Error': {
                'Code': 'NoSuchBucket'
            }}, 'restore_object'),
            ClientError({'Error': {
                'Code': 'NoSuchBucket'
            }}, 'restore_object'), None
        ])
        CumulusLogger.info = Mock()
        CumulusLogger.error = Mock()
        mock_ssm_get_parameter(2)
        exp_gran = {}
        exp_gran['granuleId'] = granule_id
        exp_files = []

        exp_file = {}
        exp_file['key'] = FILE1
        exp_file['dest_bucket'] = "sndbx-cumulus-protected"
        exp_file['success'] = True
        exp_file['err_msg'] = ''
        exp_files.append(exp_file)

        exp_file = {}
        exp_file['key'] = FILE2
        exp_file['dest_bucket'] = "sndbx-cumulus-protected"
        exp_file['success'] = True
        exp_file['err_msg'] = ''
        exp_files.append(exp_file)

        exp_gran['files'] = exp_files

        print_rows("begin")

        result = request_files.task(exp_event, self.context)
        self.assertEqual(exp_gran, result)

        print_rows("end")