Example #1
0
 def setUp(self):
     super(CommandArchitectureTest, self).setUp()
     self.session = FakeSession()
     self.bucket = make_s3_files(self.session)
     self.loc_files = make_loc_files()
     self.output = StringIO()
     self.err_output = StringIO()
     self.saved_stdout = sys.stdout
     self.saved_stderr = sys.stderr
     sys.stdout = self.output
     sys.stderr = self.err_output
Example #2
0
 def setUp(self):
     super(CommandArchitectureTest, self).setUp()
     self.session = self.driver.session
     self.bucket = 'mybucket'
     self.file_creator = FileCreator()
     self.loc_files = make_loc_files(self.file_creator)
     self.output = StringIO()
     self.err_output = StringIO()
     self.saved_stdout = sys.stdout
     self.saved_stderr = sys.stderr
     sys.stdout = self.output
     sys.stderr = self.err_output
Example #3
0
 def setUp(self):
     super(CommandArchitectureTest, self).setUp()
     self.session = self.driver.session
     self.bucket = 'mybucket'
     self.file_creator = FileCreator()
     self.loc_files = make_loc_files(self.file_creator)
     self.output = StringIO()
     self.err_output = StringIO()
     self.saved_stdout = sys.stdout
     self.saved_stderr = sys.stderr
     sys.stdout = self.output
     sys.stderr = self.err_output
Example #4
0
 def setUp(self):
     self.stream = StringIO()
     self.user_code = '12345'
     self.verification_uri = 'https://verification.com'
     self.verification_uri_complete = 'https://verification.com?code=12345'
     self.pending_authorization = {
         'userCode': self.user_code,
         'verificationUri': self.verification_uri,
         'verificationUriComplete': self.verification_uri_complete,
     }
     self.open_browser = mock.Mock(spec=webbrowser.open_new_tab)
     self.handler = OpenBrowserHandler(
         self.stream,
         open_browser=self.open_browser,
     )
Example #5
0
 def test_output_use_existing_bucket(self):
     name = 'MyBucket/MyPrefix'
     with mock.patch('sys.stdout', StringIO()) as mock_stdout:
         self.helper.prepare_bucket(name)
         self.assertIn(
             'Using existing S3 bucket: MyBucket',
             mock_stdout.getvalue())
Example #6
0
 def setUp(self):
     self.log_event = {
         'timestamp': datetime(2018, 1, 1, 0, 29, 43, 79060, tz.tzutc()),
         'logStreamName': 'stream_name',
         'message': 'my message'
     }
     self.output = StringIO()
Example #7
0
 def test_output_new_topic(self):
     name = 'arn:aws:sns:us-east-1:934212987125:config'
     with mock.patch('sys.stdout', StringIO()) as mock_stdout:
         self.helper.prepare_topic(name)
         self.assertIn(
             'Using existing SNS topic: %s' % name,
             mock_stdout.getvalue())
Example #8
0
    def _assert_pypi_rc_has_expected_content(self,
                                             pypi_rc_str,
                                             server,
                                             repo_url=None,
                                             username=None,
                                             password=None):
        pypi_rc = RawConfigParser()
        pypi_rc.readfp(StringIO(pypi_rc_str))

        self.assertIn('distutils', pypi_rc.sections())
        self.assertIn('index-servers', pypi_rc.options('distutils'))
        index_servers = pypi_rc.get('distutils', 'index-servers')
        index_servers = [
            index_server.strip() for index_server in index_servers.split('\n')
            if index_server.strip() != ''
        ]
        self.assertIn(server, index_servers)

        if repo_url or username or password:
            self.assertIn(server, pypi_rc.sections())

        if repo_url:
            self.assertIn('repository', pypi_rc.options(server))
            self.assertEqual(pypi_rc.get(server, 'repository'), repo_url)

        if username:
            self.assertIn('username', pypi_rc.options(server))
            self.assertEqual(pypi_rc.get(server, 'username'), username)

        if password:
            self.assertIn('password', pypi_rc.options(server))
            self.assertEqual(pypi_rc.get(server, 'password'), password)
Example #9
0
 def test_output_create_bucket(self):
     name = 'MyBucket/MyPrefix'
     self.s3_client.head_bucket.side_effect = self.bucket_no_exists_error
     self.s3_client._endpoint.region_name = 'us-east-1'
     with mock.patch('sys.stdout', StringIO()) as mock_stdout:
         self.helper.prepare_bucket(name)
         self.assertIn('Using new S3 bucket: MyBucket',
                       mock_stdout.getvalue())
Example #10
0
 def test_output_existing_topic(self):
     name = 'mysnstopic'
     self.sns_client.create_topic.return_value = {'TopicArn': 'myARN'}
     with mock.patch('sys.stdout', StringIO()) as mock_stdout:
         self.helper.prepare_topic(name)
         self.assertIn(
             'Using new SNS topic: myARN',
             mock_stdout.getvalue())
Example #11
0
 def setUp(self):
     self.session = botocore.session.get_session(EnvironmentVariables)
     self.client = self.session.create_client('s3', 'us-west-2')
     params = {'region': 'us-west-2', 'acl': ['private']}
     self.s3_handler = S3Handler(self.session, params)
     self.s3_handler_multi = S3Handler(
         self.session, params=params,
         runtime_config=runtime_config(
             multipart_threshold=10, multipart_chunksize=2))
     self.bucket = create_bucket(self.session)
     self.file_creator = FileCreator()
     self.loc_files = make_loc_files(self.file_creator)
     self.s3_files = [self.bucket + '/text1.txt',
                      self.bucket + '/another_directory/text2.txt']
     self.output = StringIO()
     self.saved_stderr = sys.stderr
     sys.stderr = self.output
Example #12
0
 def setUp(self):
     self.session = botocore.session.get_session(EnvironmentVariables)
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     params = {'region': 'us-east-1', 'acl': ['private']}
     self.s3_handler = S3Handler(self.session, params)
     self.s3_handler_multi = S3Handler(self.session,
                                       multi_threshold=10,
                                       chunksize=2,
                                       params=params)
     self.bucket = create_bucket(self.session)
     self.loc_files = make_loc_files()
     self.s3_files = [
         self.bucket + '/text1.txt',
         self.bucket + '/another_directory/text2.txt'
     ]
     self.output = StringIO()
     self.saved_stderr = sys.stderr
     sys.stderr = self.output
Example #13
0
class TestYAMLDumper(unittest.TestCase):
    def setUp(self):
        self.dumper = YAMLDumper()
        self.output = StringIO()

    def test_dump_int(self):
        self.dumper.dump(1, self.output)
        self.assertEqual(self.output.getvalue(), '1\n')

    def test_dump_float(self):
        self.dumper.dump(1.2, self.output)
        self.assertEqual(self.output.getvalue(), '1.2\n')

    def test_dump_bool(self):
        self.dumper.dump(True, self.output)
        self.assertEqual(self.output.getvalue(), 'true\n')

    def test_dump_str(self):
        self.dumper.dump('foo', self.output)
        self.assertEqual(self.output.getvalue(), '"foo"\n')

    def test_dump_structure(self):
        self.dumper.dump({'key': 'val'}, self.output)
        self.assertEqual(self.output.getvalue(), 'key: val\n')

    def test_dump_list(self):
        self.dumper.dump(['val1', 'val2'], self.output)
        self.assertEqual(self.output.getvalue(), '- val1\n- val2\n')
Example #14
0
 def setUp(self):
     self.session = botocore.session.get_session(EnvironmentVariables)
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     params = {'region': 'us-east-1', 'acl': ['private']}
     self.s3_handler = S3Handler(self.session, params)
     self.s3_handler_multi = S3Handler(self.session, multi_threshold=10,
                                       chunksize=2,
                                       params=params)
     self.bucket = create_bucket(self.session)
     self.loc_files = make_loc_files()
     self.s3_files = [self.bucket + '/text1.txt',
                      self.bucket + '/another_directory/text2.txt']
     self.output = StringIO()
     self.saved_stderr = sys.stderr
     sys.stderr = self.output
Example #15
0
    def get_commands(cls, endpoint, auth_token, **kwargs):
        # TODO(ujjwalpa@): We don't really have a command to execute for Twine
        # as we directly write to the pypirc file (or to stdout for dryrun)
        # with python itself instead. Nevertheless, we're using this method for
        # testing so we'll keep the interface for now but return a string with
        # the expected pypirc content instead of a list of commands to
        # execute. This definitely reeks of code smell and there is probably
        # room for rethinking and refactoring the interfaces of these adapter
        # helper classes in the future.

        assert 'pypi_rc_path' in kwargs, 'pypi_rc_path must be provided.'
        pypi_rc_path = kwargs['pypi_rc_path']

        default_pypi_rc = cls.DEFAULT_PYPI_RC_FMT.format(
            repository_endpoint=endpoint,
            auth_token=auth_token
        )

        pypi_rc = RawConfigParser()
        if os.path.exists(pypi_rc_path):
            try:
                pypi_rc.read(pypi_rc_path)
                index_servers = pypi_rc.get('distutils', 'index-servers')
                servers = [
                    server.strip()
                    for server in index_servers.split('\n')
                    if server.strip() != ''
                ]

                if 'codeartifact' not in servers:
                    servers.append('codeartifact')
                    pypi_rc.set(
                        'distutils', 'index-servers', '\n' + '\n'.join(servers)
                    )

                if 'codeartifact' not in pypi_rc.sections():
                    pypi_rc.add_section('codeartifact')

                pypi_rc.set('codeartifact', 'repository', endpoint)
                pypi_rc.set('codeartifact', 'username', 'aws')
                pypi_rc.set('codeartifact', 'password', auth_token)
            except Exception as e:  # invalid .pypirc file
                sys.stdout.write('%s is in an invalid state.' % pypi_rc_path)
                sys.stdout.write(os.linesep)
                raise e
        else:
            pypi_rc.readfp(StringIO(default_pypi_rc))

        pypi_rc_stream = StringIO()
        pypi_rc.write(pypi_rc_stream)
        pypi_rc_str = pypi_rc_stream.getvalue()
        pypi_rc_stream.close()

        return pypi_rc_str
Example #16
0
 def setUp(self):
     self.session = botocore.session.get_session(EnvironmentVariables)
     self.client = self.session.create_client('s3', 'us-west-2')
     params = {'region': 'us-west-2', 'acl': ['private']}
     self.s3_handler = S3Handler(self.session, params)
     self.s3_handler_multi = S3Handler(
         self.session, params=params,
         runtime_config=runtime_config(
             multipart_threshold=10, multipart_chunksize=2))
     self.bucket = create_bucket(self.session)
     self.file_creator = FileCreator()
     self.loc_files = make_loc_files(self.file_creator)
     self.s3_files = [self.bucket + '/text1.txt',
                      self.bucket + '/another_directory/text2.txt']
     self.output = StringIO()
     self.saved_stderr = sys.stderr
     sys.stderr = self.output
Example #17
0
    def test_no_attach_history_handler_when_no_sqlite3(self, mock_recorder,
                                                       mock_sqlite3):
        mock_session = mock.Mock(Session)
        mock_session.get_scoped_config.return_value = {
            'cli_history': 'enabled'
        }

        parsed_args = argparse.Namespace()
        parsed_args.command = 's3'

        with mock.patch('sys.stderr', StringIO()) as mock_stderr:
            attach_history_handler(session=mock_session,
                                   parsed_args=parsed_args)
            self.assertIn('enabled but sqlite3 is unavailable',
                          mock_stderr.getvalue())
        self.assertFalse(mock_recorder.add_handler.called)
        self.assertFalse(mock_sqlite3.connect.called)
Example #18
0
    def _get_twine_commands(self):
        default_pypi_rc_fmt = '''\
[distutils]
index-servers=
    pypi
    codeartifact

[codeartifact]
repository: {repository_endpoint}
username: aws
password: {auth_token}'''
        default_pypi_rc = default_pypi_rc_fmt.format(
            repository_endpoint=self.endpoint,
            auth_token=self.auth_token
        )

        pypi_rc = RawConfigParser()
        if os.path.exists(self.test_pypi_rc_path):
            pypi_rc.read(self.test_pypi_rc_path)
            index_servers = pypi_rc.get('distutils', 'index-servers')
            servers = [
                server.strip()
                for server in index_servers.split('\n')
                if server.strip() != ''
            ]

            if 'codeartifact' not in servers:
                servers.append('codeartifact')
                pypi_rc.set(
                    'distutils', 'index-servers', '\n' + '\n'.join(servers)
                )

            if 'codeartifact' not in pypi_rc.sections():
                pypi_rc.add_section('codeartifact')

            pypi_rc.set('codeartifact', 'repository', self.endpoint)
            pypi_rc.set('codeartifact', 'username', 'aws')
            pypi_rc.set('codeartifact', 'password', self.auth_token)
        else:
            pypi_rc.readfp(StringIO(default_pypi_rc))

        pypi_rc_stream = StringIO()
        pypi_rc.write(pypi_rc_stream)
        pypi_rc_str = pypi_rc_stream.getvalue()
        pypi_rc_stream.close()

        return pypi_rc_str
Example #19
0
class TestOpenBrowserHandler(unittest.TestCase):
    def setUp(self):
        self.stream = StringIO()
        self.user_code = '12345'
        self.verification_uri = 'https://verification.com'
        self.verification_uri_complete = 'https://verification.com?code=12345'
        self.pending_authorization = {
            'userCode': self.user_code,
            'verificationUri': self.verification_uri,
            'verificationUriComplete': self.verification_uri_complete,
        }
        self.open_browser = mock.Mock(spec=webbrowser.open_new_tab)
        self.handler = OpenBrowserHandler(
            self.stream,
            open_browser=self.open_browser,
        )

    def assert_text_in_output(self, *args):
        output = self.stream.getvalue()
        for text in args:
            self.assertIn(text, output)

    def test_call_no_browser(self):
        handler = OpenBrowserHandler(self.stream, open_browser=False)
        handler(**self.pending_authorization)
        self.assert_text_in_output(self.user_code, self.verification_uri)

    def test_call_browser_success(self):
        self.handler(**self.pending_authorization)
        self.open_browser.assert_called_with(self.verification_uri_complete)
        self.assert_text_in_output('automatically', 'open')
        # assert the URI and user coe are still displayed
        self.assert_text_in_output(self.user_code, self.verification_uri)

    def test_call_browser_fails(self):
        self.open_browser.side_effect = webbrowser.Error()
        self.handler(**self.pending_authorization)
        self.assert_text_in_output(self.user_code, self.verification_uri)
        self.open_browser.assert_called_with(self.verification_uri_complete)
 def test_can_specify_amount_for_nonseekable_stream(self):
     nonseekable_fileobj = NonSeekableStream(StringIO('foobar'))
     self.assertEqual(nonseekable_fileobj.read(3), 'foo')
 def test_can_make_stream_unseekable(self):
     fileobj = StringIO('foobar')
     self.assertTrue(seekable(fileobj))
     nonseekable_fileobj = NonSeekableStream(fileobj)
     self.assertFalse(seekable(nonseekable_fileobj))
     self.assertEqual(nonseekable_fileobj.read(), 'foobar')
Example #22
0
class S3HandlerTestUpload(unittest.TestCase):
    """
    This class tests the ability to upload objects into an S3 bucket as
    well as multipart uploads
    """
    def setUp(self):
        self.session = botocore.session.get_session(EnvironmentVariables)
        self.client = self.session.create_client('s3', 'us-west-2')
        params = {'region': 'us-west-2', 'acl': 'private'}
        self.s3_handler = S3Handler(self.session, params)
        self.chunk_size = minimum_chunk_size()
        self.threshold = self.chunk_size + 1
        self.s3_handler_multi = S3Handler(
            self.session,
            params=params,
            runtime_config=runtime_config(multipart_threshold=self.threshold,
                                          multipart_chunksize=self.chunk_size))
        self.bucket = create_bucket(self.session)
        self.file_creator = FileCreator()
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]
        self.output = StringIO()
        self.saved_stderr = sys.stderr
        self.saved_stdout = sys.stdout
        sys.stderr = self.output
        sys.stdout = self.output

    def tearDown(self):
        self.output.close()
        sys.stderr = self.saved_stderr
        clean_loc_files(self.file_creator)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        self.loc_files = make_loc_files(self.file_creator)
        # Confirm there are no objects in the bucket.
        response = self.client.list_objects(Bucket=self.bucket)
        self.assertEqual(len(response.get('Contents', [])), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(
                    src=self.loc_files[i],
                    dest=self.s3_files[i],
                    operation_name='upload',
                    size=0,
                    client=self.client,
                ))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm the files were uploaded.
        response = self.client.list_objects(Bucket=self.bucket)
        self.assertEqual(len(response.get('Contents', [])), 2)

    def test_multi_upload(self):
        self.loc_files = make_loc_files(self.file_creator, self.threshold + 1)
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(
                    src=self.loc_files[i],
                    dest=self.s3_files[i],
                    size=self.threshold + 1,
                    operation_name='upload',
                    client=self.client,
                ))

        self.s3_handler_multi.call(tasks)

        # Confirm UploadPart was called
        self.assertIn("Completed 4 of 4 part(s)", self.output.getvalue())

        # Confirm the files were uploaded.
        response = self.client.list_objects(Bucket=self.bucket)
        self.assertEqual(len(response.get('Contents', [])), 2)
Example #23
0
class TestDetailedFormatter(unittest.TestCase):
    def setUp(self):
        self.output = StringIO()
        self.formatter = DetailedFormatter(self.output, colorize=False)

    def get_pretty_xml(self, xml_str):
        xml_dom = xml.dom.minidom.parseString(xml_str)
        return xml_dom.toprettyxml(indent=' ' * 4, newl='\n')

    def assert_output(self, for_event, contains):
        self.formatter.display(for_event)
        collected_output = ensure_text_type(self.output.getvalue())
        for line in contains:
            self.assertIn(line, collected_output)

    def test_display_cli_version(self):
        self.assert_output(for_event={
            'event_type': 'CLI_VERSION',
            'id': 'my-id',
            'payload': 'aws-cli/1.11.188',
            'timestamp': 86400000,
            'request_id': None
        },
                           contains=[
                               'AWS CLI command entered',
                               'with AWS CLI version: aws-cli/1.11.188'
                           ])

    def test_can_use_color(self):
        self.formatter = DetailedFormatter(self.output, colorize=True)
        self.assert_output(for_event={
            'event_type': 'CLI_VERSION',
            'id': 'my-id',
            'payload': 'aws-cli/1.11.188',
            'timestamp': 86400000,
            'request_id': None
        },
                           contains=[
                               '\x1b[1mAWS CLI command entered',
                               '\x1b[36mwith AWS CLI version:'
                           ])

    def test_display_cli_arguments(self):
        self.assert_output(
            for_event={
                'event_type': 'CLI_ARGUMENTS',
                'id': 'my-id',
                'payload': ['ec2', 'describe-regions'],
                'timestamp': 86400000,
                'request_id': None
            },
            contains=["with arguments: ['ec2', 'describe-regions']"])

    def test_display_api_call(self):
        self.assert_output(for_event={
            'event_type': 'API_CALL',
            'id': 'my-id',
            'request_id': 'some-id',
            'payload': {
                'service': 'ec2',
                'operation': 'DescribeRegions',
                'params': {}
            },
            'timestamp': 86400000,
        },
                           contains=[
                               'to service: ec2\n',
                               'using operation: DescribeRegions\n',
                               'with parameters: {}\n'
                           ])

    def test_two_different_api_calls_have_different_numbers(self):
        event = {
            'event_type': 'API_CALL',
            'id': 'my-id',
            'request_id': 'some-id',
            'payload': {
                'service': 'ec2',
                'operation': 'DescribeRegions',
                'params': {}
            },
            'timestamp': 86400000,
        }
        self.formatter.display(event)
        collected_output = ensure_text_type(self.output.getvalue())
        self.assertIn('[0] API call made', collected_output)

        other_event = {
            'event_type': 'API_CALL',
            'id': 'my-id',
            'request_id': 'other-id',
            'payload': {
                'service': 'ec2',
                'operation': 'DescribeRegions',
                'params': {}
            },
            'timestamp': 86400000,
        }
        self.formatter.display(other_event)
        new_output = ensure_text_type(
            self.output.getvalue())[len(collected_output):]
        self.assertIn('[1] API call made', new_output)

    def test_display_http_request(self):
        self.assert_output(
            for_event={
                'event_type': 'HTTP_REQUEST',
                'id': 'my-id',
                'request_id': 'some-id',
                'payload': {
                    'method': 'GET',
                    'url': 'https://myservice.us-west-2.amazonaws.com',
                    'headers': {},
                    'body': 'This is my body'
                },
                'timestamp': 86400000,
            },
            contains=[
                'to URL: https://myservice.us-west-2.amazonaws.com\n',
                'with method: GET\n', 'with headers: {}\n',
                'with body: This is my body\n'
            ])

    def test_display_http_request_filter_signature(self):
        self.assert_output(for_event={
            'event_type': 'HTTP_REQUEST',
            'id': 'my-id',
            'request_id': 'some-id',
            'payload': {
                'method': 'GET',
                'url': 'https://myservice.us-west-2.amazonaws.com',
                'headers': {
                    'Authorization':
                    ('Signature=d7fa4de082b598a0ac08b756db438c630a6'
                     'cc79c4f3d1636cf69fac0e7c1abcd')
                },
                'body': 'This is my body'
            },
            'timestamp': 86400000,
        },
                           contains=['"Authorization": "Signature=d7fa..."'])

    def test_display_http_request_with_streaming_body(self):
        self.assert_output(
            for_event={
                'event_type': 'HTTP_REQUEST',
                'id': 'my-id',
                'request_id': 'some-id',
                'payload': {
                    'method': 'GET',
                    'url': 'https://myservice.us-west-2.amazonaws.com',
                    'headers': {},
                    'body': 'This should not be printed out',
                    'streaming': True
                },
                'timestamp': 86400000,
            },
            contains=[
                'with body: The body is a stream and will not be displayed',
            ])

    def test_display_http_request_with_no_payload(self):
        self.assert_output(for_event={
            'event_type': 'HTTP_REQUEST',
            'id': 'my-id',
            'request_id': 'some-id',
            'payload': {
                'method': 'GET',
                'url': 'https://myservice.us-west-2.amazonaws.com',
                'headers': {},
                'body': None
            },
            'timestamp': 86400000,
        },
                           contains=['with body: There is no associated body'])

    def test_display_http_request_with_empty_string_payload(self):
        self.assert_output(for_event={
            'event_type': 'HTTP_REQUEST',
            'id': 'my-id',
            'request_id': 'some-id',
            'payload': {
                'method': 'GET',
                'url': 'https://myservice.us-west-2.amazonaws.com',
                'headers': {},
                'body': ''
            },
            'timestamp': 86400000,
        },
                           contains=['with body: There is no associated body'])

    def test_display_http_request_with_xml_payload(self):
        xml_body = '<?xml version="1.0" ?><foo><bar>text</bar></foo>'
        self.assert_output(
            for_event={
                'event_type': 'HTTP_REQUEST',
                'id': 'my-id',
                'request_id': 'some-id',
                'payload': {
                    'method': 'GET',
                    'url': 'https://myservice.us-west-2.amazonaws.com',
                    'headers': {},
                    'body': xml_body
                },
                'timestamp': 86400000,
            },
            contains=['with body: ' + self.get_pretty_xml(xml_body)])

    def test_display_http_request_with_xml_payload_and_whitespace(self):
        xml_body = '<?xml version="1.0" ?><foo><bar>text</bar></foo>'
        self.assert_output(
            for_event={
                'event_type': 'HTTP_REQUEST',
                'id': 'my-id',
                'request_id': 'some-id',
                'payload': {
                    'method': 'GET',
                    'url': 'https://myservice.us-west-2.amazonaws.com',
                    'headers': {},
                    'body': self.get_pretty_xml(xml_body)
                },
                'timestamp': 86400000,
            },
            # The XML should not be prettified more than once if the body
            # of the request was already prettied.
            contains=['with body: ' + self.get_pretty_xml(xml_body)])

    def test_display_http_request_with_json_struct_payload(self):
        self.assert_output(
            for_event={
                'event_type': 'HTTP_REQUEST',
                'id': 'my-id',
                'request_id': 'some-id',
                'payload': {
                    'method': 'GET',
                    'url': 'https://myservice.us-west-2.amazonaws.com',
                    'headers': {},
                    'body': '{"foo": "bar"}'
                },
                'timestamp': 86400000,
            },
            contains=['with body: {\n'
                      '    "foo": "bar"\n'
                      '}'])

    def test_shares_api_number_across_events_of_same_api_call(self):
        self.assert_output(for_event={
            'event_type': 'API_CALL',
            'id': 'my-id',
            'request_id': 'some-id',
            'payload': {
                'service': 'ec2',
                'operation': 'DescribeRegions',
                'params': {}
            },
            'timestamp': 86400000,
        },
                           contains=['[0] API call made'])
        self.assert_output(for_event={
            'event_type': 'HTTP_REQUEST',
            'id': 'my-id',
            'request_id': 'some-id',
            'payload': {
                'method': 'GET',
                'url': 'https://myservice.us-west-2.amazonaws.com',
                'headers': {},
                'body': 'This is my body'
            },
            'timestamp': 86400000,
        },
                           contains=['[0] HTTP request sent'])

    def test_display_http_response(self):
        self.assert_output(for_event={
            'event_type': 'HTTP_RESPONSE',
            'id': 'my-id',
            'request_id': 'some-id',
            'payload': {
                'status_code': 200,
                'headers': {},
                'body': 'This is my body'
            },
            'timestamp': 86400000,
        },
                           contains=[
                               '[0] HTTP response received',
                               'with status code: 200\n', 'with headers: {}\n',
                               'with body: This is my body\n'
                           ])

    def test_display_http_response_with_streaming_body(self):
        self.assert_output(
            for_event={
                'event_type': 'HTTP_RESPONSE',
                'id': 'my-id',
                'request_id': 'some-id',
                'payload': {
                    'status_code': 200,
                    'headers': {},
                    'body': 'This should not be printed out',
                    'streaming': True
                },
                'timestamp': 86400000,
            },
            contains=[
                'with body: The body is a stream and will not be displayed'
            ])

    def test_display_http_response_with_no_payload(self):
        self.assert_output(for_event={
            'event_type': 'HTTP_RESPONSE',
            'id': 'my-id',
            'request_id': 'some-id',
            'payload': {
                'status_code': 200,
                'headers': {},
                'body': None
            },
            'timestamp': 86400000,
        },
                           contains=['with body: There is no associated body'])

    def test_display_http_response_with_empty_string_payload(self):
        self.assert_output(for_event={
            'event_type': 'HTTP_RESPONSE',
            'id': 'my-id',
            'request_id': 'some-id',
            'payload': {
                'status_code': 200,
                'headers': {},
                'body': ''
            },
            'timestamp': 86400000,
        },
                           contains=['with body: There is no associated body'])

    def test_display_http_response_with_xml_payload(self):
        xml_body = '<?xml version="1.0" ?><foo><bar>text</bar></foo>'
        self.assert_output(
            for_event={
                'event_type': 'HTTP_RESPONSE',
                'id': 'my-id',
                'request_id': 'some-id',
                'payload': {
                    'status_code': 200,
                    'headers': {},
                    'body': xml_body
                },
                'timestamp': 86400000,
            },
            contains=['with body: ' + self.get_pretty_xml(xml_body)])

    def test_display_http_response_with_xml_payload_and_whitespace(self):
        xml_body = '<?xml version="1.0" ?><foo><bar>text</bar></foo>'
        self.assert_output(
            for_event={
                'event_type': 'HTTP_RESPONSE',
                'id': 'my-id',
                'request_id': 'some-id',
                'payload': {
                    'status_code': 200,
                    'headers': {},
                    'body': self.get_pretty_xml(xml_body)
                },
                'timestamp': 86400000,
            },
            # The XML should not be prettified more than once if the body
            # of the response was already prettied.
            contains=['with body: ' + self.get_pretty_xml(xml_body)])

    def test_display_http_response_with_json_struct_payload(self):
        self.assert_output(for_event={
            'event_type': 'HTTP_RESPONSE',
            'id': 'my-id',
            'request_id': 'some-id',
            'payload': {
                'status_code': 200,
                'headers': {},
                'body': '{"foo": "bar"}'
            },
            'timestamp': 86400000,
        },
                           contains=[
                               'with body: {\n',
                               '    "foo": "bar"\n',
                               '}',
                           ])

    def test_display_parsed_response(self):
        self.assert_output(
            for_event={
                'event_type': 'PARSED_RESPONSE',
                'id': 'my-id',
                'request_id': 'some-id',
                'payload': {},
                'timestamp': 86400000,
            },
            contains=['[0] HTTP response parsed', 'parsed to: {}'])

    def test_display_cli_rc(self):
        self.assert_output(
            for_event={
                'event_type': 'CLI_RC',
                'id': 'my-id',
                'payload': 0,
                'timestamp': 86400000,
                'request_id': None
            },
            contains=['AWS CLI command exited', 'with return code: 0'])

    def test_display_unknown_type(self):
        event = {
            'event_type': 'UNKNOWN',
            'id': 'my-id',
            'payload': 'foo',
            'timestamp': 86400000,
            'request_id': None
        }
        self.formatter.display(event)
        collected_output = ensure_text_type(self.output.getvalue())
        self.assertEqual('', collected_output)
Example #24
0
class CommandArchitectureTest(S3HandlerBaseTest):
    def setUp(self):
        super(CommandArchitectureTest, self).setUp()
        self.session = FakeSession()
        self.bucket = make_s3_files(self.session)
        self.loc_files = make_loc_files()
        self.output = StringIO()
        self.err_output = StringIO()
        self.saved_stdout = sys.stdout
        self.saved_stderr = sys.stderr
        sys.stdout = self.output
        sys.stderr = self.err_output

    def tearDown(self):
        self.output.close()
        self.err_output.close()
        sys.stdout = self.saved_stdout
        sys.stderr = self.saved_stderr

        super(CommandArchitectureTest, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_set_endpoint_no_source(self):
        cmd_arc = CommandArchitecture(self.session, 'sync',
                                      {'region': 'us-west-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None,
                                       'source_region': None})
        cmd_arc.set_endpoints()
        endpoint = cmd_arc._endpoint
        source_endpoint = cmd_arc._source_endpoint
        self.assertEqual(endpoint.region_name, 'us-west-1')
        self.assertEqual(source_endpoint.region_name, 'us-west-1')

    def test_set_endpoint_with_source(self):
        cmd_arc = CommandArchitecture(self.session, 'sync',
                                      {'region': 'us-west-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None,
                                       'paths_type': 's3s3',
                                       'source_region': ['us-west-2']})
        cmd_arc.set_endpoints()
        endpoint = cmd_arc._endpoint
        source_endpoint = cmd_arc._source_endpoint
        self.assertEqual(endpoint.region_name, 'us-west-1')
        self.assertEqual(source_endpoint.region_name, 'us-west-2')

    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync', 'mb', 'rb']

        instructions = {'cp': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'mv': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'rm': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'sync': ['file_generator', 'comparator',
                                 'file_info_builder', 's3_handler'],
                        'mb': ['s3_handler'],
                        'rb': ['s3_handler']}

        params = {'filters': True, 'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'is_stream': False}
        for cmd in cmds:
            cmd_arc = CommandArchitecture(self.session, cmd,
                                          {'region': 'us-east-1',
                                           'endpoint_url': None,
                                           'verify_ssl': None,
                                           'is_stream': False})
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(cmd_arc.instructions, ['file_generator', 'filters',
                                                'file_info_builder',
                                                's3_handler'])

    def test_choose_sync_strategy_default(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-east-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None})
        # Check if no plugins return their sync strategy.  Should
        # result in the default strategies
        session.emit.return_value = None
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'].__class__,
            SizeAndLastModifiedSync
        )
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'].__class__,
            MissingFileSync
        )
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'].__class__,
            NeverSync
        )

    def test_choose_sync_strategy_overwrite(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-east-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None})
        # Check that the default sync strategy is overwritted if a plugin
        # returns its sync strategy.
        mock_strategy = Mock()
        mock_strategy.sync_type = 'file_at_src_and_dest'

        mock_not_at_dest_sync_strategy = Mock()
        mock_not_at_dest_sync_strategy.sync_type = 'file_not_at_dest'

        mock_not_at_src_sync_strategy = Mock()
        mock_not_at_src_sync_strategy.sync_type = 'file_not_at_src'

        responses = [(None, mock_strategy),
                     (None, mock_not_at_dest_sync_strategy),
                     (None, mock_not_at_src_sync_strategy)]

        session.emit.return_value = responses
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'],
            mock_strategy
        )
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'],
            mock_not_at_dest_sync_strategy
        )
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'],
            mock_not_at_src_sync_strategy
        )

    def test_run_cp_put(self):
        # This ensures that the architecture sets up correctly for a ``cp`` put
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': local_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_error_on_same_line_as_status(self):
        s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': False, 'quiet': False,
                  'src': local_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        # Also, we need to verify that the error message is on the *same* line
        # as the upload failed line, to make it easier to track.
        output_str = (
            "upload failed: %s to %s Error: Bucket does not exist\n" % (
                rel_local_file, s3_file))
        self.assertIn(output_str, self.err_output.getvalue())

    def test_run_cp_get(self):
        # This ensures that the architecture sets up correctly for a ``cp`` get
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': local_file, 'filters': filters,
                  'paths_type': 's3local', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_cp_copy(self):
        # This ensures that the architecture sets up correctly for a ``cp`` copy
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3s3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mv(self):
        # This ensures that the architecture sets up correctly for a ``mv``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3s3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'mv', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_remove(self):
        # This ensures that the architecture sets up correctly for a ``rm``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'rm', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) delete: %s" % s3_file
        self.assertIn(output_str, self.output.getvalue())

    def test_run_sync(self):
        # This ensures that the architecture sets up correctly for a ``sync``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        s3_prefix = 's3://' + self.bucket + '/'
        local_dir = self.loc_files[3]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': True, 'dryrun': True, 'quiet': False,
                  'src': local_dir, 'dest': s3_prefix, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'sync', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {'dir_op': True, 'dryrun': True, 'quiet': False,
                  'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
                  'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'follow_symlinks': True,
                  'page_size': None, 'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'mb', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) make_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())

    def test_run_rb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {'dir_op': True, 'dryrun': True, 'quiet': False,
                  'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
                  'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'follow_symlinks': True,
                  'page_size': None, 'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        rc = cmd_arc.run()
        output_str = "(dryrun) remove_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())
        self.assertEqual(rc, 0)

    def test_run_rb_nonzero_rc(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {'dir_op': True, 'dryrun': False, 'quiet': False,
                  'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
                  'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'follow_symlinks': True,
                  'page_size': None, 'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        rc = cmd_arc.run()
        output_str = "remove_bucket failed: %s" % s3_prefix
        self.assertIn(output_str, self.err_output.getvalue())
        self.assertEqual(rc, 1)
Example #25
0
 def setUp(self):
     self.args = Namespace(query=None)
     self.formatter = StreamedYAMLFormatter(self.args)
     self.output = StringIO()
Example #26
0
class S3HandlerTestUpload(unittest.TestCase):
    """
    This class tests the ability to upload objects into an S3 bucket as
    well as multipart uploads
    """
    def setUp(self):
        self.session = botocore.session.get_session(EnvironmentVariables)
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'acl': ['private']}
        self.s3_handler = S3Handler(self.session, params)
        self.s3_handler_multi = S3Handler(self.session,
                                          params=params,
                                          runtime_config=runtime_config(
                                              multipart_threshold=10,
                                              multipart_chunksize=2))
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]
        self.output = StringIO()
        self.saved_stderr = sys.stderr
        sys.stderr = self.output

    def tearDown(self):
        self.output.close()
        sys.stderr = self.saved_stderr
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(
                    src=self.loc_files[i],
                    dest=self.s3_files[i],
                    operation_name='upload',
                    size=0,
                    service=self.service,
                    endpoint=self.endpoint,
                ))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm the files were uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 2)

    def test_multi_upload(self):
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(
                    src=self.loc_files[i],
                    dest=self.s3_files[i],
                    size=15,
                    operation_name='upload',
                    service=self.service,
                    endpoint=self.endpoint,
                ))

        # Note nothing is uploaded because the file is too small
        # a print statement will show up if it fails.
        self.s3_handler_multi.call(tasks)
        print_op = "Error: Your proposed upload is smaller than the minimum"
        self.assertIn(print_op, self.output.getvalue())
Example #27
0
class CommandArchitectureTest(BaseAWSCommandParamsTest):
    def setUp(self):
        super(CommandArchitectureTest, self).setUp()
        self.session = self.driver.session
        self.bucket = 'mybucket'
        self.file_creator = FileCreator()
        self.loc_files = make_loc_files(self.file_creator)
        self.output = StringIO()
        self.err_output = StringIO()
        self.saved_stdout = sys.stdout
        self.saved_stderr = sys.stderr
        sys.stdout = self.output
        sys.stderr = self.err_output

    def tearDown(self):
        self.output.close()
        self.err_output.close()
        sys.stdout = self.saved_stdout
        sys.stderr = self.saved_stderr

        super(CommandArchitectureTest, self).tearDown()
        clean_loc_files(self.file_creator)

    def test_set_client_no_source(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-west-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None,
                                       'source_region': None})
        cmd_arc.set_clients()
        self.assertEqual(session.create_client.call_count, 2)
        self.assertEqual(
            session.create_client.call_args_list[0],
            mock.call(
             's3', region_name='us-west-1', endpoint_url=None, verify=None,
             config=None)
        )
        # A client created with the same arguments as the first should be used
        # for the source client since no source region was provided.
        self.assertEqual(
            session.create_client.call_args_list[1],
            mock.call(
                's3', region_name='us-west-1', endpoint_url=None, verify=None,
                config=None)
        )

    def test_set_client_with_source(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-west-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None,
                                       'paths_type': 's3s3',
                                       'source_region': 'us-west-2'})
        cmd_arc.set_clients()
        create_client_args = session.create_client.call_args_list
        # Assert that two clients were created
        self.assertEqual(len(create_client_args), 3)
        self.assertEqual(
            create_client_args[0][1],
            {'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None,
             'config': None}
        )
        self.assertEqual(
            create_client_args[1][1],
            {'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None,
             'config': None}
        )
        # Assert override the second client created with the one needed for the
        # source region.
        self.assertEqual(
            create_client_args[2][1],
            {'region_name': 'us-west-2', 'verify': None, 'endpoint_url': None,
             'config': None}
        )

    def test_set_sigv4_clients_with_sse_kms(self):
        session = Mock()
        cmd_arc = CommandArchitecture(
            session, 'sync',
            {'region': 'us-west-1', 'endpoint_url': None, 'verify_ssl': None,
             'source_region': None, 'sse': 'aws:kms'})
        cmd_arc.set_clients()
        self.assertEqual( session.create_client.call_count, 2)
        create_client_call = session.create_client.call_args_list[0]
        create_source_client_call = session.create_client.call_args_list[1]

        # Make sure that both clients are using sigv4 if kms is enabled.
        self.assertEqual(
            create_client_call[1]['config'].signature_version, 's3v4')
        self.assertEqual(
            create_source_client_call[1]['config'].signature_version, 's3v4')

    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync']

        instructions = {'cp': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'mv': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'rm': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'sync': ['file_generator', 'comparator',
                                 'file_info_builder', 's3_handler']}

        params = {'filters': True, 'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'is_stream': False}
        for cmd in cmds:
            cmd_arc = CommandArchitecture(self.session, cmd,
                                          {'region': 'us-east-1',
                                           'endpoint_url': None,
                                           'verify_ssl': None,
                                           'is_stream': False})
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(cmd_arc.instructions, ['file_generator', 'filters',
                                                'file_info_builder',
                                                's3_handler'])

    def test_choose_sync_strategy_default(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-east-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None})
        # Check if no plugins return their sync strategy.  Should
        # result in the default strategies
        session.emit.return_value = None
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'].__class__,
            SizeAndLastModifiedSync
        )
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'].__class__,
            MissingFileSync
        )
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'].__class__,
            NeverSync
        )

    def test_choose_sync_strategy_overwrite(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-east-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None})
        # Check that the default sync strategy is overwritted if a plugin
        # returns its sync strategy.
        mock_strategy = Mock()
        mock_strategy.sync_type = 'file_at_src_and_dest'

        mock_not_at_dest_sync_strategy = Mock()
        mock_not_at_dest_sync_strategy.sync_type = 'file_not_at_dest'

        mock_not_at_src_sync_strategy = Mock()
        mock_not_at_src_sync_strategy.sync_type = 'file_not_at_src'

        responses = [(None, mock_strategy),
                     (None, mock_not_at_dest_sync_strategy),
                     (None, mock_not_at_src_sync_strategy)]

        session.emit.return_value = responses
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'],
            mock_strategy
        )
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'],
            mock_not_at_dest_sync_strategy
        )
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'],
            mock_not_at_src_sync_strategy
        )

    def test_run_cp_put(self):
        # This ensures that the architecture sets up correctly for a ``cp`` put
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': local_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': None, 'metadata': None}
        config = RuntimeConfig().build_config()
        cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_error_on_same_line_as_status(self):
        s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': False, 'quiet': False,
                  'src': local_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': None, 'metadata': None}
        self.http_response.status_code = 400
        self.parsed_responses = [{'Error': {
                                  'Code': 'BucketNotExists',
                                  'Message': 'Bucket does not exist'}}]
        cmd_arc = CommandArchitecture(
            self.session, 'cp', params, RuntimeConfig().build_config())
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        # Also, we need to verify that the error message is on the *same* line
        # as the upload failed line, to make it easier to track.
        output_str = (
            "upload failed: %s to %s An error" % (
                rel_local_file, s3_file))
        self.assertIn(output_str, self.err_output.getvalue())

    def test_run_cp_get(self):
        # This ensures that the architecture sets up correctly for a ``cp`` get
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': local_file, 'filters': filters,
                  'paths_type': 's3local', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': None}
        self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
                                  "LastModified": "2014-01-09T20:45:49.000Z"}]
        config = RuntimeConfig().build_config()
        cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_cp_copy(self):
        # This ensures that the architecture sets up correctly for a ``cp``
        # copy command.  It is just just a dry run, but all of the
        # components need to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3s3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': None}
        self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
                                  "LastModified": "2014-01-09T20:45:49.000Z"}]
        config = RuntimeConfig().build_config()
        cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mv(self):
        # This ensures that the architecture sets up correctly for a ``mv``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3s3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': None,
                  'is_move': True}
        self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
                                  "LastModified": "2014-01-09T20:45:49.000Z"}]
        config = RuntimeConfig().build_config()
        cmd_arc = CommandArchitecture(self.session, 'mv', params, config)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_remove(self):
        # This ensures that the architecture sets up correctly for a ``rm``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': None}
        self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
                                  "LastModified": "2014-01-09T20:45:49.000Z"}]
        config = RuntimeConfig().build_config()
        cmd_arc = CommandArchitecture(self.session, 'rm', params, config)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) delete: %s" % s3_file
        self.assertIn(output_str, self.output.getvalue())

    def test_run_sync(self):
        # This ensures that the architecture sets up correctly for a ``sync``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        s3_prefix = 's3://' + self.bucket + '/'
        local_dir = self.loc_files[3]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': True, 'dryrun': True, 'quiet': False,
                  'src': local_dir, 'dest': s3_prefix, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': 'us-west-2'}
        self.parsed_responses = [
            {"CommonPrefixes": [], "Contents": [
                {"Key": "text1.txt", "Size": 100,
                 "LastModified": "2014-01-09T20:45:49.000Z"}]},
            {"CommonPrefixes": [], "Contents": []}]
        config = RuntimeConfig().build_config()
        cmd_arc = CommandArchitecture(self.session, 'sync', params, config)
        cmd_arc.create_instructions()
        cmd_arc.set_clients()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())
Example #28
0
class CommandArchitectureTest(S3HandlerBaseTest):
    def setUp(self):
        super(CommandArchitectureTest, self).setUp()
        self.session = FakeSession()
        self.bucket = make_s3_files(self.session)
        self.loc_files = make_loc_files()
        self.output = StringIO()
        self.err_output = StringIO()
        self.saved_stdout = sys.stdout
        self.saved_stderr = sys.stderr
        sys.stdout = self.output
        sys.stderr = self.err_output

    def tearDown(self):
        self.output.close()
        self.err_output.close()
        sys.stdout = self.saved_stdout
        sys.stderr = self.saved_stderr

        super(CommandArchitectureTest, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_set_endpoint_no_source(self):
        cmd_arc = CommandArchitecture(
            self.session, 'sync', {
                'region': 'us-west-1',
                'endpoint_url': None,
                'verify_ssl': None,
                'source_region': None
            })
        cmd_arc.set_endpoints()
        endpoint = cmd_arc._endpoint
        source_endpoint = cmd_arc._source_endpoint
        self.assertEqual(endpoint.region_name, 'us-west-1')
        self.assertEqual(source_endpoint.region_name, 'us-west-1')

    def test_set_endpoint_with_source(self):
        cmd_arc = CommandArchitecture(
            self.session, 'sync', {
                'region': 'us-west-1',
                'endpoint_url': None,
                'verify_ssl': None,
                'paths_type': 's3s3',
                'source_region': ['us-west-2']
            })
        cmd_arc.set_endpoints()
        endpoint = cmd_arc._endpoint
        source_endpoint = cmd_arc._source_endpoint
        self.assertEqual(endpoint.region_name, 'us-west-1')
        self.assertEqual(source_endpoint.region_name, 'us-west-2')

    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync', 'mb', 'rb']

        instructions = {
            'cp': ['file_generator', 'file_info_builder', 's3_handler'],
            'mv': ['file_generator', 'file_info_builder', 's3_handler'],
            'rm': ['file_generator', 'file_info_builder', 's3_handler'],
            'sync': [
                'file_generator', 'comparator', 'file_info_builder',
                's3_handler'
            ],
            'mb': ['s3_handler'],
            'rb': ['s3_handler']
        }

        params = {
            'filters': True,
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'is_stream': False
        }
        for cmd in cmds:
            cmd_arc = CommandArchitecture(
                self.session, cmd, {
                    'region': 'us-east-1',
                    'endpoint_url': None,
                    'verify_ssl': None,
                    'is_stream': False
                })
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(
            cmd_arc.instructions,
            ['file_generator', 'filters', 'file_info_builder', 's3_handler'])

    def test_choose_sync_strategy_default(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync', {
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None
        })
        # Check if no plugins return their sync strategy.  Should
        # result in the default strategies
        session.emit.return_value = None
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'].__class__,
            SizeAndLastModifiedSync)
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'].__class__,
            MissingFileSync)
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'].__class__,
            NeverSync)

    def test_choose_sync_strategy_overwrite(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync', {
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None
        })
        # Check that the default sync strategy is overwritted if a plugin
        # returns its sync strategy.
        mock_strategy = Mock()
        mock_strategy.sync_type = 'file_at_src_and_dest'

        mock_not_at_dest_sync_strategy = Mock()
        mock_not_at_dest_sync_strategy.sync_type = 'file_not_at_dest'

        mock_not_at_src_sync_strategy = Mock()
        mock_not_at_src_sync_strategy.sync_type = 'file_not_at_src'

        responses = [(None, mock_strategy),
                     (None, mock_not_at_dest_sync_strategy),
                     (None, mock_not_at_src_sync_strategy)]

        session.emit.return_value = responses
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(sync_strategies['file_at_src_and_dest_sync_strategy'],
                         mock_strategy)
        self.assertEqual(sync_strategies['file_not_at_dest_sync_strategy'],
                         mock_not_at_dest_sync_strategy)
        self.assertEqual(sync_strategies['file_not_at_src_sync_strategy'],
                         mock_not_at_src_sync_strategy)

    def test_run_cp_put(self):
        # This ensures that the architecture sets up correctly for a ``cp`` put
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': local_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_error_on_same_line_as_status(self):
        s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': False,
            'quiet': False,
            'src': local_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        # Also, we need to verify that the error message is on the *same* line
        # as the upload failed line, to make it easier to track.
        output_str = (
            "upload failed: %s to %s Error: Bucket does not exist\n" %
            (rel_local_file, s3_file))
        self.assertIn(output_str, self.err_output.getvalue())

    def test_run_cp_get(self):
        # This ensures that the architecture sets up correctly for a ``cp`` get
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': local_file,
            'filters': filters,
            'paths_type': 's3local',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_cp_copy(self):
        # This ensures that the architecture sets up correctly for a ``cp`` copy
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3s3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mv(self):
        # This ensures that the architecture sets up correctly for a ``mv``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3s3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'mv', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_remove(self):
        # This ensures that the architecture sets up correctly for a ``rm``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'rm', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) delete: %s" % s3_file
        self.assertIn(output_str, self.output.getvalue())

    def test_run_sync(self):
        # This ensures that the architecture sets up correctly for a ``sync``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        s3_prefix = 's3://' + self.bucket + '/'
        local_dir = self.loc_files[3]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': local_dir,
            'dest': s3_prefix,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'sync', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'mb', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) make_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())

    def test_run_rb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        rc = cmd_arc.run()
        output_str = "(dryrun) remove_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())
        self.assertEqual(rc, 0)

    def test_run_rb_nonzero_rc(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': False,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        rc = cmd_arc.run()
        output_str = "remove_bucket failed: %s" % s3_prefix
        self.assertIn(output_str, self.err_output.getvalue())
        self.assertEqual(rc, 1)
Example #29
0
class TestTextFormatter(unittest.TestCase):
    _COL_WIDTHS = {
        'id_a': 10,
        'timestamp': 23,
        'args': 10,
        'rc': 10
    }

    def setUp(self):
        self.output_stream = StringIO()
        self.formatter = TextFormatter(self._COL_WIDTHS, self.output_stream)

        self.timestamp = 1511376242067
        command_time = datetime.datetime.fromtimestamp(self.timestamp / 1000)
        self.formatted_time = datetime.datetime.strftime(
            command_time, '%Y-%m-%d %I:%M:%S %p')

    def _format_records(self, records):
        adapter = RecordAdapter(iter(records))
        self.formatter(adapter)

    def test_can_emit_single_row(self):
        self._format_records([
            {
                'id_a': 'foo',
                'timestamp': self.timestamp,
                'args': '["s3", "ls"]',
                'rc': 0
            }
        ])
        expected_output = 'foo       %s s3 ls     0\n' % self.formatted_time
        actual_output = ensure_text_type(self.output_stream.getvalue())
        self.assertEqual(expected_output, actual_output)

    def test_can_emit_multiple_rows(self):
        self._format_records([
            {
                'id_a': 'foo',
                'timestamp': self.timestamp,
                'args': '["s3", "ls"]',
                'rc': 0
            },
            {
                'id_a': 'bar',
                'timestamp': self.timestamp,
                'args': '["s3", "cp"]',
                'rc': 1
            }
        ])
        expected_output = ('foo       %s s3 ls     0\n'
                           'bar       %s s3 cp     1\n') % (
                               self.formatted_time, self.formatted_time)
        actual_output = ensure_text_type(self.output_stream.getvalue())
        self.assertEqual(expected_output, actual_output)

    def test_can_truncate_args(self):
        # Truncate the argument if it won't fit in the space alotted to the
        # arguments field.
        self._format_records([
            {
                'id_a': 'foo',
                'timestamp': self.timestamp,
                'args': ('["s3", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
                         'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"]'),
                'rc': 0
            }
        ])
        expected_output = 'foo       %s s3 aaa... 0\n' % self.formatted_time
        actual_output = ensure_text_type(self.output_stream.getvalue())
        self.assertEqual(expected_output, actual_output)
Example #30
0
 def setUp(self):
     self.output = StringIO()
     self.formatter = DetailedFormatter(self.output, colorize=False)
Example #31
0
class S3HandlerTestUpload(unittest.TestCase):
    """
    This class tests the ability to upload objects into an S3 bucket as
    well as multipart uploads
    """
    def setUp(self):
        self.session = botocore.session.get_session(EnvironmentVariables)
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'acl': ['private']}
        self.s3_handler = S3Handler(self.session, params)
        self.s3_handler_multi = S3Handler(self.session, multi_threshold=10,
                                          chunksize=2,
                                          params=params)
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']
        self.output = StringIO()
        self.saved_stderr = sys.stderr
        sys.stderr = self.output

    def tearDown(self):
        self.output.close()
        sys.stderr = self.saved_stderr
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=self.s3_files[i],
                operation_name='upload', size=0,
                service=self.service,
                endpoint=self.endpoint,
            ))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm the files were uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 2)

    def test_multi_upload(self):
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=self.s3_files[i], size=15,
                operation_name='upload',
                service=self.service,
                endpoint=self.endpoint,
            ))

        # Note nothing is uploaded because the file is too small
        # a print statement will show up if it fails.
        self.s3_handler_multi.call(tasks)
        print_op = "Error: Your proposed upload is smaller than the minimum"
        self.assertIn(print_op, self.output.getvalue())
Example #32
0
 def setUp(self):
     self.dumper = YAMLDumper()
     self.output = StringIO()
Example #33
0
class CommandArchitectureTest(BaseAWSCommandParamsTest):
    def setUp(self):
        super(CommandArchitectureTest, self).setUp()
        self.session = self.driver.session
        self.bucket = 'mybucket'
        self.file_creator = FileCreator()
        self.loc_files = make_loc_files(self.file_creator)
        self.output = StringIO()
        self.err_output = StringIO()
        self.saved_stdout = sys.stdout
        self.saved_stderr = sys.stderr
        sys.stdout = self.output
        sys.stderr = self.err_output

    def tearDown(self):
        self.output.close()
        self.err_output.close()
        sys.stdout = self.saved_stdout
        sys.stderr = self.saved_stderr

        super(CommandArchitectureTest, self).tearDown()
        clean_loc_files(self.file_creator)

    def test_set_client_no_source(self):
        session = Mock()
        cmd_arc = CommandArchitecture(
            session, 'sync', {
                'region': 'us-west-1',
                'endpoint_url': None,
                'verify_ssl': None,
                'source_region': None
            })
        cmd_arc.set_clients()
        self.assertEqual(session.create_client.call_count, 2)
        self.assertEqual(
            session.create_client.call_args_list[0],
            mock.call('s3',
                      region_name='us-west-1',
                      endpoint_url=None,
                      verify=None,
                      config=None))
        # A client created with the same arguments as the first should be used
        # for the source client since no source region was provided.
        self.assertEqual(
            session.create_client.call_args_list[1],
            mock.call('s3',
                      region_name='us-west-1',
                      endpoint_url=None,
                      verify=None,
                      config=None))

    def test_set_client_with_source(self):
        session = Mock()
        cmd_arc = CommandArchitecture(
            session, 'sync', {
                'region': 'us-west-1',
                'endpoint_url': None,
                'verify_ssl': None,
                'paths_type': 's3s3',
                'source_region': 'us-west-2'
            })
        cmd_arc.set_clients()
        create_client_args = session.create_client.call_args_list
        # Assert that two clients were created
        self.assertEqual(len(create_client_args), 3)
        self.assertEqual(
            create_client_args[0][1], {
                'region_name': 'us-west-1',
                'verify': None,
                'endpoint_url': None,
                'config': None
            })
        self.assertEqual(
            create_client_args[1][1], {
                'region_name': 'us-west-1',
                'verify': None,
                'endpoint_url': None,
                'config': None
            })
        # Assert override the second client created with the one needed for the
        # source region.
        self.assertEqual(
            create_client_args[2][1], {
                'region_name': 'us-west-2',
                'verify': None,
                'endpoint_url': None,
                'config': None
            })

    def test_set_sigv4_clients_with_sse_kms(self):
        session = Mock()
        cmd_arc = CommandArchitecture(
            session, 'sync', {
                'region': 'us-west-1',
                'endpoint_url': None,
                'verify_ssl': None,
                'source_region': None,
                'sse': 'aws:kms'
            })
        cmd_arc.set_clients()
        self.assertEqual(session.create_client.call_count, 2)
        create_client_call = session.create_client.call_args_list[0]
        create_source_client_call = session.create_client.call_args_list[1]

        # Make sure that both clients are using sigv4 if kms is enabled.
        self.assertEqual(create_client_call[1]['config'].signature_version,
                         's3v4')
        self.assertEqual(
            create_source_client_call[1]['config'].signature_version, 's3v4')

    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync', 'mb', 'rb']

        instructions = {
            'cp': ['file_generator', 'file_info_builder', 's3_handler'],
            'mv': ['file_generator', 'file_info_builder', 's3_handler'],
            'rm': ['file_generator', 'file_info_builder', 's3_handler'],
            'sync': [
                'file_generator', 'comparator', 'file_info_builder',
                's3_handler'
            ],
            'mb': ['s3_handler'],
            'rb': ['s3_handler']
        }

        params = {
            'filters': True,
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'is_stream': False
        }
        for cmd in cmds:
            cmd_arc = CommandArchitecture(
                self.session, cmd, {
                    'region': 'us-east-1',
                    'endpoint_url': None,
                    'verify_ssl': None,
                    'is_stream': False
                })
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(
            cmd_arc.instructions,
            ['file_generator', 'filters', 'file_info_builder', 's3_handler'])

    def test_choose_sync_strategy_default(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync', {
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None
        })
        # Check if no plugins return their sync strategy.  Should
        # result in the default strategies
        session.emit.return_value = None
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'].__class__,
            SizeAndLastModifiedSync)
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'].__class__,
            MissingFileSync)
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'].__class__,
            NeverSync)

    def test_choose_sync_strategy_overwrite(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync', {
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None
        })
        # Check that the default sync strategy is overwritted if a plugin
        # returns its sync strategy.
        mock_strategy = Mock()
        mock_strategy.sync_type = 'file_at_src_and_dest'

        mock_not_at_dest_sync_strategy = Mock()
        mock_not_at_dest_sync_strategy.sync_type = 'file_not_at_dest'

        mock_not_at_src_sync_strategy = Mock()
        mock_not_at_src_sync_strategy.sync_type = 'file_not_at_src'

        responses = [(None, mock_strategy),
                     (None, mock_not_at_dest_sync_strategy),
                     (None, mock_not_at_src_sync_strategy)]

        session.emit.return_value = responses
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(sync_strategies['file_at_src_and_dest_sync_strategy'],
                         mock_strategy)
        self.assertEqual(sync_strategies['file_not_at_dest_sync_strategy'],
                         mock_not_at_dest_sync_strategy)
        self.assertEqual(sync_strategies['file_not_at_src_sync_strategy'],
                         mock_not_at_src_sync_strategy)

    def test_run_cp_put(self):
        # This ensures that the architecture sets up correctly for a ``cp`` put
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': local_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_error_on_same_line_as_status(self):
        s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': False,
            'quiet': False,
            'src': local_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': None,
            'metadata': None
        }
        self.http_response.status_code = 400
        self.parsed_responses = [{
            'Error': {
                'Code': 'BucketNotExists',
                'Message': 'Bucket does not exist'
            }
        }]
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        # Also, we need to verify that the error message is on the *same* line
        # as the upload failed line, to make it easier to track.
        output_str = ("upload failed: %s to %s An error" %
                      (rel_local_file, s3_file))
        self.assertIn(output_str, self.err_output.getvalue())

    def test_run_cp_get(self):
        # This ensures that the architecture sets up correctly for a ``cp`` get
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': local_file,
            'filters': filters,
            'paths_type': 's3local',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': None
        }
        self.parsed_responses = [{
            "ETag": "abcd",
            "ContentLength": 100,
            "LastModified": "2014-01-09T20:45:49.000Z"
        }]
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_cp_copy(self):
        # This ensures that the architecture sets up correctly for a ``cp``
        # copy command.  It is just just a dry run, but all of the
        # components need to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3s3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': None
        }
        self.parsed_responses = [{
            "ETag": "abcd",
            "ContentLength": 100,
            "LastModified": "2014-01-09T20:45:49.000Z"
        }]
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mv(self):
        # This ensures that the architecture sets up correctly for a ``mv``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3s3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': None
        }
        self.parsed_responses = [{
            "ETag": "abcd",
            "ContentLength": 100,
            "LastModified": "2014-01-09T20:45:49.000Z"
        }]
        cmd_arc = CommandArchitecture(self.session, 'mv', params)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_remove(self):
        # This ensures that the architecture sets up correctly for a ``rm``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': None
        }
        self.parsed_responses = [{
            "ETag": "abcd",
            "ContentLength": 100,
            "LastModified": "2014-01-09T20:45:49.000Z"
        }]
        cmd_arc = CommandArchitecture(self.session, 'rm', params)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) delete: %s" % s3_file
        self.assertIn(output_str, self.output.getvalue())

    def test_run_sync(self):
        # This ensures that the architecture sets up correctly for a ``sync``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        s3_prefix = 's3://' + self.bucket + '/'
        local_dir = self.loc_files[3]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': local_dir,
            'dest': s3_prefix,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': 'us-west-2'
        }
        self.parsed_responses = [{
            "CommonPrefixes": [],
            "Contents": [{
                "Key": "text1.txt",
                "Size": 100,
                "LastModified": "2014-01-09T20:45:49.000Z"
            }]
        }, {
            "CommonPrefixes": [],
            "Contents": []
        }]
        cmd_arc = CommandArchitecture(self.session, 'sync', params)
        cmd_arc.create_instructions()
        cmd_arc.set_clients()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'mb', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) make_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())

    def test_run_rb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': None
        }
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        self.patch_make_request()
        rc = cmd_arc.run()
        output_str = "(dryrun) remove_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())
        self.assertEqual(rc, 0)

    def test_run_rb_nonzero_rc(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': False,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        self.http_response.status_code = 400
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        self.patch_make_request()
        rc = cmd_arc.run()
        output_str = "remove_bucket failed: %s" % s3_prefix
        self.assertIn(output_str, self.err_output.getvalue())
        self.assertEqual(rc, 1)
Example #34
0
class S3HandlerTestUpload(unittest.TestCase):
    """
    This class tests the ability to upload objects into an S3 bucket as
    well as multipart uploads
    """
    def setUp(self):
        self.session = botocore.session.get_session(EnvironmentVariables)
        self.client = self.session.create_client('s3', 'us-west-2')
        params = {'region': 'us-west-2', 'acl': 'private'}
        self.s3_handler = S3Handler(self.session, params)
        self.chunk_size = minimum_chunk_size()
        self.threshold = self.chunk_size + 1
        self.s3_handler_multi = S3Handler(
            self.session, params=params,
            runtime_config=runtime_config(
                multipart_threshold=self.threshold,
                multipart_chunksize=self.chunk_size))
        self.bucket = create_bucket(self.session)
        self.file_creator = FileCreator()
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']
        self.output = StringIO()
        self.saved_stderr = sys.stderr
        self.saved_stdout = sys.stdout
        sys.stderr = self.output
        sys.stdout = self.output

    def tearDown(self):
        self.output.close()
        sys.stderr = self.saved_stderr
        clean_loc_files(self.file_creator)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        self.loc_files = make_loc_files(self.file_creator)
        # Confirm there are no objects in the bucket.
        response = self.client.list_objects(Bucket=self.bucket)
        self.assertEqual(len(response.get('Contents', [])), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=self.s3_files[i],
                operation_name='upload', size=0,
                client=self.client,
            ))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm the files were uploaded.
        response = self.client.list_objects(Bucket=self.bucket)
        self.assertEqual(len(response.get('Contents', [])), 2)

    def test_multi_upload(self):
        self.loc_files = make_loc_files(self.file_creator, self.threshold+1)
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=self.s3_files[i],
                size=self.threshold+1,
                operation_name='upload',
                client=self.client,
            ))

        self.s3_handler_multi.call(tasks)

        # Confirm UploadPart was called
        self.assertIn("Completed 4 of 4 part(s)", self.output.getvalue())

        # Confirm the files were uploaded.
        response = self.client.list_objects(Bucket=self.bucket)
        self.assertEqual(len(response.get('Contents', [])), 2)