Ejemplo n.º 1
0
    def get_commands(cls, endpoint, auth_token, **kwargs):
        # TODO(ujjwalpa@): We don't really have a command to execute for Twine
        # as we directly write to the pypirc file (or to stdout for dryrun)
        # with python itself instead. Nevertheless, we're using this method for
        # testing so we'll keep the interface for now but return a string with
        # the expected pypirc content instead of a list of commands to
        # execute. This definitely reeks of code smell and there is probably
        # room for rethinking and refactoring the interfaces of these adapter
        # helper classes in the future.

        assert 'pypi_rc_path' in kwargs, 'pypi_rc_path must be provided.'
        pypi_rc_path = kwargs['pypi_rc_path']

        default_pypi_rc = cls.DEFAULT_PYPI_RC_FMT.format(
            repository_endpoint=endpoint,
            auth_token=auth_token
        )

        pypi_rc = RawConfigParser()
        if os.path.exists(pypi_rc_path):
            try:
                pypi_rc.read(pypi_rc_path)
                index_servers = pypi_rc.get('distutils', 'index-servers')
                servers = [
                    server.strip()
                    for server in index_servers.split('\n')
                    if server.strip() != ''
                ]

                if 'codeartifact' not in servers:
                    servers.append('codeartifact')
                    pypi_rc.set(
                        'distutils', 'index-servers', '\n' + '\n'.join(servers)
                    )

                if 'codeartifact' not in pypi_rc.sections():
                    pypi_rc.add_section('codeartifact')

                pypi_rc.set('codeartifact', 'repository', endpoint)
                pypi_rc.set('codeartifact', 'username', 'aws')
                pypi_rc.set('codeartifact', 'password', auth_token)
            except Exception as e:  # invalid .pypirc file
                sys.stdout.write('%s is in an invalid state.' % pypi_rc_path)
                sys.stdout.write(os.linesep)
                raise e
        else:
            pypi_rc.readfp(StringIO(default_pypi_rc))

        pypi_rc_stream = StringIO()
        pypi_rc.write(pypi_rc_stream)
        pypi_rc_str = pypi_rc_stream.getvalue()
        pypi_rc_stream.close()

        return pypi_rc_str
Ejemplo n.º 2
0
    def _get_twine_commands(self):
        default_pypi_rc_fmt = '''\
[distutils]
index-servers=
    pypi
    codeartifact

[codeartifact]
repository: {repository_endpoint}
username: aws
password: {auth_token}'''
        default_pypi_rc = default_pypi_rc_fmt.format(
            repository_endpoint=self.endpoint,
            auth_token=self.auth_token
        )

        pypi_rc = RawConfigParser()
        if os.path.exists(self.test_pypi_rc_path):
            pypi_rc.read(self.test_pypi_rc_path)
            index_servers = pypi_rc.get('distutils', 'index-servers')
            servers = [
                server.strip()
                for server in index_servers.split('\n')
                if server.strip() != ''
            ]

            if 'codeartifact' not in servers:
                servers.append('codeartifact')
                pypi_rc.set(
                    'distutils', 'index-servers', '\n' + '\n'.join(servers)
                )

            if 'codeartifact' not in pypi_rc.sections():
                pypi_rc.add_section('codeartifact')

            pypi_rc.set('codeartifact', 'repository', self.endpoint)
            pypi_rc.set('codeartifact', 'username', 'aws')
            pypi_rc.set('codeartifact', 'password', self.auth_token)
        else:
            pypi_rc.readfp(StringIO(default_pypi_rc))

        pypi_rc_stream = StringIO()
        pypi_rc.write(pypi_rc_stream)
        pypi_rc_str = pypi_rc_stream.getvalue()
        pypi_rc_stream.close()

        return pypi_rc_str
Ejemplo n.º 3
0
class CommandArchitectureTest(BaseAWSCommandParamsTest):
    def setUp(self):
        super(CommandArchitectureTest, self).setUp()
        self.session = self.driver.session
        self.bucket = 'mybucket'
        self.file_creator = FileCreator()
        self.loc_files = make_loc_files(self.file_creator)
        self.output = StringIO()
        self.err_output = StringIO()
        self.saved_stdout = sys.stdout
        self.saved_stderr = sys.stderr
        sys.stdout = self.output
        sys.stderr = self.err_output

    def tearDown(self):
        self.output.close()
        self.err_output.close()
        sys.stdout = self.saved_stdout
        sys.stderr = self.saved_stderr

        super(CommandArchitectureTest, self).tearDown()
        clean_loc_files(self.file_creator)

    def test_set_client_no_source(self):
        session = Mock()
        cmd_arc = CommandArchitecture(
            session, 'sync', {
                'region': 'us-west-1',
                'endpoint_url': None,
                'verify_ssl': None,
                'source_region': None
            })
        cmd_arc.set_clients()
        self.assertEqual(session.create_client.call_count, 2)
        self.assertEqual(
            session.create_client.call_args_list[0],
            mock.call('s3',
                      region_name='us-west-1',
                      endpoint_url=None,
                      verify=None,
                      config=None))
        # A client created with the same arguments as the first should be used
        # for the source client since no source region was provided.
        self.assertEqual(
            session.create_client.call_args_list[1],
            mock.call('s3',
                      region_name='us-west-1',
                      endpoint_url=None,
                      verify=None,
                      config=None))

    def test_set_client_with_source(self):
        session = Mock()
        cmd_arc = CommandArchitecture(
            session, 'sync', {
                'region': 'us-west-1',
                'endpoint_url': None,
                'verify_ssl': None,
                'paths_type': 's3s3',
                'source_region': 'us-west-2'
            })
        cmd_arc.set_clients()
        create_client_args = session.create_client.call_args_list
        # Assert that two clients were created
        self.assertEqual(len(create_client_args), 3)
        self.assertEqual(
            create_client_args[0][1], {
                'region_name': 'us-west-1',
                'verify': None,
                'endpoint_url': None,
                'config': None
            })
        self.assertEqual(
            create_client_args[1][1], {
                'region_name': 'us-west-1',
                'verify': None,
                'endpoint_url': None,
                'config': None
            })
        # Assert override the second client created with the one needed for the
        # source region.
        self.assertEqual(
            create_client_args[2][1], {
                'region_name': 'us-west-2',
                'verify': None,
                'endpoint_url': None,
                'config': None
            })

    def test_set_sigv4_clients_with_sse_kms(self):
        session = Mock()
        cmd_arc = CommandArchitecture(
            session, 'sync', {
                'region': 'us-west-1',
                'endpoint_url': None,
                'verify_ssl': None,
                'source_region': None,
                'sse': 'aws:kms'
            })
        cmd_arc.set_clients()
        self.assertEqual(session.create_client.call_count, 2)
        create_client_call = session.create_client.call_args_list[0]
        create_source_client_call = session.create_client.call_args_list[1]

        # Make sure that both clients are using sigv4 if kms is enabled.
        self.assertEqual(create_client_call[1]['config'].signature_version,
                         's3v4')
        self.assertEqual(
            create_source_client_call[1]['config'].signature_version, 's3v4')

    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync', 'mb', 'rb']

        instructions = {
            'cp': ['file_generator', 'file_info_builder', 's3_handler'],
            'mv': ['file_generator', 'file_info_builder', 's3_handler'],
            'rm': ['file_generator', 'file_info_builder', 's3_handler'],
            'sync': [
                'file_generator', 'comparator', 'file_info_builder',
                's3_handler'
            ],
            'mb': ['s3_handler'],
            'rb': ['s3_handler']
        }

        params = {
            'filters': True,
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'is_stream': False
        }
        for cmd in cmds:
            cmd_arc = CommandArchitecture(
                self.session, cmd, {
                    'region': 'us-east-1',
                    'endpoint_url': None,
                    'verify_ssl': None,
                    'is_stream': False
                })
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(
            cmd_arc.instructions,
            ['file_generator', 'filters', 'file_info_builder', 's3_handler'])

    def test_choose_sync_strategy_default(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync', {
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None
        })
        # Check if no plugins return their sync strategy.  Should
        # result in the default strategies
        session.emit.return_value = None
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'].__class__,
            SizeAndLastModifiedSync)
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'].__class__,
            MissingFileSync)
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'].__class__,
            NeverSync)

    def test_choose_sync_strategy_overwrite(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync', {
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None
        })
        # Check that the default sync strategy is overwritted if a plugin
        # returns its sync strategy.
        mock_strategy = Mock()
        mock_strategy.sync_type = 'file_at_src_and_dest'

        mock_not_at_dest_sync_strategy = Mock()
        mock_not_at_dest_sync_strategy.sync_type = 'file_not_at_dest'

        mock_not_at_src_sync_strategy = Mock()
        mock_not_at_src_sync_strategy.sync_type = 'file_not_at_src'

        responses = [(None, mock_strategy),
                     (None, mock_not_at_dest_sync_strategy),
                     (None, mock_not_at_src_sync_strategy)]

        session.emit.return_value = responses
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(sync_strategies['file_at_src_and_dest_sync_strategy'],
                         mock_strategy)
        self.assertEqual(sync_strategies['file_not_at_dest_sync_strategy'],
                         mock_not_at_dest_sync_strategy)
        self.assertEqual(sync_strategies['file_not_at_src_sync_strategy'],
                         mock_not_at_src_sync_strategy)

    def test_run_cp_put(self):
        # This ensures that the architecture sets up correctly for a ``cp`` put
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': local_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_error_on_same_line_as_status(self):
        s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': False,
            'quiet': False,
            'src': local_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': None,
            'metadata': None
        }
        self.http_response.status_code = 400
        self.parsed_responses = [{
            'Error': {
                'Code': 'BucketNotExists',
                'Message': 'Bucket does not exist'
            }
        }]
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        # Also, we need to verify that the error message is on the *same* line
        # as the upload failed line, to make it easier to track.
        output_str = ("upload failed: %s to %s An error" %
                      (rel_local_file, s3_file))
        self.assertIn(output_str, self.err_output.getvalue())

    def test_run_cp_get(self):
        # This ensures that the architecture sets up correctly for a ``cp`` get
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': local_file,
            'filters': filters,
            'paths_type': 's3local',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': None
        }
        self.parsed_responses = [{
            "ETag": "abcd",
            "ContentLength": 100,
            "LastModified": "2014-01-09T20:45:49.000Z"
        }]
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_cp_copy(self):
        # This ensures that the architecture sets up correctly for a ``cp``
        # copy command.  It is just just a dry run, but all of the
        # components need to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3s3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': None
        }
        self.parsed_responses = [{
            "ETag": "abcd",
            "ContentLength": 100,
            "LastModified": "2014-01-09T20:45:49.000Z"
        }]
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mv(self):
        # This ensures that the architecture sets up correctly for a ``mv``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3s3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': None
        }
        self.parsed_responses = [{
            "ETag": "abcd",
            "ContentLength": 100,
            "LastModified": "2014-01-09T20:45:49.000Z"
        }]
        cmd_arc = CommandArchitecture(self.session, 'mv', params)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_remove(self):
        # This ensures that the architecture sets up correctly for a ``rm``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': None
        }
        self.parsed_responses = [{
            "ETag": "abcd",
            "ContentLength": 100,
            "LastModified": "2014-01-09T20:45:49.000Z"
        }]
        cmd_arc = CommandArchitecture(self.session, 'rm', params)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) delete: %s" % s3_file
        self.assertIn(output_str, self.output.getvalue())

    def test_run_sync(self):
        # This ensures that the architecture sets up correctly for a ``sync``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        s3_prefix = 's3://' + self.bucket + '/'
        local_dir = self.loc_files[3]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': local_dir,
            'dest': s3_prefix,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': 'us-west-2'
        }
        self.parsed_responses = [{
            "CommonPrefixes": [],
            "Contents": [{
                "Key": "text1.txt",
                "Size": 100,
                "LastModified": "2014-01-09T20:45:49.000Z"
            }]
        }, {
            "CommonPrefixes": [],
            "Contents": []
        }]
        cmd_arc = CommandArchitecture(self.session, 'sync', params)
        cmd_arc.create_instructions()
        cmd_arc.set_clients()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'mb', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) make_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())

    def test_run_rb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False,
            'source_region': None
        }
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        self.patch_make_request()
        rc = cmd_arc.run()
        output_str = "(dryrun) remove_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())
        self.assertEqual(rc, 0)

    def test_run_rb_nonzero_rc(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': False,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        self.http_response.status_code = 400
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        self.patch_make_request()
        rc = cmd_arc.run()
        output_str = "remove_bucket failed: %s" % s3_prefix
        self.assertIn(output_str, self.err_output.getvalue())
        self.assertEqual(rc, 1)
Ejemplo n.º 4
0
class S3HandlerTestUpload(unittest.TestCase):
    """
    This class tests the ability to upload objects into an S3 bucket as
    well as multipart uploads
    """
    def setUp(self):
        self.session = botocore.session.get_session(EnvironmentVariables)
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'acl': ['private']}
        self.s3_handler = S3Handler(self.session, params)
        self.s3_handler_multi = S3Handler(self.session, multi_threshold=10,
                                          chunksize=2,
                                          params=params)
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']
        self.output = StringIO()
        self.saved_stderr = sys.stderr
        sys.stderr = self.output

    def tearDown(self):
        self.output.close()
        sys.stderr = self.saved_stderr
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=self.s3_files[i],
                operation_name='upload', size=0,
                service=self.service,
                endpoint=self.endpoint,
            ))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm the files were uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 2)

    def test_multi_upload(self):
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=self.s3_files[i], size=15,
                operation_name='upload',
                service=self.service,
                endpoint=self.endpoint,
            ))

        # Note nothing is uploaded because the file is too small
        # a print statement will show up if it fails.
        self.s3_handler_multi.call(tasks)
        print_op = "Error: Your proposed upload is smaller than the minimum"
        self.assertIn(print_op, self.output.getvalue())
Ejemplo n.º 5
0
class S3HandlerTestUpload(unittest.TestCase):
    """
    This class tests the ability to upload objects into an S3 bucket as
    well as multipart uploads
    """
    def setUp(self):
        self.session = botocore.session.get_session(EnvironmentVariables)
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'acl': ['private']}
        self.s3_handler = S3Handler(self.session, params)
        self.s3_handler_multi = S3Handler(self.session,
                                          params=params,
                                          runtime_config=runtime_config(
                                              multipart_threshold=10,
                                              multipart_chunksize=2))
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]
        self.output = StringIO()
        self.saved_stderr = sys.stderr
        sys.stderr = self.output

    def tearDown(self):
        self.output.close()
        sys.stderr = self.saved_stderr
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(
                    src=self.loc_files[i],
                    dest=self.s3_files[i],
                    operation_name='upload',
                    size=0,
                    service=self.service,
                    endpoint=self.endpoint,
                ))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm the files were uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 2)

    def test_multi_upload(self):
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(
                    src=self.loc_files[i],
                    dest=self.s3_files[i],
                    size=15,
                    operation_name='upload',
                    service=self.service,
                    endpoint=self.endpoint,
                ))

        # Note nothing is uploaded because the file is too small
        # a print statement will show up if it fails.
        self.s3_handler_multi.call(tasks)
        print_op = "Error: Your proposed upload is smaller than the minimum"
        self.assertIn(print_op, self.output.getvalue())
Ejemplo n.º 6
0
class CommandArchitectureTest(BaseAWSCommandParamsTest):
    def setUp(self):
        super(CommandArchitectureTest, self).setUp()
        self.session = self.driver.session
        self.bucket = 'mybucket'
        self.file_creator = FileCreator()
        self.loc_files = make_loc_files(self.file_creator)
        self.output = StringIO()
        self.err_output = StringIO()
        self.saved_stdout = sys.stdout
        self.saved_stderr = sys.stderr
        sys.stdout = self.output
        sys.stderr = self.err_output

    def tearDown(self):
        self.output.close()
        self.err_output.close()
        sys.stdout = self.saved_stdout
        sys.stderr = self.saved_stderr

        super(CommandArchitectureTest, self).tearDown()
        clean_loc_files(self.file_creator)

    def test_set_client_no_source(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-west-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None,
                                       'source_region': None})
        cmd_arc.set_clients()
        self.assertEqual(session.create_client.call_count, 2)
        self.assertEqual(
            session.create_client.call_args_list[0],
            mock.call(
             's3', region_name='us-west-1', endpoint_url=None, verify=None,
             config=None)
        )
        # A client created with the same arguments as the first should be used
        # for the source client since no source region was provided.
        self.assertEqual(
            session.create_client.call_args_list[1],
            mock.call(
                's3', region_name='us-west-1', endpoint_url=None, verify=None,
                config=None)
        )

    def test_set_client_with_source(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-west-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None,
                                       'paths_type': 's3s3',
                                       'source_region': 'us-west-2'})
        cmd_arc.set_clients()
        create_client_args = session.create_client.call_args_list
        # Assert that two clients were created
        self.assertEqual(len(create_client_args), 3)
        self.assertEqual(
            create_client_args[0][1],
            {'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None,
             'config': None}
        )
        self.assertEqual(
            create_client_args[1][1],
            {'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None,
             'config': None}
        )
        # Assert override the second client created with the one needed for the
        # source region.
        self.assertEqual(
            create_client_args[2][1],
            {'region_name': 'us-west-2', 'verify': None, 'endpoint_url': None,
             'config': None}
        )

    def test_set_sigv4_clients_with_sse_kms(self):
        session = Mock()
        cmd_arc = CommandArchitecture(
            session, 'sync',
            {'region': 'us-west-1', 'endpoint_url': None, 'verify_ssl': None,
             'source_region': None, 'sse': 'aws:kms'})
        cmd_arc.set_clients()
        self.assertEqual( session.create_client.call_count, 2)
        create_client_call = session.create_client.call_args_list[0]
        create_source_client_call = session.create_client.call_args_list[1]

        # Make sure that both clients are using sigv4 if kms is enabled.
        self.assertEqual(
            create_client_call[1]['config'].signature_version, 's3v4')
        self.assertEqual(
            create_source_client_call[1]['config'].signature_version, 's3v4')

    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync']

        instructions = {'cp': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'mv': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'rm': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'sync': ['file_generator', 'comparator',
                                 'file_info_builder', 's3_handler']}

        params = {'filters': True, 'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'is_stream': False}
        for cmd in cmds:
            cmd_arc = CommandArchitecture(self.session, cmd,
                                          {'region': 'us-east-1',
                                           'endpoint_url': None,
                                           'verify_ssl': None,
                                           'is_stream': False})
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(cmd_arc.instructions, ['file_generator', 'filters',
                                                'file_info_builder',
                                                's3_handler'])

    def test_choose_sync_strategy_default(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-east-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None})
        # Check if no plugins return their sync strategy.  Should
        # result in the default strategies
        session.emit.return_value = None
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'].__class__,
            SizeAndLastModifiedSync
        )
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'].__class__,
            MissingFileSync
        )
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'].__class__,
            NeverSync
        )

    def test_choose_sync_strategy_overwrite(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-east-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None})
        # Check that the default sync strategy is overwritted if a plugin
        # returns its sync strategy.
        mock_strategy = Mock()
        mock_strategy.sync_type = 'file_at_src_and_dest'

        mock_not_at_dest_sync_strategy = Mock()
        mock_not_at_dest_sync_strategy.sync_type = 'file_not_at_dest'

        mock_not_at_src_sync_strategy = Mock()
        mock_not_at_src_sync_strategy.sync_type = 'file_not_at_src'

        responses = [(None, mock_strategy),
                     (None, mock_not_at_dest_sync_strategy),
                     (None, mock_not_at_src_sync_strategy)]

        session.emit.return_value = responses
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'],
            mock_strategy
        )
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'],
            mock_not_at_dest_sync_strategy
        )
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'],
            mock_not_at_src_sync_strategy
        )

    def test_run_cp_put(self):
        # This ensures that the architecture sets up correctly for a ``cp`` put
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': local_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': None, 'metadata': None}
        config = RuntimeConfig().build_config()
        cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_error_on_same_line_as_status(self):
        s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': False, 'quiet': False,
                  'src': local_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': None, 'metadata': None}
        self.http_response.status_code = 400
        self.parsed_responses = [{'Error': {
                                  'Code': 'BucketNotExists',
                                  'Message': 'Bucket does not exist'}}]
        cmd_arc = CommandArchitecture(
            self.session, 'cp', params, RuntimeConfig().build_config())
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        # Also, we need to verify that the error message is on the *same* line
        # as the upload failed line, to make it easier to track.
        output_str = (
            "upload failed: %s to %s An error" % (
                rel_local_file, s3_file))
        self.assertIn(output_str, self.err_output.getvalue())

    def test_run_cp_get(self):
        # This ensures that the architecture sets up correctly for a ``cp`` get
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': local_file, 'filters': filters,
                  'paths_type': 's3local', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': None}
        self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
                                  "LastModified": "2014-01-09T20:45:49.000Z"}]
        config = RuntimeConfig().build_config()
        cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_cp_copy(self):
        # This ensures that the architecture sets up correctly for a ``cp``
        # copy command.  It is just just a dry run, but all of the
        # components need to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3s3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': None}
        self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
                                  "LastModified": "2014-01-09T20:45:49.000Z"}]
        config = RuntimeConfig().build_config()
        cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mv(self):
        # This ensures that the architecture sets up correctly for a ``mv``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3s3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': None,
                  'is_move': True}
        self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
                                  "LastModified": "2014-01-09T20:45:49.000Z"}]
        config = RuntimeConfig().build_config()
        cmd_arc = CommandArchitecture(self.session, 'mv', params, config)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_remove(self):
        # This ensures that the architecture sets up correctly for a ``rm``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': None}
        self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
                                  "LastModified": "2014-01-09T20:45:49.000Z"}]
        config = RuntimeConfig().build_config()
        cmd_arc = CommandArchitecture(self.session, 'rm', params, config)
        cmd_arc.set_clients()
        cmd_arc.create_instructions()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) delete: %s" % s3_file
        self.assertIn(output_str, self.output.getvalue())

    def test_run_sync(self):
        # This ensures that the architecture sets up correctly for a ``sync``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        s3_prefix = 's3://' + self.bucket + '/'
        local_dir = self.loc_files[3]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': True, 'dryrun': True, 'quiet': False,
                  'src': local_dir, 'dest': s3_prefix, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False, 'source_region': 'us-west-2'}
        self.parsed_responses = [
            {"CommonPrefixes": [], "Contents": [
                {"Key": "text1.txt", "Size": 100,
                 "LastModified": "2014-01-09T20:45:49.000Z"}]},
            {"CommonPrefixes": [], "Contents": []}]
        config = RuntimeConfig().build_config()
        cmd_arc = CommandArchitecture(self.session, 'sync', params, config)
        cmd_arc.create_instructions()
        cmd_arc.set_clients()
        self.patch_make_request()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())
Ejemplo n.º 7
0
class CommandArchitectureTest(S3HandlerBaseTest):
    def setUp(self):
        super(CommandArchitectureTest, self).setUp()
        self.session = FakeSession()
        self.bucket = make_s3_files(self.session)
        self.loc_files = make_loc_files()
        self.output = StringIO()
        self.err_output = StringIO()
        self.saved_stdout = sys.stdout
        self.saved_stderr = sys.stderr
        sys.stdout = self.output
        sys.stderr = self.err_output

    def tearDown(self):
        self.output.close()
        self.err_output.close()
        sys.stdout = self.saved_stdout
        sys.stderr = self.saved_stderr

        super(CommandArchitectureTest, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_set_endpoint_no_source(self):
        cmd_arc = CommandArchitecture(self.session, 'sync',
                                      {'region': 'us-west-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None,
                                       'source_region': None})
        cmd_arc.set_endpoints()
        endpoint = cmd_arc._endpoint
        source_endpoint = cmd_arc._source_endpoint
        self.assertEqual(endpoint.region_name, 'us-west-1')
        self.assertEqual(source_endpoint.region_name, 'us-west-1')

    def test_set_endpoint_with_source(self):
        cmd_arc = CommandArchitecture(self.session, 'sync',
                                      {'region': 'us-west-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None,
                                       'paths_type': 's3s3',
                                       'source_region': ['us-west-2']})
        cmd_arc.set_endpoints()
        endpoint = cmd_arc._endpoint
        source_endpoint = cmd_arc._source_endpoint
        self.assertEqual(endpoint.region_name, 'us-west-1')
        self.assertEqual(source_endpoint.region_name, 'us-west-2')

    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync', 'mb', 'rb']

        instructions = {'cp': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'mv': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'rm': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'sync': ['file_generator', 'comparator',
                                 'file_info_builder', 's3_handler'],
                        'mb': ['s3_handler'],
                        'rb': ['s3_handler']}

        params = {'filters': True, 'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'is_stream': False}
        for cmd in cmds:
            cmd_arc = CommandArchitecture(self.session, cmd,
                                          {'region': 'us-east-1',
                                           'endpoint_url': None,
                                           'verify_ssl': None,
                                           'is_stream': False})
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(cmd_arc.instructions, ['file_generator', 'filters',
                                                'file_info_builder',
                                                's3_handler'])

    def test_choose_sync_strategy_default(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-east-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None})
        # Check if no plugins return their sync strategy.  Should
        # result in the default strategies
        session.emit.return_value = None
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'].__class__,
            SizeAndLastModifiedSync
        )
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'].__class__,
            MissingFileSync
        )
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'].__class__,
            NeverSync
        )

    def test_choose_sync_strategy_overwrite(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-east-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None})
        # Check that the default sync strategy is overwritted if a plugin
        # returns its sync strategy.
        mock_strategy = Mock()
        mock_strategy.sync_type = 'file_at_src_and_dest'

        mock_not_at_dest_sync_strategy = Mock()
        mock_not_at_dest_sync_strategy.sync_type = 'file_not_at_dest'

        mock_not_at_src_sync_strategy = Mock()
        mock_not_at_src_sync_strategy.sync_type = 'file_not_at_src'

        responses = [(None, mock_strategy),
                     (None, mock_not_at_dest_sync_strategy),
                     (None, mock_not_at_src_sync_strategy)]

        session.emit.return_value = responses
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'],
            mock_strategy
        )
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'],
            mock_not_at_dest_sync_strategy
        )
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'],
            mock_not_at_src_sync_strategy
        )

    def test_run_cp_put(self):
        # This ensures that the architecture sets up correctly for a ``cp`` put
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': local_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_error_on_same_line_as_status(self):
        s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': False, 'quiet': False,
                  'src': local_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        # Also, we need to verify that the error message is on the *same* line
        # as the upload failed line, to make it easier to track.
        output_str = (
            "upload failed: %s to %s Error: Bucket does not exist\n" % (
                rel_local_file, s3_file))
        self.assertIn(output_str, self.err_output.getvalue())

    def test_run_cp_get(self):
        # This ensures that the architecture sets up correctly for a ``cp`` get
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': local_file, 'filters': filters,
                  'paths_type': 's3local', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_cp_copy(self):
        # This ensures that the architecture sets up correctly for a ``cp`` copy
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3s3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mv(self):
        # This ensures that the architecture sets up correctly for a ``mv``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3s3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'mv', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_remove(self):
        # This ensures that the architecture sets up correctly for a ``rm``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'rm', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) delete: %s" % s3_file
        self.assertIn(output_str, self.output.getvalue())

    def test_run_sync(self):
        # This ensures that the architecture sets up correctly for a ``sync``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        s3_prefix = 's3://' + self.bucket + '/'
        local_dir = self.loc_files[3]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': True, 'dryrun': True, 'quiet': False,
                  'src': local_dir, 'dest': s3_prefix, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'sync', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {'dir_op': True, 'dryrun': True, 'quiet': False,
                  'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
                  'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'follow_symlinks': True,
                  'page_size': None, 'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'mb', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) make_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())

    def test_run_rb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {'dir_op': True, 'dryrun': True, 'quiet': False,
                  'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
                  'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'follow_symlinks': True,
                  'page_size': None, 'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        rc = cmd_arc.run()
        output_str = "(dryrun) remove_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())
        self.assertEqual(rc, 0)

    def test_run_rb_nonzero_rc(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {'dir_op': True, 'dryrun': False, 'quiet': False,
                  'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
                  'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'follow_symlinks': True,
                  'page_size': None, 'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        rc = cmd_arc.run()
        output_str = "remove_bucket failed: %s" % s3_prefix
        self.assertIn(output_str, self.err_output.getvalue())
        self.assertEqual(rc, 1)
Ejemplo n.º 8
0
class CommandArchitectureTest(S3HandlerBaseTest):
    def setUp(self):
        super(CommandArchitectureTest, self).setUp()
        self.session = FakeSession()
        self.bucket = make_s3_files(self.session)
        self.loc_files = make_loc_files()
        self.output = StringIO()
        self.err_output = StringIO()
        self.saved_stdout = sys.stdout
        self.saved_stderr = sys.stderr
        sys.stdout = self.output
        sys.stderr = self.err_output

    def tearDown(self):
        self.output.close()
        self.err_output.close()
        sys.stdout = self.saved_stdout
        sys.stderr = self.saved_stderr

        super(CommandArchitectureTest, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_set_endpoint_no_source(self):
        cmd_arc = CommandArchitecture(
            self.session, 'sync', {
                'region': 'us-west-1',
                'endpoint_url': None,
                'verify_ssl': None,
                'source_region': None
            })
        cmd_arc.set_endpoints()
        endpoint = cmd_arc._endpoint
        source_endpoint = cmd_arc._source_endpoint
        self.assertEqual(endpoint.region_name, 'us-west-1')
        self.assertEqual(source_endpoint.region_name, 'us-west-1')

    def test_set_endpoint_with_source(self):
        cmd_arc = CommandArchitecture(
            self.session, 'sync', {
                'region': 'us-west-1',
                'endpoint_url': None,
                'verify_ssl': None,
                'paths_type': 's3s3',
                'source_region': ['us-west-2']
            })
        cmd_arc.set_endpoints()
        endpoint = cmd_arc._endpoint
        source_endpoint = cmd_arc._source_endpoint
        self.assertEqual(endpoint.region_name, 'us-west-1')
        self.assertEqual(source_endpoint.region_name, 'us-west-2')

    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync', 'mb', 'rb']

        instructions = {
            'cp': ['file_generator', 'file_info_builder', 's3_handler'],
            'mv': ['file_generator', 'file_info_builder', 's3_handler'],
            'rm': ['file_generator', 'file_info_builder', 's3_handler'],
            'sync': [
                'file_generator', 'comparator', 'file_info_builder',
                's3_handler'
            ],
            'mb': ['s3_handler'],
            'rb': ['s3_handler']
        }

        params = {
            'filters': True,
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'is_stream': False
        }
        for cmd in cmds:
            cmd_arc = CommandArchitecture(
                self.session, cmd, {
                    'region': 'us-east-1',
                    'endpoint_url': None,
                    'verify_ssl': None,
                    'is_stream': False
                })
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(
            cmd_arc.instructions,
            ['file_generator', 'filters', 'file_info_builder', 's3_handler'])

    def test_choose_sync_strategy_default(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync', {
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None
        })
        # Check if no plugins return their sync strategy.  Should
        # result in the default strategies
        session.emit.return_value = None
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'].__class__,
            SizeAndLastModifiedSync)
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'].__class__,
            MissingFileSync)
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'].__class__,
            NeverSync)

    def test_choose_sync_strategy_overwrite(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync', {
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None
        })
        # Check that the default sync strategy is overwritted if a plugin
        # returns its sync strategy.
        mock_strategy = Mock()
        mock_strategy.sync_type = 'file_at_src_and_dest'

        mock_not_at_dest_sync_strategy = Mock()
        mock_not_at_dest_sync_strategy.sync_type = 'file_not_at_dest'

        mock_not_at_src_sync_strategy = Mock()
        mock_not_at_src_sync_strategy.sync_type = 'file_not_at_src'

        responses = [(None, mock_strategy),
                     (None, mock_not_at_dest_sync_strategy),
                     (None, mock_not_at_src_sync_strategy)]

        session.emit.return_value = responses
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(sync_strategies['file_at_src_and_dest_sync_strategy'],
                         mock_strategy)
        self.assertEqual(sync_strategies['file_not_at_dest_sync_strategy'],
                         mock_not_at_dest_sync_strategy)
        self.assertEqual(sync_strategies['file_not_at_src_sync_strategy'],
                         mock_not_at_src_sync_strategy)

    def test_run_cp_put(self):
        # This ensures that the architecture sets up correctly for a ``cp`` put
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': local_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_error_on_same_line_as_status(self):
        s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': False,
            'quiet': False,
            'src': local_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        # Also, we need to verify that the error message is on the *same* line
        # as the upload failed line, to make it easier to track.
        output_str = (
            "upload failed: %s to %s Error: Bucket does not exist\n" %
            (rel_local_file, s3_file))
        self.assertIn(output_str, self.err_output.getvalue())

    def test_run_cp_get(self):
        # This ensures that the architecture sets up correctly for a ``cp`` get
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': local_file,
            'filters': filters,
            'paths_type': 's3local',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_cp_copy(self):
        # This ensures that the architecture sets up correctly for a ``cp`` copy
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3s3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mv(self):
        # This ensures that the architecture sets up correctly for a ``mv``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3s3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'mv', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_remove(self):
        # This ensures that the architecture sets up correctly for a ``rm``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'rm', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) delete: %s" % s3_file
        self.assertIn(output_str, self.output.getvalue())

    def test_run_sync(self):
        # This ensures that the architecture sets up correctly for a ``sync``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        s3_prefix = 's3://' + self.bucket + '/'
        local_dir = self.loc_files[3]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': local_dir,
            'dest': s3_prefix,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'sync', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'mb', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) make_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())

    def test_run_rb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        rc = cmd_arc.run()
        output_str = "(dryrun) remove_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())
        self.assertEqual(rc, 0)

    def test_run_rb_nonzero_rc(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': False,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True,
            'page_size': None,
            'is_stream': False
        }
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        rc = cmd_arc.run()
        output_str = "remove_bucket failed: %s" % s3_prefix
        self.assertIn(output_str, self.err_output.getvalue())
        self.assertEqual(rc, 1)
Ejemplo n.º 9
0
class S3HandlerTestUpload(unittest.TestCase):
    """
    This class tests the ability to upload objects into an S3 bucket as
    well as multipart uploads
    """
    def setUp(self):
        self.session = botocore.session.get_session(EnvironmentVariables)
        self.client = self.session.create_client('s3', 'us-west-2')
        params = {'region': 'us-west-2', 'acl': 'private'}
        self.s3_handler = S3Handler(self.session, params)
        self.chunk_size = minimum_chunk_size()
        self.threshold = self.chunk_size + 1
        self.s3_handler_multi = S3Handler(
            self.session,
            params=params,
            runtime_config=runtime_config(multipart_threshold=self.threshold,
                                          multipart_chunksize=self.chunk_size))
        self.bucket = create_bucket(self.session)
        self.file_creator = FileCreator()
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]
        self.output = StringIO()
        self.saved_stderr = sys.stderr
        self.saved_stdout = sys.stdout
        sys.stderr = self.output
        sys.stdout = self.output

    def tearDown(self):
        self.output.close()
        sys.stderr = self.saved_stderr
        clean_loc_files(self.file_creator)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        self.loc_files = make_loc_files(self.file_creator)
        # Confirm there are no objects in the bucket.
        response = self.client.list_objects(Bucket=self.bucket)
        self.assertEqual(len(response.get('Contents', [])), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(
                    src=self.loc_files[i],
                    dest=self.s3_files[i],
                    operation_name='upload',
                    size=0,
                    client=self.client,
                ))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm the files were uploaded.
        response = self.client.list_objects(Bucket=self.bucket)
        self.assertEqual(len(response.get('Contents', [])), 2)

    def test_multi_upload(self):
        self.loc_files = make_loc_files(self.file_creator, self.threshold + 1)
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(
                    src=self.loc_files[i],
                    dest=self.s3_files[i],
                    size=self.threshold + 1,
                    operation_name='upload',
                    client=self.client,
                ))

        self.s3_handler_multi.call(tasks)

        # Confirm UploadPart was called
        self.assertIn("Completed 4 of 4 part(s)", self.output.getvalue())

        # Confirm the files were uploaded.
        response = self.client.list_objects(Bucket=self.bucket)
        self.assertEqual(len(response.get('Contents', [])), 2)
Ejemplo n.º 10
0
class S3HandlerTestUpload(unittest.TestCase):
    """
    This class tests the ability to upload objects into an S3 bucket as
    well as multipart uploads
    """
    def setUp(self):
        self.session = botocore.session.get_session(EnvironmentVariables)
        self.client = self.session.create_client('s3', 'us-west-2')
        params = {'region': 'us-west-2', 'acl': 'private'}
        self.s3_handler = S3Handler(self.session, params)
        self.chunk_size = minimum_chunk_size()
        self.threshold = self.chunk_size + 1
        self.s3_handler_multi = S3Handler(
            self.session, params=params,
            runtime_config=runtime_config(
                multipart_threshold=self.threshold,
                multipart_chunksize=self.chunk_size))
        self.bucket = create_bucket(self.session)
        self.file_creator = FileCreator()
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']
        self.output = StringIO()
        self.saved_stderr = sys.stderr
        self.saved_stdout = sys.stdout
        sys.stderr = self.output
        sys.stdout = self.output

    def tearDown(self):
        self.output.close()
        sys.stderr = self.saved_stderr
        clean_loc_files(self.file_creator)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        self.loc_files = make_loc_files(self.file_creator)
        # Confirm there are no objects in the bucket.
        response = self.client.list_objects(Bucket=self.bucket)
        self.assertEqual(len(response.get('Contents', [])), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=self.s3_files[i],
                operation_name='upload', size=0,
                client=self.client,
            ))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm the files were uploaded.
        response = self.client.list_objects(Bucket=self.bucket)
        self.assertEqual(len(response.get('Contents', [])), 2)

    def test_multi_upload(self):
        self.loc_files = make_loc_files(self.file_creator, self.threshold+1)
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=self.s3_files[i],
                size=self.threshold+1,
                operation_name='upload',
                client=self.client,
            ))

        self.s3_handler_multi.call(tasks)

        # Confirm UploadPart was called
        self.assertIn("Completed 4 of 4 part(s)", self.output.getvalue())

        # Confirm the files were uploaded.
        response = self.client.list_objects(Bucket=self.bucket)
        self.assertEqual(len(response.get('Contents', [])), 2)