def test_set_client_with_source(self):
     session = Mock()
     cmd_arc = CommandArchitecture(session, 'sync',
                                   {'region': 'us-west-1',
                                    'endpoint_url': None,
                                    'verify_ssl': None,
                                    'paths_type': 's3s3',
                                    'source_region': 'us-west-2'})
     cmd_arc.set_clients()
     create_client_args = session.create_client.call_args_list
     # Assert that two clients were created
     self.assertEqual(len(create_client_args), 3)
     self.assertEqual(
         create_client_args[0][1],
         {'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None,
          'config': None}
     )
     self.assertEqual(
         create_client_args[1][1],
         {'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None,
          'config': None}
     )
     # Assert override the second client created with the one needed for the
     # source region.
     self.assertEqual(
         create_client_args[2][1],
         {'region_name': 'us-west-2', 'verify': None, 'endpoint_url': None,
          'config': None}
     )
    def test_choose_sync_strategy_overwrite(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-east-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None})
        # Check that the default sync strategy is overwritted if a plugin
        # returns its sync strategy.
        mock_strategy = Mock()
        mock_strategy.sync_type = 'file_at_src_and_dest'

        mock_not_at_dest_sync_strategy = Mock()
        mock_not_at_dest_sync_strategy.sync_type = 'file_not_at_dest'

        mock_not_at_src_sync_strategy = Mock()
        mock_not_at_src_sync_strategy.sync_type = 'file_not_at_src'

        responses = [(None, mock_strategy),
                     (None, mock_not_at_dest_sync_strategy),
                     (None, mock_not_at_src_sync_strategy)]

        session.emit.return_value = responses
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'],
            mock_strategy
        )
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'],
            mock_not_at_dest_sync_strategy
        )
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'],
            mock_not_at_src_sync_strategy
        )
Exemple #3
0
 def test_set_endpoint_no_source(self):
     cmd_arc = CommandArchitecture(self.session, 'sync',
                                   {'region': 'us-west-1',
                                    'endpoint_url': None,
                                    'verify_ssl': None,
                                    'source_region': None})
     cmd_arc.set_endpoints()
     endpoint = cmd_arc._endpoint
     source_endpoint = cmd_arc._source_endpoint
     self.assertEqual(endpoint.region_name, 'us-west-1')
     self.assertEqual(source_endpoint.region_name, 'us-west-1')
Exemple #4
0
 def test_run_mb(self):
     # This ensures that the architecture sets up correctly for a ``rb``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_prefix = 's3://' + self.bucket + '/'
     params = {'dir_op': True, 'dryrun': True, 'quiet': False,
               'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
               'region': 'us-east-1', 'endpoint_url': None,
               'verify_ssl': None, 'follow_symlinks': True}
     cmd_arc = CommandArchitecture(self.session, 'mb', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) make_bucket: %s" % s3_prefix
     self.assertIn(output_str, self.output.getvalue())
Exemple #5
0
 def test_run_remove(self):
     # This ensures that the architecture sets up correctly for a ``rm``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': True, 'quiet': False,
               'src': s3_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 's3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None}
     cmd_arc = CommandArchitecture(self.session, 'rm', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) delete: %s" % s3_file
     self.assertIn(output_str, self.output.getvalue())
    def test_set_sigv4_clients_with_sse_kms(self):
        session = Mock()
        cmd_arc = CommandArchitecture(
            session, 'sync',
            {'region': 'us-west-1', 'endpoint_url': None, 'verify_ssl': None,
             'source_region': None, 'sse': 'aws:kms'})
        cmd_arc.set_clients()
        self.assertEqual( session.create_client.call_count, 2)
        create_client_call = session.create_client.call_args_list[0]
        create_source_client_call = session.create_client.call_args_list[1]

        # Make sure that both clients are using sigv4 if kms is enabled.
        self.assertEqual(
            create_client_call[1]['config'].signature_version, 's3v4')
        self.assertEqual(
            create_source_client_call[1]['config'].signature_version, 's3v4')
 def test_run_sync(self):
     # This ensures that the architecture sets up correctly for a ``sync``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     s3_prefix = 's3://' + self.bucket + '/'
     local_dir = self.loc_files[3]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': True, 'dryrun': True, 'quiet': False,
               'src': local_dir, 'dest': s3_prefix, 'filters': filters,
               'paths_type': 'locals3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None,
               'is_stream': False, 'source_region': 'us-west-2'}
     self.parsed_responses = [
         {"CommonPrefixes": [], "Contents": [
             {"Key": "text1.txt", "Size": 100,
              "LastModified": "2014-01-09T20:45:49.000Z"}]},
         {"CommonPrefixes": [], "Contents": []}]
     config = RuntimeConfig().build_config()
     cmd_arc = CommandArchitecture(self.session, 'sync', params, config)
     cmd_arc.create_instructions()
     cmd_arc.set_clients()
     self.patch_make_request()
     cmd_arc.run()
     output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())
 def test_error_on_same_line_as_status(self):
     s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': False, 'quiet': False,
               'src': local_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 'locals3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None,
               'is_stream': False, 'source_region': None, 'metadata': None}
     self.http_response.status_code = 400
     self.parsed_responses = [{'Error': {
                               'Code': 'BucketNotExists',
                               'Message': 'Bucket does not exist'}}]
     cmd_arc = CommandArchitecture(
         self.session, 'cp', params, RuntimeConfig().build_config())
     cmd_arc.set_clients()
     cmd_arc.create_instructions()
     self.patch_make_request()
     cmd_arc.run()
     # Also, we need to verify that the error message is on the *same* line
     # as the upload failed line, to make it easier to track.
     output_str = (
         "upload failed: %s to %s An error" % (
             rel_local_file, s3_file))
     self.assertIn(output_str, self.err_output.getvalue())
Exemple #9
0
 def test_run_cp_get(self):
     # This ensures that the architecture sets up correctly for a ``cp`` get
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': True, 'quiet': False,
               'src': s3_file, 'dest': local_file, 'filters': filters,
               'paths_type': 's3local', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None}
     cmd_arc = CommandArchitecture(self.session, 'cp', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
     self.assertIn(output_str, self.output.getvalue())
Exemple #10
0
 def test_run_rb_nonzero_rc(self):
     # This ensures that the architecture sets up correctly for a ``rb``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_prefix = 's3://' + self.bucket + '/'
     params = {'dir_op': True, 'dryrun': False, 'quiet': False,
               'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
               'region': 'us-east-1', 'endpoint_url': None,
               'verify_ssl': None, 'follow_symlinks': True,
               'page_size': None, 'is_stream': False}
     self.http_response.status_code = 400
     cmd_arc = CommandArchitecture(self.session, 'rb', params)
     cmd_arc.create_instructions()
     self.patch_make_request()
     rc = cmd_arc.run()
     output_str = "remove_bucket failed: %s" % s3_prefix
     self.assertIn(output_str, self.err_output.getvalue())
     self.assertEqual(rc, 1)
Exemple #11
0
 def test_error_on_same_line_as_status(self):
     s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': False, 'quiet': False,
               'src': local_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 'locals3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None}
     cmd_arc = CommandArchitecture(self.session, 'cp', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     # Also, we need to verify that the error message is on the *same* line
     # as the upload failed line, to make it easier to track.
     output_str = (
         "upload failed: %s to %s Error: Bucket does not exist\n" % (
             rel_local_file, s3_file))
     self.assertIn(output_str, self.output.getvalue())
Exemple #12
0
 def test_set_client_no_source(self):
     session = Mock()
     cmd_arc = CommandArchitecture(session, 'sync',
                                   {'region': 'us-west-1',
                                    'endpoint_url': None,
                                    'verify_ssl': None,
                                    'source_region': None})
     cmd_arc.set_clients()
     self.assertEqual( session.create_client.call_count, 2)
     self.assertEqual(
         session.create_client.call_args_list[0],
         mock.call(
          's3', region_name='us-west-1', endpoint_url=None, verify=None)
     )
     # A client created with the same arguments as the first should be used
     # for the source client since no source region was provided.
     self.assertEqual(
         session.create_client.call_args_list[1],
         mock.call(
          's3', region_name='us-west-1', endpoint_url=None, verify=None)
     )
    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync']

        instructions = {'cp': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'mv': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'rm': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'sync': ['file_generator', 'comparator',
                                 'file_info_builder', 's3_handler']}

        params = {'filters': True, 'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'is_stream': False}
        for cmd in cmds:
            cmd_arc = CommandArchitecture(self.session, cmd,
                                          {'region': 'us-east-1',
                                           'endpoint_url': None,
                                           'verify_ssl': None,
                                           'is_stream': False})
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(cmd_arc.instructions, ['file_generator', 'filters',
                                                'file_info_builder',
                                                's3_handler'])
 def test_choose_sync_strategy_default(self):
     session = Mock()
     cmd_arc = CommandArchitecture(session, 'sync',
                                   {'region': 'us-east-1',
                                    'endpoint_url': None,
                                    'verify_ssl': None})
     # Check if no plugins return their sync strategy.  Should
     # result in the default strategies
     session.emit.return_value = None
     sync_strategies = cmd_arc.choose_sync_strategies()
     self.assertEqual(
         sync_strategies['file_at_src_and_dest_sync_strategy'].__class__,
         SizeAndLastModifiedSync
     )
     self.assertEqual(
         sync_strategies['file_not_at_dest_sync_strategy'].__class__,
         MissingFileSync
     )
     self.assertEqual(
         sync_strategies['file_not_at_src_sync_strategy'].__class__,
         NeverSync
     )
Exemple #15
0
 def test_error_on_same_line_as_status(self):
     s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {
         'dir_op': False,
         'dryrun': False,
         'quiet': False,
         'src': local_file,
         'dest': s3_file,
         'filters': filters,
         'paths_type': 'locals3',
         'region': 'us-east-1',
         'endpoint_url': None,
         'verify_ssl': None,
         'follow_symlinks': True,
         'page_size': None,
         'is_stream': False,
         'source_region': None,
         'metadata': None
     }
     self.http_response.status_code = 400
     self.parsed_responses = [{
         'Error': {
             'Code': 'BucketNotExists',
             'Message': 'Bucket does not exist'
         }
     }]
     cmd_arc = CommandArchitecture(self.session, 'cp', params)
     cmd_arc.set_clients()
     cmd_arc.create_instructions()
     self.patch_make_request()
     cmd_arc.run()
     # Also, we need to verify that the error message is on the *same* line
     # as the upload failed line, to make it easier to track.
     output_str = ("upload failed: %s to %s A client error" %
                   (rel_local_file, s3_file))
     self.assertIn(output_str, self.err_output.getvalue())
Exemple #16
0
 def test_run_mb(self):
     # This ensures that the architecture sets up correctly for a ``rb``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_prefix = 's3://' + self.bucket + '/'
     params = {'dir_op': True, 'dryrun': True, 'quiet': False,
               'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
               'region': 'us-east-1', 'endpoint_url': None,
               'verify_ssl': None, 'follow_symlinks': True,
               'page_size': None, 'is_stream': False}
     cmd_arc = CommandArchitecture(self.session, 'mb', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) make_bucket: %s" % s3_prefix
     self.assertIn(output_str, self.output.getvalue())
Exemple #17
0
 def test_run_mv(self):
     # This ensures that the architecture sets up correctly for a ``mv``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': True, 'quiet': False,
               'src': s3_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 's3s3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None,
               'is_stream': False, 'source_region': None,
               'is_move': True}
     self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
                               "LastModified": "2014-01-09T20:45:49.000Z"}]
     config = RuntimeConfig().build_config()
     cmd_arc = CommandArchitecture(self.session, 'mv', params, config)
     cmd_arc.set_clients()
     cmd_arc.create_instructions()
     self.patch_make_request()
     cmd_arc.run()
     output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())
    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync', 'mb', 'rb']

        instructions = {
            'cp': ['file_generator', 's3_handler'],
            'mv': ['file_generator', 's3_handler'],
            'rm': ['file_generator', 's3_handler'],
            'sync': ['file_generator', 'comparator', 's3_handler'],
            'mb': ['s3_handler'],
            'rb': ['s3_handler']
        }

        params = {
            'filters': True,
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None
        }
        for cmd in cmds:
            cmd_arc = CommandArchitecture(self.session, cmd, {
                'region': 'us-east-1',
                'endpoint_url': None,
                'verify_ssl': None
            })
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(cmd_arc.instructions,
                         ['file_generator', 'filters', 's3_handler'])
 def test_run_remove(self):
     # This ensures that the architecture sets up correctly for a ``rm``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': True, 'quiet': False,
               'src': s3_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 's3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None,
               'is_stream': False}
     cmd_arc = CommandArchitecture(self.session, 'rm', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) delete: %s" % s3_file
     self.assertIn(output_str, self.output.getvalue())
Exemple #20
0
 def test_run_remove(self):
     # This ensures that the architecture sets up correctly for a ``rm``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': True, 'quiet': False,
               'src': s3_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 's3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None,
               'is_stream': False, 'source_region': None}
     self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
                               "LastModified": "2014-01-09T20:45:49.000Z"}]
     cmd_arc = CommandArchitecture(self.session, 'rm', params)
     cmd_arc.set_clients()
     cmd_arc.create_instructions()
     self.patch_make_request()
     cmd_arc.run()
     output_str = "(dryrun) delete: %s" % s3_file
     self.assertIn(output_str, self.output.getvalue())
Exemple #21
0
 def test_run_cp_put(self):
     # This ensures that the architecture sets up correctly for a ``cp`` put
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': True, 'quiet': False,
               'src': local_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 'locals3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None,
               'is_stream': False}
     cmd_arc = CommandArchitecture(self.session, 'cp', params)
     cmd_arc.create_instructions()
     self.patch_make_request()
     cmd_arc.run()
     output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())