def test_run_remove(self): # This ensures that the architecture sets up correctly for a ``rm`` # command. It is just just a dry run, but all of the components need # to be wired correctly for it to work. s3_file = 's3://' + self.bucket + '/' + 'text1.txt' filters = [['--include', '*']] params = { 'dir_op': False, 'dryrun': True, 'quiet': False, 'src': s3_file, 'dest': s3_file, 'filters': filters, 'paths_type': 's3', 'region': 'us-east-1', 'endpoint_url': None, 'verify_ssl': None, 'follow_symlinks': True, 'page_size': None, 'is_stream': False, 'source_region': None } self.parsed_responses = [{ "ETag": "abcd", "ContentLength": 100, "LastModified": "2014-01-09T20:45:49.000Z" }] cmd_arc = CommandArchitecture(self.session, 'rm', params) cmd_arc.set_clients() cmd_arc.create_instructions() self.patch_make_request() cmd_arc.run() output_str = "(dryrun) delete: %s" % s3_file self.assertIn(output_str, self.output.getvalue())
def test_run_sync(self): # This ensures that the architecture sets up correctly for a ``sync`` # command. It is just just a dry run, but all of the components need # to be wired correctly for it to work. s3_file = 's3://' + self.bucket + '/' + 'text1.txt' local_file = self.loc_files[0] s3_prefix = 's3://' + self.bucket + '/' local_dir = self.loc_files[3] rel_local_file = os.path.relpath(local_file) filters = [['--include', '*']] params = {'dir_op': True, 'dryrun': True, 'quiet': False, 'src': local_dir, 'dest': s3_prefix, 'filters': filters, 'paths_type': 'locals3', 'region': 'us-east-1', 'endpoint_url': None, 'verify_ssl': None, 'follow_symlinks': True, 'page_size': None, 'is_stream': False, 'source_region': 'us-west-2'} self.parsed_responses = [ {"CommonPrefixes": [], "Contents": [ {"Key": "text1.txt", "Size": 100, "LastModified": "2014-01-09T20:45:49.000Z"}]}, {"CommonPrefixes": [], "Contents": []}] config = RuntimeConfig().build_config() cmd_arc = CommandArchitecture(self.session, 'sync', params, config) cmd_arc.create_instructions() cmd_arc.set_clients() self.patch_make_request() cmd_arc.run() output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file) self.assertIn(output_str, self.output.getvalue())
def test_set_client_no_source(self): session = Mock() cmd_arc = CommandArchitecture( session, 'sync', { 'region': 'us-west-1', 'endpoint_url': None, 'verify_ssl': None, 'source_region': None }) cmd_arc.set_clients() self.assertEqual(session.create_client.call_count, 2) self.assertEqual( session.create_client.call_args_list[0], mock.call('s3', region_name='us-west-1', endpoint_url=None, verify=None, config=None)) # A client created with the same arguments as the first should be used # for the source client since no source region was provided. self.assertEqual( session.create_client.call_args_list[1], mock.call('s3', region_name='us-west-1', endpoint_url=None, verify=None, config=None))
def test_set_client_with_source(self): session = Mock() cmd_arc = CommandArchitecture(session, 'sync', {'region': 'us-west-1', 'endpoint_url': None, 'verify_ssl': None, 'paths_type': 's3s3', 'source_region': 'us-west-2'}) cmd_arc.set_clients() create_client_args = session.create_client.call_args_list # Assert that two clients were created self.assertEqual(len(create_client_args), 3) self.assertEqual( create_client_args[0][1], {'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None, 'config': None} ) self.assertEqual( create_client_args[1][1], {'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None, 'config': None} ) # Assert override the second client created with the one needed for the # source region. self.assertEqual( create_client_args[2][1], {'region_name': 'us-west-2', 'verify': None, 'endpoint_url': None, 'config': None} )
def test_error_on_same_line_as_status(self): s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt' local_file = self.loc_files[0] rel_local_file = os.path.relpath(local_file) filters = [['--include', '*']] params = {'dir_op': False, 'dryrun': False, 'quiet': False, 'src': local_file, 'dest': s3_file, 'filters': filters, 'paths_type': 'locals3', 'region': 'us-east-1', 'endpoint_url': None, 'verify_ssl': None, 'follow_symlinks': True, 'page_size': None, 'is_stream': False, 'source_region': None, 'metadata': None} self.http_response.status_code = 400 self.parsed_responses = [{'Error': { 'Code': 'BucketNotExists', 'Message': 'Bucket does not exist'}}] cmd_arc = CommandArchitecture( self.session, 'cp', params, RuntimeConfig().build_config()) cmd_arc.set_clients() cmd_arc.create_instructions() self.patch_make_request() cmd_arc.run() # Also, we need to verify that the error message is on the *same* line # as the upload failed line, to make it easier to track. output_str = ( "upload failed: %s to %s An error" % ( rel_local_file, s3_file)) self.assertIn(output_str, self.err_output.getvalue())
def test_run_cp_put(self): # This ensures that the architecture sets up correctly for a ``cp`` put # command. It is just just a dry run, but all of the components need # to be wired correctly for it to work. s3_file = 's3://' + self.bucket + '/' + 'text1.txt' local_file = self.loc_files[0] rel_local_file = os.path.relpath(local_file) filters = [['--include', '*']] params = { 'dir_op': False, 'dryrun': True, 'quiet': False, 'src': local_file, 'dest': s3_file, 'filters': filters, 'paths_type': 'locals3', 'region': 'us-east-1', 'endpoint_url': None, 'verify_ssl': None, 'follow_symlinks': True, 'page_size': None, 'is_stream': False, 'source_region': None, 'metadata': None } config = RuntimeConfig().build_config() cmd_arc = CommandArchitecture(self.session, 'cp', params, config) cmd_arc.set_clients() cmd_arc.create_instructions() self.patch_make_request() cmd_arc.run() output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file) self.assertIn(output_str, self.output.getvalue())
def test_set_sigv4_clients_with_sse_kms(self): session = Mock() cmd_arc = CommandArchitecture( session, 'sync', {'region': 'us-west-1', 'endpoint_url': None, 'verify_ssl': None, 'source_region': None, 'sse': 'aws:kms'}) cmd_arc.set_clients() self.assertEqual( session.create_client.call_count, 2) create_client_call = session.create_client.call_args_list[0] create_source_client_call = session.create_client.call_args_list[1] # Make sure that both clients are using sigv4 if kms is enabled. self.assertEqual( create_client_call[1]['config'].signature_version, 's3v4') self.assertEqual( create_source_client_call[1]['config'].signature_version, 's3v4')
def test_run_remove(self): # This ensures that the architecture sets up correctly for a ``rm`` # command. It is just just a dry run, but all of the components need # to be wired correctly for it to work. s3_file = 's3://' + self.bucket + '/' + 'text1.txt' filters = [['--include', '*']] params = {'dir_op': False, 'dryrun': True, 'quiet': False, 'src': s3_file, 'dest': s3_file, 'filters': filters, 'paths_type': 's3', 'region': 'us-east-1', 'endpoint_url': None, 'verify_ssl': None, 'follow_symlinks': True, 'page_size': None, 'is_stream': False, 'source_region': None} self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100, "LastModified": "2014-01-09T20:45:49.000Z"}] cmd_arc = CommandArchitecture(self.session, 'rm', params) cmd_arc.set_clients() cmd_arc.create_instructions() self.patch_make_request() cmd_arc.run() output_str = "(dryrun) delete: %s" % s3_file self.assertIn(output_str, self.output.getvalue())
def test_set_client_no_source(self): session = Mock() cmd_arc = CommandArchitecture(session, 'sync', {'region': 'us-west-1', 'endpoint_url': None, 'verify_ssl': None, 'source_region': None}) cmd_arc.set_clients() self.assertEqual( session.create_client.call_count, 2) self.assertEqual( session.create_client.call_args_list[0], mock.call( 's3', region_name='us-west-1', endpoint_url=None, verify=None) ) # A client created with the same arguments as the first should be used # for the source client since no source region was provided. self.assertEqual( session.create_client.call_args_list[1], mock.call( 's3', region_name='us-west-1', endpoint_url=None, verify=None) )
def test_run_cp_put(self): # This ensures that the architecture sets up correctly for a ``cp`` put # command. It is just just a dry run, but all of the components need # to be wired correctly for it to work. s3_file = 's3://' + self.bucket + '/' + 'text1.txt' local_file = self.loc_files[0] rel_local_file = os.path.relpath(local_file) filters = [['--include', '*']] params = {'dir_op': False, 'dryrun': True, 'quiet': False, 'src': local_file, 'dest': s3_file, 'filters': filters, 'paths_type': 'locals3', 'region': 'us-east-1', 'endpoint_url': None, 'verify_ssl': None, 'follow_symlinks': True, 'page_size': None, 'is_stream': False, 'source_region': None, 'metadata': None} config = RuntimeConfig().build_config() cmd_arc = CommandArchitecture(self.session, 'cp', params, config) cmd_arc.set_clients() cmd_arc.create_instructions() self.patch_make_request() cmd_arc.run() output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file) self.assertIn(output_str, self.output.getvalue())