示例#1
0
 def test_run_sync(self):
     # This ensures that the architecture sets up correctly for a ``sync``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     s3_prefix = 's3://' + self.bucket + '/'
     local_dir = self.loc_files[3]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': True, 'dryrun': True, 'quiet': False,
               'src': local_dir, 'dest': s3_prefix, 'filters': filters,
               'paths_type': 'locals3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None,
               'is_stream': False, 'source_region': 'us-west-2'}
     self.parsed_responses = [
         {"CommonPrefixes": [], "Contents": [
             {"Key": "text1.txt", "Size": 100,
              "LastModified": "2014-01-09T20:45:49.000Z"}]},
         {"CommonPrefixes": [], "Contents": []}]
     config = RuntimeConfig().build_config()
     cmd_arc = CommandArchitecture(self.session, 'sync', params, config)
     cmd_arc.create_instructions()
     cmd_arc.set_clients()
     self.patch_make_request()
     cmd_arc.run()
     output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())
 def test_run_remove(self):
     # This ensures that the architecture sets up correctly for a ``rm``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     filters = [['--include', '*']]
     params = {
         'dir_op': False,
         'dryrun': True,
         'quiet': False,
         'src': s3_file,
         'dest': s3_file,
         'filters': filters,
         'paths_type': 's3',
         'region': 'us-east-1',
         'endpoint_url': None,
         'verify_ssl': None,
         'follow_symlinks': True,
         'page_size': None,
         'is_stream': False,
         'source_region': None
     }
     self.parsed_responses = [{
         "ETag": "abcd",
         "ContentLength": 100,
         "LastModified": "2014-01-09T20:45:49.000Z"
     }]
     config = RuntimeConfig().build_config()
     cmd_arc = CommandArchitecture(self.session, 'rm', params, config)
     cmd_arc.set_clients()
     cmd_arc.create_instructions()
     self.patch_make_request()
     cmd_arc.run()
     output_str = "(dryrun) delete: %s" % s3_file
     self.assertIn(output_str, self.output.getvalue())
示例#3
0
 def test_error_on_same_line_as_status(self):
     s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': False, 'quiet': False,
               'src': local_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 'locals3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None,
               'is_stream': False, 'source_region': None, 'metadata': None}
     self.http_response.status_code = 400
     self.parsed_responses = [{'Error': {
                               'Code': 'BucketNotExists',
                               'Message': 'Bucket does not exist'}}]
     cmd_arc = CommandArchitecture(
         self.session, 'cp', params, RuntimeConfig().build_config())
     cmd_arc.set_clients()
     cmd_arc.create_instructions()
     self.patch_make_request()
     cmd_arc.run()
     # Also, we need to verify that the error message is on the *same* line
     # as the upload failed line, to make it easier to track.
     output_str = (
         "upload failed: %s to %s An error" % (
             rel_local_file, s3_file))
     self.assertIn(output_str, self.err_output.getvalue())
 def test_run_cp_put(self):
     # This ensures that the architecture sets up correctly for a ``cp`` put
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {
         'dir_op': False,
         'dryrun': True,
         'quiet': False,
         'src': local_file,
         'dest': s3_file,
         'filters': filters,
         'paths_type': 'locals3',
         'region': 'us-east-1',
         'endpoint_url': None,
         'verify_ssl': None,
         'follow_symlinks': True,
         'page_size': None,
         'is_stream': False,
         'source_region': None,
         'metadata': None
     }
     config = RuntimeConfig().build_config()
     cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
     cmd_arc.set_clients()
     cmd_arc.create_instructions()
     self.patch_make_request()
     cmd_arc.run()
     output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())
示例#5
0
 def __init__(self,
              session,
              params,
              result_queue=None,
              runtime_config=None):
     self.session = session
     if runtime_config is None:
         runtime_config = RuntimeConfig.defaults()
     self._runtime_config = runtime_config
     # The write_queue has potential for optimizations, so the constant
     # for maxsize is scoped to this class (as opposed to constants.py)
     # so we have the ability to change this value later.
     self.write_queue = queue.Queue(maxsize=self.MAX_IO_QUEUE_SIZE)
     self.result_queue = result_queue
     if not self.result_queue:
         self.result_queue = queue.Queue()
     self.params = {
         'dryrun': False,
         'quiet': False,
         'acl': None,
         'guess_mime_type': True,
         'sse_c_copy_source': None,
         'sse_c_copy_source_key': None,
         'sse': None,
         'sse_c': None,
         'sse_c_key': None,
         'sse_kms_key_id': None,
         'storage_class': None,
         'website_redirect': None,
         'content_type': None,
         'cache_control': None,
         'content_disposition': None,
         'content_encoding': None,
         'content_language': None,
         'expires': None,
         'grants': None,
         'only_show_errors': False,
         'is_stream': False,
         'paths_type': None,
         'expected_size': None,
         'metadata_directive': None,
         'ignore_glacier_warnings': False
     }
     self.params['region'] = params['region']
     for key in self.params.keys():
         if key in params:
             self.params[key] = params[key]
     self.multi_threshold = self._runtime_config['multipart_threshold']
     self.chunksize = self._runtime_config['multipart_chunksize']
     LOGGER.debug("Using a multipart threshold of %s and a part size of %s",
                  self.multi_threshold, self.chunksize)
     self.executor = Executor(
         num_threads=self._runtime_config['max_concurrent_requests'],
         result_queue=self.result_queue,
         quiet=self.params['quiet'],
         only_show_errors=self.params['only_show_errors'],
         max_queue_size=self._runtime_config['max_queue_size'],
         write_queue=self.write_queue)
     self._multipart_uploads = []
     self._multipart_downloads = []
示例#6
0
 def __init__(self, session, params, result_queue=None,
              runtime_config=None):
     if runtime_config is None:
         # Rather than using the .defaults(), streaming
         # has different default values so that it does not
         # consume large amounts of memory.
         runtime_config = RuntimeConfig().build_config(
             max_queue_size=self.MAX_EXECUTOR_QUEUE_SIZE,
             max_concurrent_requests=self.EXECUTOR_NUM_THREADS)
     super(S3StreamHandler, self).__init__(session, params, result_queue,
                                           runtime_config)
示例#7
0
    def __init__(self,
                 session,
                 params,
                 result_queue=None,
                 runtime_config=None):
        self.session = session
        if runtime_config is None:
            runtime_config = RuntimeConfig.defaults()
        self._runtime_config = runtime_config
        self.result_queue = result_queue
        if not self.result_queue:
            self.result_queue = queue.Queue()

        self.params = {
            'dryrun': False,
            'quiet': False,
            'acl': None,
            'guess_mime_type': True,
            'sse_c_copy_source': None,
            'sse_c_copy_source_key': None,
            'sse': None,
            'sse_c': None,
            'sse_c_key': None,
            'sse_kms_key_id': None,
            'storage_class': None,
            'website_redirect': None,
            'content_type': None,
            'cache_control': None,
            'content_disposition': None,
            'content_encoding': None,
            'content_language': None,
            'expires': None,
            'grants': None,
            'only_show_errors': False,
            'is_stream': False,
            'paths_type': None,
            'expected_size': None,
            'metadata': None,
            'metadata_directive': None,
            'ignore_glacier_warnings': False,
            'force_glacier_transfer': False
        }
        self.params['region'] = params['region']
        for key in self.params.keys():
            if key in params:
                self.params[key] = params[key]
示例#8
0
 def __init__(self, session, params, result_queue=None,
              runtime_config=None):
     self.session = session
     if runtime_config is None:
         runtime_config = RuntimeConfig.defaults()
     self._runtime_config = runtime_config
     # The write_queue has potential for optimizations, so the constant
     # for maxsize is scoped to this class (as opposed to constants.py)
     # so we have the ability to change this value later.
     self.write_queue = queue.Queue(maxsize=self.MAX_IO_QUEUE_SIZE)
     self.result_queue = result_queue
     if not self.result_queue:
         self.result_queue = queue.Queue()
     self.params = {
         'dryrun': False, 'quiet': False, 'acl': None,
         'guess_mime_type': True, 'sse_c_copy_source': None,
         'sse_c_copy_source_key': None, 'sse': None,
         'sse_c': None, 'sse_c_key': None, 'sse_kms_key_id': None,
         'storage_class': None, 'website_redirect': None,
         'content_type': None, 'cache_control': None,
         'content_disposition': None, 'content_encoding': None,
         'content_language': None, 'expires': None, 'grants': None,
         'only_show_errors': False, 'is_stream': False,
         'paths_type': None, 'expected_size': None, 'metadata': None,
         'metadata_directive': None, 'ignore_glacier_warnings': False,
         'force_glacier_transfer': False
     }
     self.params['region'] = params['region']
     for key in self.params.keys():
         if key in params:
             self.params[key] = params[key]
     self.multi_threshold = self._runtime_config['multipart_threshold']
     self.chunksize = self._runtime_config['multipart_chunksize']
     LOGGER.debug("Using a multipart threshold of %s and a part size of %s",
                  self.multi_threshold, self.chunksize)
     self.executor = Executor(
         num_threads=self._runtime_config['max_concurrent_requests'],
         result_queue=self.result_queue,
         quiet=self.params['quiet'],
         only_show_errors=self.params['only_show_errors'],
         max_queue_size=self._runtime_config['max_queue_size'],
         write_queue=self.write_queue
     )
     self._multipart_uploads = []
     self._multipart_downloads = []
示例#9
0
def runtime_config(**kwargs):
    return RuntimeConfig().build_config(**kwargs)