Exemplo n.º 1
0
 def __init__(self,
              session,
              params,
              result_queue=None,
              runtime_config=None):
     self.session = session
     if runtime_config is None:
         runtime_config = RuntimeConfig.defaults()
     self._runtime_config = runtime_config
     # The write_queue has potential for optimizations, so the constant
     # for maxsize is scoped to this class (as opposed to constants.py)
     # so we have the ability to change this value later.
     self.write_queue = queue.Queue(maxsize=self.MAX_IO_QUEUE_SIZE)
     self.result_queue = result_queue
     if not self.result_queue:
         self.result_queue = queue.Queue()
     self.params = {
         'dryrun': False,
         'quiet': False,
         'acl': None,
         'guess_mime_type': True,
         'sse_c_copy_source': None,
         'sse_c_copy_source_key': None,
         'sse': None,
         'sse_c': None,
         'sse_c_key': None,
         'sse_kms_key_id': None,
         'storage_class': None,
         'website_redirect': None,
         'content_type': None,
         'cache_control': None,
         'content_disposition': None,
         'content_encoding': None,
         'content_language': None,
         'expires': None,
         'grants': None,
         'only_show_errors': False,
         'is_stream': False,
         'paths_type': None,
         'expected_size': None,
         'metadata_directive': None,
         'ignore_glacier_warnings': False
     }
     self.params['region'] = params['region']
     for key in self.params.keys():
         if key in params:
             self.params[key] = params[key]
     self.multi_threshold = self._runtime_config['multipart_threshold']
     self.chunksize = self._runtime_config['multipart_chunksize']
     LOGGER.debug("Using a multipart threshold of %s and a part size of %s",
                  self.multi_threshold, self.chunksize)
     self.executor = Executor(
         num_threads=self._runtime_config['max_concurrent_requests'],
         result_queue=self.result_queue,
         quiet=self.params['quiet'],
         only_show_errors=self.params['only_show_errors'],
         max_queue_size=self._runtime_config['max_queue_size'],
         write_queue=self.write_queue)
     self._multipart_uploads = []
     self._multipart_downloads = []
Exemplo n.º 2
0
 def __init__(self,
              session,
              params,
              result_queue=None,
              multi_threshold=MULTI_THRESHOLD,
              chunksize=CHUNKSIZE):
     self.session = session
     # The write_queue has potential for optimizations, so the constant
     # for maxsize is scoped to this class (as opposed to constants.py)
     # so we have the ability to change this value later.
     self.write_queue = queue.Queue(maxsize=self.MAX_IO_QUEUE_SIZE)
     self.result_queue = result_queue
     if not self.result_queue:
         self.result_queue = queue.Queue()
     self.params = {
         'dryrun': False,
         'quiet': False,
         'acl': None,
         'guess_mime_type': True,
         'sse': False,
         'storage_class': None,
         'website_redirect': None,
         'content_type': None,
         'cache_control': None,
         'content_disposition': None,
         'content_encoding': None,
         'content_language': None,
         'expires': None,
         'grants': None,
         'only_show_errors': False,
         'is_stream': False,
         'paths_type': None,
         'expected_size': None
     }
     self.params['region'] = params['region']
     for key in self.params.keys():
         if key in params:
             self.params[key] = params[key]
     self.multi_threshold = multi_threshold
     self.chunksize = chunksize
     self.executor = Executor(
         num_threads=self.EXECUTOR_NUM_THREADS,
         result_queue=self.result_queue,
         quiet=self.params['quiet'],
         only_show_errors=self.params['only_show_errors'],
         max_queue_size=self.MAX_EXECUTOR_QUEUE_SIZE,
         write_queue=self.write_queue)
     self._multipart_uploads = []
     self._multipart_downloads = []
Exemplo n.º 3
0
    def test_shutdown_does_not_hang(self):
        executor = Executor(2, queue.Queue(), False,
                            10, queue.Queue(maxsize=1))
        with temporary_file('rb+') as f:
            executor.start()
            class FloodIOQueueTask(object):
                PRIORITY = 10

                def __call__(self):
                    for i in range(50):
                        executor.write_queue.put(IORequest(f.name, 0, b'foobar'))
            executor.submit(FloodIOQueueTask())
            executor.initiate_shutdown()
            executor.wait_until_shutdown()
            self.assertEqual(open(f.name, 'rb').read(), b'foobar')
Exemplo n.º 4
0
 def __init__(self,
              session,
              params,
              multi_threshold=MULTI_THRESHOLD,
              chunksize=CHUNKSIZE):
     self.session = session
     self.done = threading.Event()
     self.interrupt = threading.Event()
     self.result_queue = NoBlockQueue()
     # The write_queue has potential for optimizations, so the constant
     # for maxsize is scoped to this class (as opposed to constants.py)
     # so we have the ability to change this value later.
     self.write_queue = NoBlockQueue(self.interrupt,
                                     maxsize=self.MAX_IO_QUEUE_SIZE)
     self.params = {
         'dryrun': False,
         'quiet': False,
         'acl': None,
         'guess_mime_type': True,
         'sse': False,
         'storage_class': None,
         'website_redirect': None,
         'content_type': None,
         'cache_control': None,
         'content_disposition': None,
         'content_encoding': None,
         'content_language': None,
         'expires': None,
         'grants': None
     }
     self.params['region'] = params['region']
     for key in self.params.keys():
         if key in params:
             self.params[key] = params[key]
     self.multi_threshold = multi_threshold
     self.chunksize = chunksize
     self.executor = Executor(done=self.done,
                              num_threads=NUM_THREADS,
                              result_queue=self.result_queue,
                              quiet=self.params['quiet'],
                              interrupt=self.interrupt,
                              max_queue_size=MAX_QUEUE_SIZE,
                              write_queue=self.write_queue)
     self._multipart_uploads = []
     self._multipart_downloads = []
Exemplo n.º 5
0
 def __init__(self,
              session,
              params,
              result_queue=None,
              runtime_config=None):
     super(S3Handler, self).__init__(session, params, result_queue,
                                     runtime_config)
     # The write_queue has potential for optimizations, so the constant
     # for maxsize is scoped to this class (as opposed to constants.py)
     # so we have the ability to change this value later.
     self.write_queue = queue.Queue(maxsize=self.MAX_IO_QUEUE_SIZE)
     self.multi_threshold = self._runtime_config['multipart_threshold']
     self.chunksize = self._runtime_config['multipart_chunksize']
     LOGGER.debug("Using a multipart threshold of %s and a part size of %s",
                  self.multi_threshold, self.chunksize)
     self.executor = Executor(
         num_threads=self._runtime_config['max_concurrent_requests'],
         result_queue=self.result_queue,
         quiet=self.params['quiet'],
         only_show_errors=self.params['only_show_errors'],
         max_queue_size=self._runtime_config['max_queue_size'],
         write_queue=self.write_queue)
     self._multipart_uploads = []
     self._multipart_downloads = []