Exemplo n.º 1
0
    def setUp(self):
        super(TestProvideLastModifiedTimeSubscriber, self).setUp()
        self.filename = self.file_creator.create_file('myfile', 'my contents')
        self.desired_utime = datetime.datetime(2016,
                                               1,
                                               18,
                                               7,
                                               0,
                                               0,
                                               tzinfo=tzlocal())
        self.result_queue = queue.Queue()
        self.subscriber = ProvideLastModifiedTimeSubscriber(
            self.desired_utime, self.result_queue)

        call_args = FakeTransferFutureCallArgs(fileobj=self.filename)
        meta = FakeTransferFutureMeta(call_args=call_args)
        self.future = FakeTransferFuture(meta=meta)
Exemplo n.º 2
0
    def __init__(self,
                 session,
                 params,
                 result_queue=None,
                 runtime_config=None):
        self.session = session
        if runtime_config is None:
            runtime_config = RuntimeConfig.defaults()
        self._runtime_config = runtime_config
        self.result_queue = result_queue
        if not self.result_queue:
            self.result_queue = queue.Queue()

        self.params = {
            'dryrun': False,
            'quiet': False,
            'acl': None,
            'guess_mime_type': True,
            'sse_c_copy_source': None,
            'sse_c_copy_source_key': None,
            'sse': None,
            'sse_c': None,
            'sse_c_key': None,
            'sse_kms_key_id': None,
            'storage_class': None,
            'website_redirect': None,
            'content_type': None,
            'cache_control': None,
            'content_disposition': None,
            'content_encoding': None,
            'content_language': None,
            'expires': None,
            'grants': None,
            'only_show_errors': False,
            'is_stream': False,
            'paths_type': None,
            'expected_size': None,
            'metadata': None,
            'metadata_directive': None,
            'ignore_glacier_warnings': False,
            'force_glacier_transfer': False
        }
        self.params['region'] = params['region']
        for key in self.params.keys():
            if key in params:
                self.params[key] = params[key]
Exemplo n.º 3
0
 def __init__(self,
              client,
              operation_name,
              follow_symlinks=True,
              page_size=None,
              result_queue=None,
              request_parameters=None):
     self._client = client
     self.operation_name = operation_name
     self.follow_symlinks = follow_symlinks
     self.page_size = page_size
     self.result_queue = result_queue
     if not result_queue:
         self.result_queue = queue.Queue()
     self.request_parameters = {}
     if request_parameters is not None:
         self.request_parameters = request_parameters
Exemplo n.º 4
0
    def setUp(self):
        self.result_queue = queue.Queue()
        self.result_recorder = ResultRecorder()
        self.processed_results = []
        self.result_processor = ResultProcessor(
            self.result_queue,
            [self.result_recorder, self.processed_results.append])
        self.command_result_recorder = CommandResultRecorder(
            self.result_queue, self.result_recorder, self.result_processor)

        self.transfer_manager = mock.Mock(spec=TransferManager)
        self.transfer_manager.__enter__ = mock.Mock()
        self.transfer_manager.__exit__ = mock.Mock()
        self.parameters = {}
        self.s3_transfer_handler = S3TransferHandler(
            self.transfer_manager, self.parameters,
            self.command_result_recorder)
Exemplo n.º 5
0
 def __init__(self,
              session,
              params,
              result_queue=None,
              runtime_config=None):
     super(S3Handler, self).__init__(session, params, result_queue,
                                     runtime_config)
     # The write_queue has potential for optimizations, so the constant
     # for maxsize is scoped to this class (as opposed to constants.py)
     # so we have the ability to change this value later.
     self.write_queue = queue.Queue(maxsize=self.MAX_IO_QUEUE_SIZE)
     self.multi_threshold = self._runtime_config['multipart_threshold']
     self.chunksize = self._runtime_config['multipart_chunksize']
     LOGGER.debug("Using a multipart threshold of %s and a part size of %s",
                  self.multi_threshold, self.chunksize)
     self.executor = Executor(
         num_threads=self._runtime_config['max_concurrent_requests'],
         result_queue=self.result_queue,
         quiet=self.params['quiet'],
         only_show_errors=self.params['only_show_errors'],
         max_queue_size=self._runtime_config['max_queue_size'],
         write_queue=self.write_queue)
     self._multipart_uploads = []
     self._multipart_downloads = []
Exemplo n.º 6
0
 def setUp(self):
     self.cli_params = {}
     self.runtime_config = runtime_config()
     self.client = mock.Mock()
     self.result_queue = queue.Queue()
Exemplo n.º 7
0
 def __init__(self, writer):
     self._read_q = queue.Queue()
     self._thread = threading.Thread(
         target=self._threaded_record_writer,
         args=(writer,))
Exemplo n.º 8
0
 def test_can_serialize_unknown_type(self):
     original = queue.Queue()
     encoded = repr(original)
     string_value = json.dumps(original, cls=PayloadSerializer)
     reloaded = json.loads(string_value)
     self.assertEqual(encoded, reloaded)
Exemplo n.º 9
0
 def __init__(self, tracker):
     self._read_q = queue.Queue()
     self._write_q = queue.Queue()
     self._thread = threading.Thread(target=self._threaded_request_tracker,
                                     args=(tracker, ))
Exemplo n.º 10
0
 def __init__(self,
              session,
              params,
              result_queue=None,
              runtime_config=None):
     self.session = session
     if runtime_config is None:
         runtime_config = RuntimeConfig.defaults()
     self._runtime_config = runtime_config
     # The write_queue has potential for optimizations, so the constant
     # for maxsize is scoped to this class (as opposed to constants.py)
     # so we have the ability to change this value later.
     self.write_queue = queue.Queue(maxsize=self.MAX_IO_QUEUE_SIZE)
     self.result_queue = result_queue
     if not self.result_queue:
         self.result_queue = queue.Queue()
     self.params = {
         'dryrun': False,
         'quiet': False,
         'acl': None,
         'guess_mime_type': True,
         'sse_c_copy_source': None,
         'sse_c_copy_source_key': None,
         'sse': None,
         'sse_c': None,
         'sse_c_key': None,
         'sse_kms_key_id': None,
         'storage_class': None,
         'website_redirect': None,
         'content_type': None,
         'cache_control': None,
         'content_disposition': None,
         'content_encoding': None,
         'content_language': None,
         'expires': None,
         'grants': None,
         'only_show_errors': False,
         'is_stream': False,
         'paths_type': None,
         'expected_size': None,
         'metadata': None,
         'metadata_directive': None,
         'ignore_glacier_warnings': False,
         'force_glacier_transfer': False
     }
     self.params['region'] = params['region']
     for key in self.params.keys():
         if key in params:
             self.params[key] = params[key]
     self.multi_threshold = self._runtime_config['multipart_threshold']
     self.chunksize = self._runtime_config['multipart_chunksize']
     LOGGER.debug("Using a multipart threshold of %s and a part size of %s",
                  self.multi_threshold, self.chunksize)
     self.executor = Executor(
         num_threads=self._runtime_config['max_concurrent_requests'],
         result_queue=self.result_queue,
         quiet=self.params['quiet'],
         only_show_errors=self.params['only_show_errors'],
         max_queue_size=self._runtime_config['max_queue_size'],
         write_queue=self.write_queue)
     self._multipart_uploads = []
     self._multipart_downloads = []
Exemplo n.º 11
0
    def run(self):
        """
        This function wires together all of the generators and completes
        the command.  First a dictionary is created that is indexed first by
        the command name.  Then using the instruction, another dictionary
        can be indexed to obtain the objects corresponding to the
        particular instruction for that command.  To begin the wiring,
        either a ``FileFormat`` or ``TaskInfo`` object, depending on the
        command, is put into a list.  Then the function enters a while loop
        that pops off an instruction.  It then determines the object needed
        and calls the call function of the object using the list as the input.
        Depending on the number of objects in the input list and the number
        of components in the list corresponding to the instruction, the call
        method of the component can be called two different ways.  If the
        number of inputs is equal to the number of components a 1:1 mapping of
        inputs to components is used when calling the call function.  If the
        there are more inputs than components, then a 2:1 mapping of inputs to
        components is used where the component call method takes two inputs
        instead of one.  Whatever files are yielded from the call function
        is appended to a list and used as the input for the next repetition
        of the while loop until there are no more instructions.
        """
        src = self.parameters['src']
        dest = self.parameters['dest']
        paths_type = self.parameters['paths_type']
        files = FileFormat().format(src, dest, self.parameters)
        rev_files = FileFormat().format(dest, src, self.parameters)

        cmd_translation = {}
        cmd_translation['locals3'] = {
            'cp': 'upload',
            'sync': 'upload',
            'mv': 'move'
        }
        cmd_translation['s3s3'] = {'cp': 'copy', 'sync': 'copy', 'mv': 'move'}
        cmd_translation['s3local'] = {
            'cp': 'download',
            'sync': 'download',
            'mv': 'move'
        }
        cmd_translation['s3'] = {
            'rm': 'delete',
            'mb': 'make_bucket',
            'rb': 'remove_bucket'
        }
        result_queue = queue.Queue()
        operation_name = cmd_translation[paths_type][self.cmd]
        file_generator = FileGenerator(self._service,
                                       self._source_endpoint,
                                       operation_name,
                                       self.parameters['follow_symlinks'],
                                       self.parameters['page_size'],
                                       result_queue=result_queue)
        rev_generator = FileGenerator(self._service,
                                      self._endpoint,
                                      '',
                                      self.parameters['follow_symlinks'],
                                      self.parameters['page_size'],
                                      result_queue=result_queue)
        taskinfo = [
            TaskInfo(src=files['src']['path'],
                     src_type='s3',
                     operation_name=operation_name,
                     service=self._service,
                     endpoint=self._endpoint)
        ]
        stream_dest_path, stream_compare_key = find_dest_path_comp_key(files)
        stream_file_info = [
            FileInfo(src=files['src']['path'],
                     dest=stream_dest_path,
                     compare_key=stream_compare_key,
                     src_type=files['src']['type'],
                     dest_type=files['dest']['type'],
                     operation_name=operation_name,
                     service=self._service,
                     endpoint=self._endpoint,
                     is_stream=True)
        ]
        file_info_builder = FileInfoBuilder(self._service, self._endpoint,
                                            self._source_endpoint,
                                            self.parameters)
        s3handler = S3Handler(self.session,
                              self.parameters,
                              runtime_config=self._runtime_config,
                              result_queue=result_queue)
        s3_stream_handler = S3StreamHandler(self.session,
                                            self.parameters,
                                            result_queue=result_queue)

        sync_strategies = self.choose_sync_strategies()

        command_dict = {}
        if self.cmd == 'sync':
            command_dict = {
                'setup': [files, rev_files],
                'file_generator': [file_generator, rev_generator],
                'filters': [
                    create_filter(self.parameters),
                    create_filter(self.parameters)
                ],
                'comparator': [Comparator(**sync_strategies)],
                'file_info_builder': [file_info_builder],
                's3_handler': [s3handler]
            }
        elif self.cmd == 'cp' and self.parameters['is_stream']:
            command_dict = {
                'setup': [stream_file_info],
                's3_handler': [s3_stream_handler]
            }
        elif self.cmd == 'cp':
            command_dict = {
                'setup': [files],
                'file_generator': [file_generator],
                'filters': [create_filter(self.parameters)],
                'file_info_builder': [file_info_builder],
                's3_handler': [s3handler]
            }
        elif self.cmd == 'rm':
            command_dict = {
                'setup': [files],
                'file_generator': [file_generator],
                'filters': [create_filter(self.parameters)],
                'file_info_builder': [file_info_builder],
                's3_handler': [s3handler]
            }
        elif self.cmd == 'mv':
            command_dict = {
                'setup': [files],
                'file_generator': [file_generator],
                'filters': [create_filter(self.parameters)],
                'file_info_builder': [file_info_builder],
                's3_handler': [s3handler]
            }
        elif self.cmd == 'mb':
            command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]}
        elif self.cmd == 'rb':
            command_dict = {'setup': [taskinfo], 's3_handler': [s3handler]}

        files = command_dict['setup']
        while self.instructions:
            instruction = self.instructions.pop(0)
            file_list = []
            components = command_dict[instruction]
            for i in range(len(components)):
                if len(files) > len(components):
                    file_list.append(components[i].call(*files))
                else:
                    file_list.append(components[i].call(files[i]))
            files = file_list
        # This is kinda quirky, but each call through the instructions
        # will replaces the files attr with the return value of the
        # file_list.  The very last call is a single list of
        # [s3_handler], and the s3_handler returns the number of
        # tasks failed and the number of tasks warned.
        # This means that files[0] now contains a namedtuple with
        # the number of failed tasks and the number of warned tasks.
        # In terms of the RC, we're keeping it simple and saying
        # that > 0 failed tasks will give a 1 RC and > 0 warned
        # tasks will give a 2 RC.  Otherwise a RC of zero is returned.
        rc = 0
        if files[0].num_tasks_failed > 0:
            rc = 1
        if files[0].num_tasks_warned > 0:
            rc = 2
        return rc