示例#1
0
 def _enqueue_tasks(self, files):
     total_files = 0
     total_parts = 0
     for filename in files:
         num_uploads = 1
         is_multipart_task = self._is_multipart_task(filename)
         too_large = False
         if hasattr(filename, "size"):
             too_large = filename.size > MAX_UPLOAD_SIZE
         if too_large and filename.operation_name == "upload":
             warning_message = "File exceeds s3 upload limit of 5 TB."
             warning = create_warning(relative_path(filename.src), message=warning_message)
             self.result_queue.put(warning)
         elif is_multipart_task and not self.params["dryrun"]:
             # If we're in dryrun mode, then we don't need the
             # real multipart tasks.  We can just use a BasicTask
             # in the else clause below, which will print out the
             # fact that it's transferring a file rather than
             # the specific part tasks required to perform the
             # transfer.
             num_uploads = self._enqueue_multipart_tasks(filename)
         else:
             task = tasks.BasicTask(
                 session=self.session, filename=filename, parameters=self.params, result_queue=self.result_queue
             )
             self.executor.submit(task)
         total_files += 1
         total_parts += num_uploads
     return total_files, total_parts
示例#2
0
 def _enqueue_tasks(self, files):
     total_files = 0
     total_parts = 0
     for filename in files:
         num_uploads = 1
         is_multipart_task = self._is_multipart_task(filename)
         too_large = False
         if hasattr(filename, 'size'):
             too_large = filename.size > MAX_UPLOAD_SIZE
         if too_large and filename.operation_name == 'upload':
             warning_message = "File exceeds s3 upload limit of 5 TB."
             warning = create_warning(relative_path(filename.src),
                                      message=warning_message)
             self.result_queue.put(warning)
         elif is_multipart_task and not self.params['dryrun']:
             # If we're in dryrun mode, then we don't need the
             # real multipart tasks.  We can just use a BasicTask
             # in the else clause below, which will print out the
             # fact that it's transferring a file rather than
             # the specific part tasks required to perform the
             # transfer.
             num_uploads = self._enqueue_multipart_tasks(filename)
         else:
             task = tasks.BasicTask(session=self.session,
                                    filename=filename,
                                    parameters=self.params,
                                    result_queue=self.result_queue)
             self.executor.submit(task)
         total_files += 1
         total_parts += num_uploads
     return total_files, total_parts
示例#3
0
 def _enqueue_tasks(self, files):
     total_files = 0
     total_parts = 0
     for filename in files:
         num_uploads = 1
         is_multipart_task = self._is_multipart_task(filename)
         too_large = False
         if hasattr(filename, 'size'):
             too_large = filename.size > MAX_UPLOAD_SIZE
         if too_large and filename.operation_name == 'upload':
             warning = "Warning %s exceeds 5 TB and upload is " \
                         "being skipped" % relative_path(filename.src)
             self.result_queue.put({'message': warning, 'error': True})
         elif is_multipart_task and not self.params['dryrun']:
             # If we're in dryrun mode, then we don't need the
             # real multipart tasks.  We can just use a BasicTask
             # in the else clause below, which will print out the
             # fact that it's transferring a file rather than
             # the specific part tasks required to perform the
             # transfer.
             num_uploads = self._enqueue_multipart_tasks(filename)
         else:
             task = tasks.BasicTask(
                 session=self.session, filename=filename,
                 parameters=self.params,
                 result_queue=self.result_queue)
             self.executer.submit(task)
         total_files += 1
         total_parts += num_uploads
     return total_files, total_parts
示例#4
0
 def _warn_if_too_large(self, fileinfo):
     if getattr(fileinfo, 'size') and fileinfo.size > MAX_UPLOAD_SIZE:
         file_path = relative_path(fileinfo.src)
         warning_message = (
             "File %s exceeds s3 upload limit of %s." % (
                 file_path, human_readable_size(MAX_UPLOAD_SIZE)))
         warning = create_warning(
             file_path, warning_message, skip_file=False)
         self._result_queue.put(warning)
示例#5
0
 def _warn_if_too_large(self, fileinfo):
     if getattr(fileinfo, 'size') and fileinfo.size > MAX_UPLOAD_SIZE:
         file_path = relative_path(fileinfo.src)
         warning_message = (
             "File %s exceeds s3 upload limit of %s." % (
                 file_path, human_readable_size(MAX_UPLOAD_SIZE)))
         warning = create_warning(
             file_path, warning_message, skip_file=False)
         self._result_queue.put(warning)
示例#6
0
def print_operation(filename, failed, dryrun=False):
    """
    Helper function used to print out what an operation did and whether
    it failed.
    """
    print_str = filename.operation_name
    if dryrun:
        print_str = '(dryrun) ' + print_str
    if failed:
        print_str += " failed"
    print_str += ": "
    if filename.src_type == "s3":
        print_str = print_str + "s3://" + filename.src
    else:
        print_str += relative_path(filename.src)
    if filename.operation_name not in ["delete", "make_bucket", "remove_bucket"]:
        if filename.dest_type == "s3":
            print_str += " to s3://" + filename.dest
        else:
            print_str += " to " + relative_path(filename.dest)
    return print_str
示例#7
0
def print_operation(filename, failed, dryrun=False):
    """
    Helper function used to print out what an operation did and whether
    it failed.
    """
    print_str = filename.operation_name
    if dryrun:
        print_str = '(dryrun) ' + print_str
    if failed:
        print_str += " failed"
    print_str += ": "
    if filename.src_type == "s3":
        print_str = print_str + "s3://" + filename.src
    else:
        print_str += relative_path(filename.src)
    if filename.operation_name not in ["delete", "make_bucket", "remove_bucket"]:
        if filename.dest_type == "s3":
            print_str += " to s3://" + filename.dest
        else:
            print_str += " to " + relative_path(filename.dest)
    return print_str
示例#8
0
 def _enqueue_tasks(self, files):
     total_files = 0
     total_parts = 0
     for filename in files:
         num_uploads = 1
         is_multipart_task = self._is_multipart_task(filename)
         too_large = False
         if hasattr(filename, 'size'):
             too_large = filename.size > MAX_UPLOAD_SIZE
         if too_large and filename.operation_name == 'upload':
             warning_message = "File exceeds s3 upload limit of 5 TB."
             warning = create_warning(relative_path(filename.src),
                                      warning_message)
             self.result_queue.put(warning)
         # Warn and skip over glacier incompatible tasks.
         elif not self.params.get('force_glacier_transfer') and \
                 not filename.is_glacier_compatible():
             LOGGER.debug(
                 'Encountered glacier object s3://%s. Not performing '
                 '%s on object.' % (filename.src, filename.operation_name))
             if not self.params['ignore_glacier_warnings']:
                 warning = create_warning(
                     's3://'+filename.src,
                     'Object is of storage class GLACIER. Unable to '
                     'perform %s operations on GLACIER objects. You must '
                     'restore the object to be able to the perform '
                     'operation.' %
                     filename.operation_name
                 )
                 self.result_queue.put(warning)
             continue
         elif is_multipart_task and not self.params['dryrun']:
             # If we're in dryrun mode, then we don't need the
             # real multipart tasks.  We can just use a BasicTask
             # in the else clause below, which will print out the
             # fact that it's transferring a file rather than
             # the specific part tasks required to perform the
             # transfer.
             num_uploads = self._enqueue_multipart_tasks(filename)
         else:
             task = tasks.BasicTask(
                 session=self.session, filename=filename,
                 parameters=self.params,
                 result_queue=self.result_queue)
             self.executor.submit(task)
         total_files += 1
         total_parts += num_uploads
     return total_files, total_parts
示例#9
0
 def _enqueue_tasks(self, files):
     total_files = 0
     total_parts = 0
     for filename in files:
         num_uploads = 1
         is_multipart_task = self._is_multipart_task(filename)
         too_large = False
         if hasattr(filename, 'size'):
             too_large = filename.size > MAX_UPLOAD_SIZE
         if too_large and filename.operation_name == 'upload':
             warning_message = "File exceeds s3 upload limit of 5 TB."
             warning = create_warning(relative_path(filename.src),
                                      warning_message)
             self.result_queue.put(warning)
         # Warn and skip over glacier incompatible tasks.
         elif not self.params.get('force_glacier_transfer') and \
                 not filename.is_glacier_compatible():
             LOGGER.debug(
                 'Encountered glacier object s3://%s. Not performing '
                 '%s on object.' % (filename.src, filename.operation_name))
             if not self.params['ignore_glacier_warnings']:
                 warning = create_warning(
                     's3://'+filename.src,
                     'Object is of storage class GLACIER. Unable to '
                     'perform %s operations on GLACIER objects. You must '
                     'restore the object to be able to the perform '
                     'operation.' %
                     filename.operation_name
                 )
                 self.result_queue.put(warning)
             continue
         elif is_multipart_task and not self.params['dryrun']:
             # If we're in dryrun mode, then we don't need the
             # real multipart tasks.  We can just use a BasicTask
             # in the else clause below, which will print out the
             # fact that it's transferring a file rather than
             # the specific part tasks required to perform the
             # transfer.
             num_uploads = self._enqueue_multipart_tasks(filename)
         else:
             task = tasks.BasicTask(
                 session=self.session, filename=filename,
                 parameters=self.params,
                 result_queue=self.result_queue)
             self.executor.submit(task)
         total_files += 1
         total_parts += num_uploads
     return total_files, total_parts
示例#10
0
 def _format_local_path(self, path):
     return relative_path(path)
 def test_relpath_with_error(self):
     # Just want to check we don't get an exception raised,
     # which is what was happening previously.
     self.assertIn(r'foo\bar', relative_path(r'c:\foo\bar'))
 def test_relpath_normal(self):
     self.assertEqual(relative_path('/tmp/foo/bar', '/tmp/foo'),
                      '.' + os.sep + 'bar')
示例#13
0
文件: test_utils.py 项目: aws/aws-cli
 def test_relpath_normal(self):
     self.assertEqual(relative_path("/tmp/foo/bar", "/tmp/foo"), "." + os.sep + "bar")
示例#14
0
 def test_relpath_normal(self):
     self.assertEqual(relative_path('/tmp/foo/bar', '/tmp/foo'),
                      '.' + os.sep + 'bar')
示例#15
0
 def _get_dest(self, fileobj):
     return relative_path(fileobj)
示例#16
0
 def _get_src(self, fileobj):
     return relative_path(fileobj)
示例#17
0
 def _get_dest(self, fileobj):
     return relative_path(fileobj)
示例#18
0
 def _get_src(self, fileobj):
     return relative_path(fileobj)
示例#19
0
文件: test_utils.py 项目: aws/aws-cli
 def test_relpath_with_error(self):
     # Just want to check we don't get an exception raised,
     # which is what was happening previously.
     self.assertIn(r"foo\bar", relative_path(r"c:\foo\bar"))
示例#20
0
 def _format_local_path(self, path):
     return relative_path(path)