def _display_page(self, response_data, use_basename=True): common_prefixes = response_data.get('CommonPrefixes', []) contents = response_data.get('Contents', []) if not contents and not common_prefixes: self._empty_result = True return for common_prefix in common_prefixes: prefix_components = common_prefix['Prefix'].split('/') prefix = prefix_components[-2] pre_string = "PRE".rjust(30, " ") print_str = pre_string + ' ' + prefix + '/\n' uni_print(print_str) for content in contents: last_mod_str = self._make_last_mod_str(content['LastModified']) self._size_accumulator += int(content['Size']) self._total_objects += 1 size_str = self._make_size_str(content['Size']) if use_basename: filename_components = content['Key'].split('/') filename = filename_components[-1] else: filename = content['Key'] print_str = last_mod_str + ' ' + size_str + ' ' + \ filename + '\n' uni_print(print_str) self._at_first_page = False
def test_encoding_statement_fails_are_replaced(self): buf = io.BytesIO() out = io.TextIOWrapper(buf, encoding="ascii") uni_print(u"SomeChars\u2713\u2714OtherChars", out) # We replace the characters that can't be encoded # with '?'. self.assertEqual(buf.getvalue(), b"SomeChars??OtherChars")
def test_encoding_statement_fails_are_replaced(self): buf = io.BytesIO() out = io.TextIOWrapper(buf, encoding='ascii') uni_print(u'SomeChars\u2713\u2714OtherChars', out) # We replace the characters that can't be encoded # with '?'. self.assertEqual(buf.getvalue(), b'SomeChars??OtherChars')
def _list_all_buckets(self): response_data = self.client.list_buckets() buckets = response_data["Buckets"] for bucket in buckets: last_mod_str = self._make_last_mod_str(bucket["CreationDate"]) print_str = last_mod_str + " " + bucket["Name"] + "\n" uni_print(print_str)
def _list_all_buckets(self): response_data = self.client.list_buckets() buckets = response_data['Buckets'] for bucket in buckets: last_mod_str = self._make_last_mod_str(bucket['CreationDate']) print_str = last_mod_str + ' ' + bucket['Name'] + '\n' uni_print(print_str)
def _display_page(self, response_data, use_basename=True): common_prefixes = response_data.get("CommonPrefixes", []) contents = response_data.get("Contents", []) if not contents and not common_prefixes: self._empty_result = True return for common_prefix in common_prefixes: prefix_components = common_prefix["Prefix"].split("/") prefix = prefix_components[-2] pre_string = "PRE".rjust(30, " ") print_str = pre_string + " " + prefix + "/\n" uni_print(print_str) for content in contents: last_mod_str = self._make_last_mod_str(content["LastModified"]) self._size_accumulator += int(content["Size"]) self._total_objects += 1 size_str = self._make_size_str(content["Size"]) if use_basename: filename_components = content["Key"].split("/") filename = filename_components[-1] else: filename = content["Key"] print_str = last_mod_str + " " + size_str + " " + filename + "\n" uni_print(print_str) self._at_first_page = False
def _list_all_buckets(self): operation = self.service.get_operation('ListBuckets') response_data = operation.call(self.endpoint)[1] buckets = response_data['Buckets'] for bucket in buckets: last_mod_str = self._make_last_mod_str(bucket['CreationDate']) print_str = last_mod_str + ' ' + bucket['Name'] + '\n' uni_print(print_str)
def _list_all_buckets(self): operation = self.service.get_operation("ListBuckets") response_data = operation.call(self.endpoint)[1] buckets = response_data["Buckets"] for bucket in buckets: last_mod_str = self._make_last_mod_str(bucket["CreationDate"]) print_str = last_mod_str + " " + bucket["Name"] + "\n" uni_print(print_str) sys.stdout.flush()
def _print_summary(self): """ This function prints a summary of total objects and total bytes """ print_str = str(self._total_objects) uni_print("\nTotal Objects: ".rjust(15, ' ') + print_str + "\n") print_str = human_readable_size( self._size_accumulator) if self._human_readable else str( self._size_accumulator) uni_print("Total Size: ".rjust(15, ' ') + print_str + "\n")
def _process_print_task(self, print_task): print_str = print_task.message if print_task.error: self.num_errors_seen += 1 warning = False if print_task.warning: if print_task.warning: warning = True self.num_warnings_seen += 1 final_str = '' if warning: final_str += print_str.ljust(self._progress_length, ' ') final_str += '\n' elif print_task.total_parts: # Normalize keys so failures and sucess # look the same. op_list = print_str.split(':') print_str = ':'.join(op_list[1:]) total_part = print_task.total_parts self._num_parts += 1 if print_str in self._progress_dict: self._progress_dict[print_str]['parts'] += 1 else: self._progress_dict[print_str] = {} self._progress_dict[print_str]['parts'] = 1 self._progress_dict[print_str]['total'] = total_part else: print_components = print_str.split(':') final_str += print_str.ljust(self._progress_length, ' ') final_str += '\n' key = ':'.join(print_components[1:]) if key in self._progress_dict: self._progress_dict.pop(print_str, None) else: self._num_parts += 1 self._file_count += 1 is_done = self._total_files == self._file_count if not is_done: prog_str = "Completed %s " % self._num_parts num_files = self._total_files if self._total_files != '...': prog_str += "of %s " % self._total_parts num_files = self._total_files - self._file_count prog_str += "part(s) with %s file(s) remaining" % \ num_files length_prog = len(prog_str) prog_str += '\r' prog_str = prog_str.ljust(self._progress_length, ' ') self._progress_length = length_prog final_str += prog_str if not self._quiet: uni_print(final_str) self._needs_newline = not final_str.endswith('\n') sys.stdout.flush()
def run(self): while True: try: print_task = self.printQueue.get(True, self.timeout) print_str = print_task['result'] final_str = '' if print_task.get('part', ''): # Normalize keys so failures and sucess # look the same. op_list = print_str.split(':') print_str = ':'.join(op_list[1:]) print_part = print_task['part'] total_part = print_part['total'] self.numParts += 1 if print_str in self.progress_dict: self.progress_dict[print_str]['parts'] += 1 else: self.progress_dict[print_str] = {} self.progress_dict[print_str]['parts'] = 1 self.progress_dict[print_str]['total'] = total_part else: print_components = print_str.split(':') final_str += print_str.ljust(self.progressLength, ' ') final_str += '\n' if print_task.get('error', ''): final_str += print_task['error'] + '\n' key = ':'.join(print_components[1:]) if key in self.progress_dict: self.progress_dict.pop(print_str, None) else: self.numParts += 1 self.file_count += 1 is_done = self.totalFiles == self.file_count if not self.interrupt.isSet() and not is_done: prog_str = "Completed %s " % self.numParts num_files = self.totalFiles if self.totalFiles != '...': prog_str += "of %s " % self.totalParts num_files = self.totalFiles - self.file_count prog_str += "part(s) with %s file(s) remaining" % \ num_files length_prog = len(prog_str) prog_str += '\r' prog_str = prog_str.ljust(self.progressLength, ' ') self.progressLength = length_prog final_str += prog_str if not self.quiet: uni_print(final_str) sys.stdout.flush() self.printQueue.task_done() except Queue.Empty: pass if self.done.isSet(): break
def _print_summary(self): """ This function prints a summary of total objects and total bytes """ print_str = str(self._total_objects) uni_print("\nTotal Objects: ".rjust(15, ' ') + print_str + "\n") if self._human_readable: print_str = human_readable_size(self._size_accumulator) else: print_str = str(self._size_accumulator) uni_print("Total Size: ".rjust(15, ' ') + print_str + "\n")
def _process_print_task(self, print_task): print_str = print_task.message print_to_stderr = False if print_task.error: self.num_errors_seen += 1 print_to_stderr = True final_str = '' if print_task.warning: self.num_warnings_seen += 1 print_to_stderr = True final_str += print_str.ljust(self._progress_length, ' ') final_str += '\n' elif print_task.total_parts: # Normalize keys so failures and sucess # look the same. op_list = print_str.split(':') print_str = ':'.join(op_list[1:]) total_part = print_task.total_parts self._num_parts += 1 if print_str in self._progress_dict: self._progress_dict[print_str]['parts'] += 1 else: self._progress_dict[print_str] = {} self._progress_dict[print_str]['parts'] = 1 self._progress_dict[print_str]['total'] = total_part else: print_components = print_str.split(':') final_str += print_str.ljust(self._progress_length, ' ') final_str += '\n' key = ':'.join(print_components[1:]) if key in self._progress_dict: self._progress_dict.pop(print_str, None) else: self._num_parts += 1 self._file_count += 1 # If the message is an error or warning, print it to standard error. if print_to_stderr and not self._quiet: uni_print(final_str, sys.stderr) final_str = '' is_done = self._total_files == self._file_count if not is_done: final_str += self._make_progress_bar() if not (self._quiet or self._only_show_errors): uni_print(final_str) self._needs_newline = not final_str.endswith('\n')
def _process_print_task(self, print_task): print_str = print_task['result'] final_str = '' if print_task.get('part', ''): # Normalize keys so failures and sucess # look the same. op_list = print_str.split(':') print_str = ':'.join(op_list[1:]) print_part = print_task['part'] total_part = print_part['total'] self._num_parts += 1 if print_str in self._progress_dict: self._progress_dict[print_str]['parts'] += 1 else: self._progress_dict[print_str] = {} self._progress_dict[print_str]['parts'] = 1 self._progress_dict[print_str]['total'] = total_part else: print_components = print_str.split(':') final_str += print_str.ljust(self._progress_length, ' ') final_str += '\n' if print_task.get('error', ''): final_str += print_task['error'] + '\n' key = ':'.join(print_components[1:]) if key in self._progress_dict: self._progress_dict.pop(print_str, None) else: self._num_parts += 1 self._file_count += 1 is_done = self._total_files == self._file_count if not self._interrupt.isSet() and not is_done: prog_str = "Completed %s " % self._num_parts num_files = self._total_files if self._total_files != '...': prog_str += "of %s " % self._total_parts num_files = self._total_files - self._file_count prog_str += "part(s) with %s file(s) remaining" % \ num_files length_prog = len(prog_str) prog_str += '\r' prog_str = prog_str.ljust(self._progress_length, ' ') self._progress_length = length_prog final_str += prog_str if not self._quiet: uni_print(final_str) sys.stdout.flush()
def _process_print_task(self, print_task): print_str = print_task['message'] if print_task['error']: self.num_errors_seen += 1 final_str = '' if 'total_parts' in print_task: # Normalize keys so failures and sucess # look the same. op_list = print_str.split(':') print_str = ':'.join(op_list[1:]) total_part = print_task['total_parts'] self._num_parts += 1 if print_str in self._progress_dict: self._progress_dict[print_str]['parts'] += 1 else: self._progress_dict[print_str] = {} self._progress_dict[print_str]['parts'] = 1 self._progress_dict[print_str]['total'] = total_part else: print_components = print_str.split(':') final_str += print_str.ljust(self._progress_length, ' ') final_str += '\n' key = ':'.join(print_components[1:]) if key in self._progress_dict: self._progress_dict.pop(print_str, None) else: self._num_parts += 1 self._file_count += 1 is_done = self._total_files == self._file_count if not self._interrupt.isSet() and not is_done: prog_str = "Completed %s " % self._num_parts num_files = self._total_files if self._total_files != '...': prog_str += "of %s " % self._total_parts num_files = self._total_files - self._file_count prog_str += "part(s) with %s file(s) remaining" % \ num_files length_prog = len(prog_str) prog_str += '\r' prog_str = prog_str.ljust(self._progress_length, ' ') self._progress_length = length_prog final_str += prog_str if not self._quiet: uni_print(final_str) sys.stdout.flush()
def _process_transfer(self, future): """ Execute and process a transfer future. :type future: s3transfer.futures.TransferFuture :param future: A future representing an S3 Transfer :return: A CommandResult representing the transfer status. """ try: future.result() return CommandResult(0, 0) except Exception as e: LOGGER.debug('Exception caught during task execution: %s', str(e), exc_info=True) # TODO: Update when S3Handler is refactored uni_print("Transfer failed: %s \n" % str(e)) return CommandResult(1, 0)
def _display_page(self, response_data, use_basename=True): common_prefixes = response_data.get('CommonPrefixes', []) contents = response_data.get('Contents', []) for common_prefix in common_prefixes: prefix_components = common_prefix['Prefix'].split('/') prefix = prefix_components[-2] pre_string = "PRE".rjust(30, " ") print_str = pre_string + ' ' + prefix + '/\n' uni_print(print_str) for content in contents: last_mod_str = self._make_last_mod_str(content['LastModified']) size_str = self._make_size_str(content['Size']) if use_basename: filename_components = content['Key'].split('/') filename = filename_components[-1] else: filename = content['Key'] print_str = last_mod_str + ' ' + size_str + ' ' + \ filename + '\n' uni_print(print_str)
def _display_page(self, response_data, use_basename=True): common_prefixes = response_data["CommonPrefixes"] contents = response_data["Contents"] for common_prefix in common_prefixes: prefix_components = common_prefix["Prefix"].split("/") prefix = prefix_components[-2] pre_string = "PRE".rjust(30, " ") print_str = pre_string + " " + prefix + "/\n" uni_print(print_str) sys.stdout.flush() for content in contents: last_mod_str = self._make_last_mod_str(content["LastModified"]) size_str = self._make_size_str(content["Size"]) if use_basename: filename_components = content["Key"].split("/") filename = filename_components[-1] else: filename = content["Key"] print_str = last_mod_str + " " + size_str + " " + filename + "\n" uni_print(print_str) sys.stdout.flush()
def _list_all_objects(self, bucket, key): operation = self.service.get_operation('ListObjects') iterator = operation.paginate(self.endpoint, bucket=bucket, prefix=key, delimiter='/') for _, response_data in iterator: common_prefixes = response_data['CommonPrefixes'] contents = response_data['Contents'] for common_prefix in common_prefixes: prefix_components = common_prefix['Prefix'].split('/') prefix = prefix_components[-2] pre_string = "PRE".rjust(30, " ") print_str = pre_string + ' ' + prefix + '/\n' uni_print(print_str) sys.stdout.flush() for content in contents: last_mod_str = self._make_last_mod_str(content['LastModified']) size_str = self._make_size_str(content['Size']) filename_components = content['Key'].split('/') filename = filename_components[-1] print_str = last_mod_str + ' ' + size_str + ' ' + \ filename + '\n' uni_print(print_str) sys.stdout.flush()
def list_objects(self): """ List all of the buckets if no bucket is specified. List the objects and common prefixes under a specified prefix. """ bucket, key = find_bucket_key(self.src) if bucket == '': operation = self.service.get_operation('ListBuckets') html_response, response_data = operation.call(self.endpoint) header_str = "CreationTime".rjust(19, ' ') header_str = header_str + ' ' + "Bucket" underline_str = "------------".rjust(19, ' ') underline_str = underline_str + ' ' + "------" sys.stdout.write("\n%s\n" % header_str) sys.stdout.write("%s\n" % underline_str) buckets = response_data['Buckets'] for bucket in buckets: last_mod_str = make_last_mod_str(bucket['CreationDate']) print_str = last_mod_str + ' ' + bucket['Name'] + '\n' uni_print(print_str) sys.stdout.flush() else: operation = self.service.get_operation('ListObjects') iterator = operation.paginate(self.endpoint, bucket=bucket, prefix=key, delimiter='/') sys.stdout.write("\nBucket: %s\n" % bucket) sys.stdout.write("Prefix: %s\n\n" % key) header_str = "LastWriteTime".rjust(19, ' ') header_str = header_str + ' ' + "Length".rjust(10, ' ') header_str = header_str + ' ' + "Name" underline_str = "-------------".rjust(19, ' ') underline_str = underline_str + ' ' + "------".rjust(10, ' ') underline_str = underline_str + ' ' + "----" sys.stdout.write("%s\n" % header_str) sys.stdout.write("%s\n" % underline_str) for html_response, response_data in iterator: check_error(response_data) common_prefixes = response_data['CommonPrefixes'] contents = response_data['Contents'] for common_prefix in common_prefixes: prefix_components = common_prefix['Prefix'].split('/') prefix = prefix_components[-2] pre_string = "PRE".rjust(30, " ") print_str = pre_string + ' ' + prefix + '/\n' uni_print(print_str) sys.stdout.flush() for content in contents: last_mod_str = make_last_mod_str(content['LastModified']) size_str = make_size_str(content['Size']) filename_components = content['Key'].split('/') filename = filename_components[-1] print_str = last_mod_str + ' ' + size_str + ' ' + \ filename + '\n' uni_print(print_str) sys.stdout.flush()
def _clear_progress_if_no_more_expected_transfers(self, **kwargs): if self._progress_length and not self._has_remaining_progress(): uni_print(self._adjust_statement_padding(''), self._out_file)
def test_encoding_with_encoding_none(self): """When the output of the aws command is being piped, the `encoding` attribute of `sys.stdout` is `None`.""" out = MockPipedStdout() uni_print(u"SomeChars\u2713\u2714OtherChars", out) self.assertEqual(out.getvalue(), b"SomeChars??OtherChars")
def test_out_file_with_encoding_attribute(self): buf = io.BytesIO() out = io.TextIOWrapper(buf, encoding="utf-8") uni_print(u"\u2713", out) self.assertEqual(buf.getvalue(), u"\u2713".encode("utf-8"))
def test_out_file_with_encoding_attribute(self): buf = io.BytesIO() out = io.TextIOWrapper(buf, encoding='utf-8') uni_print(u'\u2713', out) self.assertEqual(buf.getvalue(), u'\u2713'.encode('utf-8'))
def test_encoding_with_encoding_none(self): '''When the output of the aws command is being piped, the `encoding` attribute of `sys.stdout` is `None`.''' out = MockPipedStdout() uni_print(u'SomeChars\u2713\u2714OtherChars', out) self.assertEqual(out.getvalue(), b'SomeChars??OtherChars')
def _print_to_out_file(self, statement): uni_print(statement, self._out_file)
def _print_to_error_file(self, statement): uni_print(statement, self._error_file)