def multipart_put(self, upload_id, part_num, part): fileobj = open(part, 'rb') bytes_to_upload = fileobj.read(self._part_size) part_size = os.fstat(fileobj.fileno()).st_size start_byte = part_num * self._part_size content_range = (start_byte, start_byte + part_size - 1) linear_hash = hashlib.sha256(bytes_to_upload).hexdigest() part_tree_hash = tree_hash(chunk_hashes(bytes_to_upload)) hex_part_tree_hash = bytes_to_hex(part_tree_hash) self._conn.upload_part( self._vault_name, upload_id, linear_hash, hex_part_tree_hash, content_range, bytes_to_upload ) if part_num not in self._part_num_list: self._part_num_list.append(part_num) self._tree_hashes.append(part_tree_hash) self._total_size += part_size fileobj.close()
def check_mock_vault_calls(vault, upload_part_calls, data_tree_hashes, data_len): vault.layer1.upload_part.assert_has_calls(upload_part_calls, any_order=True) assert_equal(len(upload_part_calls), vault.layer1.upload_part.call_count) data_tree_hash = bytes_to_hex(tree_hash(data_tree_hashes)) vault.layer1.complete_multipart_upload.assert_called_once_with( sentinel.vault_name, sentinel.upload_id, data_tree_hash, data_len)
def check_mock_vault_calls(vault, upload_part_calls, data_tree_hashes, data_len): vault.layer1.upload_part.assert_has_calls( upload_part_calls, any_order=True) assert_equal( len(upload_part_calls), vault.layer1.upload_part.call_count) data_tree_hash = bytes_to_hex(tree_hash(data_tree_hashes)) vault.layer1.complete_multipart_upload.assert_called_once_with( sentinel.vault_name, sentinel.upload_id, data_tree_hash, data_len)
def multipart_complete(self, upload_id): ''' Returns glacier://Vault_1/?avail_zone=us-east-1&archive_id=NkbByEejwEggmBz2fTHgJrg0XBoDfjP4q6iu87-TjhqG6eGoOY9Z8i1_AUyUsuhPAdTqLHy8pTl5nfCFJmDl2yEZONi5L26Omw12vcs01MNGntHEQL8MBfGlqrEXAMPLEArchiveId ''' hex_tree_hash = bytes_to_hex(tree_hash(self._tree_hashes)) response = self._conn.complete_multipart_upload( self._vault_name, upload_id, hex_tree_hash, self._total_size) path = 'glacier://' + self._vault_name + '/?avail_zone=' + self._conn.region.name + '&archive_id=' + response[ 'ArchiveId'] return path
def multipart_complete(self, upload_id): ''' Returns glacier://Vault_1/?avail_zone=us-east-1&archive_id=NkbByEejwEggmBz2fTHgJrg0XBoDfjP4q6iu87-TjhqG6eGoOY9Z8i1_AUyUsuhPAdTqLHy8pTl5nfCFJmDl2yEZONi5L26Omw12vcs01MNGntHEQL8MBfGlqrEXAMPLEArchiveId ''' hex_tree_hash = bytes_to_hex(tree_hash(self._tree_hashes)) response = self._conn.complete_multipart_upload( self._vault_name, upload_id, hex_tree_hash, self._total_size ) path = 'glacier://' + self._vault_name + '/?avail_zone=' + self._conn.region.name + '&archive_id=' + response['ArchiveId'] return path
def calculate_mock_vault_calls(data, part_size, chunk_size): upload_part_calls = [] data_tree_hashes = [] for i, data_part in enumerate(partify(data, part_size)): start = i * part_size end = start + len(data_part) data_part_tree_hash_blob = tree_hash( chunk_hashes(data_part, chunk_size)) data_part_tree_hash = bytes_to_hex(data_part_tree_hash_blob) data_part_linear_hash = sha256(data_part).hexdigest() upload_part_calls.append( call.layer1.upload_part(sentinel.vault_name, sentinel.upload_id, data_part_linear_hash, data_part_tree_hash, (start, end - 1), data_part)) data_tree_hashes.append(data_part_tree_hash_blob) return upload_part_calls, data_tree_hashes
def calculate_mock_vault_calls(data, part_size, chunk_size): upload_part_calls = [] data_tree_hashes = [] for i, data_part in enumerate(partify(data, part_size)): start = i * part_size end = start + len(data_part) data_part_tree_hash_blob = tree_hash( chunk_hashes(data_part, chunk_size)) data_part_tree_hash = bytes_to_hex(data_part_tree_hash_blob) data_part_linear_hash = sha256(data_part).hexdigest() upload_part_calls.append( call.layer1.upload_part( sentinel.vault_name, sentinel.upload_id, data_part_linear_hash, data_part_tree_hash, (start, end - 1), data_part)) data_tree_hashes.append(data_part_tree_hash_blob) return upload_part_calls, data_tree_hashes