def test_large_encrypted_file_write_from_stream(self): blob = self.bucket.blob('LargeFile', encryption_key=self.ENCRYPTION_KEY) file_data = self.FILES['big'] with open(file_data['path'], 'rb') as file_obj: blob.upload_from_file(file_obj) self.case_blobs_to_delete.append(blob) md5_hash = blob.md5_hash if not isinstance(md5_hash, six.binary_type): md5_hash = md5_hash.encode('utf-8') self.assertEqual(md5_hash, file_data['hash']) temp_filename = tempfile.mktemp() with open(temp_filename, 'wb') as file_obj: blob.download_to_file(file_obj) with open(temp_filename, 'rb') as file_obj: md5_temp_hash = _base64_md5hash(file_obj) self.assertEqual(md5_temp_hash, file_data['hash'])
def test_large_file_write_from_stream_w_encryption_key( storage_client, shared_bucket, blobs_to_delete, file_data, service_account, ): blob = shared_bucket.blob("LargeFile", encryption_key=encryption_key) info = file_data["big"] with open(info["path"], "rb") as file_obj: blob.upload_from_file(file_obj) blobs_to_delete.append(blob) _check_blob_hash(blob, info) with tempfile.NamedTemporaryFile() as temp_f: with open(temp_f.name, "wb") as file_obj: storage_client.download_blob_to_file(blob, file_obj) with open(temp_f.name, "rb") as file_obj: md5_temp_hash = _base64_md5hash(file_obj) assert md5_temp_hash == info["hash"]
def setUpClass(cls): super(TestStorageFiles, cls).setUpClass() for file_data in cls.FILES.values(): with open(file_data['path'], 'rb') as file_obj: file_data['hash'] = _base64_md5hash(file_obj) cls.bucket = Config.TEST_BUCKET
def _call_fut(self, bytes_to_sign): from google.cloud.storage._helpers import _base64_md5hash return _base64_md5hash(bytes_to_sign)
def file_data(): for file_data in _file_data.values(): with open(file_data["path"], "rb") as file_obj: file_data["hash"] = _base64_md5hash(file_obj) return _file_data