def CreateObjectJson(self, contents, bucket_name=None, object_name=None, encryption_key=None): """Creates a test object (GCS provider only) using the JSON API. Args: contents: The contents to write to the object. bucket_name: Name of bucket to place the object in. If not specified, a new temporary bucket is created. object_name: The name to use for the object. If not specified, a temporary test object name is constructed. encryption_key: AES256 encryption key to use when creating the object, if any. Returns: An apitools Object for the created object. """ bucket_name = bucket_name or self.CreateBucketJson().name object_name = object_name or self.MakeTempName('obj') object_metadata = apitools_messages.Object( name=object_name, bucket=bucket_name, contentType='application/octet-stream') encryption_tuple = None if encryption_key: encryption_tuple = CryptoTuple(encryption_key) return self.json_api.UploadObject(cStringIO.StringIO(contents), object_metadata, provider='gs', encryption_tuple=encryption_tuple)
def CreateObjectJson(self, contents, bucket_name=None, object_name=None, encryption_key=None, mtime=None, storage_class=None, gs_idempotent_generation=None): """Creates a test object (GCS provider only) using the JSON API. Args: contents: The contents to write to the object. bucket_name: Name of bucket to place the object in. If not specified, a new temporary bucket is created. object_name: The name to use for the object. If not specified, a temporary test object name is constructed. encryption_key: AES256 encryption key to use when creating the object, if any. mtime: The modification time of the file in POSIX time (seconds since UTC 1970-01-01). If not specified, this defaults to the current system time. storage_class: String representing the storage class to use for the object. gs_idempotent_generation: For use when overwriting an object for which you know the previously uploaded generation. Create GCS object idempotently by supplying this generation number as a precondition and assuming the current object is correct on precondition failure. Defaults to 0 (new object); to disable, set to None. Returns: An apitools Object for the created object. """ bucket_name = bucket_name or self.CreateBucketJson().name object_name = object_name or self.MakeTempName('obj') preconditions = Preconditions(gen_match=gs_idempotent_generation) custom_metadata = apitools_messages.Object.MetadataValue( additionalProperties=[]) if mtime is not None: CreateCustomMetadata({MTIME_ATTR: mtime}, custom_metadata) object_metadata = apitools_messages.Object( name=object_name, metadata=custom_metadata, bucket=bucket_name, contentType='application/octet-stream', storageClass=storage_class) encryption_tuple = None if encryption_key: encryption_tuple = CryptoTuple(encryption_key) try: return self.json_api.UploadObject(cStringIO.StringIO(contents), object_metadata, provider='gs', encryption_tuple=encryption_tuple, preconditions=preconditions) except PreconditionException: if gs_idempotent_generation is None: raise with SetBotoConfigForTest([('GSUtil', 'decryption_key1', encryption_key)]): return self.json_api.GetObjectMetadata(bucket_name, object_name)
def CreateObjectJson(self, contents, bucket_name=None, object_name=None, encryption_key=None, mtime=None, storage_class=None): """Creates a test object (GCS provider only) using the JSON API. Args: contents: The contents to write to the object. bucket_name: Name of bucket to place the object in. If not specified, a new temporary bucket is created. object_name: The name to use for the object. If not specified, a temporary test object name is constructed. encryption_key: AES256 encryption key to use when creating the object, if any. mtime: The modification time of the file in POSIX time (seconds since UTC 1970-01-01). If not specified, this defaults to the current system time. storage_class: String representing the storage class to use for the object. Returns: An apitools Object for the created object. """ bucket_name = bucket_name or self.CreateBucketJson().name object_name = object_name or self.MakeTempName('obj') custom_metadata = apitools_messages.Object.MetadataValue( additionalProperties=[]) if mtime is not None: CreateCustomMetadata({MTIME_ATTR: mtime}, custom_metadata) object_metadata = apitools_messages.Object( name=object_name, metadata=custom_metadata, bucket=bucket_name, contentType='application/octet-stream', storageClass=storage_class) encryption_tuple = None if encryption_key: encryption_tuple = CryptoTuple(encryption_key) return self.json_api.UploadObject(cStringIO.StringIO(contents), object_metadata, provider='gs', encryption_tuple=encryption_tuple)
def _test_rewrite_resume_or_restart(self, initial_dec_key, initial_enc_key, new_dec_key=None, new_enc_key=None): """Tests that the rewrite command restarts if the object's key changed. Args: initial_dec_key: Initial key the object is encrypted with, used as decryption key in the first rewrite call. initial_enc_key: Initial encryption key to rewrite the object with, used as encryption key in the first rewrite call. new_dec_key: Decryption key for the second rewrite call; if specified, object will be overwritten with a new encryption key in between the first and second rewrite calls, and this key will be used for the second rewrite call. new_enc_key: Encryption key for the second rewrite call; if specified, this key will be used for the second rewrite call, otherwise the initial key will be used. Returns: None """ if self.test_api == ApiSelector.XML: return unittest.skip('Rewrite API is only supported in JSON.') bucket_uri = self.CreateBucket() # maxBytesPerCall must be >= 1 MiB, so create an object > 2 MiB because we # need 2 response from the service: 1 success, 1 failure prior to # completion. object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', contents=('12' * ONE_MIB) + 'bar', prefer_json_api=True, encryption_key=initial_dec_key) gsutil_api = GcsJsonApi(BucketStorageUri, logging.getLogger(), DiscardMessagesQueue(), self.default_provider) with SetBotoConfigForTest([('GSUtil', 'decryption_key1', initial_dec_key)]): src_obj_metadata = gsutil_api.GetObjectMetadata( object_uri.bucket_name, object_uri.object_name, provider=self.default_provider, fields=['bucket', 'contentType', 'etag', 'name']) dst_obj_metadata = src_obj_metadata tracker_file_name = GetRewriteTrackerFilePath(src_obj_metadata.bucket, src_obj_metadata.name, dst_obj_metadata.bucket, dst_obj_metadata.name, self.test_api) decryption_tuple = CryptoTuple(initial_dec_key) decryption_tuple2 = CryptoTuple(new_dec_key or initial_dec_key) encryption_tuple = CryptoTuple(initial_enc_key) encryption_tuple2 = CryptoTuple(new_enc_key or initial_enc_key) try: try: gsutil_api.CopyObject( src_obj_metadata, dst_obj_metadata, progress_callback=HaltingRewriteCallbackHandler(ONE_MIB * 2).call, max_bytes_per_call=ONE_MIB, decryption_tuple=decryption_tuple, encryption_tuple=encryption_tuple) self.fail('Expected RewriteHaltException.') except RewriteHaltException: pass # Tracker file should be left over. self.assertTrue(os.path.exists(tracker_file_name)) if new_dec_key: # Recreate the object with a different encryption key. object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', contents=('12' * ONE_MIB) + 'bar', prefer_json_api=True, encryption_key=new_dec_key) with SetBotoConfigForTest([('GSUtil', 'decryption_key1', new_dec_key or initial_dec_key)]): original_md5 = gsutil_api.GetObjectMetadata( src_obj_metadata.bucket, src_obj_metadata.name, fields=['customerEncryption', 'md5Hash']).md5Hash if new_dec_key or new_enc_key: # Keys changed, rewrite should be restarted. progress_callback = EnsureRewriteRestartCallbackHandler( ONE_MIB).call else: # Keys are the same, rewrite should be resumed. progress_callback = EnsureRewriteResumeCallbackHandler( ONE_MIB * 2).call # Now resume. Callback ensures the appropriate resume/restart behavior. gsutil_api.CopyObject(src_obj_metadata, dst_obj_metadata, progress_callback=progress_callback, max_bytes_per_call=ONE_MIB, decryption_tuple=decryption_tuple2, encryption_tuple=encryption_tuple2) # Copy completed; tracker file should be deleted. self.assertFalse(os.path.exists(tracker_file_name)) final_enc_key = new_enc_key or initial_enc_key with SetBotoConfigForTest([('GSUtil', 'encryption_key', final_enc_key)]): self.assertEqual( original_md5, gsutil_api.GetObjectMetadata( dst_obj_metadata.bucket, dst_obj_metadata.name, fields=['customerEncryption', 'md5Hash']).md5Hash, 'Error: Rewritten object\'s hash doesn\'t match source object.' ) finally: # Clean up if something went wrong. DeleteTrackerFile(tracker_file_name)
def GetEncryptionTuple(): """Returns the encryption tuple from .boto configuration.""" encryption_key = _GetBase64EncryptionKey() return CryptoTuple(encryption_key) if encryption_key else None
def CryptoTupleFromKey(crypto_key): """Returns a CryptoTuple matching the crypto key, or None for no key.""" return CryptoTuple(crypto_key) if crypto_key else None