def test_copy_handles_generation(self): source_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('gs://b/o#gen')) destination_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('gs://b/o2')) params = { 'Bucket': destination_resource.storage_url.bucket_name, 'Key': destination_resource.storage_url.object_name, 'CopySource': {'Bucket': source_resource.storage_url.bucket_name, 'Key': source_resource.storage_url.object_name, 'VersionId': 'gen'}} response = {'CopyObjectResult': {'ETag': ETAG, 'LastModified': LAST_MODIFIED}} self.stubber.add_response( 'copy_object', service_response=response, expected_params=params) expected_resource = self.s3_api._get_object_resource_from_s3_response( response, destination_resource.storage_url.bucket_name, destination_resource.storage_url.object_name) with self.stubber: observed_resource = self.s3_api.copy_object(source_resource, destination_resource) self.assertEqual(observed_resource, expected_resource)
def test_copy_handles_predefined_acl(self, arg_acl, translated_acl): source_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('gs://b/o')) destination_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('gs://b/o2')) params = { 'Bucket': destination_resource.storage_url.bucket_name, 'Key': destination_resource.storage_url.object_name, 'CopySource': {'Bucket': source_resource.storage_url.bucket_name, 'Key': source_resource.storage_url.object_name}, 'ACL': translated_acl} response = {'CopyObjectResult': {'ETag': ETAG, 'LastModified': LAST_MODIFIED}} self.stubber.add_response( 'copy_object', service_response=response, expected_params=params) expected_resource = self.s3_api._get_object_resource_from_s3_response( response, destination_resource.storage_url.bucket_name, destination_resource.storage_url.object_name) with self.stubber: observed_resource = self.s3_api.copy_object( source_resource, destination_resource, request_config=cloud_api.RequestConfig(arg_acl)) self.assertEqual(observed_resource, expected_resource)
def test_copy_handles_generation(self): source_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('gs://b/o')) destination_metadata = self.messages.Object(name='o2', bucket='b') destination_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('gs://b/o2')) request = self.messages.StorageObjectsCopyRequest( sourceBucket=source_resource.storage_url.bucket_name, sourceObject=source_resource.storage_url.object_name, destinationBucket=destination_resource.storage_url.bucket_name, destinationObject=destination_resource.storage_url.object_name, ifSourceGenerationMatch=1) self.apitools_client.objects.Copy.Expect(request, response=destination_metadata) request_config = gcs_api.GcsRequestConfig( precondition_generation_match=1) observed_resource = self.gcs_client.copy_object( source_resource, destination_resource, request_config=request_config) expected_resource = gcs_api._object_resource_from_metadata( destination_metadata) self.assertEqual(observed_resource, expected_resource)
def test_copy_handles_api_error(self): source_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('gs://b/o')) destination_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('gs://b/o2')) self.stubber.add_client_error( 'copy_object', http_status_code=404) with self.stubber: with self.assertRaises(errors.S3ApiError): self.s3_api.copy_object(source_resource, destination_resource)
def test_copy_handles_api_error(self): source_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('gs://b/o')) destination_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('gs://b/o2')) request = self.messages.StorageObjectsCopyRequest( sourceBucket=source_resource.storage_url.bucket_name, sourceObject=source_resource.storage_url.object_name, destinationBucket=destination_resource.storage_url.bucket_name, destinationObject=destination_resource.storage_url.object_name) self.apitools_client.objects.Copy.Expect( request, exception=apitools_exceptions.HttpError(None, None, None)) with self.assertRaises(cloud_errors.GcsApiError): self.gcs_client.copy_object(source_resource, destination_resource)
def _get_raw_destination(self): """Converts self._destination_string to a destination resource. Returns: A resource_reference.Resource. Note that this resource may not be a valid copy destination if it is a BucketResource, PrefixResource, FileDirectoryResource or UnknownResource. Raises: ValueError if the destination url is a cloud provider or if it specifies a version. """ destination_url = storage_url.storage_url_from_string( self._destination_string) if isinstance(destination_url, storage_url.CloudUrl): if destination_url.is_provider(): raise ValueError( 'The cp command does not support provider-only destination URLs.') elif destination_url.generation is not None: raise ValueError( 'The destination argument of the cp command cannot be a ' 'version-specific URL ({}).' .format(self._destination_string)) raw_destination = self._expand_destination_wildcards() if raw_destination: return raw_destination return resource_reference.UnknownResource(destination_url)
def test_gets_unknown_resource(self): # Create URL test_resources.from_url_string will categorize as unknown. url_string = 'gs://' parsed_url = storage_url.storage_url_from_string(url_string) resource = resource_reference.UnknownResource(parsed_url) self.assertEqual(test_resources.from_url_string(url_string), resource)
def get_temporary_component_resource(source_resource, destination_resource, random_prefix, component_id): """Gets a temporary component destination resource for a composite upload. Args: source_resource (resource_reference.FileObjectResource): The upload source. destination_resource (resource_reference.ObjectResource|UnknownResource): The upload destination. random_prefix (str): Added to temporary component names to avoid collisions between different instances of the CLI uploading to the same destination. component_id (int): An id that's not shared by any other component in this transfer. Returns: A resource_reference.UnknownResource representing the component's destination. """ component_object_name = _get_temporary_component_name( source_resource, random_prefix, component_id) destination_url = destination_resource.storage_url component_url = storage_url.CloudUrl(destination_url.scheme, destination_url.bucket_name, component_object_name) return resource_reference.UnknownResource(component_url)
def test_raises_api_error(self): self.stubber.add_client_error('put_object', http_status_code=404) upload_resource = resource_reference.UnknownResource( storage_url.CloudUrl(SCHEME, BUCKET_NAME, OBJECT_NAME)) with self.stubber: with self.assertRaises(errors.S3ApiError): self.s3_api.upload_object(io.BytesIO(BINARY_DATA), upload_resource)
def Run(self, args): resource = resource_reference.UnknownResource( storage_url.storage_url_from_string(args.url)) user_request_args = ( user_request_args_factory.get_user_request_args_from_command_args( args, metadata_type=user_request_args_factory.MetadataType.BUCKET)) create_bucket_task.CreateBucketTask( resource, user_request_args=user_request_args).execute()
def test_execute_fails_for_local_to_cloud_copy(self): source_resource = resource_reference.ObjectResource( storage_url.storage_url_from_string('file://o.txt'), metadata=self.source_metadata) destination_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('gs://b/o.txt')) with self.assertRaises(ValueError): daisy_chain_copy_task.DaisyChainCopyTask(source_resource, destination_resource)
def test_upload_raises_error_for_unrecognized_predefined_acl(self): upload_resource = resource_reference.UnknownResource( storage_url.CloudUrl(SCHEME, BUCKET_NAME, OBJECT_NAME)) with self.assertRaisesRegex( ValueError, ('Could not translate predefined_acl_string fake_acl to' ' AWS-accepted ACL.')): self.s3_api.upload_object( io.BytesIO(BINARY_DATA), upload_resource, request_config=cloud_api.RequestConfig('fake_acl'))
def test_copies_objects(self): source_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('gs://b/o')) destination_metadata = self.messages.Object(name='o2', bucket='b') destination_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('gs://b/o2')) request = self.messages.StorageObjectsCopyRequest( sourceBucket=source_resource.storage_url.bucket_name, sourceObject=source_resource.storage_url.object_name, destinationBucket=destination_resource.storage_url.bucket_name, destinationObject=destination_resource.storage_url.object_name) self.apitools_client.objects.Copy.Expect(request, response=destination_metadata) observed_resource = self.gcs_client.copy_object( source_resource, destination_resource) expected_resource = gcs_api._object_resource_from_metadata( destination_metadata) self.assertEqual(observed_resource, expected_resource)
def test_uploads_with_unknown_resource(self): params = {'Bucket': BUCKET_NAME, 'Key': OBJECT_NAME, 'Body': BINARY_DATA} response = {'ETag': ETAG} self.stubber.add_response( 'put_object', service_response=response, expected_params=params) upload_resource = resource_reference.UnknownResource( storage_url.CloudUrl(SCHEME, BUCKET_NAME, OBJECT_NAME)) expected_resource = self.s3_api._get_object_resource_from_s3_response( response, BUCKET_NAME, OBJECT_NAME) with self.stubber: observed_resource = self.s3_api.upload_object( io.BytesIO(BINARY_DATA), upload_resource) self.assertEqual(observed_resource, expected_resource)
def test_upload_translates_predefined_acl(self, arg_acl, translated_acl): params = {'Bucket': BUCKET_NAME, 'Key': OBJECT_NAME, 'Body': BINARY_DATA, 'ACL': translated_acl} response = {'ETag': ETAG} self.stubber.add_response( 'put_object', service_response=response, expected_params=params) expected_resource = self.s3_api._get_object_resource_from_s3_response( response, BUCKET_NAME, OBJECT_NAME) upload_resource = resource_reference.UnknownResource( storage_url.CloudUrl(SCHEME, BUCKET_NAME, OBJECT_NAME)) with self.stubber: observed_resource = self.s3_api.upload_object( io.BytesIO(BINARY_DATA), upload_resource, request_config=cloud_api.RequestConfig(arg_acl)) self.assertEqual(observed_resource, expected_resource)
def get_resource(source_resource, destination_resource, component_id): """Gets a temporary component destination resource for a composite upload. Args: source_resource (resource_reference.FileObjectResource): The upload source. destination_resource (resource_reference.ObjectResource|UnknownResource): The upload destination. component_id (int): An id that's not shared by any other component in this transfer. Returns: A resource_reference.UnknownResource representing the component's destination. """ component_object_name = _get_temporary_name(source_resource, component_id) destination_url = destination_resource.storage_url component_url = storage_url.CloudUrl(destination_url.scheme, destination_url.bucket_name, component_object_name) return resource_reference.UnknownResource(component_url)
def _complete_destination(self, destination_container, source): """Gets a valid copy destination incorporating part of the source's name. When given a source file or object and a destination resource that should be treated as a container, this function uses the last part of the source's name to get an object or file resource representing the copy destination. For example: given a source `dir/file` and a destination `gs://bucket/`, the destination returned is a resource representing `gs://bucket/file`. Check the recursive helper function docstring for details on recursion handling. Args: destination_container (resource_reference.Resource): The destination container. source (NameExpansionResult): Represents the source resource and the expanded parent url in case of recursion. Returns: The completed destination, a resource_reference.Resource. """ destination_url = destination_container.storage_url source_url = source.resource.storage_url if (source_url.versionless_url_string != source.expanded_url.versionless_url_string): # In case of recursion, the expanded_url can be the expanded wildcard URL # representing the container, and the source url can be the file/object. destination_suffix = self._get_destination_suffix_for_recursion( destination_container, source) else: # Schema might give us incorrect suffix for Windows. # TODO(b/169093672) This will not be required if we get rid of file:// schemaless_url = source_url.versionless_url_string.rpartition( source_url.scheme.value + '://')[2] destination_suffix = schemaless_url.rpartition(source_url.delimiter)[2] new_destination_url = destination_url.join(destination_suffix) return resource_reference.UnknownResource(new_destination_url)
def test_execute_copies_file_between_clouds(self, mock_client): test_stream = io.BytesIO() mock_client.download_object.side_effect = _write_to_stream_side_effect( test_stream) mock_client.upload_object.side_effect = ( self._assert_upload_stream_has_correct_data) with mock.patch.object(io, 'BytesIO') as mock_stream_creator: mock_stream_creator.return_value = test_stream source_resource = resource_reference.ObjectResource( storage_url.storage_url_from_string('gs://b/o.txt'), metadata=self.source_metadata) destination_resource = resource_reference.UnknownResource( storage_url.storage_url_from_string('s3://b/o2.txt')) task = daisy_chain_copy_task.DaisyChainCopyTask( source_resource, destination_resource) task.execute() mock_client.download_object.assert_called_once_with( source_resource.bucket, source_resource.name, test_stream) mock_client.upload_object.assert_called_once_with( test_stream, destination_resource)
def get_unknown_resource(url_string): url = storage_url.storage_url_from_string(url_string) return resource_reference.UnknownResource(url)