def __iter__(self): recursion_needed = '**' in self._path normal_file_iterator = glob.iglob(self._path, recursive=recursion_needed) if recursion_needed: hidden_file_iterator = glob.iglob(os.path.join( self._path, '**', '.*'), recursive=recursion_needed) else: hidden_file_iterator = [] for path in itertools.chain(normal_file_iterator, hidden_file_iterator): # Follow symlinks unless pointing to directory or exclude flag is present. if os.path.islink(path) and (os.path.isdir(path) or self._ignore_symlinks): log.warning('Skipping symlink {}'.format(path)) continue # For pattern like foo/bar/**, glob returns first path as 'foo/bar/' # even when foo/bar does not exist. So we skip non-existing paths. # Glob also returns intermediate directories if called with **. We skip # them to be consistent with CloudWildcardIterator. if self._path.endswith('**') and (not os.path.exists(path) or os.path.isdir(path)): continue file_url = storage_url.FileUrl(path) if os.path.isdir(path): yield resource_reference.FileDirectoryResource(file_url) else: yield resource_reference.FileObjectResource(file_url)
def test_object_upload_handles_api_error(self): upload_metadata = self.messages.Object(name='o', bucket='b') request = self.messages.StorageObjectsInsertRequest( bucket=upload_metadata.bucket, object=upload_metadata) self.apitools_client.objects.Insert.Expect( request, exception=apitools_exceptions.HttpError(None, None, None)) upload_resource = resource_reference.FileObjectResource( storage_url.storage_url_from_string('gs://b/o')) with self.assertRaises(cloud_errors.GcsApiError): self.gcs_client.upload_object(mock.mock_open(), upload_resource)
def test_execute_downloads_file(self, mock_client, mock_file_writer): source_url = storage_url.storage_url_from_string('gs://b/o1.txt') source_resource = resource_reference.ObjectResource(source_url) destination_resource = resource_reference.FileObjectResource( storage_url.storage_url_from_string('file://o2.txt')) task = file_download_task.FileDownloadTask(source_resource, destination_resource) task.execute() mock_file_writer.assert_called_once_with('o2.txt', create_path=True) mock_stream = mock_file_writer() mock_client.download_object.assert_called_once_with( source_url.bucket_name, source_url.object_name, mock_stream)
def test_execute_uploads_file(self, mock_client, mock_stream): source_resource = resource_reference.FileObjectResource( storage_url.storage_url_from_string('file://o1.txt')) destination_resource = resource_reference.ObjectResource( storage_url.storage_url_from_string('gs://b/o2.txt')) task = file_upload_task.FileUploadTask(source_resource, destination_resource) task.execute() mock_stream.assert_called_once_with('o1.txt') # We create a new instance of mock_stream to emulate "with ... as ..." # syntax in the task. However, this means "assert_called_once" must be above # because now mock_stream is called twice. mock_client.upload_object.assert_called_once_with( mock_stream(), destination_resource)
def __iter__(self): recursion_needed = '**' in self._path for path in glob.iglob(self._path, recursive=recursion_needed): # For pattern like foo/bar/**, glob returns first path as 'foo/bar/' # even when foo/bar does not exist. So we skip non-existing paths. # Glob also returns intermediate directories if called with **. We skip # them to be consistent with CloudWildcardIterator. if self._path.endswith('**') and (not os.path.exists(path) or os.path.isdir(path)): continue file_url = storage_url.FileUrl(path) if os.path.isdir(path): yield resource_reference.FileDirectoryResource(file_url) else: yield resource_reference.FileObjectResource(file_url)
def test_object_upload_predefined_acl_string(self): upload_metadata = self.messages.Object(name='o', bucket='b') predefined_acl_string = 'authenticatedRead' predefined_acl = getattr( self.messages.StorageObjectsInsertRequest. PredefinedAclValueValuesEnum, predefined_acl_string) request = self.messages.StorageObjectsInsertRequest( bucket=upload_metadata.bucket, object=upload_metadata, predefinedAcl=predefined_acl) self.apitools_client.objects.Insert.Expect(request, response=upload_metadata) request_config = gcs_api.GcsRequestConfig( predefined_acl_string=predefined_acl_string) upload_resource = resource_reference.FileObjectResource( storage_url.storage_url_from_string('gs://b/o')) self.gcs_client.upload_object(mock.mock_open(), upload_resource, request_config=request_config)
def test_object_upload_gzip_encoded(self): upload_metadata = self.messages.Object(name='o', bucket='b') request = self.messages.StorageObjectsInsertRequest( bucket=upload_metadata.bucket, object=upload_metadata) self.apitools_client.objects.Insert.Expect(request, response=upload_metadata) upload_stream = mock.mock_open() upload_resource = resource_reference.FileObjectResource( storage_url.storage_url_from_string('gs://b/o')) with mock.patch.object(apitools_transfer, 'Upload') as mock_upload: request_config = gcs_api.GcsRequestConfig(gzip_encoded=True) self.gcs_client.upload_object(upload_stream, upload_resource, request_config=request_config) mock_upload.assert_called_once_with( upload_stream, gcs_api.DEFAULT_CONTENT_TYPE, total_size=None, auto_transfer=True, num_retries=gcs_api.DEFAULT_NUM_RETRIES, gzip_encoded=True)
def get_file_object_resource(path): url = storage_url.storage_url_from_string(path) return resource_reference.FileObjectResource(url)
def test_gets_file_object_resource(self): url_string = 'hi.txt' parsed_url = storage_url.storage_url_from_string(url_string) resource = resource_reference.FileObjectResource(parsed_url) self.assertEqual(test_resources.from_url_string(url_string), resource)