def testSplitGCSUrl(self): self.gcs_url = 'gs://bucket_name/some/where' uploader_object = uploader.GCSUploader(self.gcs_url, 'fake_key.json', 'fake_clientid', FakeStampManager()) expected_tuple = ('bucket_name', 'some/where') self.assertEqual(uploader_object._SplitGCSUrl(), expected_tuple) self.gcs_url = 'gs://bucket_name' uploader_object = uploader.GCSUploader(self.gcs_url, 'fake_key.json', 'fake_clientid', FakeStampManager()) expected_tuple = ('bucket_name', '') self.assertEqual(uploader_object._SplitGCSUrl(), expected_tuple) self.gcs_url = 'gs://bucket_name/' uploader_object = uploader.GCSUploader(self.gcs_url, 'fake_key.json', 'fake_clientid', FakeStampManager()) expected_tuple = ('bucket_name', '') self.assertEqual(uploader_object._SplitGCSUrl(), expected_tuple) self.gcs_url = 'invalid' uploader_object = uploader.GCSUploader(self.gcs_url, 'fake_key.json', 'fake_clientid', FakeStampManager()) with self.assertRaisesRegexp( argparse.ArgumentError, 'Invalid GCS URL \'{0:s}\''.format('invalid')): uploader_object._SplitGCSUrl()
def _MakeUploader(self, options): """Creates a new Uploader object. This instantiates the proper Uploader object to handle the destination URL argument. Args: options (argparse.Namespace): the parsed command-line arguments. Returns: Uploader: an uploader object. Raises: errors.BadConfigOption: if the options are invalid. """ stamp_manager = manager.StampManager() if options.destination.startswith('gs://'): if not self._gcs_settings: raise errors.BadConfigOption( 'Please provide a valid GCS json file. ' 'See --gs_keyfile option') client_id = self._gcs_settings.get('client_id', None) if not client_id: raise errors.BadConfigOption( 'The provided GCS json file lacks a "client_id" key.') return uploader.GCSUploader(options.destination, options.gs_keyfile, client_id, stamp_manager) return None
def testMakeRemotePathNoAsset(self): uploader_object = uploader.GCSUploader( self.gcs_url, 'fake_key.json', 'fake_clientid', FakeStampManager(), stamp=FAKE_STAMP_NO_ASSET) remote_name = 'remote_file' expected_remote_path = ( 'bucket_name/some/where/20171012-135619/fake_uuid/remote_file') remote_path = uploader_object._MakeRemotePath(remote_name) self.assertEqual(remote_path, expected_remote_path)
def testFailUploadNoRetry(self, patched_storage, patched_getstream): patched_getstream.return_value = BytesIO(b'fake_content') patched_storage.side_effect = errors.ForensicateError('random_error') test_artifact = base.BaseArtifact('test_artifact') uploader_object = uploader.GCSUploader( 'gs://fake_bucket/', 'no_keyfile', 'client_id', FakeStampManager()) uploader_object._boto_configured = True with self.assertRaises(errors.ForensicateError): uploader_object._UploadStream( test_artifact.OpenStream(), 'gs://fake_bucket/remote/path')
def testFailUploadRetryWorthy(self, patched_storage, patched_getstream): patched_getstream.return_value = StringIO('fake_content') patched_storage.side_effect = boto.exception.GSDataError('boom') test_artifact = base.BaseArtifact('test_artifact') uploader_object = uploader.GCSUploader('gs://fake_bucket/', 'no_keyfile', 'client_id', FakeStampManager()) uploader_object._boto_configured = True with self.assertRaises(errors.RetryableError): uploader_object._UploadStream(test_artifact.OpenStream(), 'gs://fake_bucket/remote/path')