def testContainsWildcardNoMatches(self, mock_CreateWildcardIterator, mock_gsutil_api): mock_CreateWildcardIterator.return_value = iter([]) with self.assertRaises(CommandException): ExpandUrlToSingleBlr('gs://test/hello*/', mock_gsutil_api, 'project_id', False, CreateOrGetGsutilLogger('copy_test'))
def testWithSlashNoMatch(self, mock_gsutil_api): mock_gsutil_api.ListObjects.return_value = iter([]) (exp_url, have_existing_dst_container) = ExpandUrlToSingleBlr( 'gs://test/folder/', mock_gsutil_api, 'project_id', False, CreateOrGetGsutilLogger('copy_test')) self.assertTrue(have_existing_dst_container) self.assertEqual(exp_url, StorageUrlFromString('gs://test/folder/'))
def testContainsWildcardMultipleMatches(self, mock_CreateWildcardIterator, mock_gsutil_api): mock_CreateWildcardIterator.return_value = iter([ BucketListingObject(StorageUrlFromString('gs://test/helloworld')), BucketListingObject(StorageUrlFromString('gs://test/helloworld2')) ]) with self.assertRaises(CommandException): ExpandUrlToSingleBlr('gs://test/hello*/', mock_gsutil_api, 'project_id', False, CreateOrGetGsutilLogger('copy_test'))
def testTranslateApitoolsResumableUploadException(self): """Tests that _TranslateApitoolsResumableUploadException works correctly.""" gsutil_api = GcsJsonApi(GSMockBucketStorageUri, CreateOrGetGsutilLogger('copy_test'), DiscardMessagesQueue()) gsutil_api.http.disable_ssl_certificate_validation = True exc = apitools_exceptions.HttpError({'status': 503}, None, None) translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue(isinstance(translated_exc, ServiceException)) gsutil_api.http.disable_ssl_certificate_validation = False exc = apitools_exceptions.HttpError({'status': 503}, None, None) translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue(isinstance(translated_exc, ResumableUploadException)) gsutil_api.http.disable_ssl_certificate_validation = False exc = apitools_exceptions.HttpError({'status': 429}, None, None) translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue(isinstance(translated_exc, ResumableUploadException)) exc = apitools_exceptions.HttpError({'status': 410}, None, None) translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue( isinstance(translated_exc, ResumableUploadStartOverException)) exc = apitools_exceptions.HttpError({'status': 404}, None, None) translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue( isinstance(translated_exc, ResumableUploadStartOverException)) exc = apitools_exceptions.HttpError({'status': 401}, None, None) translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue( isinstance(translated_exc, ResumableUploadAbortException)) exc = apitools_exceptions.TransferError('Aborting transfer') translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue( isinstance(translated_exc, ResumableUploadAbortException)) exc = apitools_exceptions.TransferError( 'additional bytes left in stream') translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue( isinstance(translated_exc, ResumableUploadAbortException)) self.assertIn('this can happen if a file changes size', translated_exc.reason)
def testNoSlashPrefixSubstringMatch(self, mock_gsutil_api): mock_gsutil_api.ListObjects.return_value = iter([ CloudApi.CsObjectOrPrefix('folderone/', CloudApi.CsObjectOrPrefixType.PREFIX) ]) (exp_url, have_existing_dst_container) = ExpandUrlToSingleBlr( 'gs://test/folder', mock_gsutil_api, 'project_id', False, CreateOrGetGsutilLogger('copy_test')) self.assertFalse(have_existing_dst_container) self.assertEqual(exp_url, StorageUrlFromString('gs://test/folder'))
def testLocalFileDirectory(self, mock_StorageUrlFromString, mock_gsutil_api): mock_storage_url = mock.Mock() mock_storage_url.isFileUrl.return_value = True mock_storage_url.IsDirectory.return_value = True mock_StorageUrlFromString.return_value = mock_storage_url (exp_url, have_existing_dst_container) = ExpandUrlToSingleBlr( '/home/test', mock_gsutil_api, 'project_id', False, CreateOrGetGsutilLogger('copy_test')) self.assertTrue(have_existing_dst_container) self.assertEqual(exp_url, mock_storage_url)
def testContainsWildcardMatchesObject(self, mock_CreateWildcardIterator, mock_gsutil_api): storage_url = StorageUrlFromString('gs://test/helloworld') mock_CreateWildcardIterator.return_value = iter( [BucketListingObject(storage_url)]) (exp_url, have_existing_dst_container) = ExpandUrlToSingleBlr( 'gs://test/hello*/', mock_gsutil_api, 'project_id', False, CreateOrGetGsutilLogger('copy_test')) self.assertFalse(have_existing_dst_container) self.assertEqual(exp_url, storage_url)
def testTranslateApitoolsResumableUploadExceptionStreamExhausted(self): """Test that StreamExhausted error gets handled.""" gsutil_api = GcsJsonApi(GSMockBucketStorageUri, CreateOrGetGsutilLogger('copy_test'), DiscardMessagesQueue()) exc = apitools_exceptions.StreamExhausted('Not enough bytes') translated_exc = gsutil_api._TranslateApitoolsResumableUploadException(exc) self.assertTrue(isinstance(translated_exc, ResumableUploadAbortException)) self.assertIn( 'if this issue persists, try deleting the tracker files' ' present under ~/.gsutil/tracker-files/', translated_exc.reason)
def testNoSlashFolderPlaceholder(self, mock_gsutil_api): mock_gsutil_api.ListObjects.return_value = iter([ CloudApi.CsObjectOrPrefix( apitools_messages.Object(name='folder_$folder$'), CloudApi.CsObjectOrPrefixType.OBJECT) ]) (exp_url, have_existing_dst_container) = ExpandUrlToSingleBlr( 'gs://test/folder', mock_gsutil_api, 'project_id', False, CreateOrGetGsutilLogger('copy_test')) self.assertTrue(have_existing_dst_container) self.assertEqual(exp_url, StorageUrlFromString('gs://test/folder'))
def setUp(self): super(TestAcl, self).setUp() self.sample_uri = self.CreateBucket() self.sample_url = StorageUrlFromString(str(self.sample_uri)) self.logger = CreateOrGetGsutilLogger('acl') # Argument to acl ch -p must be the project number, not a name; create a # bucket to perform translation. self._project_number = self.json_api.GetBucket( self.CreateBucket().bucket_name, fields=['projectNumber']).projectNumber self._project_test_acl = '%s-%s' % (self._project_team, self._project_number)
def __init__(self, do_parallel): self.bucket_storage_uri_class = BucketStorageUri support_map = {'gs': ['JSON'], 's3': ['XML']} default_map = {'gs': 'JSON', 's3': 'XML'} self.gsutil_api_map = cs_api_map.GsutilApiMapFactory.GetApiMap( cs_api_map.GsutilApiClassMapFactory, support_map, default_map) self.logger = CreateOrGetGsutilLogger('FakeCommand') self.parallel_operations = do_parallel self.failure_count = 0 self.gsutil_api = MockCloudApi() self.multiprocessing_is_available = ( CheckMultiprocessingAvailableAndInit().is_available) self.debug = 0 self.user_project = None
def testMultithreadingDoesNotLogMacOSWarning(self): logger = CreateOrGetGsutilLogger('FakeCommand') mock_log_handler = MockLoggingHandler() logger.addHandler(mock_log_handler) self._TestRecursiveDepthThreeDifferentFunctions(1, 3) macos_message = 'If you experience problems with multiprocessing on MacOS' contains_message = [ message.startswith(macos_message) for message in mock_log_handler.messages['info'] ] self.assertFalse(any(contains_message)) logger.removeHandler(mock_log_handler)
def testSequentialApplyDoesNotRecommendParallelismAtEndIfLastSuggestionInView( self, mock_get_term_lines): logger = CreateOrGetGsutilLogger('FakeCommand') mock_log_handler = MockLoggingHandler() logger.addHandler(mock_log_handler) self._RunApply(_ReturnOneValue, range(22), process_count=1, thread_count=1) contains_message = [ message == PARALLEL_PROCESSING_MESSAGE for message in mock_log_handler.messages['info'] ] self.assertEqual(sum(contains_message), 2) logger.removeHandler(mock_log_handler)
def testSequentialApplyRecommendsParallelismAfterThreshold( self, mock_get_term_lines): mock_get_term_lines.return_value = 100 logger = CreateOrGetGsutilLogger('FakeCommand') mock_log_handler = MockLoggingHandler() logger.addHandler(mock_log_handler) self._RunApply(_ReturnOneValue, range(2), process_count=1, thread_count=1) contains_message = [ message == PARALLEL_PROCESSING_MESSAGE for message in mock_log_handler.messages['info'] ] self.assertTrue(any(contains_message)) logger.removeHandler(mock_log_handler)
def _GetTabCompleteLogger(self): """Returns a logger for tab completion.""" return CreateOrGetGsutilLogger('tab_complete')
def setUp(self): super(TestTabComplete, self).setUp() self.logger = CreateOrGetGsutilLogger('tab_complete')