def testTranslateApitoolsResumableUploadException(self): """Tests that _TranslateApitoolsResumableUploadException works correctly.""" gsutil_api = GcsJsonApi(GSMockBucketStorageUri, CreateGsutilLogger('copy_test'), DiscardMessagesQueue()) gsutil_api.http.disable_ssl_certificate_validation = True exc = apitools_exceptions.HttpError({'status': 503}, None, None) translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue(isinstance(translated_exc, ServiceException)) gsutil_api.http.disable_ssl_certificate_validation = False exc = apitools_exceptions.HttpError({'status': 503}, None, None) translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue(isinstance(translated_exc, ResumableUploadException)) gsutil_api.http.disable_ssl_certificate_validation = False exc = apitools_exceptions.HttpError({'status': 429}, None, None) translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue(isinstance(translated_exc, ResumableUploadException)) exc = apitools_exceptions.HttpError({'status': 410}, None, None) translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue( isinstance(translated_exc, ResumableUploadStartOverException)) exc = apitools_exceptions.HttpError({'status': 404}, None, None) translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue( isinstance(translated_exc, ResumableUploadStartOverException)) exc = apitools_exceptions.HttpError({'status': 401}, None, None) translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue( isinstance(translated_exc, ResumableUploadAbortException)) exc = apitools_exceptions.TransferError('Aborting transfer') translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue( isinstance(translated_exc, ResumableUploadAbortException)) exc = apitools_exceptions.TransferError( 'additional bytes left in stream') translated_exc = gsutil_api._TranslateApitoolsResumableUploadException( exc) self.assertTrue( isinstance(translated_exc, ResumableUploadAbortException)) self.assertIn('this can happen if a file changes size', translated_exc.reason)
def testMultipleConfiguredCreds(self): with SetBotoConfigForTest([ ('Credentials', 'gs_oauth2_refresh_token', 'foo'), ('Credentials', 'gs_service_client_id', 'bar'), ('Credentials', 'gs_service_key_file', 'baz') ]): try: GcsJsonApi(None, self.logger, DiscardMessagesQueue()) self.fail('Succeeded with multiple types of configured creds.') except CommandException, e: msg = str(e) self.assertIn('types of configured credentials', msg) self.assertIn(CredTypes.OAUTH2_USER_ACCOUNT, msg) self.assertIn(CredTypes.OAUTH2_SERVICE_ACCOUNT, msg)
def setUp(self): """Creates base configuration for integration tests.""" super(GsUtilIntegrationTestCase, self).setUp() self.bucket_uris = [] # Set up API version and project ID handler. self.api_version = boto.config.get_value('GSUtil', 'default_api_version', '1') # Instantiate a JSON API for use by the current integration test. self.json_api = GcsJsonApi(BucketStorageUri, logging.getLogger(), DiscardMessagesQueue(), 'gs') if util.RUN_S3_TESTS: self.nonexistent_bucket_name = ( 'nonexistentbucket-asf801rj3r9as90mfnnkjxpo02')
def MakeGsUtilApi(cls, debug=0): gsutil_api_map = { ApiMapConstants.API_MAP: ( cls.mock_gsutil_api_class_map_factory.GetClassMap()), ApiMapConstants.SUPPORT_MAP: { 'gs': [ApiSelector.XML, ApiSelector.JSON], 's3': [ApiSelector.XML] }, ApiMapConstants.DEFAULT_MAP: { 'gs': ApiSelector.JSON, 's3': ApiSelector.XML } } return CloudApiDelegator( cls.mock_bucket_storage_uri, gsutil_api_map, cls.logger, DiscardMessagesQueue(), debug=debug)
def testExactlyOneInvalid(self): with SetBotoConfigForTest([ ('Credentials', 'gs_oauth2_refresh_token', 'foo'), ('Credentials', 'gs_service_client_id', None), ('Credentials', 'gs_service_key_file', None) ]): succeeded = False try: GcsJsonApi(None, self.logger, DiscardMessagesQueue()) succeeded = True # If we self.fail() here, the except below will catch except: # pylint: disable=bare-except warning_messages = self.log_handler.messages['warning'] self.assertEquals(1, len(warning_messages)) self.assertIn('credentials are invalid', warning_messages[0]) self.assertIn(CredTypes.OAUTH2_USER_ACCOUNT, warning_messages[0]) if succeeded: self.fail( 'Succeeded with invalid credentials, one configured.')
def ConfigureCommandArgumentParsers(self, subparsers): """Configures argparse arguments and argcomplete completers for commands. Args: subparsers: argparse object that can be used to add parsers for subcommands (called just 'commands' in gsutil) """ # This should match the support map for the "ls" command. support_map = { 'gs': [ApiSelector.XML, ApiSelector.JSON], 's3': [ApiSelector.XML] } default_map = {'gs': ApiSelector.JSON, 's3': ApiSelector.XML} gsutil_api_map = GsutilApiMapFactory.GetApiMap( self.gsutil_api_class_map_factory, support_map, default_map) logger = CreateGsutilLogger('tab_complete') gsutil_api = CloudApiDelegator(self.bucket_storage_uri_class, gsutil_api_map, logger, DiscardMessagesQueue(), debug=0) for command in set(self.command_map.values()): command_parser = subparsers.add_parser( command.command_spec.command_name, add_help=False) if isinstance(command.command_spec.argparse_arguments, dict): subcommand_parsers = command_parser.add_subparsers() subcommand_argument_dict = command.command_spec.argparse_arguments for subcommand, arguments in subcommand_argument_dict.iteritems( ): subcommand_parser = subcommand_parsers.add_parser( subcommand, add_help=False) self._ConfigureCommandArgumentParserArguments( subcommand_parser, arguments, gsutil_api) else: self._ConfigureCommandArgumentParserArguments( command_parser, command.command_spec.argparse_arguments, gsutil_api)
def _test_rewrite_resume_or_restart(self, initial_dec_key, initial_enc_key, new_dec_key=None, new_enc_key=None): """Tests that the rewrite command restarts if the object's key changed. Args: initial_dec_key: Initial key the object is encrypted with, used as decryption key in the first rewrite call. initial_enc_key: Initial encryption key to rewrite the object with, used as encryption key in the first rewrite call. new_dec_key: Decryption key for the second rewrite call; if specified, object will be overwritten with a new encryption key in between the first and second rewrite calls, and this key will be used for the second rewrite call. new_enc_key: Encryption key for the second rewrite call; if specified, this key will be used for the second rewrite call, otherwise the initial key will be used. Returns: None """ if self.test_api == ApiSelector.XML: return unittest.skip('Rewrite API is only supported in JSON.') bucket_uri = self.CreateBucket() # maxBytesPerCall must be >= 1 MiB, so create an object > 2 MiB because we # need 2 response from the service: 1 success, 1 failure prior to # completion. object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo', contents=('12' * ONE_MIB) + 'bar', prefer_json_api=True, encryption_key=initial_dec_key) gsutil_api = GcsJsonApi(BucketStorageUri, logging.getLogger(), DiscardMessagesQueue(), self.default_provider) with SetBotoConfigForTest([('GSUtil', 'decryption_key1', initial_dec_key)]): src_obj_metadata = gsutil_api.GetObjectMetadata( object_uri.bucket_name, object_uri.object_name, provider=self.default_provider, fields=['bucket', 'contentType', 'etag', 'name']) dst_obj_metadata = src_obj_metadata tracker_file_name = GetRewriteTrackerFilePath(src_obj_metadata.bucket, src_obj_metadata.name, dst_obj_metadata.bucket, dst_obj_metadata.name, self.test_api) decryption_tuple = CryptoKeyWrapperFromKey(initial_dec_key) decryption_tuple2 = CryptoKeyWrapperFromKey(new_dec_key or initial_dec_key) encryption_tuple = CryptoKeyWrapperFromKey(initial_enc_key) encryption_tuple2 = CryptoKeyWrapperFromKey(new_enc_key or initial_enc_key) try: try: gsutil_api.CopyObject( src_obj_metadata, dst_obj_metadata, progress_callback=HaltingRewriteCallbackHandler(ONE_MIB * 2).call, max_bytes_per_call=ONE_MIB, decryption_tuple=decryption_tuple, encryption_tuple=encryption_tuple) self.fail('Expected RewriteHaltException.') except RewriteHaltException: pass # Tracker file should be left over. self.assertTrue(os.path.exists(tracker_file_name)) if new_dec_key: # Recreate the object with a different encryption key. self.CreateObject(bucket_uri=bucket_uri, object_name='foo', contents=('12' * ONE_MIB) + 'bar', prefer_json_api=True, encryption_key=new_dec_key, gs_idempotent_generation=urigen(object_uri)) with SetBotoConfigForTest([('GSUtil', 'decryption_key1', new_dec_key or initial_dec_key)]): original_md5 = gsutil_api.GetObjectMetadata( src_obj_metadata.bucket, src_obj_metadata.name, fields=['customerEncryption', 'md5Hash']).md5Hash if new_dec_key or new_enc_key: # Keys changed, rewrite should be restarted. progress_callback = EnsureRewriteRestartCallbackHandler( ONE_MIB).call else: # Keys are the same, rewrite should be resumed. progress_callback = EnsureRewriteResumeCallbackHandler( ONE_MIB * 2).call # Now resume. Callback ensures the appropriate resume/restart behavior. gsutil_api.CopyObject(src_obj_metadata, dst_obj_metadata, progress_callback=progress_callback, max_bytes_per_call=ONE_MIB, decryption_tuple=decryption_tuple2, encryption_tuple=encryption_tuple2) # Copy completed; tracker file should be deleted. self.assertFalse(os.path.exists(tracker_file_name)) final_enc_key = new_enc_key or initial_enc_key with SetBotoConfigForTest([('GSUtil', 'encryption_key', final_enc_key)]): self.assertEqual( original_md5, gsutil_api.GetObjectMetadata( dst_obj_metadata.bucket, dst_obj_metadata.name, fields=['customerEncryption', 'md5Hash']).md5Hash, 'Error: Rewritten object\'s hash doesn\'t match source object.' ) finally: # Clean up if something went wrong. DeleteTrackerFile(tracker_file_name)
def MaybeCheckForAndOfferSoftwareUpdate(self, command_name, debug): """Checks the last time we checked for an update and offers one if needed. Offer is made if the time since the last update check is longer than the configured threshold offers the user to update gsutil. Args: command_name: The name of the command being run. debug: Debug level to pass in to boto connection (range 0..3). Returns: True if the user decides to update. """ # Don't try to interact with user if: # - gsutil is not connected to a tty (e.g., if being run from cron); # - user is running gsutil -q # - user is running the config command (which could otherwise attempt to # check for an update for a user running behind a proxy, who has not yet # configured gsutil to go through the proxy; for such users we need the # first connection attempt to be made by the gsutil config command). # - user is running the version command (which gets run when using # gsutil -D, which would prevent users with proxy config problems from # sending us gsutil -D output). # - user is running the update command (which could otherwise cause an # additional note that an update is available when user is already trying # to perform an update); # - user specified gs_host (which could be a non-production different # service instance, in which case credentials won't work for checking # gsutil tarball). # - user is using a Cloud SDK install (which should only be updated via # gcloud components update) logger = logging.getLogger() gs_host = boto.config.get('Credentials', 'gs_host', None) if (not IsRunningInteractively() or command_name in ('config', 'update', 'ver', 'version') or not logger.isEnabledFor(logging.INFO) or gs_host or os.environ.get('CLOUDSDK_WRAPPER') == '1'): return False software_update_check_period = boto.config.getint( 'GSUtil', 'software_update_check_period', 30) # Setting software_update_check_period to 0 means periodic software # update checking is disabled. if software_update_check_period == 0: return False cur_ts = int(time.time()) if not os.path.isfile(LAST_CHECKED_FOR_GSUTIL_UPDATE_TIMESTAMP_FILE): # Set last_checked_ts from date of VERSION file, so if the user installed # an old copy of gsutil it will get noticed (and an update offered) the # first time they try to run it. last_checked_ts = GetGsutilVersionModifiedTime() with open(LAST_CHECKED_FOR_GSUTIL_UPDATE_TIMESTAMP_FILE, 'w') as f: f.write(str(last_checked_ts)) else: try: with open(LAST_CHECKED_FOR_GSUTIL_UPDATE_TIMESTAMP_FILE, 'r') as f: last_checked_ts = int(f.readline()) except (TypeError, ValueError): return False if (cur_ts - last_checked_ts > software_update_check_period * SECONDS_PER_DAY): # Create a credential-less gsutil API to check for the public # update tarball. gsutil_api = GcsJsonApi(self.bucket_storage_uri_class, logger, DiscardMessagesQueue(), credentials=NoOpCredentials(), debug=debug) cur_ver = LookUpGsutilVersion(gsutil_api, GSUTIL_PUB_TARBALL) with open(LAST_CHECKED_FOR_GSUTIL_UPDATE_TIMESTAMP_FILE, 'w') as f: f.write(str(cur_ts)) (g, m) = CompareVersions(cur_ver, gslib.VERSION) if m: print '\n'.join(textwrap.wrap( 'A newer version of gsutil (%s) is available than the version you ' 'are running (%s). NOTE: This is a major new version, so it is ' 'strongly recommended that you review the release note details at ' '%s before updating to this version, especially if you use gsutil ' 'in scripts.' % (cur_ver, gslib.VERSION, RELEASE_NOTES_URL))) if gslib.IS_PACKAGE_INSTALL: return False print answer = raw_input('Would you like to update [y/N]? ') return answer and answer.lower()[0] == 'y' elif g: print '\n'.join(textwrap.wrap( 'A newer version of gsutil (%s) is available than the version you ' 'are running (%s). A detailed log of gsutil release changes is ' 'available at %s if you would like to read them before updating.' % (cur_ver, gslib.VERSION, RELEASE_NOTES_URL))) if gslib.IS_PACKAGE_INSTALL: return False print answer = raw_input('Would you like to update [Y/n]? ') return not answer or answer.lower()[0] != 'n' return False