def test_basic_operations(self): credentials = _create_test_credentials() store = multiprocess_file_storage.MultiprocessFileStorage( self.filename, 'basic') # Save credentials store.put(credentials) credentials = store.get() self.assertIsNotNone(credentials) self.assertEqual('foo', credentials.access_token) # Reset internal cache, ensure credentials were saved. store._backend._credentials = {} credentials = store.get() self.assertIsNotNone(credentials) self.assertEqual('foo', credentials.access_token) # Delete credentials store.delete() credentials = store.get() self.assertIsNone(credentials)
def child_process_func( die_event, ready_event, check_event): # pragma: NO COVER store = multiprocess_file_storage.MultiprocessFileStorage( self.filename, 'multi-process') credentials = store.get() self.assertIsNotNone(credentials) # Make sure this thread gets to refresh first. original_acquire_lock = store.acquire_lock def replacement_acquire_lock(*args, **kwargs): result = original_acquire_lock(*args, **kwargs) ready_event.set() check_event.wait() return result credentials.store.acquire_lock = replacement_acquire_lock http = _generate_token_response_http(actual_token) credentials.refresh(http) self.assertEqual(credentials.access_token, actual_token) # Verify mock http. self._verify_refresh_payload(http, credentials)
def SetUpJsonCredentialsAndCache(api, logger, credentials=None): """Helper to ensure each GCS API client shares the same credentials.""" api.credentials = (credentials or _CheckAndGetCredentials(logger) or NoOpCredentials()) # Set credential cache so that we don't have to get a new access token for # every call we make. All GCS APIs use the same credentials as the JSON API, # so we use its version in the key for caching access tokens. credential_store_key = (GetCredentialStoreKey(api.credentials, GetGcsJsonApiVersion())) api.credentials.set_store( multiprocess_file_storage.MultiprocessFileStorage( GetCredentialStoreFilename(), credential_store_key)) # The cached entry for this credential often contains more context than what # we can construct from boto config attributes (e.g. for a user credential, # the cached version might also contain a RAPT token and expiry info). # Prefer the cached credential if present. cached_cred = None if not isinstance(api.credentials, NoOpCredentials): # A NoOpCredentials object doesn't actually have a store attribute. cached_cred = api.credentials.store.get() # As of gsutil 4.31, we never use the OAuth2Credentials class for # credentials directly; rather, we use subclasses (user credentials were # the only ones left using it, but they now use # Oauth2WithReauthCredentials). If we detect that a cached credential is # an instance of OAuth2Credentials and not a subclass of it (as might # happen when transitioning to version v4.31+), we don't fetch it from the # cache. This results in our new-style credential being refreshed and # overwriting the old credential cache entry in our credstore. if (cached_cred and type(cached_cred) != oauth2client.client.OAuth2Credentials): api.credentials = cached_cred
def __init__(self, scope): super(_ClientSecretsAuthenticator, self).__init__(scope) self._client_secrets_file = os.path.join(FLAGS.gcloud_credentials_dir, 'client_secrets.json') # Ensure that the client secrets file exists. This also verifies that that # the credentials directory exists. if not os.path.isfile(self._client_secrets_file): raise _AuthenticatorError(self._HelpMessage()) # Name of file used to cache credentials locally. self._credentials_filename = os.path.join( FLAGS.gcloud_credentials_dir, 'google_cloud_credentials.dat') # The second argument is a "client_id" that is only used for keying # credentials in the multistore. key = '{}-{}-{}'.format(gcloud_constants.OAUTH2_CLIENT_ID, gcloud_constants.USER_AGENT, self._scope) self._storage = multiprocess_file_storage.MultiprocessFileStorage( self._credentials_filename, key) self._flow = oauth2_client.flow_from_clientsecrets( self._client_secrets_file, self._scope, message=self._HelpMessage())
def test_multi_process_refresh(self): # This will test that two processes attempting to refresh credentials # will only refresh once. store = multiprocess_file_storage.MultiprocessFileStorage( self.filename, 'multi-process') credentials = _create_test_credentials() credentials.set_store(store) store.put(credentials) actual_token = 'b' def child_process_func( die_event, ready_event, check_event): # pragma: NO COVER store = multiprocess_file_storage.MultiprocessFileStorage( self.filename, 'multi-process') credentials = store.get() self.assertIsNotNone(credentials) # Make sure this thread gets to refresh first. original_acquire_lock = store.acquire_lock def replacement_acquire_lock(*args, **kwargs): result = original_acquire_lock(*args, **kwargs) ready_event.set() check_event.wait() return result credentials.store.acquire_lock = replacement_acquire_lock http = _generate_token_response_http(actual_token) credentials.refresh(http) self.assertEqual(credentials.access_token, actual_token) # Verify mock http. self._verify_refresh_payload(http, credentials) check_event = multiprocessing.Event() with scoped_child_process(child_process_func, check_event=check_event): # The lock should be currently held by the child process. self.assertFalse( store._backend._process_lock.acquire(blocking=False)) check_event.set() http = _generate_token_response_http('not ' + actual_token) credentials.refresh(http=http) # The child process will refresh first, so we should end up # with `actual_token`' as the token. self.assertEqual(credentials.access_token, actual_token) # Make sure the refresh did not make a request. self.assertEqual(http.requests, 0) retrieved = store.get() self.assertEqual(retrieved.access_token, actual_token)
def test_multiprocess_file_storage_credestore_permissions(self): credentials = 'dummy-string' storage = multiprocess_file_storage.MultiprocessFileStorage( FILENAME, credentials) storage.acquire_lock() storage.release_lock() self.assertTrue(os.path.exists(FILENAME)) if os.name == 'posix': # pragma: NO COVER mode = os.stat(FILENAME).st_mode self.assertEquals('0o600', oct(stat.S_IMODE(mode)))
def test_single_process_refresh(self): store = multiprocess_file_storage.MultiprocessFileStorage( self.filename, 'single-process') credentials = _create_test_credentials() credentials.set_store(store) http = _generate_token_response_http() credentials.refresh(http) self.assertEqual(credentials.access_token, 'new_token') retrieved = store.get() self.assertEqual(retrieved.access_token, 'new_token') # Verify mocks. self._verify_refresh_payload(http, credentials)
def test_read_only_file_fail_lock(self): credentials = _create_test_credentials() # Grab the lock in another process, preventing this process from # acquiring the lock. def child_process(die_event, ready_event): # pragma: NO COVER lock = fasteners.InterProcessLock( '{0}.lock'.format(self.filename)) with lock: ready_event.set() die_event.wait() with scoped_child_process(child_process): store = multiprocess_file_storage.MultiprocessFileStorage( self.filename, 'fail-lock') store.put(credentials) self.assertTrue(store._backend._read_only) # These credentials should still be in the store's memory-only cache. self.assertIsNotNone(store.get())
def CredentialsFromFile(path, client_info, oauth2client_args=None): """Read credentials from a file.""" user_agent = client_info['user_agent'] scope_key = client_info['scope'] if not isinstance(scope_key, six.string_types): scope_key = ':'.join(scope_key) storage_key = client_info['client_id'] + user_agent + scope_key if _NEW_FILESTORE: credential_store = multiprocess_file_storage.MultiprocessFileStorage( path, storage_key) else: credential_store = multistore_file.get_credential_storage_custom_string_key( # noqa path, storage_key) if hasattr(FLAGS, 'auth_local_webserver'): FLAGS.auth_local_webserver = False credentials = credential_store.get() if credentials is None or credentials.invalid: print('Generating new OAuth credentials ...') for _ in range(20): # If authorization fails, we want to retry, rather than let this # cascade up and get caught elsewhere. If users want out of the # retry loop, they can ^C. try: flow = oauth2client.client.OAuth2WebServerFlow(**client_info) flags = _GetRunFlowFlags(args=oauth2client_args) credentials = tools.run_flow(flow, credential_store, flags) break except (oauth2client.client.FlowExchangeError, SystemExit) as e: # Here SystemExit is "no credential at all", and the # FlowExchangeError is "invalid" -- usually because # you reused a token. print('Invalid authorization: %s' % (e, )) except httplib2.HttpLib2Error as e: print('Communication error: %s' % (e, )) raise exceptions.CredentialsError( 'Communication error creating credentials: %s' % e) return credentials
def _GetCredentialStore(credentials_filename, key_id, scopes): credentials_filename = _GetCredentialsFilename(credentials_filename) storage_key = '{}#{}'.format(key_id, scopes) return multiprocess_file_storage.MultiprocessFileStorage( credentials_filename, storage_key)