def get_native_app_authorizer(client_id): tokens = None client = NativeClient(client_id=client_id, app_name=APP_NAME) try: # if we already have tokens, load and use them tokens = client.load_tokens(requested_scopes=SCOPES) except: pass if not tokens: tokens = client.login(requested_scopes=SCOPES, refresh_tokens=True) try: client.save_tokens(tokens) except: pass transfer_tokens = tokens['transfer.api.globus.org'] auth_client = globus_sdk.NativeAppAuthClient(client_id=client_id) return globus_sdk.RefreshTokenAuthorizer( transfer_tokens['refresh_token'], auth_client, access_token=transfer_tokens['access_token'], expires_at=transfer_tokens['expires_at_seconds'])
def main(): tokens = None client = NativeClient(client_id=CLIENT_ID, app_name=APP_NAME) try: # if we already have tokens, load and use them tokens = client.load_tokens(requested_scope=SCOPES) except: pass if not tokens: # if we need to get tokens, start the Native App authentication process # need to specify that we want refresh tokens tokens = client.login(requested_scopes=SCOPES, refresh_tokens=True) try: client.save_tokens(tokens) except: pass transfer = setup_transfer_client(tokens['transfer.api.globus.org']) try: task_data = load_data_from_file(DATA_FILE)['task'] task = transfer.get_task(task_data['task_id']) if task['status'] not in PREVIOUS_TASK_RUN_CASES: print('The last transfer status is {}, skipping run...'.format( task['status'])) sys.exit(1) except KeyError: # Ignore if there is no previous task pass check_endpoint_path(transfer, SOURCE_ENDPOINT, SOURCE_PATH) if CREATE_DESTINATION_FOLDER: create_destination_directory(transfer, DESTINATION_ENDPOINT, DESTINATION_PATH) else: check_endpoint_path(transfer, DESTINATION_ENDPOINT, DESTINATION_PATH) tdata = TransferData(transfer, SOURCE_ENDPOINT, DESTINATION_ENDPOINT, label=TRANSFER_LABEL, sync_level="checksum") tdata.add_item(SOURCE_PATH, DESTINATION_PATH, recursive=True) task = transfer.submit_transfer(tdata) save_data_to_file(DATA_FILE, 'task', task.data) print('Transfer has been started from\n {}:{}\nto\n {}:{}'.format( SOURCE_ENDPOINT, SOURCE_PATH, DESTINATION_ENDPOINT, DESTINATION_PATH)) url_string = 'https://globus.org/app/transfer?' + \ six.moves.urllib.parse.urlencode({ 'origin_id': SOURCE_ENDPOINT, 'origin_path': SOURCE_PATH, 'destination_id': DESTINATION_ENDPOINT, 'destination_path': DESTINATION_PATH }) print('Visit the link below to see the changes:\n{}'.format(url_string))
def tasks(): client = NativeClient(client_id=CLIENT_ID, app_name=APP_NAME) client.login(requested_scopes=SCOPES) tokens = client.load_tokens(requested_scopes=SCOPES) auther = globus_sdk.AccessTokenAuthorizer( tokens['search.api.globus.org']['access_token']) sc = globus_sdk.SearchClient(authorizer=auther) print(sc.get_task_list(INDEX)) print('Finished')
def test_json_token_storage(mock_tokens, mock_revoke, monkeypatch): cli = NativeClient(client_id=str(uuid.uuid4()), token_storage=JSONTokenStorage()) # Mock actual call to open(). Catch the data 'written' and use it in the # load function. This is a cheap and easy (and hacky) way to test that the # stuff we get read was the same as the stuff written in. monkeypatch.setattr(os.path, 'exists', lambda x: True) mo = mock_open() with patch(BUILTIN_OPEN, mo): cli.save_tokens(mock_tokens) written = ''.join([c[1][0] for c in mo().write.mock_calls]) with patch(BUILTIN_OPEN, mock_open(read_data=written)): tokens = cli.load_tokens() assert tokens == MOCK_TOKEN_SET mock_remove = Mock() with patch('os.remove', mock_remove): cli.logout() assert mock_remove.called
def __init__(self, *args, **kwargs): super(GlobusContentsManager, self).__init__(*args, **kwargs) # TODO: Make this check for tokens in the environment (i.e., JupyterHub) # Then load via Native App. Figure out login. client = NativeClient(client_id=self.client_id, app_name=self.app_name) tokens = client.load_tokens() transfer_access_token = tokens['transfer.api.globus.org'][ 'access_token'] # then use that token to create an AccessTokenAuthorizer transfer_auth = globus_sdk.AccessTokenAuthorizer(transfer_access_token) # finally, use the authorizer to create a TransferClient object self.transfer_client = globus_sdk.TransferClient( authorizer=transfer_auth) self.transfer_client.endpoint_autoactivate(self.globus_remote_endpoint) # TODO: How to handle caching dir? Needs to be writable. On laptops, # tmp dirs may not be accessible by GCP #self._cache_dir = tempfile.TemporaryDirectory() self._cache_dir = '/Users/rpwagner/tmp/jupyter_contents_cache'
def delete(filename): with open(filename) as f: ingest_doc = json.loads(f.read()) client = NativeClient(client_id=CLIENT_ID, app_name=APP_NAME) client.login(requested_scopes=SCOPES) tokens = client.load_tokens(requested_scopes=SCOPES) auther = globus_sdk.AccessTokenAuthorizer( tokens['search.api.globus.org']['access_token']) sc = globus_sdk.SearchClient(authorizer=auther) subject = ingest_doc['ingest_data']['subject'] print(subject) print('Deleting from "{}"?'.format( sc.get_index(INDEX).data['display_name'])) #user_input = input('Y/N> ') #if user_input in ['yes', 'Y', 'y', 'yarr']: result = sc.delete_subject(INDEX, subject) print('Finished') print(result)
def listind(): client = NativeClient(client_id=CLIENT_ID, app_name=APP_NAME) client.login(requested_scopes=SCOPES) tokens = client.load_tokens(requested_scopes=SCOPES) auther = globus_sdk.AccessTokenAuthorizer( tokens['search.api.globus.org']['access_token']) sc = globus_sdk.SearchClient(authorizer=auther) search_results = sc.search(index_id=INDEX, q='*') header = 'Title Data Dataframe Rows Cols Size Filename' print(header) for i in search_results['gmeta']: j = i['content'][0] s, h = get_size(j['remote_file_manifest']['length']) size = str(int(s)) + ' ' + h print('{:21.20}'.format(j['dc']['titles'][0]['title']) + '{:11.10}'.format(j['ncipilot']['data_type']) + '{:10.9}'.format(j['ncipilot']['dataframe_type']) + '{:7.6}'.format(str(j['ncipilot']['numrows'])) + '{:7.6}'.format(str(j['ncipilot']['numcols'])) + '{:7.6}'.format(size) + '{:.16}'.format(j['remote_file_manifest']['filename']))
# Alternatively, with no local server, we simply wait for a code. The # more likely case there is the user enters garbage which results in an # invalid grant. no_local_server=True, ) print('Login Successful') except LocalServerError as lse: # There was some problem with the local server, likely the user clicked # "Decline" on the consents page print('Login Unsuccessful: {}'.format(str(lse))) except globus_sdk.exc.AuthAPIError as aapie: # Something went wrong with getting the auth code print('Login Unsuccessful: {}'.format(aapie)) """ Token Expiration """ # Let's start off by manually expiring some tokens client.save_tokens({ 'auth.globus.org': { 'scope': 'openid profile', 'access_token': '<fake_access_token>', 'expires_at_seconds': 0, 'resource_server': 'auth.globus.org', } }) try: client.load_tokens(requested_scopes=['openid', 'profile']) except TokensExpired as te: print('Load Failure, tokens expired for: {}'.format(te))
def get_tokens(): """ Retrieves the Globus tokens. """ client = NativeClient(client_id=CLIENT_ID, app_name=APP_NAME) return client.load_tokens()
import os import json from fair_research_login import NativeClient CLIENT_ID = 'e54de045-d346-42ef-9fbc-5d466f4a00c6' APP_NAME = 'My App' SCOPES = 'openid email profile urn:globus:auth:scope:transfer.api.globus.org:all urn:globus:auth:scope:search.api.globus.org:all' CONFIG_FILE = 'tokens-data.json' tokens = None # try to load tokens from local file (native app config) client = NativeClient(client_id=CLIENT_ID, app_name=APP_NAME) try: tokens = client.load_tokens(requested_scopes=SCOPES) except: pass if not tokens: # if no tokens, need to start Native App authentication process to get tokens tokens = client.login(requested_scopes=SCOPES, refresh_tokens=False) try: # save the tokens client.save_tokens(tokens) # create environment variable os.environ['GLOBUS_DATA'] = json.dumps(tokens, indent=4, sort_keys=True) except: pass
globus_sdk.auth.token_response.OAuthTokenResponse.by_resource_server No need to check expiration, that's handled by NativeClient. """ with open(self.FILENAME) as fh: return json.load(fh) def clear_tokens(self): """ Delete tokens from where they are stored. Before this method is called, tokens will have been revoked. This is both for cleanup and to ensure inactive tokens are not accidentally loaded in the future. """ os.remove(self.FILENAME) # Provide an instance of your config object to Native Client. The only # restrictions are your client MUST have the three methods above, # or it will throw an AttributeError. app = NativeClient(client_id='7414f0b4-7d05-4bb6-bb00-076fa3f17cf5', token_storage=MyTokenStorage()) # Calls read_tokens() then write_tokens() app.login() # Calls read_tokens() app.load_tokens() # Calls clear_tokens() app.logout()
requested_scopes=['openid', 'profile'], # You can turn off the local server if it cannot be used for some reason no_local_server=False, # You can also turn off automatically opening the Auth URL no_browser=False, # refresh tokens are fully supported, but optional refresh_tokens=True, ) # Calling login() twice will load tokens instead of initiating an oauth flow, # as long as the requested scopes match and the tokens have not expired. assert tokens == client.login(requested_scopes=['openid', 'profile']) # You can also load tokens explicitly. This will also load tokens if you have # done other logins assert tokens == client.load_tokens() # If you want to disregard other saved tokens assert tokens == client.load_tokens(requested_scopes=['openid', 'profile']) # Loading by scope is also supported tokens_by_scope = client.load_tokens_by_scope() assert set(tokens_by_scope.keys()) == {'openid', 'profile'} # Authorizers automatically choose a refresh token authorizer if possible, # and will automatically save new refreshed tokens when they expire. ac_authorizer = client.get_authorizers()['auth.globus.org'] # Also supported ac_authorizer = client.get_authorizers_by_scope()['openid'] # Example client usage: auth_cli = AuthClient(authorizer=ac_authorizer)