def test_create_job(timer_client, start, interval): meta = load_response(timer_client.create_job).metadata transfer_client = TransferClient() transfer_client.get_submission_id = lambda *_0, **_1: {"value": "mock"} transfer_data = TransferData(transfer_client, GO_EP1_ID, GO_EP2_ID) timer_job = TimerJob.from_transfer_data(transfer_data, start, interval) response = timer_client.create_job(timer_job) assert response.http_status == 201 assert response.data["job_id"] == meta["job_id"] timer_job = TimerJob.from_transfer_data(dict(transfer_data), start, interval) response = timer_client.create_job(timer_job) assert response.http_status == 201 assert response.data["job_id"] == meta["job_id"] req_body = json.loads(get_last_request().body) if isinstance(start, datetime): assert req_body["start"] == start.isoformat() else: assert req_body["start"] == start if isinstance(interval, timedelta): assert req_body["interval"] == interval.total_seconds() else: assert req_body["interval"] == interval assert req_body["callback_url"] == slash_join(get_service_url("actions"), "/transfer/transfer/run")
def ls_cc(self, name, id, pw): self.print_header(name, id) try: auth_client = ConfidentialAppAuthClient(client_id=id, client_secret=pw) scopes = "urn:globus:auth:scope:transfer.api.globus.org:all" cc_authorizer = ClientCredentialsAuthorizer(auth_client, scopes) transfer_client = TransferClient(authorizer=cc_authorizer) results = transfer_client.endpoint_search( filter_scope="my-endpoints") endpoint_list = list(results) if endpoint_list: print("Owned endpoints:") for ep in endpoint_list: print("{} ({})".format(ep['display_name'], ep['id'])) else: print("(No owned endpoints.)") results = transfer_client.endpoint_search( filter_scope="shared-with-me") endpoint_list = list(results) if endpoint_list: print("Shared endpoints:") for ep in endpoint_list: print("{} ({})".format(ep['display_name'], ep['id'])) else: print("(No shared endpoints.)") except AuthAPIError as e: print(e)
def login(self): ''' fetch refresh token, store in dj.config['globus.token'] ''' auth_client = self.auth_client print('Please login via: {}'.format( auth_client.oauth2_get_authorize_url())) code = input('and enter code:').strip() tokens = auth_client.oauth2_exchange_code_for_tokens(code) xfer_auth_cfg = tokens.by_resource_server['transfer.api.globus.org'] xfer_rt = xfer_auth_cfg['refresh_token'] xfer_at = xfer_auth_cfg['access_token'] xfer_exp = xfer_auth_cfg['expires_at_seconds'] xfer_auth = RefreshTokenAuthorizer(xfer_rt, auth_client, access_token=xfer_at, expires_at=xfer_exp) self.xfer_client = TransferClient(authorizer=xfer_auth) custom = dj.config.get('custom', {}) custom['globus.token'] = xfer_rt dj.config['custom'] = custom
def create_shared_endpoint(globus_dict, host_endpoint, host_path, display_name='Globus endpoint', description='description'): globus_transfer_token = globus_dict['transfer_token'] scopes = "urn:globus:auth:scopes:transfer.api.globus.org:all" authorizer = globus_sdk.AccessTokenAuthorizer(globus_transfer_token) tc = TransferClient(authorizer=authorizer) # high level interface; provides iterators for list responses shared_ep_data = { "DATA_TYPE": "shared_endpoint", "host_endpoint": host_endpoint, "host_path": host_path, "display_name": display_name, # optionally specify additional endpoint fields "description": description } #r = tc.operation_mkdir(host_id, path=share_path) #TODO create the directory directly from here instead of at local level? tc.endpoint_autoactivate(host_endpoint, if_expires_in=3600) #necessary for real use? create_result = tc.create_shared_endpoint( shared_ep_data) #not the app's end point, so should fail endpoint_id = create_result['id'] globus_dict['endpoint_id'] = endpoint_id globus_dict['transfer_client'] = tc return globus_dict
def refresh(self): ''' use refresh token to refresh access token ''' auth_client = self.auth_client xfer_auth = RefreshTokenAuthorizer( dj.config['custom']['globus.token'], auth_client, access_token=None, expires_at=None) self.xfer_client = TransferClient(authorizer=xfer_auth)
def __init__(self, config: UOCloudSyncConfig): confidential_client = ConfidentialAppAuthClient( client_id=config.get_client_id(), client_secret=config.get_client_secret()) scopes = "urn:globus:auth:scope:transfer.api.globus.org:all" cc_authorizer = ClientCredentialsAuthorizer(confidential_client, scopes) # create a new client self._transfer_client = TransferClient(authorizer=cc_authorizer) self._src_endpoint = None self._dest_endpoint = None
def getTransferClient(self, check: bool = False): if self.transferClient is None: authz = self.getAppTransferAuthorizer() self.transferClient = TransferClient(authz) if check: # almost dummy call as a sanity check self.transferClient.task_list(num_results=1) return self.transferClient
def bulk_submit_xfer(submitjob, recursive=False): cfg = load_config() client_id = cfg['globus']['apps'][GLOBUS_AUTH_APP]['client_id'] auth_client = NativeAppAuthClient(client_id) refresh_token = cfg['globus']['apps'][GLOBUS_AUTH_APP]['refresh_token'] source_endpoint_id = submitjob[0].get('metadata').get( 'source_globus_endpoint_id') destination_endpoint_id = submitjob[0].get('metadata').get( 'dest_globus_endpoint_id') authorizer = RefreshTokenAuthorizer(refresh_token=refresh_token, auth_client=auth_client) tc = TransferClient(authorizer=authorizer) # as both endpoints are expected to be Globus Server endpoints, send auto-activate commands for both globus endpoints a = auto_activate_endpoint(tc, source_endpoint_id) logging.debug('a: %s' % a) if a != 'AlreadyActivated': return None b = auto_activate_endpoint(tc, destination_endpoint_id) logging.debug('b: %s' % b) if b != 'AlreadyActivated': return None # make job_label for task a timestamp x = datetime.now() job_label = x.strftime('%Y%m%d%H%M%s') # from Globus... sync_level=checksum means that before files are transferred, Globus will compute checksums on the source # and destination files, and only transfer files that have different checksums are transferred. verify_checksum=True means # that after a file is transferred, Globus will compute checksums on the source and destination files to verify that the # file was transferred correctly. If the checksums do not match, it will redo the transfer of that file. tdata = TransferData(tc, source_endpoint_id, destination_endpoint_id, label=job_label, sync_level="checksum") for file in submitjob: source_path = file.get('sources')[0] dest_path = file.get('destinations')[0] filesize = file['metadata']['filesize'] # TODO: support passing a recursive parameter to Globus # md5 = file['metadata']['md5'] # tdata.add_item(source_path, dest_path, recursive=False, external_checksum=md5) tdata.add_item(source_path, dest_path, recursive=False) record_counter( 'daemons.conveyor.transfer_submitter.globus.transfers.submit.filesize', filesize) # logging.info('submitting transfer...') transfer_result = tc.submit_transfer(tdata) # logging.info("task_id =", transfer_result["task_id"]) return transfer_result["task_id"]
def globus_download_files(client: globus_sdk.TransferClient, endpoint_id: str, files: tuple) -> None: """Gets the details of the files in the list Arguments: client: the Globus transfer client to use endpoint_id: the ID of the endpoint to access files: the list of files to fetch Return: Returns an updated list of file details """ # Fetch metadata and pull information out of it file_transfers = {} for one_file in files: globus_save_path = os.path.join(LOCAL_SAVE_PATH, os.path.basename(one_file)) if not os.path.exists(globus_save_path): globus_remote_path = one_file file_transfers[globus_remote_path] = globus_save_path if file_transfers: have_exception = False cnt = 1 for remote_path, save_path in file_transfers.items(): try: logging.info("Trying transfer %s: %s", str(cnt), str(remote_path)) cnt += 1 transfer_setup = globus_sdk.TransferData( client, endpoint_id, GLOBUS_LOCAL_ENDPOINT_ID, label="Get image file", sync_level="checksum") transfer_setup.add_item(remote_path, save_path) transfer_request = client.submit_transfer(transfer_setup) task_result = client.task_wait(transfer_request['task_id'], timeout=600, polling_interval=5) if not task_result: raise RuntimeError("Unable to retrieve JSON metadata: %s" % remote_path) if not os.path.exists(save_path): raise RuntimeError( "Unable to find downloaded file at: %s" % save_path) except RuntimeError as ex: have_exception = True logging.warning("Failed to get image: %s", str(ex)) if have_exception: raise RuntimeError("Unable to retrieve all files individually") del file_transfers
def main(): current_time = datetime.utcnow().replace(microsecond=0).isoformat() last_cleanup_time = datetime.utcnow().replace(microsecond=0)\ - timedelta(hours=24) last_cleanup = last_cleanup_time.isoformat() completion_range = last_cleanup + "," + current_time print("Cleaning up source endpoint {} \nfor outbound transfers completed " "in range {}\n ".format(SOURCE_ENDPOINT_ID, completion_range)) transfer_token = do_client_authentication(CLIENT_ID, CLIENT_SECRET) authorizer = AccessTokenAuthorizer(access_token=transfer_token) tc = TransferClient(authorizer=authorizer) # print out a directory listing from an endpoint tc.endpoint_autoactivate(SOURCE_ENDPOINT_ID) try: task_fields = "task_id,source_endpoint,destination_endpoint," \ "source_host_path,owner_string,source_endpoint_id,type" tasks = tc.endpoint_manager_task_list( filter_status="SUCCEEDED", filter_endpoint=SOURCE_ENDPOINT_ID, filter_completion_time=completion_range, fields=task_fields) except TransferAPIError as tapie: if tapie.code == 'PermissionDenied': print('Permission denied! Give your app permission by going to ' '"globus.org/app/endpoints/{}/roles", and under ' '"Identity/E-mail adding "{}@clients.auth.globus.org" as ' 'an "AccessManager" and "Activity Manager"'.format( SOURCE_ENDPOINT_ID, CLIENT_ID)) sys.exit(1) # Nothing weird *should* happen here, but if so re-raise so the user # can deal with it. raise tasklist = tasks.data if not tasklist: print("No transfers from {} found in the last 24 hours, " "nothing to clean up".format(SOURCE_ENDPOINT_ID)) else: print("{} total transfers found from {} in the last 24 hours, " "some may not be of type TRANSFER".format( len(tasklist), SOURCE_ENDPOINT_ID)) delete_tasks = [ task.data for task in tasklist if task_delete_conditions_satisfied(task) ] for task in delete_tasks: files_list, common_dir = select_dir_to_delete(tc, task) delete_dir_and_acls(tc, task, files_list, common_dir)
class UOCloudTransferClient: def __init__(self, config: UOCloudSyncConfig): confidential_client = ConfidentialAppAuthClient( client_id=config.get_client_id(), client_secret=config.get_client_secret()) scopes = "urn:globus:auth:scope:transfer.api.globus.org:all" cc_authorizer = ClientCredentialsAuthorizer(confidential_client, scopes) # create a new client self._transfer_client = TransferClient(authorizer=cc_authorizer) self._src_endpoint = None self._dest_endpoint = None def get_endpoint_id(self, endpoint_name: str): endpoints = self._transfer_client.endpoint_search(filter_fulltext=endpoint_name) # Just return the first result. Hope it is right! for ep in endpoints: return ep['id'] def transfer_data(self, src_endpoint: str, src_path: Union[str, Path, PathLike], dest_endpoint: str, dest_path: Union[str, Path, PathLike]): self._src_endpoint = src_endpoint self._dest_endpoint = dest_endpoint src_endpoint_id = self.get_endpoint_id(src_endpoint) if not src_endpoint_id: print(f'ERROR: Unable to find source endpoint id for: "{self._src_endpoint}"') return dest_endpoint_id = self.get_endpoint_id(dest_endpoint) if not dest_endpoint_id: print(f'ERROR: Unable to find destination endpoint id for: "{self._dest_endpoint}"') return transfer_data = TransferData(self._transfer_client, src_endpoint_id, dest_endpoint_id, encrypt_data=True) transfer_data.add_item(src_path, dest_path, recursive=True) try: print( f'Submitting a transfer task from {self._src_endpoint}:{src_path} to {self._dest_endpoint}:{dest_path}') task = self._transfer_client.submit_transfer(transfer_data) except TransferAPIError as e: print(str(e)) sys.exit(1) task_id = task['task_id'] print(f'\tWaiting for transfer to complete with task_id: {task_id}') while not self._transfer_client.task_wait(task_id=task_id, timeout=3600, polling_interval=60): print('.', end='') print('Transferred files:') for info in self._transfer_client.task_successful_transfers(task_id=task_id, num_results=None): print("\t{} -> {}".format(info["source_path"], info["destination_path"]))
def bulk_submit_xfer(submitjob, recursive=False, logger=logging.log): cfg = load_config(logger=logger) client_id = cfg['globus']['apps'][GLOBUS_AUTH_APP]['client_id'] auth_client = NativeAppAuthClient(client_id) refresh_token = cfg['globus']['apps'][GLOBUS_AUTH_APP]['refresh_token'] source_endpoint_id = submitjob[0].get('metadata').get( 'source_globus_endpoint_id') destination_endpoint_id = submitjob[0].get('metadata').get( 'dest_globus_endpoint_id') authorizer = RefreshTokenAuthorizer(refresh_token=refresh_token, auth_client=auth_client) tc = TransferClient(authorizer=authorizer) # make job_label for task a timestamp now = datetime.datetime.now() job_label = now.strftime('%Y%m%d%H%M%s') # retrieve globus_task_deadline value to enforce time window to complete transfers # default is 2880 minutes or 48 hours globus_task_deadline = config_get_int('conveyor', 'globus_task_deadline', False, 2880) deadline = now + datetime.timedelta(minutes=globus_task_deadline) # from Globus... sync_level=checksum means that before files are transferred, Globus will compute checksums on the source # and destination files, and only transfer files that have different checksums are transferred. verify_checksum=True means # that after a file is transferred, Globus will compute checksums on the source and destination files to verify that the # file was transferred correctly. If the checksums do not match, it will redo the transfer of that file. tdata = TransferData(tc, source_endpoint_id, destination_endpoint_id, label=job_label, sync_level="checksum", deadline=str(deadline)) for file in submitjob: source_path = file.get('sources')[0] dest_path = file.get('destinations')[0] filesize = file['metadata']['filesize'] # TODO: support passing a recursive parameter to Globus # md5 = file['metadata']['md5'] # tdata.add_item(source_path, dest_path, recursive=False, external_checksum=md5) tdata.add_item(source_path, dest_path, recursive=False) record_counter( 'daemons.conveyor.transfer_submitter.globus.transfers.submit.filesize', filesize) # logging.info('submitting transfer...') transfer_result = tc.submit_transfer(tdata) logger(logging.INFO, "transfer_result: %s" % transfer_result) return transfer_result["task_id"]
def preactivate_globus(self): """ Read the local globus endpoint UUID from ~/.zstash.ini. If the ini file does not exist, create an ini file with empty values, and try to find the local endpoint UUID based on the FQDN """ local_endpoint = None ini_path = os.path.expanduser("~/.zstash.ini") ini = configparser.ConfigParser() if ini.read(ini_path): if "local" in ini.sections(): local_endpoint = ini["local"].get("globus_endpoint_uuid") else: ini["local"] = {"globus_endpoint_uuid": ""} try: with open(ini_path, "w") as f: ini.write(f) except Exception as e: self.fail(e) if not local_endpoint: fqdn = socket.getfqdn() for pattern in regex_endpoint_map.keys(): if re.fullmatch(pattern, fqdn): local_endpoint = regex_endpoint_map.get(pattern) break if not local_endpoint: # self.fail("{} does not have the local Globus endpoint set".format(ini_path)) self.skipTest( "{} does not have the local Globus endpoint set".format( ini_path)) native_client = NativeClient( client_id="6c1629cf-446c-49e7-af95-323c6412397f", app_name="Zstash", default_scopes= "openid urn:globus:auth:scope:transfer.api.globus.org:all", ) native_client.login(no_local_server=True, refresh_tokens=True) transfer_authorizer = native_client.get_authorizers().get( "transfer.api.globus.org") self.transfer_client = TransferClient(transfer_authorizer) for ep_id in [hpss_globus_endpoint, local_endpoint]: r = self.transfer_client.endpoint_autoactivate(ep_id, if_expires_in=600) if r.get("code") == "AutoActivationFailed": self.fail( "The {} endpoint is not activated or the current activation expires soon. Please go to https://app.globus.org/file-manager/collections/{} and (re)-activate the endpoint." .format(ep_id, ep_id))
async def get_transfer_client(request: Request): """This function forms globus transfer client with authentication present in session token""" tokens = request.session.get('tokens', False) authorizer = AccessTokenAuthorizer( tokens['transfer.api.globus.org']['access_token']) transfer_client = TransferClient(authorizer=authorizer) return transfer_client
async def delete_transfer_globus(globus_transfer_id: str, transfer_client: TransferClient): """This function cancels a globus transfer""" #transfer_client = await get_transfer_client(request) transfer_response = None try: transfer_result = transfer_client.cancel_task(globus_transfer_id) transfer_result_json = json.loads(str(transfer_result)) transfer_response = {'globus_response': transfer_result_json} transfer_response['status'] = 200 return transfer_response except GlobusAPIError as e: # Error response from the REST service, check the code and message for # details. return handle_globus_api_error(e) except NetworkError: logging.error(("Network Failure. " "Possibly a firewall or connectivity issue")) raise except GlobusError: logging.exception("Totally unexpected GlobusError!") raise
def getTransferData(): cfg = load_config() client_id = cfg['globus']['apps']['SDK Tutorial App']['client_id'] auth_client = NativeAppAuthClient(client_id) refresh_token = cfg['globus']['apps']['SDK Tutorial App']['refresh_token'] source_endpoint_id = cfg['globus']['apps']['SDK Tutorial App'][ 'win10_endpoint_id'] destination_endpoint_id = cfg['globus']['apps']['SDK Tutorial App'][ 'sdccfed_endpoint_id'] authorizer = RefreshTokenAuthorizer(refresh_token=refresh_token, auth_client=auth_client) tc = TransferClient(authorizer=authorizer) # as both endpoints are expected to be Globus Server endpoints, send auto-activate commands for both globus endpoints auto_activate_endpoint(tc, source_endpoint_id) auto_activate_endpoint(tc, destination_endpoint_id) # make job_label for task a timestamp x = datetime.now() job_label = x.strftime('%Y%m%d%H%M%s') # from Globus... sync_level=checksum means that before files are transferred, Globus will compute checksums on the source and destination files, # and only transfer files that have different checksums are transferred. verify_checksum=True means that after a file is transferred, Globus will # compute checksums on the source and destination files to verify that the file was transferred correctly. If the checksums do not match, it will # redo the transfer of that file. tdata = TransferData(tc, source_endpoint_id, destination_endpoint_id, label=job_label, sync_level="checksum", verify_checksum=True) return tdata
def transfer_status(task_id): """ Call Globus to get status/details of transfer with task_id. The target template (tranfer_status.jinja2) expects a Transfer API 'task' object. 'task_id' is passed to the route in the URL as 'task_id'. """ transfer = TransferClient(authorizer=RefreshTokenAuthorizer( session['tokens']['transfer.api.globus.org']['refresh_token'], load_portal_client())) task = transfer.get_task(task_id) return render_template('transfer_status.jinja2', task=task)
def globus_get_folders(client: globus_sdk.TransferClient, endpoint_id: str, remote_path: str) -> Optional[tuple]: """Returns a list of files on the endpoint path that match the dates provided Arguments: client: the Globus transfer client to use endpoint_id: the ID of the endpoint to access remote_path: the remote path to search Return: Returns a list of found sub folders """ base_path = os.path.join('/-', remote_path) return_paths = [] try: path_contents = client.operation_ls(endpoint_id, path=base_path) except globus_sdk.exc.TransferAPIError: logging.error( "Continuing after TransferAPIError Exception caught for: '%s'", base_path) return None for one_entry in path_contents: if one_entry['type'] == 'dir': sub_folder = os.path.join(base_path, one_entry['name']) logging.debug("Globus remote sub folder: %s", sub_folder) return_paths.append(sub_folder) return tuple(return_paths)
def getUserTransferClient(self, user): username = user['login'] authz = self.getAuthorizer(user) with self.userClientsLock: if username not in self.userClients: self.userClients[username] = TransferClient(authz) return self.userClients[username]
def _get_required_data_access_scopes( tc: TransferClient, collection_ids: Iterable[str], ) -> List[str]: data_access_scopes: List[str] = [] for collection_id in collection_ids: collection_id_info = tc.get_endpoint(collection_id) if collection_id_info["DATA_TYPE"] == "endpoint": gcs_version = collection_id_info.get("gcs_version") if gcs_version is None: continue gcs_version_parts = [int(x) for x in gcs_version.split(".")] requires_data_access = all( [ gcs_version_parts[0] > 5 or gcs_version_parts[0] == 5 and gcs_version_parts[1] >= 4, (collection_id_info.get("high_assurance", True) is False), collection_id_info.get("host_endpoint", True) is None, ] ) if requires_data_access: data_access_scopes.append( f"https://auth.globus.org/scopes/{collection_id}/data_access" ) return data_access_scopes
def init(): tokens = None try: # if we already have tokens, load and use them tokens = load_tokens_from_db() except: pass if not tokens: # if we need to get tokens, start the Native App authentication process tokens = do_native_app_authentication(config.TRANSFER_CLIENT_ID, config.REDIRECT_URI, config.SCOPES) try: save_tokens_to_db(tokens) except: pass transfer_tokens = tokens['transfer.api.globus.org'] client = NativeAppAuthClient(client_id=config.TRANSFER_CLIENT_ID) authorizer = RefreshTokenAuthorizer( transfer_tokens['refresh_token'], client, access_token=transfer_tokens['access_token'], expires_at=transfer_tokens['expires_at_seconds'], on_refresh=update_tokens_file_on_refresh) transfer = TransferClient(authorizer=authorizer) prepare_call(transfer) return transfer
class GridFTPConnection: def __init__(self,endpoint_id,source_id): self.transfer_client = TransferClient() self.endpoint_id = endpoint_id self.source_id = source_id self.endpoint = self.transfer_client.get_endpoint(endpoint_id) print("Connection successful to:", self.endpoint["display_name"] or self.endpoint["canonical_name"])
def login(): tok_path = os.path.expanduser('~/.mdf_agent_tokens.json') def _read_tokfile(): tokens = {} if os.path.exists(tok_path): with open(tok_path) as f: tokens = json.load(f) return tokens def _write_tokfile(new_tokens): # We have multiple tokens in our tokens file, but on update we only # get the currently updated token, so read current and update with the # input tokens cur_tokens = _read_tokfile() for key in new_tokens: cur_tokens[key] = new_tokens[key] # deny rwx to Group and World -- don't bother storing the returned old # mask value, since we'll never restore it anyway # do this on every call to ensure that we're always consistent about it os.umask(0o077) with open(tok_path, 'w') as f: f.write(json.dumps(cur_tokens)) def _update_tokfile(tokens): _write_tokfile(tokens.by_resource_server['transfer.api.globus.org']) tokens = _read_tokfile() client_id = "1e162bfc-ad52-4014-8844-b82841145fc4" native_client = NativeAppAuthClient(client_id, app_name='MDF Agents') if not tokens: # and do the Native App Grant flow native_client.oauth2_start_flow( requested_scopes='urn:globus:auth:scope:transfer.api.globus.org:all', refresh_tokens=True) linkprompt = 'Please login to Globus here' print('{0}:\n{1}\n{2}\n{1}\n' .format(linkprompt, '-' * len(linkprompt), native_client.oauth2_get_authorize_url()), flush=True) auth_code = input( 'Enter the resulting Authorization Code here').strip() tkns = native_client.oauth2_exchange_code_for_tokens(auth_code) tokens = tkns.by_resource_server['transfer.api.globus.org'] _write_tokfile(tokens) transfer_tokens = tokens transfer_authorizer = RefreshTokenAuthorizer( transfer_tokens['refresh_token'], native_client, transfer_tokens['access_token'], transfer_tokens['expires_at_seconds'], on_refresh=_update_tokfile) transfer_client = TransferClient(authorizer=transfer_authorizer) return transfer_client
def __init__(self): # initialize a transfer_client if it has not been done already if Globus.transfer_client == None: native_client = NativeClient( client_id=settings.globus.get("client_id"), app_name="Globus Endpoint Performance Dashboard", default_scopes=settings.globus.get("scopes")) native_client.login(no_local_server=True, refresh_tokens=True) transfer_authorizer = native_client.get_authorizers().get( "transfer.api.globus.org") Globus.transfer_client = TransferClient(transfer_authorizer)
def get_transfer_client(): cfg = load_config() # cfg = yaml.safe_load(open("/opt/rucio/lib/rucio/transfertool/config.yml")) client_id = cfg['globus']['apps'][GLOBUS_AUTH_APP]['client_id'] auth_client = NativeAppAuthClient(client_id) refresh_token = cfg['globus']['apps'][GLOBUS_AUTH_APP]['refresh_token'] logging.info('authorizing token...') authorizer = RefreshTokenAuthorizer(refresh_token=refresh_token, auth_client=auth_client) logging.info('initializing TransferClient...') tc = TransferClient(authorizer=authorizer) return tc
def browse(dataset_id=None, endpoint_id=None, endpoint_path=None): """ - Get list of files for the selected dataset or endpoint ID/path - Return a list of files to a browse view The target template (browse.jinja2) expects an `endpoint_uri` (if available for the endpoint), `target` (either `"dataset"` or `"endpoint"`), and 'file_list' (list of dictionaries) containing the following information about each file in the result: {'name': 'file name', 'size': 'file size', 'id': 'file uri/path'} If you want to display additional information about each file, you must add those keys to the dictionary and modify the browse.jinja2 template accordingly. """ assert bool(dataset_id) != bool(endpoint_id and endpoint_path) if dataset_id: try: dataset = next(ds for ds in datasets if ds['id'] == dataset_id) except StopIteration: abort(404) endpoint_id = app.config['DATASET_ENDPOINT_ID'] endpoint_path = app.config['DATASET_ENDPOINT_BASE'] + dataset['path'] else: endpoint_path = '/' + endpoint_path transfer = TransferClient(authorizer=RefreshTokenAuthorizer( session['tokens']['transfer.api.globus.org']['refresh_token'], load_portal_client())) try: transfer.endpoint_autoactivate(endpoint_id) listing = transfer.operation_ls(endpoint_id, path=endpoint_path) except TransferAPIError as err: flash('Error [{}]: {}'.format(err.code, err.message)) return redirect(url_for('transfer')) file_list = [e for e in listing if e['type'] == 'file'] ep = transfer.get_endpoint(endpoint_id) https_server = ep['https_server'] endpoint_uri = https_server + endpoint_path if https_server else None webapp_xfer = 'https://www.globus.org/app/transfer?' + \ urlencode(dict(origin_id=endpoint_id, origin_path=endpoint_path)) return render_template( 'browse.jinja2', endpoint_uri=endpoint_uri, target="dataset" if dataset_id else "endpoint", description=(dataset['name'] if dataset_id else ep['display_name']), file_list=file_list, webapp_xfer=webapp_xfer)
def submit_transfer(): """ - Take the data returned by the Browse Endpoint helper page and make a Globus transfer request. - Send the user to the transfer status page with the task id from the transfer. """ browse_endpoint_form = request.form selected = session['form']['datasets'] filtered_datasets = [ds for ds in datasets if ds['id'] in selected] transfer_tokens = session['tokens']['transfer.api.globus.org'] authorizer = RefreshTokenAuthorizer( transfer_tokens['refresh_token'], load_portal_client(), access_token=transfer_tokens['access_token'], expires_at=transfer_tokens['expires_at_seconds']) transfer = TransferClient(authorizer=authorizer) source_endpoint_id = app.config['DATASET_ENDPOINT_ID'] source_endpoint_base = app.config['DATASET_ENDPOINT_BASE'] destination_endpoint_id = browse_endpoint_form['endpoint_id'] destination_folder = browse_endpoint_form.get('folder[0]') transfer_data = TransferData(transfer_client=transfer, source_endpoint=source_endpoint_id, destination_endpoint=destination_endpoint_id, label=browse_endpoint_form.get('label')) for ds in filtered_datasets: source_path = source_endpoint_base + ds['path'] dest_path = browse_endpoint_form['path'] if destination_folder: dest_path += destination_folder + '/' dest_path += ds['name'] + '/' transfer_data.add_item(source_path=source_path, destination_path=dest_path, recursive=True) transfer.endpoint_autoactivate(source_endpoint_id) transfer.endpoint_autoactivate(destination_endpoint_id) task_id = transfer.submit_transfer(transfer_data)['task_id'] flash('Transfer request submitted successfully. Task ID: ' + task_id) return (redirect(url_for('transfer_status', task_id=task_id)))
def status(self): token = TokenStore.get_transfer_token(self.user) tc = TransferClient(authorizer=AccessTokenAuthorizer(token)) old = json.loads(self.task_catalog or '{}') tasks = { t: tc.get_task(t).data for t in json.loads(self.transfer_task_ids) if not old.get(t) or old[t]['status'] == 'ACTIVE' } old.update(tasks) tasks = old transferred = [t['files_transferred'] for t in tasks.values()] log.debug(transferred) self.files_transferred = reduce(lambda x, y: x + y, transferred) log.debug(self.files_transferred) self.task_catalog = json.dumps(tasks) self.save() statuses = [s['status'] for s in tasks.values()] if any(filter(lambda stat: stat in ['INACTIVE', 'FAILED'], statuses)): return 'FAILED' if any(filter(lambda stat: stat == 'ACTIVE', statuses)): return 'ACTIVE' return 'SUCCEEDED'
def get_transfer_interface(self, auth_client): self.log.info("get_transfer_interface") if self.transfer_client: self.log.info("found transfer_client") return self.transfer_client self.log.info("did not found transfer_client") self.log.info("auth_client") self.log.info(auth_client) scopes = "urn:globus:auth:scope:transfer.api.globus.org:all" cc_authorizer = ClientCredentialsAuthorizer(auth_client, scopes) transfer_client = TransferClient(authorizer=cc_authorizer) self.log.info("get_transfer_interface - transfer_client") self.log.info(transfer_client) return transfer_client
def cleanup(): user_identity_name = request.form.get('user_identity_name') dependent_tokens = get_dependent_tokens(g.req_token) transfer_token = dependent_tokens.by_resource_server[ 'transfer.api.globus.org']['access_token'] dest_ep = app.config['GRAPH_ENDPOINT_ID'] dest_base = app.config['GRAPH_ENDPOINT_BASE'] dest_path = '%sGraphs for %s/' % (dest_base, user_identity_name) transfer = TransferClient(authorizer=AccessTokenAuthorizer(transfer_token)) transfer.endpoint_autoactivate(dest_ep) try: acl = next(acl for acl in transfer.endpoint_acl_list(dest_ep) if dest_path == acl['path']) except StopIteration: pass except TransferAPIError as ex: # PermissionDenied can happen if a new Portal client is swapped # in and it doesn't have endpoint manager on the dest_ep. # The /portal/processed directory has been set to to writeable # for all users so the delete task will succeed even if an ACL # can't be set. if ex.code == 'PermissionDenied': pass else: transfer.delete_endpoint_acl_rule(dest_ep, acl['id']) delete_request = DeleteData(transfer_client=transfer, endpoint=dest_ep, label="Delete Graphs from the Service Demo", recursive=True) delete_request.add_item(dest_path) try: task = transfer.submit_delete(delete_request) except TransferAPIError as ex: raise InternalServerError(message=ex.message) else: return jsonify(dict(task_id=task['task_id']))
def __init__(self): """Initiate an OAuth2() object. Initiate OAuth2 flow with Globus credentaials to obtain access tokens. Refresh the tokens automatically so another login is not required. Examples -------- Create an OAuth2 object: >>> from archeion.models import OAuth2 >>> authorizer = OAuth2() """ self.client = NativeAppAuthClient(CLIENT_ID) self.client.oauth2_start_flow(refresh_tokens=True) logger.info("Opening browser window for Globus Authentication") webbrowser.open_new(self.client.oauth2_get_authorize_url()) get_input = getattr(__builtins__, "raw_input", input) auth_code = get_input( "Please enter the code you get after login here: " ).strip() logger.debug("User has input authentication code") token_response = self.client.oauth2_exchange_code_for_tokens(auth_code) self.access_token = token_response.by_resource_server["auth.globus.org"][ "access_token" ] transfer_response = token_response.by_resource_server["transfer.api.globus.org"] self.transfer_token = transfer_response["access_token"] self.transfer_refresh_token = transfer_response["refresh_token"] self.transfer_expiry_seconds = transfer_response["expires_at_seconds"] authorizer = RefreshTokenAuthorizer( self.transfer_refresh_token, self.client, access_token=self.transfer_token, expires_at=self.transfer_expiry_seconds, ) self.transfer_client = TransferClient( AccessTokenAuthorizer(self.transfer_token) ) self.authorisation_client = AuthClient(authorizer=authorizer)
def doit(): """ - Call token introspect - Get dependent tokens """ dependent_tokens = get_dependent_tokens(g.req_token) # dependent_tokens is a token response object # create transfer_token and http_token variables containing # the correct token for each resource server transfer_token = dependent_tokens.by_resource_server[ 'transfer.api.globus.org']['access_token'] http_token = dependent_tokens.by_resource_server[ app.config['GRAPH_ENDPOINT_ID']]['access_token'] selected_ids = request.form.getlist('datasets') selected_year = request.form.get('year') user_identity_id = request.form.get('user_identity_id') user_identity_name = request.form.get('user_identity_name') selected_datasets = [dataset for dataset in datasets if dataset['id'] in selected_ids] if not (selected_datasets and selected_year): raise BadRequestError() transfer = TransferClient(authorizer=AccessTokenAuthorizer(transfer_token)) source_ep = app.config['DATASET_ENDPOINT_ID'] source_info = transfer.get_endpoint(source_ep) source_https = source_info['https_server'] source_base = app.config['DATASET_ENDPOINT_BASE'] source_token = http_token dest_ep = app.config['GRAPH_ENDPOINT_ID'] dest_info = transfer.get_endpoint(dest_ep) dest_https = dest_info['https_server'] dest_base = app.config['GRAPH_ENDPOINT_BASE'] dest_path = '%sGraphs for %s/' % (dest_base, user_identity_name) dest_token = http_token if not (source_https and dest_https): raise InternalServerError(message='Endpoints must be HTTPS servers') svgs = {} for dataset in selected_datasets: source_path = dataset['path'] response = requests.get('%s%s%s/%s.csv' % (source_https, source_base, source_path, selected_year), headers=dict( Authorization='Bearer ' + source_token), allow_redirects=False) response.raise_for_status() svgs.update(render_graphs( csv_data=response.iter_lines(decode_unicode=True), append_titles=' from %s for %s' % (dataset['name'], selected_year), )) transfer.endpoint_autoactivate(dest_ep) try: transfer.operation_mkdir(dest_ep, dest_path) except TransferAPIError as error: if 'MkdirFailed.Exists' not in error.code: raise try: transfer.add_endpoint_acl_rule( dest_ep, dict(principal=user_identity_id, principal_type='identity', path=dest_path, permissions='r'), ) except TransferAPIError as error: # PermissionDenied can happen if a new Portal client is swapped # in and it doesn't have endpoint manager on the dest_ep. # The /portal/processed directory has been set to to read/write # for all users so the subsequent operations will succeed. if error.code == 'PermissionDenied': pass elif error.code != 'Exists': raise for filename, svg in svgs.items(): requests.put('%s%s%s.svg' % (dest_https, dest_path, filename), data=svg, headers=dict(Authorization='Bearer ' + dest_token), allow_redirects=False).raise_for_status() results = { 'dest_ep': dest_ep, 'dest_path': dest_path, 'dest_name': dest_info['display_name'], 'graph_count': len(svgs) or 0 } return jsonify(results)
def __init__(self, **kwargs): TransferClient.__init__(self, **kwargs)
help='Directory to create a share on', required=True) parser.add_argument('--share-name', dest='shareName', help='globusid to share with', required=True) args = parser.parse_args() print(args.sharedDir) print(args.shareName) if not os.path.exists(args.sharedDir): os.makedirs(args.sharedDir) os.chmod(args.sharedDir, 0777) authToken = '' transferToken = '' endpointId = '' tc = TransferClient(authorizer=transferToken) # uses transfer_token from the config file auth = AuthClient(authorizer=authToken) identities = auth.get_identities(usernames="*****@*****.**" % args.shareName) user_identity_id = identities['identities'][0]['id'] try: tc.add_endpoint_acl_rule( endpointId, dict(principal=user_identity_id, principal_type='identity', path=args.sharedDir, permissions='rw'), ) except TransferAPIError as error: if error.code != 'Exists': raise
import os parser = argparse.ArgumentParser(description='kbase share creator') parser.add_argument('--share-dir', dest='sharedDir', help='Directory to create a share on', required=True) parser.add_argument('--share-name', dest='shareName', help='globusid to share with', required=True) args = parser.parse_args() print(args.sharedDir) print(args.shareName) if not os.path.exists(args.sharedDir): os.makedirs(args.sharedDir) os.chmod(args.sharedDir, 0777) tc = TransferClient() # uses transfer_token from the config file auth = AuthClient() identities = auth.get_identities(usernames="*****@*****.**" % args.shareName) user_identity_id = identities['identities'][0]['id'] try: tc.add_endpoint_acl_rule( '3aca022a-5e5b-11e6-8309-22000b97daec', dict(principal=user_identity_id, principal_type='identity', path=args.sharedDir, permissions='rw'), ) except TransferAPIError as error: if error.code != 'Exists': raise
#!/usr/bin/env python2.7 from __future__ import print_function # for python 2 from globus_sdk import TransferClient import globus_sdk import traceback import argparse parser = argparse.ArgumentParser(description='kbase share creator') parser.add_argument('--share-dir', dest='sharedDir', help='Directory to create a share on') parser.add_argument('--share-name', dest='shareName', help='name for the share (must be unique among all globus shares)') args = parser.parse_args() tc = TransferClient() # uses transfer_token from the config file shared_ep_data = { "DATA_TYPE": "shared_endpoint", "host_endpoint": 'e25a4bda-0636-11e6-a732-22000bf2d559', "host_path": args.sharedDir, "display_name": args.shareName, # optionally specify additional endpoint fields "description": "autocreated by kbase tool support" } tc.endpoint_autoactivate('e25a4bda-0636-11e6-a732-22000bf2d559', if_expires_in=3600) create_result = tc.create_shared_endpoint(shared_ep_data) endpoint_id = create_result["id"] print("new endpoint id: ", endpoint_id)