Exemple #1
0
async def get_transfer_client(request: Request):
    """This function forms globus transfer client with authentication present in session token"""
    tokens = request.session.get('tokens', False)
    authorizer = AccessTokenAuthorizer(
        tokens['transfer.api.globus.org']['access_token'])
    transfer_client = TransferClient(authorizer=authorizer)
    return transfer_client
Exemple #2
0
def init():
    tokens = None
    try:
        # if we already have tokens, load and use them
        tokens = load_tokens_from_db()
    except:
        pass

    if not tokens:
        # if we need to get tokens, start the Native App authentication process
        tokens = do_native_app_authentication(config.TRANSFER_CLIENT_ID,
                                              config.REDIRECT_URI,
                                              config.SCOPES)
        try:
            save_tokens_to_db(tokens)
        except:
            pass

    transfer_tokens = tokens['transfer.api.globus.org']

    client = NativeAppAuthClient(client_id=config.TRANSFER_CLIENT_ID)

    authorizer = RefreshTokenAuthorizer(
        transfer_tokens['refresh_token'],
        client,
        access_token=transfer_tokens['access_token'],
        expires_at=transfer_tokens['expires_at_seconds'],
        on_refresh=update_tokens_file_on_refresh)

    transfer = TransferClient(authorizer=authorizer)
    prepare_call(transfer)

    return transfer
Exemple #3
0
 def getUserTransferClient(self, user):
     username = user['login']
     authz = self.getAuthorizer(user)
     with self.userClientsLock:
         if username not in self.userClients:
             self.userClients[username] = TransferClient(authz)
         return self.userClients[username]
Exemple #4
0
 def ls_cc(self, name, id, pw):
     self.print_header(name, id)
     try:
         auth_client = ConfidentialAppAuthClient(client_id=id,
                                                 client_secret=pw)
         scopes = "urn:globus:auth:scope:transfer.api.globus.org:all"
         cc_authorizer = ClientCredentialsAuthorizer(auth_client, scopes)
         transfer_client = TransferClient(authorizer=cc_authorizer)
         results = transfer_client.endpoint_search(
             filter_scope="my-endpoints")
         endpoint_list = list(results)
         if endpoint_list:
             print("Owned endpoints:")
             for ep in endpoint_list:
                 print("{} ({})".format(ep['display_name'], ep['id']))
         else:
             print("(No owned endpoints.)")
         results = transfer_client.endpoint_search(
             filter_scope="shared-with-me")
         endpoint_list = list(results)
         if endpoint_list:
             print("Shared endpoints:")
             for ep in endpoint_list:
                 print("{} ({})".format(ep['display_name'], ep['id']))
         else:
             print("(No shared endpoints.)")
     except AuthAPIError as e:
         print(e)
Exemple #5
0
def getTransferData():
    cfg = load_config()
    client_id = cfg['globus']['apps']['SDK Tutorial App']['client_id']
    auth_client = NativeAppAuthClient(client_id)
    refresh_token = cfg['globus']['apps']['SDK Tutorial App']['refresh_token']
    source_endpoint_id = cfg['globus']['apps']['SDK Tutorial App'][
        'win10_endpoint_id']
    destination_endpoint_id = cfg['globus']['apps']['SDK Tutorial App'][
        'sdccfed_endpoint_id']
    authorizer = RefreshTokenAuthorizer(refresh_token=refresh_token,
                                        auth_client=auth_client)
    tc = TransferClient(authorizer=authorizer)
    # as both endpoints are expected to be Globus Server endpoints, send auto-activate commands for both globus endpoints
    auto_activate_endpoint(tc, source_endpoint_id)
    auto_activate_endpoint(tc, destination_endpoint_id)

    # make job_label for task a timestamp
    x = datetime.now()
    job_label = x.strftime('%Y%m%d%H%M%s')

    # from Globus... sync_level=checksum means that before files are transferred, Globus will compute checksums on the source and destination files,
    # and only transfer files that have different checksums are transferred. verify_checksum=True means that after a file is transferred, Globus will
    # compute checksums on the source and destination files to verify that the file was transferred correctly.  If the checksums do not match, it will
    # redo the transfer of that file.
    tdata = TransferData(tc,
                         source_endpoint_id,
                         destination_endpoint_id,
                         label=job_label,
                         sync_level="checksum",
                         verify_checksum=True)

    return tdata
Exemple #6
0
def create_shared_endpoint(globus_dict,
                           host_endpoint,
                           host_path,
                           display_name='Globus endpoint',
                           description='description'):
    globus_transfer_token = globus_dict['transfer_token']
    scopes = "urn:globus:auth:scopes:transfer.api.globus.org:all"
    authorizer = globus_sdk.AccessTokenAuthorizer(globus_transfer_token)
    tc = TransferClient(authorizer=authorizer)
    # high level interface; provides iterators for list responses
    shared_ep_data = {
        "DATA_TYPE": "shared_endpoint",
        "host_endpoint": host_endpoint,
        "host_path": host_path,
        "display_name": display_name,
        # optionally specify additional endpoint fields
        "description": description
    }
    #r = tc.operation_mkdir(host_id, path=share_path) #TODO create the directory directly from here instead of at local level?

    tc.endpoint_autoactivate(host_endpoint,
                             if_expires_in=3600)  #necessary for real use?
    create_result = tc.create_shared_endpoint(
        shared_ep_data)  #not the app's end point, so should fail
    endpoint_id = create_result['id']
    globus_dict['endpoint_id'] = endpoint_id
    globus_dict['transfer_client'] = tc
    return globus_dict
def test_create_job(timer_client, start, interval):
    meta = load_response(timer_client.create_job).metadata
    transfer_client = TransferClient()
    transfer_client.get_submission_id = lambda *_0, **_1: {"value": "mock"}
    transfer_data = TransferData(transfer_client, GO_EP1_ID, GO_EP2_ID)
    timer_job = TimerJob.from_transfer_data(transfer_data, start, interval)
    response = timer_client.create_job(timer_job)
    assert response.http_status == 201
    assert response.data["job_id"] == meta["job_id"]
    timer_job = TimerJob.from_transfer_data(dict(transfer_data), start,
                                            interval)
    response = timer_client.create_job(timer_job)
    assert response.http_status == 201
    assert response.data["job_id"] == meta["job_id"]
    req_body = json.loads(get_last_request().body)
    if isinstance(start, datetime):
        assert req_body["start"] == start.isoformat()
    else:
        assert req_body["start"] == start
    if isinstance(interval, timedelta):
        assert req_body["interval"] == interval.total_seconds()
    else:
        assert req_body["interval"] == interval
    assert req_body["callback_url"] == slash_join(get_service_url("actions"),
                                                  "/transfer/transfer/run")
Exemple #8
0
    def login(self):
        ''' fetch refresh token, store in dj.config['globus.token'] '''

        auth_client = self.auth_client

        print('Please login via: {}'.format(
            auth_client.oauth2_get_authorize_url()))

        code = input('and enter code:').strip()
        tokens = auth_client.oauth2_exchange_code_for_tokens(code)

        xfer_auth_cfg = tokens.by_resource_server['transfer.api.globus.org']
        xfer_rt = xfer_auth_cfg['refresh_token']
        xfer_at = xfer_auth_cfg['access_token']
        xfer_exp = xfer_auth_cfg['expires_at_seconds']

        xfer_auth = RefreshTokenAuthorizer(xfer_rt,
                                           auth_client,
                                           access_token=xfer_at,
                                           expires_at=xfer_exp)

        self.xfer_client = TransferClient(authorizer=xfer_auth)

        custom = dj.config.get('custom', {})
        custom['globus.token'] = xfer_rt
        dj.config['custom'] = custom
def browse(dataset_id=None, endpoint_id=None, endpoint_path=None):
    """
    - Get list of files for the selected dataset or endpoint ID/path
    - Return a list of files to a browse view

    The target template (browse.jinja2) expects an `endpoint_uri` (if
    available for the endpoint), `target` (either `"dataset"`
    or `"endpoint"`), and 'file_list' (list of dictionaries) containing
    the following information about each file in the result:

    {'name': 'file name', 'size': 'file size', 'id': 'file uri/path'}

    If you want to display additional information about each file, you
    must add those keys to the dictionary and modify the browse.jinja2
    template accordingly.
    """

    assert bool(dataset_id) != bool(endpoint_id and endpoint_path)

    if dataset_id:
        try:
            dataset = next(ds for ds in datasets if ds['id'] == dataset_id)
        except StopIteration:
            abort(404)

        endpoint_id = app.config['DATASET_ENDPOINT_ID']
        endpoint_path = app.config['DATASET_ENDPOINT_BASE'] + dataset['path']

    else:
        endpoint_path = '/' + endpoint_path

    transfer = TransferClient(authorizer=RefreshTokenAuthorizer(
        session['tokens']['transfer.api.globus.org']['refresh_token'],
        load_portal_client()))

    try:
        transfer.endpoint_autoactivate(endpoint_id)
        listing = transfer.operation_ls(endpoint_id, path=endpoint_path)
    except TransferAPIError as err:
        flash('Error [{}]: {}'.format(err.code, err.message))
        return redirect(url_for('transfer'))

    file_list = [e for e in listing if e['type'] == 'file']

    ep = transfer.get_endpoint(endpoint_id)

    https_server = ep['https_server']
    endpoint_uri = https_server + endpoint_path if https_server else None
    webapp_xfer = 'https://www.globus.org/app/transfer?' + \
        urlencode(dict(origin_id=endpoint_id, origin_path=endpoint_path))

    return render_template(
        'browse.jinja2',
        endpoint_uri=endpoint_uri,
        target="dataset" if dataset_id else "endpoint",
        description=(dataset['name'] if dataset_id else ep['display_name']),
        file_list=file_list,
        webapp_xfer=webapp_xfer)
 def __init__(self, config: UOCloudSyncConfig):
     confidential_client = ConfidentialAppAuthClient(
         client_id=config.get_client_id(), client_secret=config.get_client_secret())
     scopes = "urn:globus:auth:scope:transfer.api.globus.org:all"
     cc_authorizer = ClientCredentialsAuthorizer(confidential_client, scopes)
     # create a new client
     self._transfer_client = TransferClient(authorizer=cc_authorizer)
     self._src_endpoint = None
     self._dest_endpoint = None
Exemple #11
0
    def refresh(self):
        ''' use refresh token to refresh access token '''
        auth_client = self.auth_client

        xfer_auth = RefreshTokenAuthorizer(
            dj.config['custom']['globus.token'], auth_client,
            access_token=None, expires_at=None)

        self.xfer_client = TransferClient(authorizer=xfer_auth)
Exemple #12
0
    def getTransferClient(self, check: bool = False):
        if self.transferClient is None:
            authz = self.getAppTransferAuthorizer()
            self.transferClient = TransferClient(authz)
            if check:
                # almost dummy call as a sanity check
                self.transferClient.task_list(num_results=1)

        return self.transferClient
def login():
    tok_path = os.path.expanduser('~/.mdf_agent_tokens.json')

    def _read_tokfile():
        tokens = {}
        if os.path.exists(tok_path):
            with open(tok_path) as f:
                tokens = json.load(f)
        return tokens

    def _write_tokfile(new_tokens):
        # We have multiple tokens in our tokens file, but on update we only
        # get the currently updated token, so read current and update with the
        # input tokens
        cur_tokens = _read_tokfile()
        for key in new_tokens:
            cur_tokens[key] = new_tokens[key]
        # deny rwx to Group and World -- don't bother storing the returned old
        # mask value, since we'll never restore it anyway
        # do this on every call to ensure that we're always consistent about it
        os.umask(0o077)
        with open(tok_path, 'w') as f:
            f.write(json.dumps(cur_tokens))

    def _update_tokfile(tokens):
        _write_tokfile(tokens.by_resource_server['transfer.api.globus.org'])

    tokens = _read_tokfile()
    client_id = "1e162bfc-ad52-4014-8844-b82841145fc4"
    native_client = NativeAppAuthClient(client_id, app_name='MDF Agents')

    if not tokens:
        # and do the Native App Grant flow
        native_client.oauth2_start_flow(
            requested_scopes='urn:globus:auth:scope:transfer.api.globus.org:all',
            refresh_tokens=True)
        linkprompt = 'Please login to Globus here'
        print('{0}:\n{1}\n{2}\n{1}\n'
              .format(linkprompt, '-' * len(linkprompt),
                      native_client.oauth2_get_authorize_url()), flush=True)
        auth_code = input(
            'Enter the resulting Authorization Code here').strip()
        tkns = native_client.oauth2_exchange_code_for_tokens(auth_code)
        tokens = tkns.by_resource_server['transfer.api.globus.org']

        _write_tokfile(tokens)

    transfer_tokens = tokens

    transfer_authorizer = RefreshTokenAuthorizer(
        transfer_tokens['refresh_token'], native_client,
        transfer_tokens['access_token'], transfer_tokens['expires_at_seconds'],
        on_refresh=_update_tokfile)

    transfer_client = TransferClient(authorizer=transfer_authorizer)
    return transfer_client
Exemple #14
0
def bulk_submit_xfer(submitjob, recursive=False):
    cfg = load_config()
    client_id = cfg['globus']['apps'][GLOBUS_AUTH_APP]['client_id']
    auth_client = NativeAppAuthClient(client_id)
    refresh_token = cfg['globus']['apps'][GLOBUS_AUTH_APP]['refresh_token']
    source_endpoint_id = submitjob[0].get('metadata').get(
        'source_globus_endpoint_id')
    destination_endpoint_id = submitjob[0].get('metadata').get(
        'dest_globus_endpoint_id')
    authorizer = RefreshTokenAuthorizer(refresh_token=refresh_token,
                                        auth_client=auth_client)
    tc = TransferClient(authorizer=authorizer)
    # as both endpoints are expected to be Globus Server endpoints, send auto-activate commands for both globus endpoints
    a = auto_activate_endpoint(tc, source_endpoint_id)
    logging.debug('a: %s' % a)
    if a != 'AlreadyActivated':
        return None

    b = auto_activate_endpoint(tc, destination_endpoint_id)
    logging.debug('b: %s' % b)
    if b != 'AlreadyActivated':
        return None

    # make job_label for task a timestamp
    x = datetime.now()
    job_label = x.strftime('%Y%m%d%H%M%s')

    # from Globus... sync_level=checksum means that before files are transferred, Globus will compute checksums on the source
    # and destination files, and only transfer files that have different checksums are transferred. verify_checksum=True means
    # that after a file is transferred, Globus will compute checksums on the source and destination files to verify that the
    # file was transferred correctly.  If the checksums do not match, it will redo the transfer of that file.
    tdata = TransferData(tc,
                         source_endpoint_id,
                         destination_endpoint_id,
                         label=job_label,
                         sync_level="checksum")

    for file in submitjob:
        source_path = file.get('sources')[0]
        dest_path = file.get('destinations')[0]
        filesize = file['metadata']['filesize']
        # TODO: support passing a recursive parameter to Globus
        # md5 = file['metadata']['md5']
        # tdata.add_item(source_path, dest_path, recursive=False, external_checksum=md5)
        tdata.add_item(source_path, dest_path, recursive=False)
        record_counter(
            'daemons.conveyor.transfer_submitter.globus.transfers.submit.filesize',
            filesize)

    # logging.info('submitting transfer...')
    transfer_result = tc.submit_transfer(tdata)
    # logging.info("task_id =", transfer_result["task_id"])

    return transfer_result["task_id"]
Exemple #15
0
 def __init__(self):
     # initialize a transfer_client if it has not been done already
     if Globus.transfer_client == None:
         native_client = NativeClient(
             client_id=settings.globus.get("client_id"),
             app_name="Globus Endpoint Performance Dashboard",
             default_scopes=settings.globus.get("scopes"))
         native_client.login(no_local_server=True, refresh_tokens=True)
         transfer_authorizer = native_client.get_authorizers().get(
             "transfer.api.globus.org")
         Globus.transfer_client = TransferClient(transfer_authorizer)
Exemple #16
0
def get_transfer_client():
    cfg = load_config()
    # cfg = yaml.safe_load(open("/opt/rucio/lib/rucio/transfertool/config.yml"))
    client_id = cfg['globus']['apps'][GLOBUS_AUTH_APP]['client_id']
    auth_client = NativeAppAuthClient(client_id)
    refresh_token = cfg['globus']['apps'][GLOBUS_AUTH_APP]['refresh_token']
    logging.info('authorizing token...')
    authorizer = RefreshTokenAuthorizer(refresh_token=refresh_token, auth_client=auth_client)
    logging.info('initializing TransferClient...')
    tc = TransferClient(authorizer=authorizer)
    return tc
Exemple #17
0
def main():

    current_time = datetime.utcnow().replace(microsecond=0).isoformat()
    last_cleanup_time = datetime.utcnow().replace(microsecond=0)\
        - timedelta(hours=24)
    last_cleanup = last_cleanup_time.isoformat()
    completion_range = last_cleanup + "," + current_time
    print("Cleaning up source endpoint {} \nfor outbound transfers completed "
          "in range {}\n ".format(SOURCE_ENDPOINT_ID, completion_range))

    transfer_token = do_client_authentication(CLIENT_ID, CLIENT_SECRET)

    authorizer = AccessTokenAuthorizer(access_token=transfer_token)
    tc = TransferClient(authorizer=authorizer)

    # print out a directory listing from an endpoint
    tc.endpoint_autoactivate(SOURCE_ENDPOINT_ID)
    try:
        task_fields = "task_id,source_endpoint,destination_endpoint," \
                      "source_host_path,owner_string,source_endpoint_id,type"
        tasks = tc.endpoint_manager_task_list(
            filter_status="SUCCEEDED",
            filter_endpoint=SOURCE_ENDPOINT_ID,
            filter_completion_time=completion_range,
            fields=task_fields)
    except TransferAPIError as tapie:
        if tapie.code == 'PermissionDenied':
            print('Permission denied! Give your app permission by going to '
                  '"globus.org/app/endpoints/{}/roles", and under '
                  '"Identity/E-mail adding "{}@clients.auth.globus.org" as '
                  'an "AccessManager" and "Activity Manager"'.format(
                      SOURCE_ENDPOINT_ID, CLIENT_ID))
            sys.exit(1)
        # Nothing weird *should* happen here, but if so re-raise so the user
        # can deal with it.
        raise
    tasklist = tasks.data
    if not tasklist:
        print("No transfers from {} found in the last 24 hours, "
              "nothing to clean up".format(SOURCE_ENDPOINT_ID))
    else:
        print("{} total transfers found from {} in the last 24 hours, "
              "some may not be of type TRANSFER".format(
                  len(tasklist), SOURCE_ENDPOINT_ID))
    delete_tasks = [
        task.data for task in tasklist
        if task_delete_conditions_satisfied(task)
    ]
    for task in delete_tasks:
        files_list, common_dir = select_dir_to_delete(tc, task)

        delete_dir_and_acls(tc, task, files_list, common_dir)
def submit_transfer():
    """
    - Take the data returned by the Browse Endpoint helper page
      and make a Globus transfer request.
    - Send the user to the transfer status page with the task id
      from the transfer.
    """
    browse_endpoint_form = request.form

    selected = session['form']['datasets']
    filtered_datasets = [ds for ds in datasets if ds['id'] in selected]

    transfer_tokens = session['tokens']['transfer.api.globus.org']

    authorizer = RefreshTokenAuthorizer(
        transfer_tokens['refresh_token'],
        load_portal_client(),
        access_token=transfer_tokens['access_token'],
        expires_at=transfer_tokens['expires_at_seconds'])

    transfer = TransferClient(authorizer=authorizer)

    source_endpoint_id = app.config['DATASET_ENDPOINT_ID']
    source_endpoint_base = app.config['DATASET_ENDPOINT_BASE']
    destination_endpoint_id = browse_endpoint_form['endpoint_id']
    destination_folder = browse_endpoint_form.get('folder[0]')

    transfer_data = TransferData(transfer_client=transfer,
                                 source_endpoint=source_endpoint_id,
                                 destination_endpoint=destination_endpoint_id,
                                 label=browse_endpoint_form.get('label'))

    for ds in filtered_datasets:
        source_path = source_endpoint_base + ds['path']
        dest_path = browse_endpoint_form['path']

        if destination_folder:
            dest_path += destination_folder + '/'

        dest_path += ds['name'] + '/'

        transfer_data.add_item(source_path=source_path,
                               destination_path=dest_path,
                               recursive=True)

    transfer.endpoint_autoactivate(source_endpoint_id)
    transfer.endpoint_autoactivate(destination_endpoint_id)
    task_id = transfer.submit_transfer(transfer_data)['task_id']

    flash('Transfer request submitted successfully. Task ID: ' + task_id)

    return (redirect(url_for('transfer_status', task_id=task_id)))
Exemple #19
0
def bulk_submit_xfer(submitjob, recursive=False, logger=logging.log):
    cfg = load_config(logger=logger)
    client_id = cfg['globus']['apps'][GLOBUS_AUTH_APP]['client_id']
    auth_client = NativeAppAuthClient(client_id)
    refresh_token = cfg['globus']['apps'][GLOBUS_AUTH_APP]['refresh_token']
    source_endpoint_id = submitjob[0].get('metadata').get(
        'source_globus_endpoint_id')
    destination_endpoint_id = submitjob[0].get('metadata').get(
        'dest_globus_endpoint_id')
    authorizer = RefreshTokenAuthorizer(refresh_token=refresh_token,
                                        auth_client=auth_client)
    tc = TransferClient(authorizer=authorizer)

    # make job_label for task a timestamp
    now = datetime.datetime.now()
    job_label = now.strftime('%Y%m%d%H%M%s')

    # retrieve globus_task_deadline value to enforce time window to complete transfers
    # default is 2880 minutes or 48 hours
    globus_task_deadline = config_get_int('conveyor', 'globus_task_deadline',
                                          False, 2880)
    deadline = now + datetime.timedelta(minutes=globus_task_deadline)

    # from Globus... sync_level=checksum means that before files are transferred, Globus will compute checksums on the source
    # and destination files, and only transfer files that have different checksums are transferred. verify_checksum=True means
    # that after a file is transferred, Globus will compute checksums on the source and destination files to verify that the
    # file was transferred correctly.  If the checksums do not match, it will redo the transfer of that file.
    tdata = TransferData(tc,
                         source_endpoint_id,
                         destination_endpoint_id,
                         label=job_label,
                         sync_level="checksum",
                         deadline=str(deadline))

    for file in submitjob:
        source_path = file.get('sources')[0]
        dest_path = file.get('destinations')[0]
        filesize = file['metadata']['filesize']
        # TODO: support passing a recursive parameter to Globus
        # md5 = file['metadata']['md5']
        # tdata.add_item(source_path, dest_path, recursive=False, external_checksum=md5)
        tdata.add_item(source_path, dest_path, recursive=False)
        record_counter(
            'daemons.conveyor.transfer_submitter.globus.transfers.submit.filesize',
            filesize)

    # logging.info('submitting transfer...')
    transfer_result = tc.submit_transfer(tdata)
    logger(logging.INFO, "transfer_result: %s" % transfer_result)

    return transfer_result["task_id"]
Exemple #20
0
    def preactivate_globus(self):
        """
        Read the local globus endpoint UUID from ~/.zstash.ini.
        If the ini file does not exist, create an ini file with empty values,
        and try to find the local endpoint UUID based on the FQDN
        """
        local_endpoint = None
        ini_path = os.path.expanduser("~/.zstash.ini")
        ini = configparser.ConfigParser()
        if ini.read(ini_path):
            if "local" in ini.sections():
                local_endpoint = ini["local"].get("globus_endpoint_uuid")
        else:
            ini["local"] = {"globus_endpoint_uuid": ""}
            try:
                with open(ini_path, "w") as f:
                    ini.write(f)
            except Exception as e:
                self.fail(e)
        if not local_endpoint:
            fqdn = socket.getfqdn()
            for pattern in regex_endpoint_map.keys():
                if re.fullmatch(pattern, fqdn):
                    local_endpoint = regex_endpoint_map.get(pattern)
                    break
        if not local_endpoint:
            # self.fail("{} does not have the local Globus endpoint set".format(ini_path))
            self.skipTest(
                "{} does not have the local Globus endpoint set".format(
                    ini_path))

        native_client = NativeClient(
            client_id="6c1629cf-446c-49e7-af95-323c6412397f",
            app_name="Zstash",
            default_scopes=
            "openid urn:globus:auth:scope:transfer.api.globus.org:all",
        )
        native_client.login(no_local_server=True, refresh_tokens=True)
        transfer_authorizer = native_client.get_authorizers().get(
            "transfer.api.globus.org")
        self.transfer_client = TransferClient(transfer_authorizer)

        for ep_id in [hpss_globus_endpoint, local_endpoint]:
            r = self.transfer_client.endpoint_autoactivate(ep_id,
                                                           if_expires_in=600)
            if r.get("code") == "AutoActivationFailed":
                self.fail(
                    "The {} endpoint is not activated or the current activation expires soon. Please go to https://app.globus.org/file-manager/collections/{} and (re)-activate the endpoint."
                    .format(ep_id, ep_id))
Exemple #21
0
    def get_transfer_interface(self, auth_client):
        self.log.info("get_transfer_interface")
        if self.transfer_client:
            self.log.info("found transfer_client")
            return self.transfer_client

        self.log.info("did not found transfer_client")
        self.log.info("auth_client")
        self.log.info(auth_client)

        scopes = "urn:globus:auth:scope:transfer.api.globus.org:all"
        cc_authorizer = ClientCredentialsAuthorizer(auth_client, scopes)
        transfer_client = TransferClient(authorizer=cc_authorizer)
        self.log.info("get_transfer_interface - transfer_client")
        self.log.info(transfer_client)
        return transfer_client
def transfer_status(task_id):
    """
    Call Globus to get status/details of transfer with
    task_id.

    The target template (tranfer_status.jinja2) expects a Transfer API
    'task' object.

    'task_id' is passed to the route in the URL as 'task_id'.
    """
    transfer = TransferClient(authorizer=RefreshTokenAuthorizer(
        session['tokens']['transfer.api.globus.org']['refresh_token'],
        load_portal_client()))
    task = transfer.get_task(task_id)

    return render_template('transfer_status.jinja2', task=task)
Exemple #23
0
    def __init__(self):
        """Initiate an OAuth2() object.

        Initiate OAuth2 flow with Globus credentaials to obtain access tokens. 
        Refresh the tokens automatically so another login is not required.

        Examples
        --------
        Create an OAuth2 object:
            >>> from archeion.models import OAuth2
            >>> authorizer = OAuth2()

        """
        self.client = NativeAppAuthClient(CLIENT_ID)
        self.client.oauth2_start_flow(refresh_tokens=True)

        logger.info("Opening browser window for Globus Authentication")
        webbrowser.open_new(self.client.oauth2_get_authorize_url())

        get_input = getattr(__builtins__, "raw_input", input)
        auth_code = get_input(
            "Please enter the code you get after login here: "
        ).strip()
        logger.debug("User has input authentication code")
        token_response = self.client.oauth2_exchange_code_for_tokens(auth_code)

        self.access_token = token_response.by_resource_server["auth.globus.org"][
            "access_token"
        ]
        transfer_response = token_response.by_resource_server["transfer.api.globus.org"]
        self.transfer_token = transfer_response["access_token"]
        self.transfer_refresh_token = transfer_response["refresh_token"]
        self.transfer_expiry_seconds = transfer_response["expires_at_seconds"]

        authorizer = RefreshTokenAuthorizer(
            self.transfer_refresh_token,
            self.client,
            access_token=self.transfer_token,
            expires_at=self.transfer_expiry_seconds,
        )
        self.transfer_client = TransferClient(
            AccessTokenAuthorizer(self.transfer_token)
        )
        self.authorisation_client = AuthClient(authorizer=authorizer)
Exemple #24
0
def cleanup():
    user_identity_name = request.form.get('user_identity_name')

    dependent_tokens = get_dependent_tokens(g.req_token)

    transfer_token = dependent_tokens.by_resource_server[
        'transfer.api.globus.org']['access_token']

    dest_ep = app.config['GRAPH_ENDPOINT_ID']
    dest_base = app.config['GRAPH_ENDPOINT_BASE']
    dest_path = '%sGraphs for %s/' % (dest_base, user_identity_name)

    transfer = TransferClient(authorizer=AccessTokenAuthorizer(transfer_token))

    transfer.endpoint_autoactivate(dest_ep)

    try:
        acl = next(acl for acl in transfer.endpoint_acl_list(dest_ep)
                   if dest_path == acl['path'])
    except StopIteration:
        pass
    except TransferAPIError as ex:
        # PermissionDenied can happen if a new Portal client is swapped
        # in and it doesn't have endpoint manager on the dest_ep.
        # The /portal/processed directory has been set to to writeable
        # for all users so the delete task will succeed even if an ACL
        # can't be set.
        if ex.code == 'PermissionDenied':
            pass
    else:
        transfer.delete_endpoint_acl_rule(dest_ep, acl['id'])

    delete_request = DeleteData(transfer_client=transfer,
                                endpoint=dest_ep,
                                label="Delete Graphs from the Service Demo",
                                recursive=True)
    delete_request.add_item(dest_path)

    try:
        task = transfer.submit_delete(delete_request)
    except TransferAPIError as ex:
        raise InternalServerError(message=ex.message)
    else:
        return jsonify(dict(task_id=task['task_id']))
    def __init__(self):
        self.log = logging.getLogger(self.__class__.__name__)
        self.log.debug(" init - started")

        self.client_user = os.environ.get('MC_CONFIDENTIAL_CLIENT_USER')
        self.client_token = os.environ.get('MC_CONFIDENTIAL_CLIENT_PW')

        if (not self.client_user) or (not self.client_token):
            missing = []
            if not self.client_user:
                missing.append('MC_CONFIDENTIAL_CLIENT_USER')
            if not self.client_token:
                missing.append('MC_CONFIDENTIAL_CLIENT_PW')
            message = "Missing environment values: {}".format(
                ", ".join(missing))
            raise EnvironmentError(message)

        self.log.info("Env variables are ok")
        self.log.info("  -- MC_CONFIDENTIAL_CLIENT_USER = {}".format(
            self.client_user))

        auth_client = ConfidentialAppAuthClient(
            client_id=self.client_user, client_secret=self.client_token)
        if not auth_client:
            error = "No Authentication Client"
            self.log.error("Error: " + str(error))
            raise AuthenticationException(error)
        self.log.info("set_transfer_client - auth_client = {}".format(
            auth_client.client_id))
        scopes = "urn:globus:auth:scope:transfer.api.globus.org:all"
        cc_authorizer = ClientCredentialsAuthorizer(auth_client, scopes)
        transfer_client = TransferClient(authorizer=cc_authorizer)
        self.log.debug("get_transfer_interface - transfer_client")
        self.log.debug(transfer_client)
        if not transfer_client:
            error = "No transfer interface"
            self.log.error("Error: " + str(error))
            raise AuthenticationException(error)

        self.log.debug(" init - done")
        self._auth_client = auth_client
        self._transfer_client = transfer_client
Exemple #26
0
def create_globus_transfer_client(tmpLog, globus_client_id,
                                  globus_refresh_token):
    """
    create Globus Transfer Client and return the transfer client
    """
    # get logger
    tmpLog.info('Creating instance of GlobusTransferClient')
    # start the Native App authentication process
    # use the refresh token to get authorizer
    # create the Globus Transfer Client
    tc = None
    ErrStat = True
    try:
        client = NativeAppAuthClient(client_id=globus_client_id)
        authorizer = RefreshTokenAuthorizer(refresh_token=globus_refresh_token,
                                            auth_client=client)
        tc = TransferClient(authorizer=authorizer)
    except:
        errStat, errMsg = handle_globus_exception(tmpLog)
    return ErrStat, tc
def setup_transfer_client(transfer_tokens):

    authorizer = RefreshTokenAuthorizer(
        transfer_tokens['refresh_token'],
        NativeAppAuthClient(client_id=CLIENT_ID),
        access_token=transfer_tokens['access_token'],
        expires_at=transfer_tokens['expires_at_seconds'])

    transfer_client = TransferClient(authorizer=authorizer)

    try:
        transfer_client.endpoint_autoactivate(SOURCE_ENDPOINT)
        transfer_client.endpoint_autoactivate(DESTINATION_ENDPOINT)
    except GlobusAPIError as ex:
        if ex.http_status == 401:
            sys.exit('Refresh token has expired. '
                     'Please delete the `tokens` object from '
                     '{} and try again.'.format(DATA_FILE))
        else:
            raise ex
    return transfer_client
Exemple #28
0
    def copy_data(self, ori, destiny):
        """
        copy data using globus

        :param ori: path where the data is in the source machine
        :type ori: str

        :param destiny: path where the data will be put on the destiny machine
        :type destiny: str

        :raises Exception: a problem occurred during the transference
        """

        authorizer = AccessTokenAuthorizer(GlobusManager.TRANSFER_TOKEN)
        tc = TransferClient(authorizer=authorizer)
        res = self.copy_directory(ori, destiny, tc)

        if res == "NOT_A_DIRECTORY":
            res = self.copy_file(ori, destiny, tc)

        if res is not "OK":
            raise Exception(res)
Exemple #29
0
def setup_transfer_client(transfer_tokens, source_endpoint,
                          destination_endpoint):
    authorizer = RefreshTokenAuthorizer(
        transfer_tokens['refresh_token'],
        NativeAppAuthClient(client_id=CLIENT_ID),
        access_token=transfer_tokens['access_token'],
        expires_at=transfer_tokens['expires_at_seconds'],
        on_refresh=update_tokens_file_on_refresh)

    transfer_client = TransferClient(authorizer=authorizer)

    try:
        transfer_client.endpoint_autoactivate(source_endpoint)
        transfer_client.endpoint_autoactivate(destination_endpoint)
    except GlobusAPIError as ex:
        if ex.http_status == 401:
            sys.exit('Refresh token has expired. '
                     'Please delete the `tokens` object from '
                     '{} and try again.'.format(DATA_FILE))
        else:
            raise ex
    return transfer_client
Exemple #30
0
    def status(self):
        token = TokenStore.get_transfer_token(self.user)
        tc = TransferClient(authorizer=AccessTokenAuthorizer(token))
        old = json.loads(self.task_catalog or '{}')
        tasks = {
            t: tc.get_task(t).data
            for t in json.loads(self.transfer_task_ids)
            if not old.get(t) or old[t]['status'] == 'ACTIVE'
        }
        old.update(tasks)
        tasks = old

        transferred = [t['files_transferred'] for t in tasks.values()]
        log.debug(transferred)
        self.files_transferred = reduce(lambda x, y: x + y, transferred)
        log.debug(self.files_transferred)
        self.task_catalog = json.dumps(tasks)
        self.save()
        statuses = [s['status'] for s in tasks.values()]
        if any(filter(lambda stat: stat in ['INACTIVE', 'FAILED'], statuses)):
            return 'FAILED'
        if any(filter(lambda stat: stat == 'ACTIVE', statuses)):
            return 'ACTIVE'
        return 'SUCCEEDED'