Exemple #1
0
def create_shared_endpoint(globus_dict,
                           host_endpoint,
                           host_path,
                           display_name='Globus endpoint',
                           description='description'):
    globus_transfer_token = globus_dict['transfer_token']
    scopes = "urn:globus:auth:scopes:transfer.api.globus.org:all"
    authorizer = globus_sdk.AccessTokenAuthorizer(globus_transfer_token)
    tc = TransferClient(authorizer=authorizer)
    # high level interface; provides iterators for list responses
    shared_ep_data = {
        "DATA_TYPE": "shared_endpoint",
        "host_endpoint": host_endpoint,
        "host_path": host_path,
        "display_name": display_name,
        # optionally specify additional endpoint fields
        "description": description
    }
    #r = tc.operation_mkdir(host_id, path=share_path) #TODO create the directory directly from here instead of at local level?

    tc.endpoint_autoactivate(host_endpoint,
                             if_expires_in=3600)  #necessary for real use?
    create_result = tc.create_shared_endpoint(
        shared_ep_data)  #not the app's end point, so should fail
    endpoint_id = create_result['id']
    globus_dict['endpoint_id'] = endpoint_id
    globus_dict['transfer_client'] = tc
    return globus_dict
def browse(dataset_id=None, endpoint_id=None, endpoint_path=None):
    """
    - Get list of files for the selected dataset or endpoint ID/path
    - Return a list of files to a browse view

    The target template (browse.jinja2) expects an `endpoint_uri` (if
    available for the endpoint), `target` (either `"dataset"`
    or `"endpoint"`), and 'file_list' (list of dictionaries) containing
    the following information about each file in the result:

    {'name': 'file name', 'size': 'file size', 'id': 'file uri/path'}

    If you want to display additional information about each file, you
    must add those keys to the dictionary and modify the browse.jinja2
    template accordingly.
    """

    assert bool(dataset_id) != bool(endpoint_id and endpoint_path)

    if dataset_id:
        try:
            dataset = next(ds for ds in datasets if ds['id'] == dataset_id)
        except StopIteration:
            abort(404)

        endpoint_id = app.config['DATASET_ENDPOINT_ID']
        endpoint_path = app.config['DATASET_ENDPOINT_BASE'] + dataset['path']

    else:
        endpoint_path = '/' + endpoint_path

    transfer = TransferClient(authorizer=RefreshTokenAuthorizer(
        session['tokens']['transfer.api.globus.org']['refresh_token'],
        load_portal_client()))

    try:
        transfer.endpoint_autoactivate(endpoint_id)
        listing = transfer.operation_ls(endpoint_id, path=endpoint_path)
    except TransferAPIError as err:
        flash('Error [{}]: {}'.format(err.code, err.message))
        return redirect(url_for('transfer'))

    file_list = [e for e in listing if e['type'] == 'file']

    ep = transfer.get_endpoint(endpoint_id)

    https_server = ep['https_server']
    endpoint_uri = https_server + endpoint_path if https_server else None
    webapp_xfer = 'https://www.globus.org/app/transfer?' + \
        urlencode(dict(origin_id=endpoint_id, origin_path=endpoint_path))

    return render_template(
        'browse.jinja2',
        endpoint_uri=endpoint_uri,
        target="dataset" if dataset_id else "endpoint",
        description=(dataset['name'] if dataset_id else ep['display_name']),
        file_list=file_list,
        webapp_xfer=webapp_xfer)
Exemple #3
0
def main():

    current_time = datetime.utcnow().replace(microsecond=0).isoformat()
    last_cleanup_time = datetime.utcnow().replace(microsecond=0)\
        - timedelta(hours=24)
    last_cleanup = last_cleanup_time.isoformat()
    completion_range = last_cleanup + "," + current_time
    print("Cleaning up source endpoint {} \nfor outbound transfers completed "
          "in range {}\n ".format(SOURCE_ENDPOINT_ID, completion_range))

    transfer_token = do_client_authentication(CLIENT_ID, CLIENT_SECRET)

    authorizer = AccessTokenAuthorizer(access_token=transfer_token)
    tc = TransferClient(authorizer=authorizer)

    # print out a directory listing from an endpoint
    tc.endpoint_autoactivate(SOURCE_ENDPOINT_ID)
    try:
        task_fields = "task_id,source_endpoint,destination_endpoint," \
                      "source_host_path,owner_string,source_endpoint_id,type"
        tasks = tc.endpoint_manager_task_list(
            filter_status="SUCCEEDED",
            filter_endpoint=SOURCE_ENDPOINT_ID,
            filter_completion_time=completion_range,
            fields=task_fields)
    except TransferAPIError as tapie:
        if tapie.code == 'PermissionDenied':
            print('Permission denied! Give your app permission by going to '
                  '"globus.org/app/endpoints/{}/roles", and under '
                  '"Identity/E-mail adding "{}@clients.auth.globus.org" as '
                  'an "AccessManager" and "Activity Manager"'.format(
                      SOURCE_ENDPOINT_ID, CLIENT_ID))
            sys.exit(1)
        # Nothing weird *should* happen here, but if so re-raise so the user
        # can deal with it.
        raise
    tasklist = tasks.data
    if not tasklist:
        print("No transfers from {} found in the last 24 hours, "
              "nothing to clean up".format(SOURCE_ENDPOINT_ID))
    else:
        print("{} total transfers found from {} in the last 24 hours, "
              "some may not be of type TRANSFER".format(
                  len(tasklist), SOURCE_ENDPOINT_ID))
    delete_tasks = [
        task.data for task in tasklist
        if task_delete_conditions_satisfied(task)
    ]
    for task in delete_tasks:
        files_list, common_dir = select_dir_to_delete(tc, task)

        delete_dir_and_acls(tc, task, files_list, common_dir)
def submit_transfer():
    """
    - Take the data returned by the Browse Endpoint helper page
      and make a Globus transfer request.
    - Send the user to the transfer status page with the task id
      from the transfer.
    """
    browse_endpoint_form = request.form

    selected = session['form']['datasets']
    filtered_datasets = [ds for ds in datasets if ds['id'] in selected]

    transfer_tokens = session['tokens']['transfer.api.globus.org']

    authorizer = RefreshTokenAuthorizer(
        transfer_tokens['refresh_token'],
        load_portal_client(),
        access_token=transfer_tokens['access_token'],
        expires_at=transfer_tokens['expires_at_seconds'])

    transfer = TransferClient(authorizer=authorizer)

    source_endpoint_id = app.config['DATASET_ENDPOINT_ID']
    source_endpoint_base = app.config['DATASET_ENDPOINT_BASE']
    destination_endpoint_id = browse_endpoint_form['endpoint_id']
    destination_folder = browse_endpoint_form.get('folder[0]')

    transfer_data = TransferData(transfer_client=transfer,
                                 source_endpoint=source_endpoint_id,
                                 destination_endpoint=destination_endpoint_id,
                                 label=browse_endpoint_form.get('label'))

    for ds in filtered_datasets:
        source_path = source_endpoint_base + ds['path']
        dest_path = browse_endpoint_form['path']

        if destination_folder:
            dest_path += destination_folder + '/'

        dest_path += ds['name'] + '/'

        transfer_data.add_item(source_path=source_path,
                               destination_path=dest_path,
                               recursive=True)

    transfer.endpoint_autoactivate(source_endpoint_id)
    transfer.endpoint_autoactivate(destination_endpoint_id)
    task_id = transfer.submit_transfer(transfer_data)['task_id']

    flash('Transfer request submitted successfully. Task ID: ' + task_id)

    return (redirect(url_for('transfer_status', task_id=task_id)))
Exemple #5
0
def cleanup():
    user_identity_name = request.form.get('user_identity_name')

    dependent_tokens = get_dependent_tokens(g.req_token)

    transfer_token = dependent_tokens.by_resource_server[
        'transfer.api.globus.org']['access_token']

    dest_ep = app.config['GRAPH_ENDPOINT_ID']
    dest_base = app.config['GRAPH_ENDPOINT_BASE']
    dest_path = '%sGraphs for %s/' % (dest_base, user_identity_name)

    transfer = TransferClient(authorizer=AccessTokenAuthorizer(transfer_token))

    transfer.endpoint_autoactivate(dest_ep)

    try:
        acl = next(acl for acl in transfer.endpoint_acl_list(dest_ep)
                   if dest_path == acl['path'])
    except StopIteration:
        pass
    except TransferAPIError as ex:
        # PermissionDenied can happen if a new Portal client is swapped
        # in and it doesn't have endpoint manager on the dest_ep.
        # The /portal/processed directory has been set to to writeable
        # for all users so the delete task will succeed even if an ACL
        # can't be set.
        if ex.code == 'PermissionDenied':
            pass
    else:
        transfer.delete_endpoint_acl_rule(dest_ep, acl['id'])

    delete_request = DeleteData(transfer_client=transfer,
                                endpoint=dest_ep,
                                label="Delete Graphs from the Service Demo",
                                recursive=True)
    delete_request.add_item(dest_path)

    try:
        task = transfer.submit_delete(delete_request)
    except TransferAPIError as ex:
        raise InternalServerError(message=ex.message)
    else:
        return jsonify(dict(task_id=task['task_id']))
def cleanup():
    user_identity_name = request.form.get('user_identity_name')

    dependent_tokens = get_dependent_tokens(g.req_token)

    transfer_token = dependent_tokens.by_resource_server[
        'transfer.api.globus.org']['access_token']

    dest_ep = app.config['GRAPH_ENDPOINT_ID']
    dest_base = app.config['GRAPH_ENDPOINT_BASE']
    dest_path = '%sGraphs for %s/' % (dest_base, user_identity_name)

    transfer = TransferClient(authorizer=AccessTokenAuthorizer(transfer_token))

    transfer.endpoint_autoactivate(dest_ep)

    try:
        acl = next(acl for acl in transfer.endpoint_acl_list(dest_ep)
                   if dest_path == acl['path'])
    except StopIteration:
        pass
    except TransferAPIError as ex:
        # PermissionDenied can happen if a new Portal client is swapped
        # in and it doesn't have endpoint manager on the dest_ep.
        # The /portal/processed directory has been set to to writeable
        # for all users so the delete task will succeed even if an ACL
        # can't be set.
        if ex.code == 'PermissionDenied':
            pass
    else:
        transfer.delete_endpoint_acl_rule(dest_ep, acl['id'])

    delete_request = DeleteData(transfer_client=transfer, endpoint=dest_ep,
                                label="Delete Graphs from the Service Demo",
                                recursive=True)
    delete_request.add_item(dest_path)

    try:
        task = transfer.submit_delete(delete_request)
    except TransferAPIError as ex:
        raise InternalServerError(message=ex.message)
    else:
        return jsonify(dict(task_id=task['task_id']))
def setup_transfer_client(transfer_tokens):

    authorizer = RefreshTokenAuthorizer(
        transfer_tokens['refresh_token'],
        NativeAppAuthClient(client_id=CLIENT_ID),
        access_token=transfer_tokens['access_token'],
        expires_at=transfer_tokens['expires_at_seconds'])

    transfer_client = TransferClient(authorizer=authorizer)

    try:
        transfer_client.endpoint_autoactivate(SOURCE_ENDPOINT)
        transfer_client.endpoint_autoactivate(DESTINATION_ENDPOINT)
    except GlobusAPIError as ex:
        if ex.http_status == 401:
            sys.exit('Refresh token has expired. '
                     'Please delete the `tokens` object from '
                     '{} and try again.'.format(DATA_FILE))
        else:
            raise ex
    return transfer_client
Exemple #8
0
def setup_transfer_client(transfer_tokens, source_endpoint,
                          destination_endpoint):
    authorizer = RefreshTokenAuthorizer(
        transfer_tokens['refresh_token'],
        NativeAppAuthClient(client_id=CLIENT_ID),
        access_token=transfer_tokens['access_token'],
        expires_at=transfer_tokens['expires_at_seconds'],
        on_refresh=update_tokens_file_on_refresh)

    transfer_client = TransferClient(authorizer=authorizer)

    try:
        transfer_client.endpoint_autoactivate(source_endpoint)
        transfer_client.endpoint_autoactivate(destination_endpoint)
    except GlobusAPIError as ex:
        if ex.http_status == 401:
            sys.exit('Refresh token has expired. '
                     'Please delete the `tokens` object from '
                     '{} and try again.'.format(DATA_FILE))
        else:
            raise ex
    return transfer_client
Exemple #9
0
def error_if_not_activated(
    transfer_client: TransferClient,
    endpoints: List[str],
    reactivate_if_expires_in=86400,
):
    not_activated = endpoints_not_activated(transfer_client, endpoints)
    still_not_activated = []
    for endpoint in not_activated:
        response = transfer_client.endpoint_autoactivate(
            endpoint, if_expires_in=reactivate_if_expires_in)
        if response.get("code") == "AutoActivationFailed":
            still_not_activated.append(endpoint)
    if still_not_activated:
        click.echo(
            f"Error: requested endpoint is not activated: {', '.join(not_activated)}\n"
            "Open in the web app to activate:",
            err=True,
        )
        for endpoint in not_activated:
            click.echo(
                f"    https://app.globus.org/file-manager?origin_id={endpoint}",
                err=True,
            )
        sys.exit(1)
Exemple #10
0
def browse(dataset_id=None, endpoint_id=None, endpoint_path=None):
    """
    - Get list of files for the selected dataset or endpoint ID/path
    - Return a list of files to a browse view

    The target template (browse.jinja2) expects an `endpoint_uri` (if
    available for the endpoint), `target` (either `"dataset"`
    or `"endpoint"`), and 'file_list' (list of dictionaries) containing
    the following information about each file in the result:

    {'name': 'file name', 'size': 'file size', 'id': 'file uri/path'}

    If you want to display additional information about each file, you
    must add those keys to the dictionary and modify the browse.jinja2
    template accordingly.
    """

    if request.method == 'GET':
        assert bool(dataset_id) != bool(endpoint_id and endpoint_path)

        if dataset_id:
            try:
                dataset = next(ds for ds in datasets if ds['id'] == dataset_id)
            except StopIteration:
                abort(404)

            endpoint_id = app.config['DATASET_ENDPOINT_ID']
            endpoint_path = app.config['DATASET_ENDPOINT_BASE'] + dataset['path']

        else:
            endpoint_path = '/' + endpoint_path

        transfer_tokens = session['tokens']['transfer.api.globus.org']

        authorizer = RefreshTokenAuthorizer(
            transfer_tokens['refresh_token'],
            load_portal_client(),
            access_token=transfer_tokens['access_token'],
            expires_at=transfer_tokens['expires_at_seconds'])

        transfer = TransferClient(authorizer=authorizer)

        try:
            transfer.endpoint_autoactivate(endpoint_id)
            listing = transfer.operation_ls(endpoint_id, path=endpoint_path)
        except TransferAPIError as err:
            flash('Error [{}]: {}'.format(err.code, err.message))
            return redirect(url_for('browse'))

        file_list = [e for e in listing if e['type'] == 'file']

        ep = transfer.get_endpoint(endpoint_id)

        https_server = ep['https_server']
        endpoint_uri = https_server + endpoint_path if https_server else None
        webapp_xfer = 'https://app.globus.org/file-manager?' + \
            urlencode(dict(origin_id=endpoint_id, origin_path=endpoint_path))


        #print("endpintURL == " + endpoint_uri)

        return render_template('browse.jinja2', endpoint_uri=endpoint_uri,
                           target="dataset" if dataset_id else "endpoint",
                           description=(dataset['name'] if dataset_id
                                        else ep['display_name']),
                           mypath=(dataset['path'] if dataset_id
                                        else None),
                           myid=(dataset['id'] if dataset_id
                                        else None),
                           file_list=file_list, webapp_xfer=webapp_xfer)

    if request.method == 'POST':
        if not request.form.get('file'):
            flash('Please select at least one file.')
            return redirect(url_for('browse'))

        params = {
            'method': 'POST',
            'action': url_for('submit_transfer', _external=True,
                              _scheme='https'),
            'filelimit': 0,
            'folderlimit': 1
        }

        browse_endpoint = 'https://app.globus.org/file-manager?{}' \
            .format(urlencode(params))

        session['form'] = {
            'dirselect': False,
            'datasets': request.form.getlist('file'),
            'path': request.form.getlist('path'),
            'id': request.form.getlist('id')
        }

        return redirect(browse_endpoint)
Exemple #11
0
def authcallback():
    
    def test_func(token_data):
        print('this is a test')
    
    """Handles the interaction with Globus Auth."""
    # If we're coming back from Globus Auth in an error state, the error
    # will be in the "error" query string parameter.
    if 'error' in request.args:
        print('error')
        return redirect(url_for('task_server.transfer'))

    # Set up our Globus Auth/OAuth2 state
    redirect_uri = url_for('task_server.authcallback', _external=True) 

    client = globus_sdk.ConfidentialAppAuthClient(
        app.config['TS_CLIENT_ID'], app.config['TS_CLIENT_SECRET'])
    client.oauth2_start_flow(redirect_uri, refresh_tokens=True) #let globus know where to go back to

    # If there's no "code" query string parameter, we're in this route
    # starting a Globus Auth login flow.
    if 'code' not in request.args:
        additional_authorize_params = (
            {'signup': 1} if request.args.get('signup') else {})


        auth_uri = client.oauth2_get_authorize_url(
            additional_params=additional_authorize_params)

        return redirect(auth_uri, code=307)
    else:
        # If we do have a "code" param, we're coming back from Globus Auth
        # and can start the process of exchanging an auth code for a token.
        

        #requests.post('http://localhost:8081/api/messenger', headers={'content-type': 'application/json'},data=json.dumps({'message':'User logged in....'}))

        code = request.args.get('code')
        tokens = client.oauth2_exchange_code_for_tokens(code)
        id_token = tokens.decode_id_token(client)
    
        
        session.update( 
            tokens=tokens.by_resource_server,
            is_authenticated=True,
            name=id_token.get('name', ''),
            email=id_token.get('email', ''),
            institution=id_token.get('institution', ''),
            primary_username=id_token.get('preferred_username'),
            primary_identity=id_token.get('sub'),
        )

        tokens = session['tokens']
        

        stage_in_source = redis_store.get('stage_in_source').decode('utf-8')
        stage_in_destination = redis_store.get('stage_in_destination').decode('utf-8')
        stage_out_destination = redis_store.get('stage_out_destination').decode('utf-8')
        new_token=None
        
        authorizer = globus_sdk.RefreshTokenAuthorizer(tokens['transfer.api.globus.org']['refresh_token'],globus_sdk.ConfidentialAppAuthClient(app.config['TS_CLIENT_ID'], app.config['TS_CLIENT_SECRET']),tokens['transfer.api.globus.org']['access_token'],expires_at=1,on_refresh=test_func)
        
        
        #authorizer = globus_sdk.AccessTokenAuthorizer(tokens['transfer.api.globus.org']['access_token'])
        tc = TransferClient(authorizer=authorizer) 



        a = tc.endpoint_autoactivate(stage_in_source, if_expires_in=3600)
        b = tc.endpoint_autoactivate(stage_in_destination, if_expires_in=3600)
        c = tc.endpoint_autoactivate(stage_out_destination, if_expires_in=3600)
        if a["code"] == "AutoActivationFailed" or b["code"] == "AutoActivationFailed" or c["code"] == "AutoActivationFailed":
            stage_in_source_response = tc.get_endpoint(stage_in_source)
            stage_in_destination_response = tc.get_endpoint(stage_in_destination)
            stage_out_destination_response = tc.get_endpoint(stage_out_destination)

            e1name = stage_in_source_response["display_name"]
            e2name = stage_in_destination_response["display_name"]
            e3name = stage_out_destination_response["display_name"]
            
            return redirect("http://localhost:8080/activate?e1id="+ (stage_in_source if a["code"] == "AutoActivationFailed" else "")+("&e1name="+e1name if a["code"] == "AutoActivationFailed" else "")+"&e2id="+ (stage_in_destination if b["code"] == "AutoActivationFailed" else "")+("&e2name="+e2name if a["code"] == "AutoActivationFailed" else "" )+"&e3id="+(stage_out_destination if c["code"] == "AutoActivationFailed" else "")+"e3id&e3name="+(e3name if c["code"] == "AutoActivationFailed" else ""))
    



        # read from db here 

        sis=redis_store.get('stage_in_source').decode('utf-8')
        sid=redis_store.get('stage_in_destination').decode('utf-8') #...
        sod=redis_store.get('stage_out_destination').decode('utf-8')#those names...........
        siop= redis_store.get('stage_in_source_path').decode('utf-8')
        sidp = redis_store.get('stage_in_destination_path').decode('utf-8')
        sodp = redis_store.get('stage_out_destination_path').decode('utf-8')

        
        return redirect(url_for('task_server.transfer', stage_in_source=sis,stage_in_dest=sid,stage_out_dest=sod,stage_in_source_path=siop,stage_in_dest_path=sidp,stage_out_dest_path=sodp))
Exemple #12
0
def doit():
    """
    - Call token introspect
    - Get dependent tokens
    """
    dependent_tokens = get_dependent_tokens(g.req_token)

    # dependent_tokens is a token response object
    # create transfer_token and http_token variables containing
    # the correct token for each resource server
    transfer_token = dependent_tokens.by_resource_server[
        'transfer.api.globus.org']['access_token']
    http_token = dependent_tokens.by_resource_server[
        'tutorial-https-endpoint.globus.org']['access_token']

    selected_ids = request.form.getlist('datasets')
    selected_year = request.form.get('year')
    user_identity_id = request.form.get('user_identity_id')
    user_identity_name = request.form.get('user_identity_name')

    selected_datasets = [
        dataset for dataset in datasets if dataset['id'] in selected_ids
    ]

    if not (selected_datasets and selected_year):
        raise BadRequestError()

    transfer = TransferClient(authorizer=AccessTokenAuthorizer(transfer_token))

    source_ep = app.config['DATASET_ENDPOINT_ID']
    source_info = transfer.get_endpoint(source_ep)
    source_https = source_info['https_server']
    source_base = app.config['DATASET_ENDPOINT_BASE']
    source_token = http_token

    dest_ep = app.config['GRAPH_ENDPOINT_ID']
    dest_info = transfer.get_endpoint(dest_ep)
    dest_https = dest_info['https_server']
    dest_base = app.config['GRAPH_ENDPOINT_BASE']
    dest_path = '%sGraphs for %s/' % (dest_base, user_identity_name)
    dest_token = http_token

    if not (source_https and dest_https):
        raise InternalServerError(message='Endpoints must be HTTPS servers')

    svgs = {}

    for dataset in selected_datasets:
        source_path = dataset['path']
        response = requests.get(
            '%s%s%s/%s.csv' %
            (source_https, source_base, source_path, selected_year),
            headers=dict(Authorization='Bearer ' + source_token),
            allow_redirects=False)
        response.raise_for_status()
        svgs.update(
            render_graphs(
                csv_data=response.iter_lines(decode_unicode=True),
                append_titles=' from %s for %s' %
                (dataset['name'], selected_year),
            ))

    transfer.endpoint_autoactivate(dest_ep)

    try:
        transfer.operation_mkdir(dest_ep, dest_path)
    except TransferAPIError as error:
        if 'MkdirFailed.Exists' not in error.code:
            raise

    try:
        transfer.add_endpoint_acl_rule(
            dest_ep,
            dict(principal=user_identity_id,
                 principal_type='identity',
                 path=dest_path,
                 permissions='r'),
        )
    except TransferAPIError as error:
        # PermissionDenied can happen if a new Portal client is swapped
        # in and it doesn't have endpoint manager on the dest_ep.
        # The /portal/processed directory has been set to to read/write
        # for all users so the subsequent operations will succeed.
        if error.code == 'PermissionDenied':
            pass
        elif error.code != 'Exists':
            raise

    for filename, svg in svgs.items():
        requests.put('%s%s%s.svg' % (dest_https, dest_path, filename),
                     data=svg,
                     headers=dict(Authorization='Bearer ' + dest_token),
                     allow_redirects=False).raise_for_status()

    results = {
        'dest_ep': dest_ep,
        'dest_path': dest_path,
        'dest_name': dest_info['display_name'],
        'graph_count': len(svgs) or 0
    }

    return jsonify(results)
Exemple #13
0
def globus_transfer(  # noqa: C901
        remote_endpoint,
        remote_path,
        name,
        transfer_type,
        non_blocking=False):
    """
    Read the local globus endpoint UUID from ~/.zstash.ini.
    If the ini file does not exist, create an ini file with empty values,
    and try to find the local endpoint UUID based on the FQDN
    """
    ini_path = os.path.expanduser("~/.zstash.ini")
    ini = configparser.ConfigParser()
    local_endpoint = None
    if ini.read(ini_path):
        if "local" in ini.sections():
            local_endpoint = ini["local"].get("globus_endpoint_uuid")
    else:
        ini["local"] = {"globus_endpoint_uuid": ""}
        try:
            with open(ini_path, "w") as f:
                ini.write(f)
        except Exception as e:
            logger.error(e)
            sys.exit(1)
    if not local_endpoint:
        fqdn = socket.getfqdn()
        for pattern in regex_endpoint_map.keys():
            if re.fullmatch(pattern, fqdn):
                local_endpoint = regex_endpoint_map.get(pattern)
                break
    if not local_endpoint:
        logger.error(
            "{} does not have the local Globus endpoint set".format(ini_path))
        sys.exit(1)

    if remote_endpoint.upper() in hpss_endpoint_map.keys():
        remote_endpoint = hpss_endpoint_map.get(remote_endpoint.upper())

    if transfer_type == "get":
        src_ep = remote_endpoint
        src_path = os.path.join(remote_path, name)
        dst_ep = local_endpoint
        dst_path = os.path.join(os.getcwd(), name)
    else:
        src_ep = local_endpoint
        src_path = os.path.join(os.getcwd(), name)
        dst_ep = remote_endpoint
        dst_path = os.path.join(remote_path, name)

    subdir = os.path.basename(os.path.normpath(remote_path))
    subdir_label = re.sub("[^A-Za-z0-9_ -]", "", subdir)
    filename = name.split(".")[0]
    label = subdir_label + " " + filename

    native_client = NativeClient(
        client_id="6c1629cf-446c-49e7-af95-323c6412397f",
        app_name="Zstash",
        default_scopes=
        "openid urn:globus:auth:scope:transfer.api.globus.org:all",
    )
    native_client.login(no_local_server=True, refresh_tokens=True)
    transfer_authorizer = native_client.get_authorizers().get(
        "transfer.api.globus.org")
    tc = TransferClient(transfer_authorizer)

    for ep_id in [src_ep, dst_ep]:
        r = tc.endpoint_autoactivate(ep_id, if_expires_in=600)
        if r.get("code") == "AutoActivationFailed":
            logger.error(
                "The {} endpoint is not activated or the current activation expires soon. Please go to https://app.globus.org/file-manager/collections/{} and (re)activate the endpoint."
                .format(ep_id, ep_id))
            sys.exit(1)

    td = TransferData(
        tc,
        src_ep,
        dst_ep,
        label=label,
        sync_level="checksum",
        verify_checksum=True,
        preserve_timestamp=True,
        fail_on_quota_errors=True,
    )
    td.add_item(src_path, dst_path)
    try:
        task = tc.submit_transfer(td)
    except TransferAPIError as e:
        if e.code == "NoCredException":
            logger.error(
                "{}. Please go to https://app.globus.org/endpoints and activate the endpoint."
                .format(e.message))
        else:
            logger.error(e)
        sys.exit(1)
    except Exception as e:
        logger.error("Exception: {}".format(e))
        sys.exit(1)

    if non_blocking:
        return

    try:
        task_id = task.get("task_id")
        """
        A Globus transfer job (task) can be in one of the three states:
        ACTIVE, SUCCEEDED, FAILED. The script every 20 seconds polls a
        status of the transfer job (task) from the Globus Transfer service,
        with 20 second timeout limit. If the task is ACTIVE after time runs
        out 'task_wait' returns False, and True otherwise.
        """
        while not tc.task_wait(task_id, 20, 20):
            pass
        """
        The Globus transfer job (task) has been finished (SUCCEEDED or FAILED).
        Check if the transfer SUCCEEDED or FAILED.
        """
        task = tc.get_task(task_id)
        if task["status"] == "SUCCEEDED":
            logger.info(
                "Globus transfer {}, from {}{} to {}{} succeeded".format(
                    task_id, src_ep, src_path, dst_ep, dst_path))
        else:
            logger.error("Transfer FAILED")
    except TransferAPIError as e:
        if e.code == "NoCredException":
            logger.error(
                "{}. Please go to https://app.globus.org/endpoints and activate the endpoint."
                .format(e.message))
        else:
            logger.error(e)
        sys.exit(1)
    except Exception as e:
        logger.error("Exception: {}".format(e))
        sys.exit(1)
Exemple #14
0
def transfer(sp,destination_endpoint_id,one_endpoint):
    tokens = None
    try:
        # if we already have tokens, load and use them
        tokens = load_tokens_from_file(TOKEN_FILE)
    except:
        pass

    if not tokens:
        # if we need to get tokens, start the Native App authentication process
        tokens = do_native_app_authentication(CLIENT_ID, REDIRECT_URI, SCOPES)

        try:
            save_tokens_to_file(TOKEN_FILE, tokens)
        except:
            pass

    transfer_tokens = tokens['transfer.api.globus.org']

    auth_client = NativeAppAuthClient(client_id=CLIENT_ID,environment='sandbox')

    authorizer = RefreshTokenAuthorizer(
        transfer_tokens['refresh_token'],
        auth_client,
        access_token=transfer_tokens['access_token'],
        expires_at=transfer_tokens['expires_at_seconds'],
        on_refresh=update_tokens_file_on_refresh)

    #transfer = TransferClient(authorizer=authorizer,environment='sandbox')
    tc = TransferClient(authorizer=authorizer, environment="sandbox")

    ##################---ENDPOINTS---###########################

    source_endpoint_id = '5a2e5704-b028-11e7-bdad-22000bdb2406' #sb vmtb4
    #source_endpoint_id = '55705028-aa15-11e7-bdad-22000bdb2406' #sb yulie7t
    #source_endpoint_id = 'b0b16296-88e7-11e7-a971-22000a92523b' #bare chameleon
    #source_endpoint_id = 'e5762bc2-8466-11e7-a8ed-22000a92523b' #large_chameleon
    #source_endpoint_id = '8b26cc0e-877b-11e7-a949-22000a92523b'#ubuntu-vm
    #source_endpoint_id = 'ad19b012-77cf-11e7-8b98-22000b9923ef'#chameleon
    # source_endpoint_id = raw_input('Input source endpoint UUID: ')
    
    #destination path
    ##############---SOURCE PATH---######################
    #source_path = '/home/parallels/stream_transfer/test_files/'
    #source_path = '/home/parallels/stream_transfer/zero_globus/test_files/'
    source_path = sp
    #source_path ='/home/cc/streaming/zero_globus/test_files/test.txt'
    #source_path = '/home/parallels/stream_transfer/zero_globus/test_files/test.txt'
    #destination path
    destination_path = '/~/'
    #destination_path = '/~/'+ sp.split("/")[-1] #use for one file
    ##if one_endpoint:
    ##    destination_path = '/projects/BrainImagingADSP/yzamora/'
    ##else:
    ##    destination_path = '/projects/BrainImagingADSP/yzamora/'+ sp.split("/")[-1] #use for one file
    #Using my sample UUID from globus tutorial
    #destination_endpoint_id = 'ddb59aef-6d04-11e5-ba46-22000b92c6ec' #globus
    #destination_endpoint_id = '5d1da0fe-3c07-11e7-bcfc-22000b9a448b' #laptop



    #tc.endpoint_autoactivate(source_endpoint_id)
    #tc.endpoint_autoactive(destination_endpoint_id)
    ep1 = tc.get_endpoint(destination_endpoint_id)
    tc.endpoint_autoactivate(destination_endpoint_id)
    #ep1 is setting the activated endpoint to be a variable to work with
    tc.endpoint_autoactivate(source_endpoint_id)

    label = "medium data transfer"
    #tdata = globus_sdk.TransferData(tc, source_endpoint_id, destination_endpoint_id,label=label, sync_level='0')
    tdata = globus_sdk.TransferData(tc, source_endpoint_id, destination_endpoint_id,label=label, perf_cc=3, sync_level=None, verify_checksum=False)
    #tdata = globus_sdk.TransferData(tc, source_endpoint_id, destination_endpoint_id,label=label)
    if one_endpoint:
        tdata.add_item(source_path,destination_path,recursive=True)
    else:
        tdata.add_item(source_path,destination_path,recursive=False)

    submit_result = tc.submit_transfer(tdata)
    print("Task ID:", submit_result["task_id"])
    """
    Checking for time completion using globus calls
    
    """
    #print("Completion time:", submit_result["completion_time"])

    #setup of the transfer, submits as a https post request
    #transfer_data = TransferData(transfer_client=tc,
    #                     source_endpoint=source_endpoint_id,
    #                     destination_endpoint=destination_endpoint_id,
    #                     label='Transfer',
    #                     sync_level='checksum')
    #transfer_data.add_item(source_path=source_path,destination_path=destination_path, recursive=False)
    #task_id=transfer.submit_transfer(transfer_data)['task_id']

    #waiting for file to transfer
    status = tc.get_task(submit_result["task_id"],fields="status")["status"]
    poll_interval = 2
    max_wait = 90
    wait_time = 0
    while not (status in ["SUCCEEDED", "FAILED"]):
        if (wait_time >= max_wait): break
        print("Task not yet complete (status {}), sleeping for {} seconds..." \
          .format(status, poll_interval))
        time.sleep(poll_interval)
        wait_time += poll_interval
        status = tc.get_task(submit_result["task_id"], fields="status")["status"]

    if status == "FAILED":
        print("WARNING! File transfer FAILED!")

    #deleting file after transfer
    if status == "SUCCEEDED":
        end_time = datetime.datetime.utcnow()
        start_time = end_time - datetime.timedelta(minutes=200)

    #limit = response objects
    #        data = tc.task_list(filter="type:TRANSFER,DELETE/request_time:%s,%s"
    #        % (start_time, end_time), limit=5)

        #print("File transfer SUCCEEDED, will delete file from local directory now")
        """ r = tc.task_list(num_results=1, filter="type:TRANSFER,DELETE")
Exemple #15
0
class TestGlobus(TestZstash):
    def preactivate_globus(self):
        """
        Read the local globus endpoint UUID from ~/.zstash.ini.
        If the ini file does not exist, create an ini file with empty values,
        and try to find the local endpoint UUID based on the FQDN
        """
        local_endpoint = None
        ini_path = os.path.expanduser("~/.zstash.ini")
        ini = configparser.ConfigParser()
        if ini.read(ini_path):
            if "local" in ini.sections():
                local_endpoint = ini["local"].get("globus_endpoint_uuid")
        else:
            ini["local"] = {"globus_endpoint_uuid": ""}
            try:
                with open(ini_path, "w") as f:
                    ini.write(f)
            except Exception as e:
                self.fail(e)
        if not local_endpoint:
            fqdn = socket.getfqdn()
            for pattern in regex_endpoint_map.keys():
                if re.fullmatch(pattern, fqdn):
                    local_endpoint = regex_endpoint_map.get(pattern)
                    break
        if not local_endpoint:
            # self.fail("{} does not have the local Globus endpoint set".format(ini_path))
            self.skipTest(
                "{} does not have the local Globus endpoint set".format(
                    ini_path))

        native_client = NativeClient(
            client_id="6c1629cf-446c-49e7-af95-323c6412397f",
            app_name="Zstash",
            default_scopes=
            "openid urn:globus:auth:scope:transfer.api.globus.org:all",
        )
        native_client.login(no_local_server=True, refresh_tokens=True)
        transfer_authorizer = native_client.get_authorizers().get(
            "transfer.api.globus.org")
        self.transfer_client = TransferClient(transfer_authorizer)

        for ep_id in [hpss_globus_endpoint, local_endpoint]:
            r = self.transfer_client.endpoint_autoactivate(ep_id,
                                                           if_expires_in=600)
            if r.get("code") == "AutoActivationFailed":
                self.fail(
                    "The {} endpoint is not activated or the current activation expires soon. Please go to https://app.globus.org/file-manager/collections/{} and (re)-activate the endpoint."
                    .format(ep_id, ep_id))

    def delete_files_globus(self):
        ep_id = hpss_globus_endpoint
        r = self.transfer_client.endpoint_autoactivate(ep_id, if_expires_in=60)
        if r.get("code") == "AutoActivationFailed":
            self.fail(
                "The {} endpoint is not activated. Please go to https://app.globus.org/file-manager/collections/{} and activate the endpoint."
                .format(ep_id, ep_id))

        ddata = DeleteData(self.transfer_client,
                           hpss_globus_endpoint,
                           recursive=True)
        ddata.add_item("/~/zstash_test/")
        try:
            task = self.transfer_client.submit_delete(ddata)
            task_id = task.get("task_id")
            """
            A Globus transfer job (task) can be in one of the three states:
            ACTIVE, SUCCEEDED, FAILED. The script every 5 seconds polls a
            status of the transfer job (task) from the Globus Transfer service,
            with 5 second timeout limit. If the task is ACTIVE after time runs
            out 'task_wait' returns False, and True otherwise.
            """
            while not self.transfer_client.task_wait(task_id, 5, 5):
                task = self.transfer_client.get_task(task_id)
                if task.get("is_paused"):
                    break
            """
            The Globus transfer job (task) has been finished (SUCCEEDED or FAILED),
            or is still active (ACTIVE). Check if the transfer SUCCEEDED or FAILED.
            """
            task = self.transfer_client.get_task(task_id)
            if task["status"] == "SUCCEEDED":
                pass
            elif task.get("status") == "ACTIVE":
                if task.get("is_paused"):
                    pause_info = self.transfer_client.task_pause_info(task_id)
                    paused_rules = pause_info.get("pause_rules")
                    reason = paused_rules[0].get("message")
                    message = "The task was paused. Reason: {}".format(reason)
                    print(message)
                else:
                    message = "The task reached a {} second deadline\n".format(
                        24 * 3600)
                    print(message)
                self.transfer_client.cancel_task(task_id)
            else:
                print("Globus delete FAILED")
        except TransferAPIError as e:
            if e.code == "NoCredException":
                self.fail(
                    "{}. Please go to https://app.globus.org/endpoints and activate the endpoint."
                    .format(e.message))
            else:
                self.fail(e)
        except Exception as e:
            self.fail("{} - exception: {}".format(self, e))

    def tearDown(self):
        """
        Tear down a test. This is run after every test method.

        After the script has failed or completed, remove all created files, even those on the HPSS repo.
        """
        os.chdir(TOP_LEVEL)
        print("Removing test files, both locally and at the HPSS repo")
        # self.cache may appear in any of these directories,
        # but should not appear at the same level as these.
        # Therefore, there is no need to explicitly remove it.
        for d in [self.test_dir, self.backup_dir]:
            if os.path.exists(d):
                shutil.rmtree(d)

        if self.hpss_path and self.hpss_path.lower().startswith("globus:"):
            self.delete_files_globus()

    def helperLsGlobus(self,
                       test_name,
                       hpss_path,
                       cache=None,
                       zstash_path=ZSTASH_PATH):
        """
        Test `zstash ls --hpss=globus://...`.
        """
        self.preactivate_globus()
        self.hpss_path = hpss_path
        if cache:
            # Override default cache
            self.cache = cache
            cache_option = " --cache={}".format(self.cache)
        else:
            cache_option = ""
        use_hpss = self.setupDirs(test_name)
        self.create(use_hpss, zstash_path, cache=self.cache)
        self.assertWorkspace()
        os.chdir(self.test_dir)
        for option in ["", "-v", "-l"]:
            print_starred("Testing zstash ls {}".format(option))
            cmd = "{}zstash ls{} {} --hpss={}".format(zstash_path,
                                                      cache_option, option,
                                                      self.hpss_path)
            output, err = run_cmd(cmd)
            self.check_strings(cmd, output + err, ["file0.txt"], ["ERROR"])
        os.chdir(TOP_LEVEL)

    def testLs(self):
        self.helperLsGlobus("testLsGlobus",
                            f"globus://{hpss_globus_endpoint}/~/zstash_test/")
class GlobusConnection(DecadeFileBase):
    REDIRECT_URI = 'https://auth.globus.org/v2/web/auth-code'
    SCOPES = ('openid email profile '
              'urn:globus:auth:scope:transfer.api.globus.org:all')
    TRANSLIMIT = 25

    def __init__(self, config, tag):
        DecadeFileBase.__init__(self, config, tag)
        self.client = None
        self.active_transfer_count = 0
        self.noao_dirs = []
        self.scantime = 0.
        self.starttime = datetime.datetime.now()
        self.active_transfer_count = 0
        self.number_started = 0
        self.number_successful = 0
        self.number_failed = 0
        self.number_waiting = 0

    def __del__(self):
        self.close()
        logging.shutdown()

    def load_tokens_from_file(self):
        """Load a set of saved tokens."""
        with open(self.config['token_file'], 'r') as _file:
            tokens = json.load(_file)
        return tokens

    def save_tokens_to_file(self, tokens):
        """Save a set of tokens for later use."""
        with open(self.config['token_file'], 'w') as _file:
            json.dump(tokens, _file)

    def update_tokens_file_on_refresh(self, token_response):
        """
        Callback function passed into the RefreshTokenAuthorizer.
        Will be invoked any time a new access token is fetched.
        """
        self.save_tokens_to_file(token_response.by_resource_server)

    def initiate_connection(self):
        """ Initiate the connection
        """
        tokens = None
        try:
            # if we already have tokens, load and use them
            tokens = self.load_tokens_from_file()
        except:
            pass

        if not tokens:
            # if we need to get tokens, start the Native App authentication process
            client = NativeAppAuthClient(
                client_id=self.CLIENT_ID)  #self.config['client_id'])
            # pass refresh_tokens=True to request refresh tokens
            client.oauth2_start_flow(
                requested_scopes=self.
                SCOPES,  #self.config['requested_scopes'],
                redirect_uri=self.REDIRECT_URI,  #self.config['redirect_uri'],
                refresh_tokens=True)

            url = client.oauth2_get_authorize_url()

            print 'Native App Authorization URL: \n{}'.format(url)

            auth_code = raw_input('Enter the auth code: ').strip()

            token_response = client.oauth2_exchange_code_for_tokens(auth_code)

            # return a set of tokens, organized by resource server name
            tokens = token_response.by_resource_server

            try:
                self.save_tokens_to_file(tokens)
            except:
                pass

        transfer_tokens = tokens['transfer.api.globus.org']

        auth_client = NativeAppAuthClient(client_id=self.config['client_id'])

        authorizer = RefreshTokenAuthorizer(
            transfer_tokens['refresh_token'],
            auth_client,
            #access_token=transfer_tokens['access_token'],
            #expires_at=transfer_tokens['expires_at_seconds'],
            on_refresh=self.update_tokens_file_on_refresh)

        self.client = TransferClient(authorizer=authorizer)

        # print out a directory listing from an endpoint
        try:
            #print 'ACTIVATE'
            #print 'DEST',self.config['dest_ep']
            self.client.endpoint_autoactivate(self.config['dest_ep'])
            ac = self.client.endpoint_get_activation_requirements(
                self.config['dest_ep'])
            #print ac
            self.client.endpoint_autoactivate(self.config['src_ep'])
            ac2 = self.client.endpoint_get_activation_requirements(
                self.config['src_ep'])
            #print ac2
        except GlobusAPIError as ex:
            self.logger.error('Error in endpoint activation %s', str(ex))

            if ex.http_status == 401:
                sys.exit('Refresh token has expired. '
                         'Please delete refresh-tokens.json and try again.')
            else:
                raise ex

    def get_dirs(self):
        """ function
        """
        now = time.time()
        full_dirs = self.client.operation_ls(self.config['src_ep'],
                                             path=self.config['noao_root'])
        self.scantime = time.time() - now
        for entry in full_dirs:
            if entry['type'].lower() == 'dir':
                self.noao_dirs.append(str(entry['name']))
Exemple #17
0
#!/usr/bin/env python2.7
from __future__ import print_function # for python 2
from globus_sdk import TransferClient
import globus_sdk
import traceback
import argparse

parser = argparse.ArgumentParser(description='kbase share creator')
parser.add_argument('--share-dir', dest='sharedDir',
                 help='Directory to create a share on')
parser.add_argument('--share-name', dest='shareName', 
                 help='name for the share (must be unique among all globus shares)')

args = parser.parse_args()

tc = TransferClient() # uses transfer_token from the config file

shared_ep_data = {
  "DATA_TYPE": "shared_endpoint",
  "host_endpoint": 'e25a4bda-0636-11e6-a732-22000bf2d559',
  "host_path": args.sharedDir,
  "display_name": args.shareName,
  # optionally specify additional endpoint fields
  "description": "autocreated by kbase tool support"
}
tc.endpoint_autoactivate('e25a4bda-0636-11e6-a732-22000bf2d559', if_expires_in=3600)
create_result = tc.create_shared_endpoint(shared_ep_data)
endpoint_id = create_result["id"]
print("new endpoint id: ", endpoint_id)
def doit():
    """
    - Call token introspect
    - Get dependent tokens
    """
    dependent_tokens = get_dependent_tokens(g.req_token)

    # dependent_tokens is a token response object
    # create transfer_token and http_token variables containing
    # the correct token for each resource server
    transfer_token = dependent_tokens.by_resource_server[
        'transfer.api.globus.org']['access_token']
    http_token = dependent_tokens.by_resource_server[
        app.config['GRAPH_ENDPOINT_ID']]['access_token']

    selected_ids = request.form.getlist('datasets')
    selected_year = request.form.get('year')
    user_identity_id = request.form.get('user_identity_id')
    user_identity_name = request.form.get('user_identity_name')

    selected_datasets = [dataset for dataset in datasets
                         if dataset['id'] in selected_ids]

    if not (selected_datasets and selected_year):
        raise BadRequestError()

    transfer = TransferClient(authorizer=AccessTokenAuthorizer(transfer_token))

    source_ep = app.config['DATASET_ENDPOINT_ID']
    source_info = transfer.get_endpoint(source_ep)
    source_https = source_info['https_server']
    source_base = app.config['DATASET_ENDPOINT_BASE']
    source_token = http_token

    dest_ep = app.config['GRAPH_ENDPOINT_ID']
    dest_info = transfer.get_endpoint(dest_ep)
    dest_https = dest_info['https_server']
    dest_base = app.config['GRAPH_ENDPOINT_BASE']
    dest_path = '%sGraphs for %s/' % (dest_base, user_identity_name)
    dest_token = http_token

    if not (source_https and dest_https):
        raise InternalServerError(message='Endpoints must be HTTPS servers')

    svgs = {}

    for dataset in selected_datasets:
        source_path = dataset['path']
        response = requests.get('%s%s%s/%s.csv' % (source_https, source_base,
                                                   source_path, selected_year),
                                headers=dict(
                                    Authorization='Bearer ' + source_token),
                                allow_redirects=False)
        response.raise_for_status()
        svgs.update(render_graphs(
            csv_data=response.iter_lines(decode_unicode=True),
            append_titles=' from %s for %s' % (dataset['name'], selected_year),
        ))

    transfer.endpoint_autoactivate(dest_ep)

    try:
        transfer.operation_mkdir(dest_ep, dest_path)
    except TransferAPIError as error:
        if 'MkdirFailed.Exists' not in error.code:
            raise

    try:
        transfer.add_endpoint_acl_rule(
            dest_ep,
            dict(principal=user_identity_id,
                 principal_type='identity', path=dest_path, permissions='r'),
        )
    except TransferAPIError as error:
        # PermissionDenied can happen if a new Portal client is swapped
        # in and it doesn't have endpoint manager on the dest_ep.
        # The /portal/processed directory has been set to to read/write
        # for all users so the subsequent operations will succeed.
        if error.code == 'PermissionDenied':
            pass
        elif error.code != 'Exists':
            raise

    for filename, svg in svgs.items():
        requests.put('%s%s%s.svg' % (dest_https, dest_path, filename),
                     data=svg,
                     headers=dict(Authorization='Bearer ' + dest_token),
                     allow_redirects=False).raise_for_status()

    results = {
        'dest_ep': dest_ep,
        'dest_path': dest_path,
        'dest_name': dest_info['display_name'],
        'graph_count': len(svgs) or 0
    }

    return jsonify(results)
def validate():
    params = request.json
    crawl_id = params["crawl_id"]
    globus_eid = params["globus_eid"]
    transfer_token = params["transfer_token"]
    source_destination = params["source_destination"]
    dataset_info = params["dataset_info"]  # To be implemented later

    client = boto3.client('sqs',
                          aws_access_key_id=os.environ["aws_access"],
                          aws_secret_access_key=os.environ["aws_secret"],
                          region_name='us-east-1')

    try:
        response = client.get_queue_url(
            QueueName=f'validate_{crawl_id}',
            QueueOwnerAWSAccountId=os.environ["aws_account_id"])
    except:  # Add SQS.Client.exceptions.QueueDoesNotExist error
        abort(400, "Invalid crawl ID")

    try:
        authorizer = AccessTokenAuthorizer(transfer_token)
        tc = TransferClient(authorizer=authorizer)
    except:  # Add exception
        abort(400, "Invalid transfer token")

    crawl_queue = response["QueueUrl"]

    date = datetime.datetime.now()
    file_name = date.strftime("%m_%d_%Y-%H_%M_%S") + ".txt"

    try:
        with open(file_name, "w") as f:

            while True:
                sqs_response = client.receive_message(
                    QueueUrl=crawl_queue,
                    MaxNumberOfMessages=1,  # To be toggled
                    WaitTimeSeconds=1)

                if "Messages" not in sqs_response:
                    # xtract_status = requests.get(f"{eb_url}/get_extract_status", json={"crawl_id": crawl_id})
                    # print("HERE")
                    # print(xtract_status.content)
                    # xtract_content = json.loads(xtract_status.content)
                    # # print(xtract_content)
                    #
                    # if xtract_content["IDLE"] == 0 and xtract_content["PENDING"] == 0:
                    break

                del_list = []

                for message in sqs_response["Messages"]:
                    message_body = message["Body"]

                    # PROCESS MESSAGE_BODY
                    f.write(message_body)
                    # print(message_body)

                    del_list.append({
                        'ReceiptHandle': message["ReceiptHandle"],
                        'Id': message["MessageId"]
                    })

                if len(del_list) > 0:
                    client.delete_message_batch(QueueUrl=crawl_queue,
                                                Entries=del_list)

        tdata = TransferData(
            tc,
            "5ecf6444-affc-11e9-98d4-0a63aa6b37da",  #TODO: Add source endpoint
            globus_eid,
            label=f"{crawl_id}")
        tdata.add_item(os.path.abspath(file_name),
                       os.path.join(source_destination, file_name))

        tc.endpoint_autoactivate(
            "5ecf6444-affc-11e9-98d4-0a63aa6b37da")  #TODO: Add source endpoint
        tc.endpoint_autoactivate(globus_eid)
        submit_result = tc.submit_transfer(tdata)

        while True:
            result = tc.get_task(submit_result['task_id'])
            if result.data["status"] == "SUCCEEDED":
                break
            elif result.data["status"] == "FAILED":
                raise RuntimeError  # TODO: Change this
            else:
                time.sleep(0.5)

    except Exception as e:
        print(e)
        abort(400, "Failed to validate")
    finally:
        os.remove(file_name)

    return "[200] Submitted"