Beispiel #1
0
def globus_native_auth_login():
    client = NativeClient(client_id='7414f0b4-7d05-4bb6-bb00-076fa3f17cf5')
    tokens = client.login(requested_scopes=[
        'https://auth.globus.org/scopes/56ceac29-e98a-440a-a594-b41e7a084b62/all',
        'urn:globus:auth:scope:transfer.api.globus.org:all',
        "https://auth.globus.org/scopes/facd7ccc-c5f4-42aa-916b-a0e270e2c2a9/all",
        "urn:globus:auth:scope:data.materialsdatafacility.org:all", 'email',
        'openid', 'urn:globus:auth:scope:search.api.globus.org:all'
    ],
                          no_local_server=True,
                          no_browser=True)

    print(tokens)

    auth_token = tokens["petrel_https_server"]['access_token']
    transfer_token = tokens['transfer.api.globus.org']['access_token']
    mdf_token = tokens["data.materialsdatafacility.org"]['access_token']
    funcx_token = tokens['funcx_service']['access_token']
    search_token = tokens['search.api.globus.org']['access_token']
    openid_token = tokens['auth.globus.org']['access_token']

    headers = {
        'Authorization': f"Bearer {funcx_token}",
        'Transfer': transfer_token,
        'FuncX': funcx_token,
        'Petrel': mdf_token,
        'Search': search_token,
        'Openid': openid_token
    }
    print(f"Headers: {headers}")
    return headers
Beispiel #2
0
    def __init__(self, endpoints, sync_level='exists', log_level='INFO'):

        transfer_scope = 'urn:globus:auth:scope:transfer.api.globus.org:all'
        native_client = NativeClient(client_id=CLIENT_ID,
                                     app_name="FuncX Continuum Scheduler",
                                     token_storage=JSONTokenStorage(TOKEN_LOC))
        native_client.login(requested_scopes=[transfer_scope],
                            no_browser=True,
                            no_local_server=True,
                            refresh_tokens=True)
        all_authorizers = native_client.get_authorizers_by_scope(
            requested_scopes=[transfer_scope])
        transfer_authorizer = all_authorizers[transfer_scope]
        self.transfer_client = globus_sdk.TransferClient(transfer_authorizer)

        self.endpoints = endpoints
        self.sync_level = sync_level
        logger.setLevel(log_level)

        # Track pending transfers
        self._next = 0
        self.active_transfers = {}
        self.completed_transfers = {}
        self.transfer_ids = {}

        # Initialize thread to wait on transfers
        self._polling_interval = 1
        self._tracker = Thread(target=self._track_transfers)
        self._tracker.daemon = True
        self._tracker.start()
Beispiel #3
0
def spawn_tokens(client_id=CLIENT_ID, req_scopes=SCOPES, name=APP_NAME):
    """
    Checks if Globus tokens already exists and spawns them if they don't.
    Returns instance of 'NativeClient'.
    """

    tokens = os.getenv('GLOBUS_DATA')
    # try to load tokens from local file (native app config)
    client = NativeClient(client_id=client_id, app_name=name)

    # try:
    #     tokens = client.load_tokens(requested_scopes=req_scopes)
    # except:
    #     pass

    if not tokens:
        # if no tokens, need to start Native App authentication process to get tokens
        tokens = client.login(requested_scopes=req_scopes, refresh_tokens=True)

        try:
            # save the tokens
            client.save_tokens(tokens)

            # create environment variable
            os.environ['GLOBUS_DATA'] = json.dumps(tokens,
                                                   indent=4,
                                                   sort_keys=True)
        except:
            pass
Beispiel #4
0
def live_client_destructive():
    storage = ConfigParserTokenStorage(filename='integ_testing_destruct.cfg')
    client = NativeClient(client_id='7414f0b4-7d05-4bb6-bb00-076fa3f17cf5',
                          token_storage=storage,
                          default_scopes=['openid'])
    yield client
    client.logout()
    os.unlink('integ_testing_destruct.cfg')
def get_headers():

    client = NativeClient(client_id='7414f0b4-7d05-4bb6-bb00-076fa3f17cf5')
    tokens = client.login(requested_scopes=[
        'https://auth.globus.org/scopes/56ceac29-e98a-440a-a594-b41e7a084b62/all'
    ])
    auth_token = tokens["petrel_https_server"]['access_token']
    headers = {'Authorization': f'Bearer {auth_token}'}

    return headers
Beispiel #6
0
    def __init__(self,
                 http_timeout=None,
                 funcx_home=os.path.join('~', '.funcx'),
                 force_login=False,
                 fx_authorizer=None,
                 funcx_service_address='https://dev.funcx.org/api/v1',
                 **kwargs):
        """ Initialize the client

        Parameters
        ----------
        http_timeout: int
        Timeout for any call to service in seconds.
        Default is no timeout

        force_login: bool
        Whether to force a login to get new credentials.

        fx_authorizer:class:`GlobusAuthorizer <globus_sdk.authorizers.base.GlobusAuthorizer>`:
        A custom authorizer instance to communicate with funcX.
        Default: ``None``, will be created.

        service_address: str
        The address of the funcX web service to communicate with.
        Default: https://dev.funcx.org/api/v1

        Keyword arguments are the same as for BaseClient.
        """
        self.ep_registration_path = 'register_endpoint_2'
        self.funcx_home = os.path.expanduser(funcx_home)

        native_client = NativeClient(client_id=self.CLIENT_ID)

        fx_scope = "https://auth.globus.org/scopes/facd7ccc-c5f4-42aa-916b-a0e270e2c2a9/all"

        if not fx_authorizer:
            native_client.login(
                requested_scopes=[fx_scope],
                no_local_server=kwargs.get("no_local_server", True),
                no_browser=kwargs.get("no_browser", True),
                refresh_tokens=kwargs.get("refresh_tokens", True),
                force=force_login)

            all_authorizers = native_client.get_authorizers_by_scope(
                requested_scopes=[fx_scope])
            fx_authorizer = all_authorizers[fx_scope]

        super(FuncXClient, self).__init__("funcX",
                                          environment='funcx',
                                          authorizer=fx_authorizer,
                                          http_timeout=http_timeout,
                                          base_url=funcx_service_address,
                                          **kwargs)
        self.fx_serializer = FuncXSerializer()
Beispiel #7
0
def tasks():

    client = NativeClient(client_id=CLIENT_ID, app_name=APP_NAME)
    client.login(requested_scopes=SCOPES)

    tokens = client.load_tokens(requested_scopes=SCOPES)
    auther = globus_sdk.AccessTokenAuthorizer(
        tokens['search.api.globus.org']['access_token'])
    sc = globus_sdk.SearchClient(authorizer=auther)
    print(sc.get_task_list(INDEX))
    print('Finished')
    def __init__(self, *args, **kwargs):
        super(GlobusContentsManager, self).__init__(*args, **kwargs)
        # TODO: Make this check for tokens in the environment (i.e., JupyterHub)
        # Then load via Native App. Figure out login.
        client = NativeClient(client_id=self.client_id, app_name=self.app_name)
        tokens = client.load_tokens()
        transfer_access_token = tokens['transfer.api.globus.org'][
            'access_token']

        # then use that token to create an AccessTokenAuthorizer
        transfer_auth = globus_sdk.AccessTokenAuthorizer(transfer_access_token)
        # finally, use the authorizer to create a TransferClient object
        self.transfer_client = globus_sdk.TransferClient(
            authorizer=transfer_auth)
        self.transfer_client.endpoint_autoactivate(self.globus_remote_endpoint)
        # TODO: How to handle caching dir? Needs to be writable. On laptops,
        # tmp dirs may not be accessible by GCP
        #self._cache_dir = tempfile.TemporaryDirectory()
        self._cache_dir = '/Users/rpwagner/tmp/jupyter_contents_cache'
Beispiel #9
0
def get_authorizer_for_scope(scope: str,
                             client_id: str = CLIENT_ID
                             ) -> AccessTokenAuthorizer:
    client = NativeClient(
        client_id=client_id,
        app_name="globus-automate CLI",
        token_storage=MultiScopeTokenStorage(scope),
        default_scopes=[scope],
    )
    try:
        client.login(
            requested_scopes=[scope],
            refresh_tokens=True,
        )
    except (LocalServerError, AuthAPIError, AuthFailure) as e:
        print(f"Login Unsuccessful: {str(e)}")
        raise SystemExit

    authorizers = client.get_authorizers_by_scope(requested_scopes=[scope])
    return authorizers[scope]
def get_native_app_authorizer(client_id):
    tokens = None
    client = NativeClient(client_id=client_id, app_name=APP_NAME)
    try:
        # if we already have tokens, load and use them
        tokens = client.load_tokens(requested_scopes=SCOPES)
    except:
        pass

    if not tokens:
        tokens = client.login(requested_scopes=SCOPES, refresh_tokens=True)
        try:
            client.save_tokens(tokens)
        except:
            pass

    transfer_tokens = tokens['transfer.api.globus.org']

    auth_client = globus_sdk.NativeAppAuthClient(client_id=client_id)

    return globus_sdk.RefreshTokenAuthorizer(
        transfer_tokens['refresh_token'],
        auth_client,
        access_token=transfer_tokens['access_token'],
        expires_at=transfer_tokens['expires_at_seconds'])
Beispiel #11
0
def delete(filename):

    with open(filename) as f:
        ingest_doc = json.loads(f.read())

    client = NativeClient(client_id=CLIENT_ID, app_name=APP_NAME)
    client.login(requested_scopes=SCOPES)

    tokens = client.load_tokens(requested_scopes=SCOPES)
    auther = globus_sdk.AccessTokenAuthorizer(
        tokens['search.api.globus.org']['access_token'])
    sc = globus_sdk.SearchClient(authorizer=auther)

    subject = ingest_doc['ingest_data']['subject']
    print(subject)
    print('Deleting from "{}"?'.format(
        sc.get_index(INDEX).data['display_name']))
    #user_input = input('Y/N> ')
    #if user_input in ['yes', 'Y', 'y', 'yarr']:
    result = sc.delete_subject(INDEX, subject)
    print('Finished')
    print(result)
Beispiel #12
0
def logout(app_name=None, client_id=None):
    """Revoke and delete all saved tokens for the app.

    Arguments:
        app_name (str): Name of the app/script/client.
                **Default**: ``'UNKNOWN_APP'``.
        client_id (str): The ID of the client.
                **Default**: The MDF Native Clients ID.
    """
    if not app_name:
        app_name = DEFAULT_APP_NAME
    if not client_id:
        client_id = DEFAULT_CLIENT_ID
    NativeClient(app_name=app_name, client_id=client_id).logout()
Beispiel #13
0
def main():
    tokens = None
    client = NativeClient(client_id=CLIENT_ID, app_name=APP_NAME)
    try:
        # if we already have tokens, load and use them
        tokens = client.load_tokens(requested_scope=SCOPES)
    except:
        pass

    if not tokens:
        # if we need to get tokens, start the Native App authentication process
        # need to specify that we want refresh tokens
        tokens = client.login(requested_scopes=SCOPES, refresh_tokens=True)
        try:
            client.save_tokens(tokens)
        except:
            pass

    transfer = setup_transfer_client(tokens['transfer.api.globus.org'])

    try:
        task_data = load_data_from_file(DATA_FILE)['task']
        task = transfer.get_task(task_data['task_id'])
        if task['status'] not in PREVIOUS_TASK_RUN_CASES:
            print('The last transfer status is {}, skipping run...'.format(
                task['status']))
            sys.exit(1)
    except KeyError:
        # Ignore if there is no previous task
        pass

    check_endpoint_path(transfer, SOURCE_ENDPOINT, SOURCE_PATH)
    if CREATE_DESTINATION_FOLDER:
        create_destination_directory(transfer, DESTINATION_ENDPOINT,
                                     DESTINATION_PATH)
    else:
        check_endpoint_path(transfer, DESTINATION_ENDPOINT, DESTINATION_PATH)

    tdata = TransferData(transfer,
                         SOURCE_ENDPOINT,
                         DESTINATION_ENDPOINT,
                         label=TRANSFER_LABEL,
                         sync_level="checksum")
    tdata.add_item(SOURCE_PATH, DESTINATION_PATH, recursive=True)

    task = transfer.submit_transfer(tdata)
    save_data_to_file(DATA_FILE, 'task', task.data)
    print('Transfer has been started from\n  {}:{}\nto\n  {}:{}'.format(
        SOURCE_ENDPOINT, SOURCE_PATH, DESTINATION_ENDPOINT, DESTINATION_PATH))
    url_string = 'https://globus.org/app/transfer?' + \
        six.moves.urllib.parse.urlencode({
            'origin_id': SOURCE_ENDPOINT,
            'origin_path': SOURCE_PATH,
            'destination_id': DESTINATION_ENDPOINT,
            'destination_path': DESTINATION_PATH
        })
    print('Visit the link below to see the changes:\n{}'.format(url_string))
Beispiel #14
0
def listind():
    client = NativeClient(client_id=CLIENT_ID, app_name=APP_NAME)
    client.login(requested_scopes=SCOPES)

    tokens = client.load_tokens(requested_scopes=SCOPES)
    auther = globus_sdk.AccessTokenAuthorizer(
        tokens['search.api.globus.org']['access_token'])
    sc = globus_sdk.SearchClient(authorizer=auther)

    search_results = sc.search(index_id=INDEX, q='*')

    header = 'Title                Data       Dataframe Rows   Cols   Size   Filename'
    print(header)
    for i in search_results['gmeta']:
        j = i['content'][0]
        s, h = get_size(j['remote_file_manifest']['length'])
        size = str(int(s)) + ' ' + h
        print('{:21.20}'.format(j['dc']['titles'][0]['title']) +
              '{:11.10}'.format(j['ncipilot']['data_type']) +
              '{:10.9}'.format(j['ncipilot']['dataframe_type']) +
              '{:7.6}'.format(str(j['ncipilot']['numrows'])) +
              '{:7.6}'.format(str(j['ncipilot']['numcols'])) +
              '{:7.6}'.format(size) +
              '{:.16}'.format(j['remote_file_manifest']['filename']))
Beispiel #15
0
def test_json_token_storage(mock_tokens, mock_revoke, monkeypatch):
    cli = NativeClient(client_id=str(uuid.uuid4()),
                       token_storage=JSONTokenStorage())
    # Mock actual call to open(). Catch the data 'written' and use it in the
    # load function. This is a cheap and easy (and hacky) way to test that the
    # stuff we get read was the same as the stuff written in.
    monkeypatch.setattr(os.path, 'exists', lambda x: True)
    mo = mock_open()
    with patch(BUILTIN_OPEN, mo):
        cli.save_tokens(mock_tokens)
        written = ''.join([c[1][0] for c in mo().write.mock_calls])
    with patch(BUILTIN_OPEN, mock_open(read_data=written)):
        tokens = cli.load_tokens()
        assert tokens == MOCK_TOKEN_SET
        mock_remove = Mock()
        with patch('os.remove', mock_remove):
            cli.logout()
            assert mock_remove.called
Beispiel #16
0
def login(services, make_clients=True, clear_old_tokens=False, **kwargs):
    """Log in to Globus services.

    Arguments:
        services (list of str): The service names or scopes to authenticate to.
        make_clients (bool): If ``True``, will make and return appropriate clients with
                generated tokens. If ``False``, will only return authorizers.
                **Default**: ``True``.
        clear_old_tokens (bool): Force a login flow, even if loaded tokens are valid.
                Same effect as ``force``. If one of these is ``True``, the effect triggers
                **Default**: ``False``.

    Keyword Arguments:
        app_name (str): Name of the app/script/client. Used for the named grant during consent,
                and the local server browser page by default.
                **Default**: ``'UNKNOWN_APP'``.
        client_id (str): The ID of the client registered with Globus at
                https://developers.globus.org
                **Default**: The MDF Native Clients ID.
        no_local_server (bool): Disable spinning up a local server to automatically
                copy-paste the auth code. THIS IS REQUIRED if you are on a remote server.
                When used locally with no_local_server=False, the domain is localhost with
                a randomly chosen open port number.
                **Default**: ``False``.
        no_browser (bool): Do not automatically open the browser for the Globus Auth URL.
                Display the URL instead and let the user navigate to that location manually.
                **Default**: ``False``.
        refresh_tokens (bool): Use Globus Refresh Tokens to extend login time.
                **Default**: ``True``.
        force (bool): Force a login flow, even if loaded tokens are valid.
                Same effect as ``clear_old_tokens``. If one of these is ``True``, the effect
                triggers. **Default**: ``False``.

    Returns:
        dict: The clients and authorizers requested, indexed by service name.
                For example, if ``login()`` is told to auth with ``'search'``
                then the search client will be in the ``'search'`` field.
    """
    if isinstance(services, str):
        services = [services]
    # Set up arg defaults
    app_name = kwargs.get("app_name") or DEFAULT_APP_NAME
    client_id = kwargs.get("client_id") or DEFAULT_CLIENT_ID

    native_client = NativeClient(client_id=client_id, app_name=app_name)

    # Translate known services into scopes, existing scopes are cleaned
    servs = []
    for serv in services:
        serv = serv.lower().strip()
        if type(serv) is str:
            servs += serv.split(" ")
        else:
            servs += list(serv)
    scopes = [KNOWN_SCOPES.get(sc, sc) for sc in servs]

    native_client.login(requested_scopes=scopes,
                        no_local_server=kwargs.get("no_local_server", False),
                        no_browser=kwargs.get("no_browser", False),
                        refresh_tokens=kwargs.get("refresh_tokens", True),
                        force=clear_old_tokens or kwargs.get("force", False))

    all_authorizers = native_client.get_authorizers_by_scope(
        requested_scopes=scopes)
    returnables = {}
    # Process authorizers (rename keys to originals, make clients)
    for scope, auth in all_authorizers.items():
        # User specified known_scope name and not scope directly
        if scope not in servs:
            try:
                key = [k for k, v in KNOWN_SCOPES.items() if scope == v][0]
            except IndexError:  # Not a known scope(?), fallback to scope as key
                key = scope
        # User specified scope directly
        else:
            key = scope

        # User wants clients and client supported
        if make_clients and scope in KNOWN_CLIENTS.keys():
            returnables[key] = KNOWN_CLIENTS[scope](authorizer=auth,
                                                    http_timeout=STD_TIMEOUT)
        # Returning authorizer only
        else:
            returnables[key] = auth

    return returnables
Beispiel #17
0
class FuncXClient(throttling.ThrottledBaseClient):
    """Main class for interacting with the funcX service

    Holds helper operations for performing common tasks with the funcX service.
    """

    TOKEN_DIR = os.path.expanduser("~/.funcx/credentials")
    TOKEN_FILENAME = 'funcx_sdk_tokens.json'
    CLIENT_ID = '4cf29807-cf21-49ec-9443-ff9a3fb9f81c'

    def __init__(self,
                 http_timeout=None,
                 funcx_home=os.path.join('~', '.funcx'),
                 force_login=False,
                 fx_authorizer=None,
                 funcx_service_address='https://funcx.org/api/v1',
                 **kwargs):
        """ Initialize the client

        Parameters
        ----------
        http_timeout: int
        Timeout for any call to service in seconds.
        Default is no timeout

        force_login: bool
        Whether to force a login to get new credentials.

        fx_authorizer:class:`GlobusAuthorizer <globus_sdk.authorizers.base.GlobusAuthorizer>`:
        A custom authorizer instance to communicate with funcX.
        Default: ``None``, will be created.

        service_address: str
        The address of the funcX web service to communicate with.
        Default: https://dev.funcx.org/api/v1

        Keyword arguments are the same as for BaseClient.
        """
        self.ep_registration_path = 'register_endpoint_2'
        self.funcx_home = os.path.expanduser(funcx_home)

        if not os.path.exists(self.TOKEN_DIR):
            os.makedirs(self.TOKEN_DIR)

        tokens_filename = os.path.join(self.TOKEN_DIR, self.TOKEN_FILENAME)
        self.native_client = NativeClient(
            client_id=self.CLIENT_ID,
            app_name="FuncX SDK",
            token_storage=JSONTokenStorage(tokens_filename))

        fx_scope = "https://auth.globus.org/scopes/facd7ccc-c5f4-42aa-916b-a0e270e2c2a9/all"

        if not fx_authorizer:
            self.native_client.login(
                requested_scopes=[fx_scope],
                no_local_server=kwargs.get("no_local_server", True),
                no_browser=kwargs.get("no_browser", True),
                refresh_tokens=kwargs.get("refresh_tokens", True),
                force=force_login)

            all_authorizers = self.native_client.get_authorizers_by_scope(
                requested_scopes=[fx_scope])
            fx_authorizer = all_authorizers[fx_scope]

        super(FuncXClient, self).__init__("funcX",
                                          environment='funcx',
                                          authorizer=fx_authorizer,
                                          http_timeout=http_timeout,
                                          base_url=funcx_service_address,
                                          **kwargs)
        self.fx_serializer = FuncXSerializer()

    def logout(self):
        """Remove credentials from your local system
        """
        self.native_client.logout()

    def get_task_status(self, task_id):
        """Get the status of a funcX task.

        Parameters
        ----------
        task_id : str
            UUID of the task

        Returns
        -------
        dict
            Status block containing "status" key.
        """

        r = self.get("{task_id}/status".format(task_id=task_id))
        return json.loads(r.text)

    def get_result(self, task_id):
        """ Get the result of a funcX task

        Parameters
        ----------
        task_id: str
            UUID of the task

        Returns
        -------
        Result obj: If task completed

        Raises
        ------
        Exception obj: Exception due to which the task failed
        """

        r = self.get("{task_id}/status".format(task_id=task_id))

        logger.info(f"Got from globus : {r}")
        r_dict = json.loads(r.text)

        if 'result' in r_dict:
            try:
                r_obj = self.fx_serializer.deserialize(r_dict['result'])
            except Exception:
                raise Exception(
                    "Failure during deserialization of the result object")
            else:
                return r_obj

        elif 'exception' in r_dict:
            try:
                r_exception = self.fx_serializer.deserialize(
                    r_dict['exception'])
                logger.info(f"Exception : {r_exception}")
            except Exception:
                raise Exception(
                    "Failure during deserialization of the Task's exception object"
                )
            else:
                r_exception.reraise()

        else:
            raise Exception("Task pending")

    def run(self,
            *args,
            endpoint_id=None,
            function_id=None,
            asynchronous=False,
            **kwargs):
        """Initiate an invocation

        Parameters
        ----------
        *args : Any
            Args as specified by the function signature
        endpoint_id : uuid str
            Endpoint UUID string. Required
        function_id : uuid str
            Function UUID string. Required
        asynchronous : bool
            Whether or not to run the function asynchronously

        Returns
        -------
        task_id : str
        UUID string that identifies the task
        """
        servable_path = 'submit'
        assert endpoint_id is not None, "endpoint_id key-word argument must be set"
        assert function_id is not None, "function_id key-word argument must be set"

        ser_args = self.fx_serializer.serialize(args)
        ser_kwargs = self.fx_serializer.serialize(kwargs)
        payload = self.fx_serializer.pack_buffers([ser_args, ser_kwargs])

        data = {
            'endpoint': endpoint_id,
            'func': function_id,
            'payload': payload,
            'is_async': asynchronous
        }

        # Send the data to funcX
        r = self.post(servable_path, json_body=data)
        if r.http_status is not 200:
            raise Exception(r)

        if 'task_uuid' not in r:
            raise MalformedResponse(r)
        """
        Create a future to deal with the result
        funcx_future = FuncXFuture(self, task_id, async_poll)

        if not asynchronous:
            return funcx_future.result()

        # Return the result
        return funcx_future
        """
        return r['task_uuid']

    def register_endpoint(self, name, endpoint_uuid, description=None):
        """Register an endpoint with the funcX service.

        Parameters
        ----------
        name : str
            Name of the endpoint
        endpoint_uuid : str
                The uuid of the endpoint
        description : str
            Description of the endpoint

        Returns
        -------
        A dict
            {'endopoint_id' : <>,
             'address' : <>,
             'client_ports': <>}
        """
        data = {
            "endpoint_name": name,
            "endpoint_uuid": endpoint_uuid,
            "description": description
        }

        r = self.post(self.ep_registration_path, json_body=data)
        if r.http_status is not 200:
            raise Exception(r)

        # Return the result
        return r.data

    def get_containers(self, name, description=None):
        """Register a DLHub endpoint with the funcX service and get the containers to launch.

        Parameters
        ----------
        name : str
            Name of the endpoint
        description : str
            Description of the endpoint

        Returns
        -------
        int
            The port to connect to and a list of containers
        """
        registration_path = 'get_containers'

        data = {"endpoint_name": name, "description": description}

        r = self.post(registration_path, json_body=data)
        if r.http_status is not 200:
            raise Exception(r)

        # Return the result
        return r.data['endpoint_uuid'], r.data['endpoint_containers']

    def get_container(self, container_uuid, container_type):
        """Get the details of a container for staging it locally.

        Parameters
        ----------
        container_uuid : str
            UUID of the container in question
        container_type : str
            The type of containers that will be used (Singularity, Shifter, Docker)

        Returns
        -------
        dict
            The details of the containers to deploy
        """
        container_path = f'containers/{container_uuid}/{container_type}'

        r = self.get(container_path)
        if r.http_status is not 200:
            raise Exception(r)

        # Return the result
        return r.data['container']

    def register_function(self,
                          function,
                          function_name=None,
                          container_uuid=None,
                          description=None):
        """Register a function code with the funcX service.

        Parameters
        ----------
        function : Python Function
            The function to be registered for remote execution

        function_name : str
            The entry point (function name) of the function. Default: None

        container_uuid : str
            Container UUID from registration with funcX

        description : str
            Description of the file

        Returns
        -------
        function uuid : str
            UUID identifier for the registered function
        """
        registration_path = 'register_function'

        serialized_fn = self.fx_serializer.serialize(function)
        packed_code = self.fx_serializer.pack_buffers([serialized_fn])

        data = {
            "function_name": function.__name__,
            "function_code": packed_code,
            "container_uuid": container_uuid,
            "entry_point":
            function_name if function_name else function.__name__,
            "description": description
        }

        logger.info("Registering function : {}".format(data))

        r = self.post(registration_path, json_body=data)
        if r.http_status is not 200:
            raise Exception(r)

        # Return the result
        return r.data['function_uuid']

    def register_container(self,
                           location,
                           container_type,
                           name='',
                           description=''):
        """Register a container with the funcX service.

        Parameters
        ----------
        location : str
            The location of the container (e.g., its docker url). Required
        container_type : str
            The type of containers that will be used (Singularity, Shifter, Docker). Required

        name : str
            A name for the container. Default = ''
        description : str
            A description to associate with the container. Default = ''

        Returns
        -------
        str
            The id of the container
        """
        container_path = f'containers'

        payload = {
            'name': name,
            'location': location,
            'description': description,
            'type': container_type
        }

        r = self.post(container_path, json_body=payload)
        if r.http_status is not 200:
            raise Exception(r)

        # Return the result
        return r.data['container_id']
Beispiel #18
0
class FuncXClient(throttling.ThrottledBaseClient):
    """Main class for interacting with the funcX service

    Holds helper operations for performing common tasks with the funcX service.
    """

    TOKEN_DIR = os.path.expanduser("~/.funcx/credentials")
    TOKEN_FILENAME = 'funcx_sdk_tokens.json'
    CLIENT_ID = '4cf29807-cf21-49ec-9443-ff9a3fb9f81c'

    def __init__(self,
                 http_timeout=None,
                 funcx_home=os.path.join('~', '.funcx'),
                 force_login=False,
                 fx_authorizer=None,
                 funcx_service_address='https://api.funcx.org/v1',
                 **kwargs):
        """ Initialize the client

        Parameters
        ----------
        http_timeout: int
        Timeout for any call to service in seconds.
        Default is no timeout

        force_login: bool
        Whether to force a login to get new credentials.

        fx_authorizer:class:`GlobusAuthorizer <globus_sdk.authorizers.base.GlobusAuthorizer>`:
        A custom authorizer instance to communicate with funcX.
        Default: ``None``, will be created.

        funcx_service_address: str
        The address of the funcX web service to communicate with.
        Default: https://api.funcx.org/v1

        Keyword arguments are the same as for BaseClient.
        """
        self.func_table = {}
        self.ep_registration_path = 'register_endpoint_2'
        self.funcx_home = os.path.expanduser(funcx_home)

        if not os.path.exists(self.TOKEN_DIR):
            os.makedirs(self.TOKEN_DIR)

        tokens_filename = os.path.join(self.TOKEN_DIR, self.TOKEN_FILENAME)
        self.native_client = NativeClient(
            client_id=self.CLIENT_ID,
            app_name="FuncX SDK",
            token_storage=JSONTokenStorage(tokens_filename))

        # TODO: if fx_authorizer is given, we still need to get an authorizer for Search
        fx_scope = "https://auth.globus.org/scopes/facd7ccc-c5f4-42aa-916b-a0e270e2c2a9/all"
        search_scope = "urn:globus:auth:scope:search.api.globus.org:all"
        scopes = [fx_scope, search_scope, "openid"]

        search_authorizer = None

        if not fx_authorizer:
            self.native_client.login(
                requested_scopes=scopes,
                no_local_server=kwargs.get("no_local_server", True),
                no_browser=kwargs.get("no_browser", True),
                refresh_tokens=kwargs.get("refresh_tokens", True),
                force=force_login)

            all_authorizers = self.native_client.get_authorizers_by_scope(
                requested_scopes=scopes)
            fx_authorizer = all_authorizers[fx_scope]
            search_authorizer = all_authorizers[search_scope]
            openid_authorizer = all_authorizers["openid"]

        super(FuncXClient, self).__init__("funcX",
                                          environment='funcx',
                                          authorizer=fx_authorizer,
                                          http_timeout=http_timeout,
                                          base_url=funcx_service_address,
                                          **kwargs)
        self.fx_serializer = FuncXSerializer()

        authclient = AuthClient(authorizer=openid_authorizer)
        user_info = authclient.oauth2_userinfo()
        self.searcher = SearchHelper(authorizer=search_authorizer,
                                     owner_uuid=user_info['sub'])
        self.funcx_service_address = funcx_service_address

    def version_check(self):
        """Check this client version meets the service's minimum supported version.
        """
        resp = self.get("version", params={"service": "all"})
        versions = resp.data
        if "min_ep_version" not in versions:
            raise VersionMismatch(
                "Failed to retrieve version information from funcX service.")

        min_ep_version = versions['min_ep_version']

        if ENDPOINT_VERSION is None:
            raise VersionMismatch(
                "You do not have the funcx endpoint installed.  You can use 'pip install funcx-endpoint'."
            )
        if ENDPOINT_VERSION < min_ep_version:
            raise VersionMismatch(
                f"Your version={ENDPOINT_VERSION} is lower than the "
                f"minimum version for an endpoint: {min_ep_version}.  Please update."
            )

    def logout(self):
        """Remove credentials from your local system
        """
        self.native_client.logout()

    def update_table(self, return_msg, task_id):
        """ Parses the return message from the service and updates the internal func_tables

        Parameters
        ----------

        return_msg : str
           Return message received from the funcx service
        task_id : str
           task id string
        """
        if isinstance(return_msg, str):
            r_dict = json.loads(return_msg)
        else:
            r_dict = return_msg

        status = {'pending': True}

        if 'result' in r_dict:
            try:
                r_obj = self.fx_serializer.deserialize(r_dict['result'])
                completion_t = r_dict['completion_t']
            except Exception:
                raise SerializationError("Result Object Deserialization")
            else:
                status.update({
                    'pending': False,
                    'result': r_obj,
                    'completion_t': completion_t
                })
                self.func_table[task_id] = status

        elif 'exception' in r_dict:
            try:
                r_exception = self.fx_serializer.deserialize(
                    r_dict['exception'])
                completion_t = r_dict['completion_t']
                logger.info(f"Exception : {r_exception}")
            except Exception:
                raise SerializationError(
                    "Task's exception object deserialization")
            else:
                status.update({
                    'pending': False,
                    'exception': r_exception,
                    'completion_t': completion_t
                })
                self.func_table[task_id] = status
        return status

    def get_task(self, task_id):
        """Get a funcX task.

        Parameters
        ----------
        task_id : str
            UUID of the task

        Returns
        -------
        dict
            Task block containing "status" key.
        """
        if task_id in self.func_table:
            return self.func_table[task_id]

        r = self.get("tasks/{task_id}".format(task_id=task_id))
        logger.debug("Response string : {}".format(r))
        try:
            rets = self.update_table(r.text, task_id)
        except Exception as e:
            raise e
        return rets

    def get_result(self, task_id):
        """ Get the result of a funcX task

        Parameters
        ----------
        task_id: str
            UUID of the task

        Returns
        -------
        Result obj: If task completed

        Raises
        ------
        Exception obj: Exception due to which the task failed
        """
        task = self.get_task(task_id)
        if task['pending'] is True:
            raise Exception("Task pending")
        else:
            if 'result' in task:
                return task['result']
            else:
                logger.warning("We have an exception : {}".format(
                    task['exception']))
                task['exception'].reraise()

    def get_batch_status(self, task_id_list):
        """ Request status for a batch of task_ids
        """
        assert isinstance(task_id_list,
                          list), "get_batch_status expects a list of task ids"

        pending_task_ids = [
            t for t in task_id_list if t not in self.func_table
        ]

        results = {}

        if pending_task_ids:
            payload = {'task_ids': pending_task_ids}
            r = self.post("/batch_status", json_body=payload)
            logger.debug("Response string : {}".format(r))

        pending_task_ids = set(pending_task_ids)

        for task_id in task_id_list:
            if task_id in pending_task_ids:
                try:
                    data = r['results'][task_id]
                    rets = self.update_table(data, task_id)
                    results[task_id] = rets
                except KeyError:
                    logger.debug(
                        "Task {} info was not available in the batch status")
                except Exception:
                    logger.exception(
                        "Failure while unpacking results fom get_batch_status")
            else:
                results[task_id] = self.func_table[task_id]

        return results

    def get_batch_result(self, task_id_list):
        """ Request results for a batch of task_ids
        """
        pass

    def run(self, *args, endpoint_id=None, function_id=None, **kwargs):
        """Initiate an invocation

        Parameters
        ----------
        *args : Any
            Args as specified by the function signature
        endpoint_id : uuid str
            Endpoint UUID string. Required
        function_id : uuid str
            Function UUID string. Required
        asynchronous : bool
            Whether or not to run the function asynchronously

        Returns
        -------
        task_id : str
        UUID string that identifies the task
        """
        assert endpoint_id is not None, "endpoint_id key-word argument must be set"
        assert function_id is not None, "function_id key-word argument must be set"

        batch = self.create_batch()
        batch.add(*args,
                  endpoint_id=endpoint_id,
                  function_id=function_id,
                  **kwargs)
        r = self.batch_run(batch)
        """
        Create a future to deal with the result
        funcx_future = FuncXFuture(self, task_id, async_poll)

        if not asynchronous:
            return funcx_future.result()

        # Return the result
        return funcx_future
        """

        return r[0]

    def create_batch(self):
        """
        Create a Batch instance to handle batch submission in funcX

        Parameters
        ----------

        Returns
        -------
        Batch instance
            Status block containing "status" key.
        """
        batch = Batch()
        return batch

    def batch_run(self, batch):
        """Initiate a batch of tasks to funcX

        Parameters
        ----------
        batch: a Batch object

        Returns
        -------
        task_ids : a list of UUID strings that identify the tasks
        """
        servable_path = 'submit'
        assert isinstance(batch, Batch), "Requires a Batch object as input"
        assert len(batch.tasks) > 0, "Requires a non-empty batch"

        data = batch.prepare()

        # Send the data to funcX
        r = self.post(servable_path, json_body=data)
        if r.http_status != 200:
            raise HTTPError(r)
        if r.get("status", "Failure") == "Failure":
            raise MalformedResponse("FuncX Request failed: {}".format(
                r.get("reason", "Unknown")))
        return r['task_uuids']

    def map_run(self,
                *args,
                endpoint_id=None,
                function_id=None,
                asynchronous=False,
                **kwargs):
        """Initiate an invocation

        Parameters
        ----------
        *args : Any
            Args as specified by the function signature
        endpoint_id : uuid str
            Endpoint UUID string. Required
        function_id : uuid str
            Function UUID string. Required
        asynchronous : bool
            Whether or not to run the function asynchronously

        Returns
        -------
        task_id : str
        UUID string that identifies the task
        """
        servable_path = 'submit_batch'
        assert endpoint_id is not None, "endpoint_id key-word argument must be set"
        assert function_id is not None, "function_id key-word argument must be set"

        ser_kwargs = self.fx_serializer.serialize(kwargs)

        batch_payload = []
        iterator = args[0]
        for arg in iterator:
            ser_args = self.fx_serializer.serialize((arg, ))
            payload = self.fx_serializer.pack_buffers([ser_args, ser_kwargs])
            batch_payload.append(payload)

        data = {
            'endpoints': [endpoint_id],
            'func': function_id,
            'payload': batch_payload,
            'is_async': asynchronous
        }

        # Send the data to funcX
        r = self.post(servable_path, json_body=data)
        if r.http_status != 200:
            raise Exception(r)

        if r.get("status", "Failure") == "Failure":
            raise MalformedResponse("FuncX Request failed: {}".format(
                r.get("reason", "Unknown")))
        return r['task_uuids']

    def register_endpoint(self,
                          name,
                          endpoint_uuid,
                          metadata=None,
                          endpoint_version=None):
        """Register an endpoint with the funcX service.

        Parameters
        ----------
        name : str
            Name of the endpoint
        endpoint_uuid : str
                The uuid of the endpoint
        metadata : dict
            endpoint metadata, see default_config example
        endpoint_version: str
            Version string to be passed to the webService as a compatibility check

        Returns
        -------
        A dict
            {'endopoint_id' : <>,
             'address' : <>,
             'client_ports': <>}
        """
        self.version_check()

        data = {
            "endpoint_name": name,
            "endpoint_uuid": endpoint_uuid,
            "version": endpoint_version
        }
        if metadata:
            data['meta'] = metadata

        r = self.post(self.ep_registration_path, json_body=data)
        if r.http_status != 200:
            raise HTTPError(r)

        # Return the result
        return r.data

    def get_containers(self, name, description=None):
        """Register a DLHub endpoint with the funcX service and get the containers to launch.

        Parameters
        ----------
        name : str
            Name of the endpoint
        description : str
            Description of the endpoint

        Returns
        -------
        int
            The port to connect to and a list of containers
        """
        registration_path = 'get_containers'

        data = {"endpoint_name": name, "description": description}

        r = self.post(registration_path, json_body=data)
        if r.http_status != 200:
            raise HTTPError(r)

        # Return the result
        return r.data['endpoint_uuid'], r.data['endpoint_containers']

    def get_container(self, container_uuid, container_type):
        """Get the details of a container for staging it locally.

        Parameters
        ----------
        container_uuid : str
            UUID of the container in question
        container_type : str
            The type of containers that will be used (Singularity, Shifter, Docker)

        Returns
        -------
        dict
            The details of the containers to deploy
        """
        container_path = f'containers/{container_uuid}/{container_type}'

        r = self.get(container_path)
        if r.http_status != 200:
            raise HTTPError(r)

        # Return the result
        return r.data['container']

    def get_endpoint_status(self, endpoint_uuid):
        """Get the status reports for an endpoint.

        Parameters
        ----------
        endpoint_uuid : str
            UUID of the endpoint in question

        Returns
        -------
        dict
            The details of the endpoint's stats
        """
        stats_path = f'endpoints/{endpoint_uuid}/status'

        r = self.get(stats_path)
        if r.http_status != 200:
            raise HTTPError(r)

        # Return the result
        return r.data

    def register_function(self,
                          function,
                          function_name=None,
                          container_uuid=None,
                          description=None,
                          public=False,
                          group=None,
                          searchable=True):
        """Register a function code with the funcX service.

        Parameters
        ----------
        function : Python Function
            The function to be registered for remote execution
        function_name : str
            The entry point (function name) of the function. Default: None
        container_uuid : str
            Container UUID from registration with funcX
        description : str
            Description of the file
        public : bool
            Whether or not the function is publicly accessible. Default = False
        group : str
            A globus group uuid to share this function with
        searchable : bool
            If true, the function will be indexed into globus search with the appropriate permissions

        Returns
        -------
        function uuid : str
            UUID identifier for the registered function
        """
        registration_path = 'register_function'

        source_code = ""
        try:
            source_code = getsource(function)
        except OSError:
            logger.error(
                "Failed to find source code during function registration.")

        serialized_fn = self.fx_serializer.serialize(function)
        packed_code = self.fx_serializer.pack_buffers([serialized_fn])

        data = {
            "function_name": function.__name__,
            "function_code": packed_code,
            "function_source": source_code,
            "container_uuid": container_uuid,
            "entry_point":
            function_name if function_name else function.__name__,
            "description": description,
            "public": public,
            "group": group,
            "searchable": searchable
        }

        logger.info("Registering function : {}".format(data))

        r = self.post(registration_path, json_body=data)
        if r.http_status != 200:
            raise HTTPError(r)

        func_uuid = r.data['function_uuid']

        # Return the result
        return func_uuid

    def update_function(self, func_uuid, function):
        pass

    def search_function(self, q, offset=0, limit=10, advanced=False):
        """Search for function via the funcX service

        Parameters
        ----------
        q : str
            free-form query string
        offset : int
            offset into total results
        limit : int
            max number of results to return
        advanced : bool
            allows elastic-search like syntax in query string

        Returns
        -------
        FunctionSearchResults
        """
        return self.searcher.search_function(q,
                                             offset=offset,
                                             limit=limit,
                                             advanced=advanced)

    def search_endpoint(self, q, scope='all', owner_id=None):
        """

        Parameters
        ----------
        q
        scope : str
            Can be one of {'all', 'my-endpoints', 'shared-with-me'}
        owner_id
            should be urn like f"urn:globus:auth:identity:{owner_uuid}"

        Returns
        -------

        """
        return self.searcher.search_endpoint(q, scope=scope, owner_id=owner_id)

    def register_container(self,
                           location,
                           container_type,
                           name='',
                           description=''):
        """Register a container with the funcX service.

        Parameters
        ----------
        location : str
            The location of the container (e.g., its docker url). Required
        container_type : str
            The type of containers that will be used (Singularity, Shifter, Docker). Required

        name : str
            A name for the container. Default = ''
        description : str
            A description to associate with the container. Default = ''

        Returns
        -------
        str
            The id of the container
        """
        container_path = 'containers'

        payload = {
            'name': name,
            'location': location,
            'description': description,
            'type': container_type
        }

        r = self.post(container_path, json_body=payload)
        if r.http_status != 200:
            raise HTTPError(r)

        # Return the result
        return r.data['container_id']

    def add_to_whitelist(self, endpoint_id, function_ids):
        """Adds the function to the endpoint's whitelist

        Parameters
        ----------
        endpoint_id : str
            The uuid of the endpoint
        function_ids : list
            A list of function id's to be whitelisted

        Returns
        -------
        json
            The response of the request
        """
        req_path = f'endpoints/{endpoint_id}/whitelist'

        if not isinstance(function_ids, list):
            function_ids = [function_ids]

        payload = {'func': function_ids}

        r = self.post(req_path, json_body=payload)
        if r.http_status != 200:
            raise HTTPError(r)

        # Return the result
        return r

    def get_whitelist(self, endpoint_id):
        """List the endpoint's whitelist

        Parameters
        ----------
        endpoint_id : str
            The uuid of the endpoint

        Returns
        -------
        json
            The response of the request
        """
        req_path = f'endpoints/{endpoint_id}/whitelist'

        r = self.get(req_path)
        if r.http_status != 200:
            raise HTTPError(r)

        # Return the result
        return r

    def delete_from_whitelist(self, endpoint_id, function_ids):
        """List the endpoint's whitelist

        Parameters
        ----------
        endpoint_id : str
            The uuid of the endpoint
        function_ids : list
            A list of function id's to be whitelisted

        Returns
        -------
        json
            The response of the request
        """
        if not isinstance(function_ids, list):
            function_ids = [function_ids]
        res = []
        for fid in function_ids:
            req_path = f'endpoints/{endpoint_id}/whitelist/{fid}'

            r = self.delete(req_path)
            if r.http_status != 200:
                raise HTTPError(r)
            res.append(r)

        # Return the result
        return res
Beispiel #19
0
    def __init__(self,
                 http_timeout=None,
                 funcx_home=os.path.join('~', '.funcx'),
                 force_login=False,
                 fx_authorizer=None,
                 funcx_service_address='https://api.funcx.org/v1',
                 **kwargs):
        """ Initialize the client

        Parameters
        ----------
        http_timeout: int
        Timeout for any call to service in seconds.
        Default is no timeout

        force_login: bool
        Whether to force a login to get new credentials.

        fx_authorizer:class:`GlobusAuthorizer <globus_sdk.authorizers.base.GlobusAuthorizer>`:
        A custom authorizer instance to communicate with funcX.
        Default: ``None``, will be created.

        funcx_service_address: str
        The address of the funcX web service to communicate with.
        Default: https://api.funcx.org/v1

        Keyword arguments are the same as for BaseClient.
        """
        self.func_table = {}
        self.ep_registration_path = 'register_endpoint_2'
        self.funcx_home = os.path.expanduser(funcx_home)

        if not os.path.exists(self.TOKEN_DIR):
            os.makedirs(self.TOKEN_DIR)

        tokens_filename = os.path.join(self.TOKEN_DIR, self.TOKEN_FILENAME)
        self.native_client = NativeClient(
            client_id=self.CLIENT_ID,
            app_name="FuncX SDK",
            token_storage=JSONTokenStorage(tokens_filename))

        # TODO: if fx_authorizer is given, we still need to get an authorizer for Search
        fx_scope = "https://auth.globus.org/scopes/facd7ccc-c5f4-42aa-916b-a0e270e2c2a9/all"
        search_scope = "urn:globus:auth:scope:search.api.globus.org:all"
        scopes = [fx_scope, search_scope, "openid"]

        search_authorizer = None

        if not fx_authorizer:
            self.native_client.login(
                requested_scopes=scopes,
                no_local_server=kwargs.get("no_local_server", True),
                no_browser=kwargs.get("no_browser", True),
                refresh_tokens=kwargs.get("refresh_tokens", True),
                force=force_login)

            all_authorizers = self.native_client.get_authorizers_by_scope(
                requested_scopes=scopes)
            fx_authorizer = all_authorizers[fx_scope]
            search_authorizer = all_authorizers[search_scope]
            openid_authorizer = all_authorizers["openid"]

        super(FuncXClient, self).__init__("funcX",
                                          environment='funcx',
                                          authorizer=fx_authorizer,
                                          http_timeout=http_timeout,
                                          base_url=funcx_service_address,
                                          **kwargs)
        self.fx_serializer = FuncXSerializer()

        authclient = AuthClient(authorizer=openid_authorizer)
        user_info = authclient.oauth2_userinfo()
        self.searcher = SearchHelper(authorizer=search_authorizer,
                                     owner_uuid=user_info['sub'])
        self.funcx_service_address = funcx_service_address
import os
import json
from fair_research_login import NativeClient

CLIENT_ID = 'e54de045-d346-42ef-9fbc-5d466f4a00c6'
APP_NAME = 'My App'
SCOPES = 'openid email profile urn:globus:auth:scope:transfer.api.globus.org:all urn:globus:auth:scope:search.api.globus.org:all'
CONFIG_FILE = 'tokens-data.json'

tokens = None
# try to load tokens from local file (native app config)
client = NativeClient(client_id=CLIENT_ID, app_name=APP_NAME)
try:
    tokens = client.load_tokens(requested_scopes=SCOPES)
except:
    pass

if not tokens:
    # if no tokens, need to start Native App authentication process to get tokens
    tokens = client.login(requested_scopes=SCOPES, refresh_tokens=False)

    try:
        # save the tokens
        client.save_tokens(tokens)

        # create environment variable
        os.environ['GLOBUS_DATA'] = json.dumps(tokens,
                                               indent=4,
                                               sort_keys=True)
    except:
        pass
"""
Here are some edge cases you may have to deal with in more complex scripts.
"""
import globus_sdk
from fair_research_login import NativeClient, TokensExpired
from fair_research_login.exc import LocalServerError

# Register a Native App for a client_id at https://developers.globus.org
client = NativeClient(client_id='7414f0b4-7d05-4bb6-bb00-076fa3f17cf5')
"""
Overwriting old live tokens results in revocation
"""

# Native Client revokes tokens when they're overwritten. It's generally bad to
# depend on an old authorizer after a new login.
# The following example will raise an error:

client.login(requested_scopes=['openid', 'profile'])
auth = client.get_authorizers()['auth.globus.org']
# Requesting a token with new scopes will revoke the old token
client.login(requested_scopes=['openid', 'profile', 'email'])
# Using the old authorizer will result in an error
globus_sdk.AuthClient(authorizer=auth).oauth2_userinfo()
"""
Handling when the user does not consent
"""

try:
    client.login(
        requested_scopes=['openid', 'profile'],
        # Using the local server will essentially copy the auth code returned
Beispiel #22
0
"""
The most basic usage automatically saves and loads tokens, and provides
a local server for logging in users.
"""
from globus_sdk import AuthClient
from fair_research_login import NativeClient

# Register a Native App for a client_id at https://developers.globus.org
client = NativeClient(client_id='7414f0b4-7d05-4bb6-bb00-076fa3f17cf5')

# Automatically saves tokens in ~/.globus-native-apps.cfg
tokens = client.login(
    # Request any scopes you want to use here.
    requested_scopes=['openid', 'profile'],
    # You can turn off the local server if it cannot be used for some reason
    no_local_server=False,
    # You can also turn off automatically opening the Auth URL
    no_browser=False,
    # refresh tokens are fully supported, but optional
    refresh_tokens=True,
)

# Authorizers automatically choose a refresh token authorizer if possible,
# and will automatically save new refreshed tokens when they expire.
ac_authorizer = client.get_authorizers()['auth.globus.org']

# Example client usage:
auth_cli = AuthClient(authorizer=ac_authorizer)
user_info = auth_cli.oauth2_userinfo()
print('Hello {}! How are you today?'.format(user_info['name']))
Beispiel #23
0
def get_tokens():
    """
    Retrieves the Globus tokens.
    """
    client = NativeClient(client_id=CLIENT_ID, app_name=APP_NAME)
    return client.load_tokens()
Beispiel #24
0
def live_client():
    storage = ConfigParserTokenStorage(filename='integ_testing_tokens.cfg')
    client = NativeClient(client_id='7414f0b4-7d05-4bb6-bb00-076fa3f17cf5',
                          token_storage=storage,
                          default_scopes=['openid'])
    return client
Beispiel #25
0
#!/usr/bin/env python
import os
import json
from fair_research_login import NativeClient

CLIENT_ID = '7414f0b4-7d05-4bb6-bb00-076fa3f17cf5'
APP_NAME = 'My App'
SCOPES = 'openid email profile urn:globus:auth:scope:transfer.api.globus.org:all urn:globus:auth:scope:search.api.globus.org:all'
CONFIG_FILE = 'tokens-data.json'

tokens = None
# try to load tokens from local file (native app config)
client = NativeClient(client_id=CLIENT_ID, app_name=APP_NAME)
tokens = client.login(requested_scopes=SCOPES, refresh_tokens=True)
Beispiel #26
0
    }, {
        'url':
        'globus://ddb59aef-6d04-11e5-ba46-22000b92c6ec/share/godata/file2.txt',
        'filename':
        'file2.txt',
        'length':
        4,
        'sha256':
        'dd8ed44a83ff94d557f9fd0412ed5a8cbca69ea04922d88c01184a07300a5a',
    }, {
        'url':
        'globus://ddb59aef-6d04-11e5-ba46-22000b92c6ec/share/godata/file3.txt',
        'filename':
        'file3.txt',
        'length':
        6,
        'sha256':
        'f6936912184481f5edd4c304ce27c5a1a827804fc7f329f43d273b8621870776',
    }]
}

client = NativeClient(client_id='e6c75d97-532a-4c88-b031-8584a319fa3e')
client.login(requested_scopes=scope)
headers = {
    'Authorization':
    f'Bearer {client.load_tokens_by_scope(requested_scopes=scope)[scope]["access_token"]}'
}
r = requests.post(url, headers=headers, json=manifest)
r.raise_for_status()
print(f'Created manifest: {r.json()["id"]}')
Beispiel #27
0
    def __init__(
        self,
        http_timeout=None,
        funcx_home=_FUNCX_HOME,
        force_login=False,
        fx_authorizer=None,
        search_authorizer=None,
        openid_authorizer=None,
        funcx_service_address=None,
        check_endpoint_version=False,
        asynchronous=False,
        loop=None,
        results_ws_uri=None,
        use_offprocess_checker=True,
        environment=None,
        **kwargs,
    ):
        """
        Initialize the client

        Parameters
        ----------
        http_timeout: int
            Timeout for any call to service in seconds.
            Default is no timeout

        force_login: bool
            Whether to force a login to get new credentials.

        fx_authorizer:class:`GlobusAuthorizer \
            <globus_sdk.authorizers.base.GlobusAuthorizer>`:
            A custom authorizer instance to communicate with funcX.
            Default: ``None``, will be created.

        search_authorizer:class:`GlobusAuthorizer \
            <globus_sdk.authorizers.base.GlobusAuthorizer>`:
            A custom authorizer instance to communicate with Globus Search.
            Default: ``None``, will be created.

        openid_authorizer:class:`GlobusAuthorizer \
            <globus_sdk.authorizers.base.GlobusAuthorizer>`:
            A custom authorizer instance to communicate with OpenID.
            Default: ``None``, will be created.

        funcx_service_address: str
            For internal use only. The address of the web service.

        results_ws_uri: str
            For internal use only. The address of the websocket service.

        environment: str
            For internal use only. The name of the environment to use.

        asynchronous: bool
        Should the API use asynchronous interactions with the web service? Currently
        only impacts the run method
        Default: False

        loop: AbstractEventLoop
        If asynchronous mode is requested, then you can provide an optional event loop
        instance. If None, then we will access asyncio.get_event_loop()
        Default: None

        use_offprocess_checker: Bool,
            Use this option to disable the offprocess_checker in the FuncXSerializer
            used by the client.
            Default: True

        Keyword arguments are the same as for BaseClient.

        """
        # resolve URLs if not set
        if funcx_service_address is None:
            funcx_service_address = get_web_service_url(environment)
        if results_ws_uri is None:
            results_ws_uri = get_web_socket_url(environment)

        self.func_table = {}
        self.use_offprocess_checker = use_offprocess_checker
        self.funcx_home = os.path.expanduser(funcx_home)
        self.session_task_group_id = str(uuid.uuid4())

        if not os.path.exists(self.TOKEN_DIR):
            os.makedirs(self.TOKEN_DIR)

        tokens_filename = os.path.join(self.TOKEN_DIR, self.TOKEN_FILENAME)
        self.native_client = NativeClient(
            client_id=self.FUNCX_SDK_CLIENT_ID,
            app_name="FuncX SDK",
            token_storage=JSONTokenStorage(tokens_filename),
        )

        # TODO: if fx_authorizer is given, we still need to get an authorizer for Search
        search_scope = "urn:globus:auth:scope:search.api.globus.org:all"
        scopes = [self.FUNCX_SCOPE, search_scope, "openid"]

        if not fx_authorizer or not search_authorizer or not openid_authorizer:
            self.native_client.login(
                requested_scopes=scopes,
                no_local_server=kwargs.get("no_local_server", True),
                no_browser=kwargs.get("no_browser", True),
                refresh_tokens=kwargs.get("refresh_tokens", True),
                force=force_login,
            )

            all_authorizers = self.native_client.get_authorizers_by_scope(
                requested_scopes=scopes
            )
            fx_authorizer = all_authorizers[self.FUNCX_SCOPE]
            search_authorizer = all_authorizers[search_scope]
            openid_authorizer = all_authorizers["openid"]

        self.web_client = FuncxWebClient(
            base_url=funcx_service_address, authorizer=fx_authorizer
        )
        self.fx_serializer = FuncXSerializer(
            use_offprocess_checker=self.use_offprocess_checker
        )

        authclient = AuthClient(authorizer=openid_authorizer)
        user_info = authclient.oauth2_userinfo()
        self.searcher = SearchHelper(
            authorizer=search_authorizer, owner_uuid=user_info["sub"]
        )
        self.funcx_service_address = funcx_service_address
        self.check_endpoint_version = check_endpoint_version

        self.version_check()

        self.results_ws_uri = results_ws_uri
        self.asynchronous = asynchronous
        if asynchronous:
            self.loop = loop if loop else asyncio.get_event_loop()

            # Start up an asynchronous polling loop in the background
            self.ws_polling_task = WebSocketPollingTask(
                self,
                self.loop,
                init_task_group_id=self.session_task_group_id,
                results_ws_uri=self.results_ws_uri,
            )
        else:
            self.loop = None
def get_transfer_client():
    nc = NativeClient(client_id='7414f0b4-7d05-4bb6-bb00-076fa3f17cf5')
    nc.login(
        requested_scopes=['urn:globus:auth:scope:transfer.api.globus.org:all'])
    auth = nc.get_authorizers()['transfer.api.globus.org']
    return globus_sdk.TransferClient(authorizer=auth)
# Be careful with enabling refresh tokens, they can leave a long-lived
# mechanism to retrieve access on your system
REFRESH_TOKENS = False

# Set this to True if you're running this on a remote
# system via SSH. The login URL will be shown, 
HEADLESS = False

if __name__ == '__main__':
    if len(sys.argv) != 2:
        print('Usage: ./gettoken.py <scope>')
        print('Example: ./gettoken.py openid')
        sys.exit(1)

    scope = sys.argv[1]
    cli = NativeClient(client_id=CLIENT_ID,
                           default_scopes=[scope,])
    try:
        tokens = cli.load_tokens_by_scope(requested_scopes=[scope,])
    except:
        no_local_server=False
        no_browser=False
        if HEADLESS:
            no_local_server=True
            no_browser=True
        cli.login(requested_scopes=[scope,],
                      refresh_tokens=REFRESH_TOKENS,
                      no_local_server=no_local_server,
                      no_browser=no_browser)
        tokens = cli.load_tokens_by_scope(requested_scopes=[scope,])

    print(tokens[scope]['access_token'])
Beispiel #30
0
class FuncXClient:
    """Main class for interacting with the funcX service

    Holds helper operations for performing common tasks with the funcX service.
    """

    TOKEN_DIR = os.path.expanduser("~/.funcx/credentials")
    TOKEN_FILENAME = "funcx_sdk_tokens.json"
    FUNCX_SDK_CLIENT_ID = os.environ.get(
        "FUNCX_SDK_CLIENT_ID", "4cf29807-cf21-49ec-9443-ff9a3fb9f81c"
    )
    FUNCX_SCOPE = os.environ.get(
        "FUNCX_SCOPE",
        "https://auth.globus.org/scopes/facd7ccc-c5f4-42aa-916b-a0e270e2c2a9/all",
    )

    def __init__(
        self,
        http_timeout=None,
        funcx_home=_FUNCX_HOME,
        force_login=False,
        fx_authorizer=None,
        search_authorizer=None,
        openid_authorizer=None,
        funcx_service_address=None,
        check_endpoint_version=False,
        asynchronous=False,
        loop=None,
        results_ws_uri=None,
        use_offprocess_checker=True,
        environment=None,
        **kwargs,
    ):
        """
        Initialize the client

        Parameters
        ----------
        http_timeout: int
            Timeout for any call to service in seconds.
            Default is no timeout

        force_login: bool
            Whether to force a login to get new credentials.

        fx_authorizer:class:`GlobusAuthorizer \
            <globus_sdk.authorizers.base.GlobusAuthorizer>`:
            A custom authorizer instance to communicate with funcX.
            Default: ``None``, will be created.

        search_authorizer:class:`GlobusAuthorizer \
            <globus_sdk.authorizers.base.GlobusAuthorizer>`:
            A custom authorizer instance to communicate with Globus Search.
            Default: ``None``, will be created.

        openid_authorizer:class:`GlobusAuthorizer \
            <globus_sdk.authorizers.base.GlobusAuthorizer>`:
            A custom authorizer instance to communicate with OpenID.
            Default: ``None``, will be created.

        funcx_service_address: str
            For internal use only. The address of the web service.

        results_ws_uri: str
            For internal use only. The address of the websocket service.

        environment: str
            For internal use only. The name of the environment to use.

        asynchronous: bool
        Should the API use asynchronous interactions with the web service? Currently
        only impacts the run method
        Default: False

        loop: AbstractEventLoop
        If asynchronous mode is requested, then you can provide an optional event loop
        instance. If None, then we will access asyncio.get_event_loop()
        Default: None

        use_offprocess_checker: Bool,
            Use this option to disable the offprocess_checker in the FuncXSerializer
            used by the client.
            Default: True

        Keyword arguments are the same as for BaseClient.

        """
        # resolve URLs if not set
        if funcx_service_address is None:
            funcx_service_address = get_web_service_url(environment)
        if results_ws_uri is None:
            results_ws_uri = get_web_socket_url(environment)

        self.func_table = {}
        self.use_offprocess_checker = use_offprocess_checker
        self.funcx_home = os.path.expanduser(funcx_home)
        self.session_task_group_id = str(uuid.uuid4())

        if not os.path.exists(self.TOKEN_DIR):
            os.makedirs(self.TOKEN_DIR)

        tokens_filename = os.path.join(self.TOKEN_DIR, self.TOKEN_FILENAME)
        self.native_client = NativeClient(
            client_id=self.FUNCX_SDK_CLIENT_ID,
            app_name="FuncX SDK",
            token_storage=JSONTokenStorage(tokens_filename),
        )

        # TODO: if fx_authorizer is given, we still need to get an authorizer for Search
        search_scope = "urn:globus:auth:scope:search.api.globus.org:all"
        scopes = [self.FUNCX_SCOPE, search_scope, "openid"]

        if not fx_authorizer or not search_authorizer or not openid_authorizer:
            self.native_client.login(
                requested_scopes=scopes,
                no_local_server=kwargs.get("no_local_server", True),
                no_browser=kwargs.get("no_browser", True),
                refresh_tokens=kwargs.get("refresh_tokens", True),
                force=force_login,
            )

            all_authorizers = self.native_client.get_authorizers_by_scope(
                requested_scopes=scopes
            )
            fx_authorizer = all_authorizers[self.FUNCX_SCOPE]
            search_authorizer = all_authorizers[search_scope]
            openid_authorizer = all_authorizers["openid"]

        self.web_client = FuncxWebClient(
            base_url=funcx_service_address, authorizer=fx_authorizer
        )
        self.fx_serializer = FuncXSerializer(
            use_offprocess_checker=self.use_offprocess_checker
        )

        authclient = AuthClient(authorizer=openid_authorizer)
        user_info = authclient.oauth2_userinfo()
        self.searcher = SearchHelper(
            authorizer=search_authorizer, owner_uuid=user_info["sub"]
        )
        self.funcx_service_address = funcx_service_address
        self.check_endpoint_version = check_endpoint_version

        self.version_check()

        self.results_ws_uri = results_ws_uri
        self.asynchronous = asynchronous
        if asynchronous:
            self.loop = loop if loop else asyncio.get_event_loop()

            # Start up an asynchronous polling loop in the background
            self.ws_polling_task = WebSocketPollingTask(
                self,
                self.loop,
                init_task_group_id=self.session_task_group_id,
                results_ws_uri=self.results_ws_uri,
            )
        else:
            self.loop = None

    def version_check(self):
        """Check this client version meets the service's minimum supported version."""
        resp = self.web_client.get_version()
        versions = resp.data
        if "min_ep_version" not in versions:
            raise VersionMismatch(
                "Failed to retrieve version information from funcX service."
            )

        min_ep_version = versions["min_ep_version"]
        min_sdk_version = versions["min_sdk_version"]

        if self.check_endpoint_version:
            if ENDPOINT_VERSION is None:
                raise VersionMismatch(
                    "You do not have the funcx endpoint installed.  "
                    "You can use 'pip install funcx-endpoint'."
                )
            if LooseVersion(ENDPOINT_VERSION) < LooseVersion(min_ep_version):
                raise VersionMismatch(
                    f"Your version={ENDPOINT_VERSION} is lower than the "
                    f"minimum version for an endpoint: {min_ep_version}.  "
                    "Please update. "
                    f"pip install funcx-endpoint>={min_ep_version}"
                )
        else:
            if LooseVersion(SDK_VERSION) < LooseVersion(min_sdk_version):
                raise VersionMismatch(
                    f"Your version={SDK_VERSION} is lower than the "
                    f"minimum version for funcx SDK: {min_sdk_version}.  "
                    "Please update. "
                    f"pip install funcx>={min_sdk_version}"
                )

    def logout(self):
        """Remove credentials from your local system"""
        self.native_client.logout()

    def update_table(self, return_msg, task_id):
        """Parses the return message from the service and updates the internal func_table

        Parameters
        ----------

        return_msg : str
           Return message received from the funcx service
        task_id : str
           task id string
        """
        if isinstance(return_msg, str):
            r_dict = json.loads(return_msg)
        else:
            r_dict = return_msg

        r_status = r_dict.get("status", "unknown")
        status = {"pending": True, "status": r_status}

        if "result" in r_dict:
            try:
                r_obj = self.fx_serializer.deserialize(r_dict["result"])
                completion_t = r_dict["completion_t"]
            except Exception:
                raise SerializationError("Result Object Deserialization")
            else:
                status.update(
                    {"pending": False, "result": r_obj, "completion_t": completion_t}
                )
                self.func_table[task_id] = status

        elif "exception" in r_dict:
            try:
                r_exception = self.fx_serializer.deserialize(r_dict["exception"])
                completion_t = r_dict["completion_t"]
                logger.info(f"Exception : {r_exception}")
            except Exception:
                raise SerializationError("Task's exception object deserialization")
            else:
                status.update(
                    {
                        "pending": False,
                        "exception": r_exception,
                        "completion_t": completion_t,
                    }
                )
                self.func_table[task_id] = status
        return status

    def get_task(self, task_id):
        """Get a funcX task.

        Parameters
        ----------
        task_id : str
            UUID of the task

        Returns
        -------
        dict
            Task block containing "status" key.
        """
        if task_id in self.func_table:
            return self.func_table[task_id]

        r = self.web_client.get_task(task_id)
        logger.debug(f"Response string : {r}")
        try:
            rets = self.update_table(r.text, task_id)
        except Exception as e:
            raise e
        return rets

    def get_result(self, task_id):
        """Get the result of a funcX task

        Parameters
        ----------
        task_id: str
            UUID of the task

        Returns
        -------
        Result obj: If task completed

        Raises
        ------
        Exception obj: Exception due to which the task failed
        """
        task = self.get_task(task_id)
        if task["pending"] is True:
            raise TaskPending(task["status"])
        else:
            if "result" in task:
                return task["result"]
            else:
                logger.warning("We have an exception : {}".format(task["exception"]))
                task["exception"].reraise()

    def get_batch_result(self, task_id_list):
        """Request status for a batch of task_ids"""
        assert isinstance(
            task_id_list, list
        ), "get_batch_result expects a list of task ids"

        pending_task_ids = [t for t in task_id_list if t not in self.func_table]

        results = {}

        if pending_task_ids:
            r = self.web_client.get_batch_status(pending_task_ids)
            logger.debug(f"Response string : {r}")

        pending_task_ids = set(pending_task_ids)

        for task_id in task_id_list:
            if task_id in pending_task_ids:
                try:
                    data = r["results"][task_id]
                    rets = self.update_table(data, task_id)
                    results[task_id] = rets
                except KeyError:
                    logger.debug("Task {} info was not available in the batch status")
                except Exception:
                    logger.exception(
                        "Failure while unpacking results fom get_batch_result"
                    )
            else:
                results[task_id] = self.func_table[task_id]

        return results

    def run(self, *args, endpoint_id=None, function_id=None, **kwargs):
        """Initiate an invocation

        Parameters
        ----------
        *args : Any
            Args as specified by the function signature
        endpoint_id : uuid str
            Endpoint UUID string. Required
        function_id : uuid str
            Function UUID string. Required
        asynchronous : bool
            Whether or not to run the function asynchronously

        Returns
        -------
        task_id : str
        UUID string that identifies the task if asynchronous is False

        funcX Task: asyncio.Task
        A future that will eventually resolve into the function's result if
        asynchronous is True
        """
        assert endpoint_id is not None, "endpoint_id key-word argument must be set"
        assert function_id is not None, "function_id key-word argument must be set"

        batch = self.create_batch()
        batch.add(*args, endpoint_id=endpoint_id, function_id=function_id, **kwargs)
        r = self.batch_run(batch)

        return r[0]

    def create_batch(self, task_group_id=None):
        """
        Create a Batch instance to handle batch submission in funcX

        Parameters
        ----------

        task_group_id : str
            Override the session wide session_task_group_id with a different
            task_group_id for this batch.
            If task_group_id is not specified, it will default to using the client's
            session_task_group_id

        Returns
        -------
        Batch instance
            Status block containing "status" key.
        """
        if not task_group_id:
            task_group_id = self.session_task_group_id

        batch = Batch(task_group_id=task_group_id)
        return batch

    def batch_run(self, batch):
        """Initiate a batch of tasks to funcX

        Parameters
        ----------
        batch: a Batch object

        Returns
        -------
        task_ids : a list of UUID strings that identify the tasks
        """
        assert isinstance(batch, Batch), "Requires a Batch object as input"
        assert len(batch.tasks) > 0, "Requires a non-empty batch"

        data = batch.prepare()

        # Send the data to funcX
        r = self.web_client.submit(data)

        task_uuids = []
        for result in r["results"]:
            task_id = result["task_uuid"]
            task_uuids.append(task_id)
            if result["http_status_code"] != 200:
                # this method of handling errors for a batch response is not
                # ideal, as it will raise any error in the multi-response,
                # but it will do until batch_run is deprecated in favor of Executer
                handle_response_errors(result)

        if self.asynchronous:
            task_group_id = r["task_group_id"]
            asyncio_tasks = []
            for task_id in task_uuids:
                funcx_task = FuncXTask(task_id)
                asyncio_task = self.loop.create_task(funcx_task.get_result())
                asyncio_tasks.append(asyncio_task)

                self.ws_polling_task.add_task(funcx_task)
            self.ws_polling_task.put_task_group_id(task_group_id)
            return asyncio_tasks

        return task_uuids

    def map_run(
        self, *args, endpoint_id=None, function_id=None, asynchronous=False, **kwargs
    ):
        """Initiate an invocation

        Parameters
        ----------
        *args : Any
            Args as specified by the function signature
        endpoint_id : uuid str
            Endpoint UUID string. Required
        function_id : uuid str
            Function UUID string. Required
        asynchronous : bool
            Whether or not to run the function asynchronously

        Returns
        -------
        task_id : str
        UUID string that identifies the task
        """
        assert endpoint_id is not None, "endpoint_id key-word argument must be set"
        assert function_id is not None, "function_id key-word argument must be set"

        ser_kwargs = self.fx_serializer.serialize(kwargs)

        batch_payload = []
        iterator = args[0]
        for arg in iterator:
            ser_args = self.fx_serializer.serialize((arg,))
            payload = self.fx_serializer.pack_buffers([ser_args, ser_kwargs])
            batch_payload.append(payload)

        data = {
            "endpoints": [endpoint_id],
            "func": function_id,
            "payload": batch_payload,
            "is_async": asynchronous,
        }

        # Send the data to funcX
        r = self.web_client.submit_batch(data)
        return r["task_uuids"]

    def register_endpoint(
        self, name, endpoint_uuid, metadata=None, endpoint_version=None
    ):
        """Register an endpoint with the funcX service.

        Parameters
        ----------
        name : str
            Name of the endpoint
        endpoint_uuid : str
                The uuid of the endpoint
        metadata : dict
            endpoint metadata, see default_config example
        endpoint_version: str
            Version string to be passed to the webService as a compatibility check

        Returns
        -------
        A dict
            {'endpoint_id' : <>,
             'address' : <>,
             'client_ports': <>}
        """
        self.version_check()

        r = self.web_client.register_endpoint(
            endpoint_name=name,
            endpoint_id=endpoint_uuid,
            metadata=metadata,
            endpoint_version=endpoint_version,
        )
        return r.data

    def get_containers(self, name, description=None):
        """Register a DLHub endpoint with the funcX service and get the containers to launch.

        Parameters
        ----------
        name : str
            Name of the endpoint
        description : str
            Description of the endpoint

        Returns
        -------
        int
            The port to connect to and a list of containers
        """
        data = {"endpoint_name": name, "description": description}

        r = self.web_client.post("get_containers", data=data)
        return r.data["endpoint_uuid"], r.data["endpoint_containers"]

    def get_container(self, container_uuid, container_type):
        """Get the details of a container for staging it locally.

        Parameters
        ----------
        container_uuid : str
            UUID of the container in question
        container_type : str
            The type of containers that will be used (Singularity, Shifter, Docker)

        Returns
        -------
        dict
            The details of the containers to deploy
        """
        self.version_check()

        r = self.web_client.get(f"containers/{container_uuid}/{container_type}")
        return r.data["container"]

    def get_endpoint_status(self, endpoint_uuid):
        """Get the status reports for an endpoint.

        Parameters
        ----------
        endpoint_uuid : str
            UUID of the endpoint in question

        Returns
        -------
        dict
            The details of the endpoint's stats
        """
        r = self.web_client.get_endpoint_status(endpoint_uuid)
        return r.data

    def register_function(
        self,
        function,
        function_name=None,
        container_uuid=None,
        description=None,
        public=False,
        group=None,
        searchable=True,
    ):
        """Register a function code with the funcX service.

        Parameters
        ----------
        function : Python Function
            The function to be registered for remote execution
        function_name : str
            The entry point (function name) of the function. Default: None
        container_uuid : str
            Container UUID from registration with funcX
        description : str
            Description of the file
        public : bool
            Whether or not the function is publicly accessible. Default = False
        group : str
            A globus group uuid to share this function with
        searchable : bool
            If true, the function will be indexed into globus search with the
            appropriate permissions

        Returns
        -------
        function uuid : str
            UUID identifier for the registered function
        """
        data = FunctionRegistrationData(
            function=function,
            failover_source="",
            container_uuid=container_uuid,
            entry_point=function_name,
            description=description,
            public=public,
            group=group,
            searchable=searchable,
            serializer=self.fx_serializer,
        )
        logger.info(f"Registering function : {data}")
        r = self.web_client.register_function(data)
        return r.data["function_uuid"]

    def search_function(self, q, offset=0, limit=10, advanced=False):
        """Search for function via the funcX service

        Parameters
        ----------
        q : str
            free-form query string
        offset : int
            offset into total results
        limit : int
            max number of results to return
        advanced : bool
            allows elastic-search like syntax in query string

        Returns
        -------
        FunctionSearchResults
        """
        return self.searcher.search_function(
            q, offset=offset, limit=limit, advanced=advanced
        )

    def search_endpoint(self, q, scope="all", owner_id=None):
        """

        Parameters
        ----------
        q
        scope : str
            Can be one of {'all', 'my-endpoints', 'shared-with-me'}
        owner_id
            should be urn like f"urn:globus:auth:identity:{owner_uuid}"

        Returns
        -------

        """
        return self.searcher.search_endpoint(q, scope=scope, owner_id=owner_id)

    def register_container(self, location, container_type, name="", description=""):
        """Register a container with the funcX service.

        Parameters
        ----------
        location : str
            The location of the container (e.g., its docker url). Required
        container_type : str
            The type of containers that will be used (Singularity, Shifter, Docker).
            Required

        name : str
            A name for the container. Default = ''
        description : str
            A description to associate with the container. Default = ''

        Returns
        -------
        str
            The id of the container
        """
        payload = {
            "name": name,
            "location": location,
            "description": description,
            "type": container_type,
        }

        r = self.web_client.post("containers", data=payload)
        return r.data["container_id"]

    def add_to_whitelist(self, endpoint_id, function_ids):
        """Adds the function to the endpoint's whitelist

        Parameters
        ----------
        endpoint_id : str
            The uuid of the endpoint
        function_ids : list
            A list of function id's to be whitelisted

        Returns
        -------
        json
            The response of the request
        """
        return self.web_client.whitelist_add(endpoint_id, function_ids)

    def get_whitelist(self, endpoint_id):
        """List the endpoint's whitelist

        Parameters
        ----------
        endpoint_id : str
            The uuid of the endpoint

        Returns
        -------
        json
            The response of the request
        """
        return self.web_client.get_whitelist(endpoint_id)

    def delete_from_whitelist(self, endpoint_id, function_ids):
        """List the endpoint's whitelist

        Parameters
        ----------
        endpoint_id : str
            The uuid of the endpoint
        function_ids : list
            A list of function id's to be whitelisted

        Returns
        -------
        json
            The response of the request
        """
        if not isinstance(function_ids, list):
            function_ids = [function_ids]
        res = []
        for fid in function_ids:
            res.append(self.web_client.whitelist_remove(endpoint_id, fid))
        return res