Exemplo n.º 1
0
def check_batch_status(task_ids):
    """
    Check the status of an entire batch of tasks. Return if ALL of them are complete

    Parameters
    ----------
    task_ids : [str]
        The task ids to check

    Returns
    -------
    If all tasks are complete : bool
    """

    rc = get_redis_client()

    try:
        for task_id in task_ids:
            app.logger.debug(f"Checking task id for: task_{task_id}")
            result_obj = rc.hget(f"task_{task_id}", 'result')
            app.logger.debug(f"Batch Result_obj : {result_obj}")
            if not result_obj:
                return False
    except Exception as e:
        return False
    return True
Exemplo n.º 2
0
def result(user_name, task_id):
    """Check the status of a task.

    Parameters
    ----------
    user_name : str
        The primary identity of the user
    task_id : str
        The task uuid to look up

    Returns
    -------
    json
        The status of the task
    """

    if not user_name:
        abort(400, description="Could not find user. You must be "
                               "logged in to perform this function.")

    try:
        # Get a redis client
        rc = get_redis_client()

        details = {}

        # Get the task from redis
        try:
            result_obj = rc.hget(f"task_{task_id}", 'result')
            app.logger.debug(f"ResulOBt_obj : {result_obj}")
            if result_obj:
                task = json.loads(result_obj)
            else:
                task = {'status': 'PENDING'}
        except Exception as e:
            app.logger.error(f"Failed to fetch results for {task_id} due to {e}")
            task = {'status': 'FAILED', 'reason': 'Unknown task id'}

        res = {'task_id': task_id}
        if 'status' in task:
            res['status'] = task['status']

        if 'result' in task:
            details['result'] = task['result']
        if 'reason' in task:
            details['reason'] = task['reason']

        if details:
            res.update({'details': details})

        app.logger.debug("Status Response: {}".format(str(res)))
        return jsonify(res)

    except Exception as e:
        app.logger.error(e)
        return jsonify({'status': 'Failed',
                        'reason': 'InternalError: {}'.format(e)})
Exemplo n.º 3
0
def status_and_result(user_name, task_id):
    """Check the status of a task.  Return result if available.

    If the query param deserialize=True is passed, then we deserialize the result object.

    Parameters
    ----------
    user_name : str
        The primary identity of the user
    task_id : str
        The task uuid to look up

    Returns
    -------
    json
        The status of the task
    """
    rc = get_redis_client()

    if not Task.exists(rc, task_id):
        abort(400, "task_id not found")

    task = Task.from_id(rc, task_id)
    task_status = task.status
    task_result = task.result
    task_exception = task.exception
    task_completion_t = task.completion_time
    if task_result or task_exception:
        task.delete()

    deserialize = request.args.get("deserialize", False)
    if deserialize and task_result:
        task_result = deserialize_result(task_result)

    # TODO: change client to have better naming conventions
    # these fields like 'status' should be changed to 'task_status', because 'status' is normally
    # used for HTTP codes.
    response = {
        'task_id': task_id,
        'status': task_status,
        'result': task_result,
        'completion_t': task_completion_t,
        'exception': task_exception
    }

    # Note: this is for backwards compat, when we can't include a None result and have a
    # non-complete status, we must forgo the result field if task not complete.
    if task_result is None:
        del response['result']

    if task_exception is None:
        del response['exception']

    return jsonify(response)
Exemplo n.º 4
0
def release(user_name, task_id):
    """
    Release the task. This does nothing as we already released the task.
    """

    automate_response = {
        "details": None,
        "status": "SUCCEEDED",
        "action_id": task_id,
        "release_after": 'P30D'
    }

    rc = get_redis_client()

    task_results = None
    # check if it is a batch:
    try:
        task_ids = rc.hget(f"batch_{task_id}", "batch")
        app.logger.info(f"batch task_ids: {task_ids}")

        if task_ids:
            task_ids = json.loads(task_ids)
            # Check the status on all the tasks.
            batch_done = check_batch_status(task_ids)
            if batch_done:
                # Get all of their results
                task_results = []
                for tid in task_ids:
                    task = get_task_result(tid)
                    task['task_id'] = tid
                    task_results.append(task)

                # If it is done, return it all
                automate_response['details'] = task_results
                # They all have a success status
                automate_response['status'] = task['status']
        else:
            # it is not a batch, get the single task result
            task = get_task_result(task_id)
            task['task_id'] = task_id

            automate_response['details'] = task
            automate_response['status'] = task['status']
    except Exception as e:
        app.logger.error(e)
        return jsonify({
            'status': 'Failed',
            'reason': 'InternalError: {}'.format(e)
        })

    return json.dumps(automate_response)
Exemplo n.º 5
0
def get_tasks_from_redis(task_ids):

    all_tasks = {}
    try:
        # Get a redis client
        rc = get_redis_client()
        for task_id in task_ids:

            # Get the task from redis
            try:
                result_obj = rc.hget(f"task_{task_id}", 'result')
                if result_obj:
                    task = json.loads(result_obj)
                    all_tasks[task_id] = task
                    all_tasks[task_id]['task_id'] = task_id
                else:
                    task = {'status': 'PENDING'}
            except Exception as e:
                app.logger.error(
                    f"Failed to fetch results for {task_id} due to {e}")
                task = {'status': 'FAILED', 'reason': 'Unknown task id'}
            else:
                if result_obj:
                    # Task complete, attempt flush
                    try:
                        rc.delete(f"task_{task_id}")
                    except Exception as e:
                        app.logger.warning(
                            f"Failed to delete Task:{task_id} due to {e}. Ignoring..."
                        )
                        pass

        return all_tasks

    except Exception as e:
        app.logger.error(e)
        return {
            'status': 'Failed',
            'reason': 'InternalError: {}'.format(e),
            'partial': all_tasks
        }
Exemplo n.º 6
0
def status(user_name, task_id):
    """Check the status of a task.

    Parameters
    ----------
    user_name
    task_id

    Returns
    -------
    json
        'status' : task status
    """
    rc = get_redis_client()

    if not Task.exists(rc, task_id):
        abort(400, "task_id not found")
    task = Task.from_id(rc, task_id)

    return jsonify({
        'status': task.status
    })
Exemplo n.º 7
0
def get_tasks_from_redis(task_ids):
    all_tasks = {}

    rc = get_redis_client()
    for task_id in task_ids:
        # Get the task from redis
        if not Task.exists(rc, task_id):
            all_tasks[task_id] = {
                'status': 'failed',
                'reason': 'unknown task id'
            }
            continue

        task = Task.from_id(rc, task_id)
        task_status = task.status
        task_result = task.result
        task_exception = task.exception
        task_completion_t = task.completion_time
        if task_result or task_exception:
            task.delete()

        all_tasks[task_id] = {
            'task_id': task_id,
            'status': task_status,
            'result': task_result,
            'completion_t': task_completion_t,
            'exception': task_exception
        }

        # Note: this is for backwards compat, when we can't include a None result and have a
        # non-complete status, we must forgo the result field if task not complete.
        if task_result is None:
            del all_tasks[task_id]['result']

        # Note: this is for backwards compat, when we can't include a None result and have a
        # non-complete status, we must forgo the result field if task not complete.
        if task_exception is None:
            del all_tasks[task_id]['exception']
    return all_tasks
Exemplo n.º 8
0
def get_task_result(task_id, delete=True):
    """Check the status of a task. Return result if available.

    If the query param deserialize=True is passed, then we deserialize the result object.

    Parameters
    ----------
    task_id : str
        The task uuid to look up
    delete : bool
        Whether or not to remove the task from the database

    Returns
    -------
    json
        The task as a dict
    """
    rc = get_redis_client()

    if not Task.exists(rc, task_id):
        abort(400, "task_id not found")

    task_dict = {}

    task = Task.from_id(rc, task_id)
    task_dict['status'] = convert_automate_status(task.status)
    task_dict['result'] = task.result
    task_dict['exception'] = task.exception
    task_dict['completion_t'] = task.completion_time
    if (task_dict['result'] or task_dict['exception']) and delete:
        task.delete()

    if task_dict['result']:
        task_dict['result'] = deserialize_result(task_dict['result'])

    return task_dict
Exemplo n.º 9
0
def get_ep_stats(user_name, endpoint_id):
    """Retrieve the status updates from an endpoint.

    Parameters
    ----------
    user_name : str
        The primary identity of the user
    endpoint_id : str
        The endpoint uuid to look up

    Returns
    -------
    json
        The status of the endpoint
    """
    alive_threshold = 2 * 60  # time in seconds since last heartbeat to be counted as alive
    last = 10

    if not user_name:
        abort(400, description="Could not find user. You must be "
                               "logged in to perform this function.")

    try:
        user_id = resolve_user(user_name)
    except Exception:
        app.logger.error("Failed to resolve user_name to user_id")
        return jsonify({'status': 'Failed',
                        'reason': 'Failed to resolve user_name:{}'.format(user_name)})

    # Extract the token for endpoint verification
    token_str = request.headers.get('Authorization')
    token = str.replace(str(token_str), 'Bearer ', '')

    if not authorize_endpoint(user_id, endpoint_id, None, token):
        return jsonify({'status': 'Failed',
                        'reason': f'Unauthorized access to endpoint: {endpoint_id}'})

    # TODO add rc to g.
    rc = get_redis_client()

    status = {'status': 'offline', 'logs': []}
    try:
        end = min(rc.llen(f'ep_status_{endpoint_id}'), last)
        print("Total len :", end)
        items = rc.lrange(f'ep_status_{endpoint_id}', 0, end)
        if items:
            for i in items:
                status['logs'].append(json.loads(i))

            # timestamp is created using time.time(), which returns seconds since epoch UTC
            logs = status['logs']  # should have been json loaded already
            newest_timestamp = logs[0]['timestamp']
            now = time.time()
            if now - newest_timestamp < alive_threshold:
                status['status'] = 'online'

    except Exception as e:
        app.logger.error("Unable to retrieve ")
        status = {'status': 'Failed',
                  'reason': f'Unable to retrieve endpoint stats: {endpoint_id}. {e}'}

    return jsonify(status)
Exemplo n.º 10
0
def auth_and_launch(user_id, function_uuid, endpoints, input_data, app, token, serialize=None):
    """ Here we do basic authz for (user, fn, endpoint(s)) and launch the functions

    Parameters
    ==========

    user_id : str
       user id
    function_uuid : str
       uuid string for functions
    endpoints : [str]
       endpoint_uuid as list
    input_data: [string_buffers]
       input_data as a list in case many function launches are to be made
    app : app object
    token : globus token
    serialize : bool
        Whether or not to serialize the input using the serialization service. This is used
        when the input is not already serialized by the SDK.

    Returns:
       json object
    """
    # Check if the user is allowed to access the function
    if not authorize_function(user_id, function_uuid, token):
        return {'status': 'Failed',
                'reason': f'Unauthorized access to function: {function_uuid}'}

    try:
        fn_code, fn_entry, container_uuid = resolve_function(user_id, function_uuid)
    except Exception as e:
        return {'status': 'Failed',
                'reason': f'Function UUID:{function_uuid} could not be resolved. {e}'}

    # Make sure the user is allowed to use the function on this endpoint
    for ep in endpoints:
        if not authorize_endpoint(user_id, ep, function_uuid, token):
            return {'status': 'Failed',
                    'reason': f'Unauthorized access to endpoint: {ep}'}

    app.logger.debug(f"Got function container_uuid :{container_uuid}")

    # We should replace this with container_hdr = ";ctnr={container_uuid}"
    if not container_uuid:
        container_uuid = 'RAW'

    # We should replace this with serialize_hdr = ";srlz={container_uuid}"
    # TODO: this is deprecated.
    serializer = "ANY"

    # TODO: Store redis connections in g
    rc = get_redis_client()

    if isinstance(input_data, list):
        input_data_items = input_data
    else:
        input_data_items = [input_data]

    task_ids = []

    db_logger = get_db_logger()
    ep_queue = {}
    for ep in endpoints:
        redis_task_queue = EndpointQueue(
            ep,
            hostname=app.config['REDIS_HOST'],
            port=app.config['REDIS_PORT']
        )
        redis_task_queue.connect()
        ep_queue[ep] = redis_task_queue

    for input_data in input_data_items:
        if serialize:
            res = serialize_inputs(input_data)
            if res:
                input_data = res

        # At this point the packed function body and the args are concatable strings
        payload = fn_code + input_data
        task_id = str(uuid.uuid4())
        task = Task(rc, task_id, container_uuid, serializer, payload)

        for ep in endpoints:
            ep_queue[ep].enqueue(task)
            app.logger.debug(f"Task:{task_id} placed on queue for endpoint:{ep}")

            # TODO: creating these connections each will be slow.
            # increment the counter
            rc.incr('funcx_invocation_counter')
            # add an invocation to the database
            # log_invocation(user_id, task_id, function_uuid, ep)
            db_logger.log(user_id, task_id, function_uuid, ep, deferred=True)

        task_ids.append(task_id)
    db_logger.commit()

    return {'status': 'Success',
            'task_uuids': task_ids}
Exemplo n.º 11
0
def run(user_name):
    """Puts a job in Redis and returns an id

    Parameters
    ----------
    user_name : str
        The primary identity of the user
    Returns
    -------
    json
        The task document
    """

    app.logger.debug(f"Automate submit invoked by user:{user_name}")

    if not user_name:
        abort(400,
              description="Could not find user. You must be "
              "logged in to perform this function.")
    try:
        user_id = resolve_user(user_name)
    except Exception:
        app.logger.error("Failed to resolve user_name to user_id")
        return jsonify({
            'status':
            'Failed',
            'reason':
            'Failed to resolve user_name:{}'.format(user_name)
        })

    # Extract the token for endpoint verification
    token_str = request.headers.get('Authorization')
    token = str.replace(str(token_str), 'Bearer ', '')

    # Parse out the function info
    tasks = []
    try:
        post_req = request.json['body']
        if 'tasks' in post_req:
            tasks = post_req.get('tasks', [])
        else:
            # Check if the old client was used and create a new task
            function_uuid = post_req.get('func', None)
            endpoint = post_req.get('endpoint', None)
            input_data = post_req.get('payload', None)
            tasks.append({
                'func': function_uuid,
                'endpoint': endpoint,
                'payload': input_data
            })

        # Sets serialize to True by default
        serialize = post_req.get('serialize', True)
    except KeyError as e:
        return jsonify({
            'status': 'Failed',
            'reason': "Missing Key {}".format(str(e))
        })
    except Exception as e:
        return jsonify({
            'status':
            'Failed',
            'reason':
            'Request Malformed. Missing critical information: {}'.format(
                str(e))
        })

    results = {'status': 'Success', 'task_uuids': []}
    app.logger.info(f'tasks to submit: {tasks}')
    for task in tasks:
        res = auth_and_launch(user_id,
                              task['func'], [task['endpoint']],
                              task['payload'],
                              app,
                              token,
                              serialize=serialize)
        if res.get('status', 'Failed') != 'Success':
            return res
        else:
            results['task_uuids'].extend(res['task_uuids'])

    # if the batch size is just one, we can return it as the action id
    if len(results['task_uuids']) == 1:
        action_id = results['task_uuids'][0]
    else:
        # Otherwise we need to create an action id for the batch
        action_id = str(uuid.uuid4())
        # Now store the list of ids in redis with this batch id
        rc = get_redis_client()
        rc.hset(f'batch_{action_id}', 'batch',
                json.dumps(results['task_uuids']))

    automate_response = {
        "status": 'ACTIVE',
        "action_id": action_id,
        "details": None,
        "release_after": 'P30D',
        "start_time": str(datetime.datetime.utcnow())
    }
    print(automate_response)
    return jsonify(automate_response)
Exemplo n.º 12
0
def status(user_name, task_id):
    """Check the status of a task.

        Parameters
        ----------
        user_name : str
            The primary identity of the user
        task_id : str
            The task uuid to look up

        Returns
        -------
        json
            The status of the task
        """

    if not user_name:
        abort(400,
              description="Could not find user. You must be "
              "logged in to perform this function.")

    automate_response = {
        "details": None,
        "status": "ACTIVE",
        "action_id": task_id,
        "release_after": 'P30D'
    }

    rc = get_redis_client()

    task_results = None
    # check if it is a batch:
    try:
        task_ids = rc.hget(f"batch_{task_id}", "batch")
        app.logger.info(f"batch task_ids: {task_ids}")

        if task_ids:
            task_ids = json.loads(task_ids)
            # Check the status on all the tasks.
            batch_done = check_batch_status(task_ids)
            if batch_done:
                # Get all of their results
                task_results = []
                for tid in task_ids:
                    task = get_task_result(tid, delete=False)
                    task['task_id'] = tid
                    task_results.append(task)

                # If it is done, return it all
                automate_response['details'] = task_results
                # They all have a success status
                automate_response['status'] = task['status']
        else:
            # it is not a batch, get the single task result
            task = get_task_result(task_id, delete=False)
            task['task_id'] = task_id

            automate_response['details'] = task
            automate_response['status'] = task['status']
    except Exception as e:
        app.logger.error(e)
        return jsonify({
            'status': 'Failed',
            'reason': 'InternalError: {}'.format(e)
        })

    return json.dumps(automate_response)
Exemplo n.º 13
0
def get_ep_stats(user_name, endpoint_id):
    """Retrieve the status updates from an endpoint.

    Parameters
    ----------
    user_name : str
        The primary identity of the user
    endpoint_id : str
        The endpoint uuid to look up

    Returns
    -------
    json
        The status of the endpoint
    """

    last = 10

    if not user_name:
        abort(400,
              description="Could not find user. You must be "
              "logged in to perform this function.")

    try:
        user_id = resolve_user(user_name)
    except Exception:
        app.logger.error("Failed to resolve user_name to user_id")
        return jsonify({
            'status':
            'Failed',
            'reason':
            'Failed to resolve user_name:{}'.format(user_name)
        })

    # Extract the token for endpoint verification
    token_str = request.headers.get('Authorization')
    token = str.replace(str(token_str), 'Bearer ', '')

    if not authorize_endpoint(user_id, endpoint_id, None, token):
        return jsonify({
            'status':
            'Failed',
            'reason':
            f'Unauthorized access to endpoint: {endpoint_id}'
        })

    # TODO add rc to g.
    rc = get_redis_client()

    stats = []
    try:
        end = min(rc.llen(f'ep_status_{endpoint_id}'), last)
        print("Total len :", end)
        items = rc.lrange(f'ep_status_{endpoint_id}', 0, end)
        if items:
            for i in items:
                stats.append(json.loads(i))
    except Exception as e:
        stats = {
            'status': 'Failed',
            'reason': f'Unable to retrieve endpoint stats: {endpoint_id}. {e}'
        }

    return jsonify(stats)
Exemplo n.º 14
0
def get_task(user_name, task_id):
    """Get a task.

    Parameters
    ----------
    user_name : str
        The primary identity of the user
    task_id : str
        The task uuid to look up

    Returns
    -------
    json
        The status of the task
    """

    if not user_name:
        abort(400,
              description="Could not find user. You must be "
              "logged in to perform this function.")

    try:
        # Get a redis client
        rc = get_redis_client()

        # Get the task from redis
        try:
            result_obj = rc.hget(f"task_{task_id}", 'result')
            app.logger.debug(f"Result_obj : {result_obj}")
            if result_obj:
                task = json.loads(result_obj)
                if 'status' not in task:
                    task['status'] = 'COMPLETED'
            else:
                task = {'status': 'PENDING'}
        except Exception as e:
            app.logger.error(
                f"Failed to fetch results for {task_id} due to {e}")
            task = {'status': 'FAILED', 'reason': 'Unknown task id'}
        else:
            if result_obj:
                # Task complete, attempt flush
                try:
                    rc.delete(f"task_{task_id}")
                except Exception as e:
                    app.logger.warning(
                        f"Failed to delete Task:{task_id} due to {e}. Ignoring..."
                    )
                    pass

        task['task_id'] = task_id

        app.logger.debug("Status Response: {}".format(str(task['status'])))
        return jsonify(task)

    except Exception as e:
        app.logger.error(e)
        return jsonify({
            'status': 'FAILED',
            'reason': 'InternalError: {}'.format(e)
        })
Exemplo n.º 15
0
def auth_and_launch(user_id,
                    function_uuid,
                    endpoints,
                    input_data,
                    app,
                    token,
                    serializer=None):
    """ Here we do basic authz for (user, fn, endpoint(s)) and launch the functions

    Parameters
    ==========

    user_id : str
       user id
    function_uuid : str
       uuid string for functions
    endpoints : [str]
       endpoint_uuid as list
    input_data: [string_buffers]
       input_data as a list in case many function launches are to be made
    app : app object
    token : globus token

    Returns:
       json object
    """
    # Check if the user is allowed to access the function
    if not authorize_function(user_id, function_uuid, token):
        return jsonify({
            'status':
            'Failed',
            'reason':
            f'Unauthorized access to function: {function_uuid}'
        })

    try:
        fn_code, fn_entry, container_uuid = resolve_function(
            user_id, function_uuid)
    except Exception as e:
        return jsonify({
            'status':
            'Failed',
            'reason':
            f'Function UUID:{function_uuid} could not be resolved. {e}'
        })

    # Make sure the user is allowed to use the function on this endpoint
    for ep in endpoints:
        if not authorize_endpoint(user_id, ep, function_uuid, token):
            return jsonify({
                'status': 'Failed',
                'reason': f'Unauthorized access to endpoint: {ep}'
            })

    app.logger.debug("Got function container_uuid :{}".format(container_uuid))

    # We should replace this with container_hdr = ";ctnr={container_uuid}"
    if not container_uuid:
        container_uuid = 'RAW'

    # We should replace this with serialize_hdr = ";srlz={container_uuid}"
    if not serializer:
        serializer = "ANY"

    # TODO: Store redis connections in g
    rc = get_redis_client()

    if isinstance(input_data, list):
        input_data_items = input_data
    else:
        input_data_items = [input_data]

    task_ids = []

    db_logger = get_db_logger()
    ep_queue = {}
    for ep in endpoints:
        redis_task_queue = RedisQueue(f"task_{ep}",
                                      hostname=app.config['REDIS_HOST'],
                                      port=app.config['REDIS_PORT'])
        redis_task_queue.connect()
        ep_queue[ep] = redis_task_queue

    for input_data in input_data_items:
        # Yadu : Remove timers
        timer_s = time.time()
        # At this point the packed function body and the args are concatable strings
        payload = fn_code + input_data
        task_id = str(uuid.uuid4())
        task_header = f"{task_id};{container_uuid};{serializer}"

        for ep in endpoints:
            ep_queue[ep].put(task_header, 'task', payload)
            app.logger.debug(f"Task:{task_id} forwarded to Endpoint:{ep}")

            # TODO: creating these connections each will be slow.
            # increment the counter
            rc.incr('funcx_invocation_counter')
            # add an invocation to the database
            # log_invocation(user_id, task_id, function_uuid, ep)
            db_logger.log(user_id, task_id, function_uuid, ep, deferred=True)

        task_ids.append(task_id)
        app.logger.debug("Pushed task {} in {}ms".format(
            task_id, 1000 * (time.time() - timer_s)))
        # YADU : Remove timers
    t = time.time()
    db_logger.commit()
    app.logger.debug("db logs committed in {}ms".format(
        1000 * (time.time() - timer_s)))

    return jsonify({'status': 'Success', 'task_uuids': task_ids})
Exemplo n.º 16
0
def submit(user_name):
    """Puts the task request into Redis and returns a task UUID
    Parameters
    ----------
    user_name : str
    The primary identity of the user

    POST payload
    ------------
    {
    }
    Returns
    -------
    json
        The task document
    """
    app.logger.debug(f"Submit invoked by user:{user_name}")

    if not user_name:
        abort(400,
              description="Could not find user. You must be "
              "logged in to perform this function.")
    try:
        user_id = resolve_user(user_name)
    except Exception:
        app.logger.error("Failed to resolve user_name to user_id")
        return jsonify({
            'status':
            'Failed',
            'reason':
            'Failed to resolve user_name:{}'.format(user_name)
        })

    # Extract the token for endpoint verification
    token_str = request.headers.get('Authorization')
    token = str.replace(str(token_str), 'Bearer ', '')

    # Parse out the function info
    try:
        post_req = request.json
        endpoint = post_req['endpoint']
        function_uuid = post_req['func']
        input_data = post_req['payload']
        serializer = None
        if 'serializer' in post_req:
            serializer = post_req['serializer']
    except KeyError as e:
        return jsonify({
            'status': 'Failed',
            'reason': "Missing Key {}".format(str(e))
        })
    except Exception as e:
        return jsonify({
            'status':
            'Failed',
            'reason':
            'Request Malformed. Missing critical information: {}'.format(
                str(e))
        })

    # Check if the user is allowed to access the function
    if not authorize_function(user_id, function_uuid, token):
        return jsonify({
            'status':
            'Failed',
            'reason':
            f'Unauthorized access to function: {function_uuid}'
        })

    try:
        fn_code, fn_entry, container_uuid = resolve_function(
            user_id, function_uuid)
    except Exception as e:
        return jsonify({
            'status':
            'Failed',
            'reason':
            f'Function UUID:{function_uuid} could not be resolved. {e}'
        })

    if isinstance(endpoint, str):
        endpoint = [endpoint]

    # Make sure the user is allowed to use the function on this endpoint
    for ep in endpoint:
        if not authorize_endpoint(user_id, ep, function_uuid, token):
            return jsonify({
                'status': 'Failed',
                'reason': f'Unauthorized access to endpoint: {ep}'
            })

    task_id = str(uuid.uuid4())

    app.logger.debug("Got function container_uuid :{}".format(container_uuid))

    # At this point the packed function body and the args are concatable strings
    payload = fn_code + input_data
    app.logger.debug("Payload : {}".format(payload))

    if not container_uuid:
        container_uuid = 'RAW'

    if not serializer:
        serializer = "ANY"

    task_header = f"{task_id};{container_uuid};{serializer}"

    # TODO: Store redis connections in g
    rc = get_redis_client()

    for ep in endpoint:
        redis_task_queue = RedisQueue(f"task_{ep}",
                                      hostname=app.config['REDIS_HOST'],
                                      port=app.config['REDIS_PORT'])
        redis_task_queue.connect()

        redis_task_queue.put(task_header, 'task', payload)
        app.logger.debug(f"Task:{task_id} forwarded to Endpoint:{ep}")
        app.logger.debug("Redis Queue : {}".format(redis_task_queue))

        # TODO: creating these connections each will be slow.
        # increment the counter
        rc.incr('funcx_invocation_counter')
        # add an invocation to the database
        log_invocation(user_id, task_id, function_uuid, ep)

    return jsonify({'status': 'Success', 'task_uuid': task_id})