Esempio n. 1
0
def get_postprocessing_stack(username, stack_id):
    """
    Fetches the Postprocessing Stack with the given ID and username from the postprocessing server

    :param stack_id: The ID of the stack to fetch

    :param username: The username of the user who owns this stack

    :raises NotFoundError: if the requested postprocessing stack is not found

    :raises UnavailableError: if the Postprocessing service is not reachable

    :return: Instance of :class:`server.gen.phenopipe_r_pb2.PostprocessingStack`
    """
    r_stub = phenopipe_r_pb2_grpc.PhenopipeRStub(get_r_channel())
    try:
        response = r_stub.GetPostprocessingStack(
            phenopipe_r_pb2.GetPostprocessingStackRequest(stack_id=stack_id,
                                                          author=username))

        return response.stack
    except grpc.RpcError as e:
        if e.code() == grpc.StatusCode.NOT_FOUND:
            raise NotFoundError(e.details(), "PostprocessingStack")
        elif e.code() == grpc.StatusCode.UNAVAILABLE:
            raise UnavailableError("Postprocessing Service")
        raise  # TODO other error options?
def upload_stack(stack):
    """
    Uploads the given Postprocessing Stack to the Postprocessing server via GRPC

    :param stack: Instance of :class:`server.gen.phenopipe_r_pb2.PostprocessingStack`

    :return: The ID of the uploaded stack
    """
    r_stub = phenopipe_r_pb2_grpc.PhenopipeRStub(get_r_channel())
    try:
        scripts = []
        for index, script in enumerate(stack.scripts):
            scripts.append(phenopipe_r_pb2.PostprocessingScript(name=script.name, description=script.description,
                                                                index=index, file=script.file))
        stack = phenopipe_r_pb2.PostprocessingStack(name=stack.name, description=stack.description,
                                                    author=stack.author, scripts=scripts)
        response = r_stub.UploadPostprocessingStack(
            phenopipe_r_pb2.UploadPostprocessingStackRequest(stack=stack)
        )
        stack_id = response.stack_id
        print(stack_id)

        return stack_id
    except grpc.RpcError as e:
        if e.code() == grpc.StatusCode.UNAVAILABLE:
            raise UnavailableError("Postprocessing Service")
        elif e.code() == grpc.StatusCode.ALREADY_EXISTS:
            raise PostprocessingStackAlreadyExistsError(e.details(), e.initial_metadata()[0][1],
                                                        e.initial_metadata()[1][1])
Esempio n. 3
0
def get_postprocessing_stacks(username):
    """
    Fetches all available Postprocessing Stacks from the postprocessing server

    :raises UnavailableError: if the Postprocessing service is not reachable

    :return: List of instances of :class:`server.gen.phenopipe_r_pb2.PostprocessingStack`
    """
    r_stub = phenopipe_r_pb2_grpc.PhenopipeRStub(get_r_channel())
    try:
        response = r_stub.GetPostprocessingStacks(
            phenopipe_r_pb2.GetPostprocessingStacksRequest(author=username))
        stacks = response.stacks
        return stacks
    except grpc.RpcError as e:
        if e.code() == grpc.StatusCode.UNAVAILABLE:
            raise UnavailableError("Postprocessing Service")
        raise  # TODO other error options?
Esempio n. 4
0
def invoke_r_postprocess(experiment_name, postprocess_id, analysis_id,
                         excluded_plant_names, path_to_report,
                         postprocessing_stack_id, postprocessing_stack_name,
                         username, task_key):
    """
    This Methods represents an RQ Job workload. It should be enqueued into the RQ Postprocessing Queue and processed by an according worker

    Handles the invokation of a postprocess on the postprocessing server and fetches the result information afterwards.
    The received information is then entered into the database accordingly

    :param experiment_name: The name of the experiment this postprocess belongs to
    :param postprocess_id: The ID of this postprocess
    :param analysis_id: The ID of the analysis to be postprocessed
    :param excluded_plant_names: The full names (Used as ID by IAP) of all excluded plants
    :param path_to_report: The path to the export path of the analysis. (The folder where the report file of IAP is saved)
    :param postprocessing_stack_id: The ID of the stack which should be used for postprocessing
    :param username: The username of the user issuing this request

    :return: A dict containing the 'path_to_results', the used 'postprocessing_stack_id' and a timestamps 'started_at' and 'finished_at'
         (All nested inside the 'response' key)
    """
    print('EXECUTE POSTPROCESS')
    job = get_current_job()
    log_store = get_log_store()
    task = PostprocessingTask.from_key(get_redis_connection(), task_key)
    log_store.put(job.id, 'Started Postprocessing Job', 0)
    task.update_message('Started Postprocessing Job')
    channel = get_grpc_channel()
    r_stub = phenopipe_r_pb2_grpc.PhenopipeRStub(channel)
    pipe_stub = phenopipe_pb2_grpc.PhenopipeStub(channel)
    session = get_session()
    # TODO get parameters from postrpocess object instead of via function parameters
    postprocess = session.query(PostprocessModel).get(postprocess_id)
    try:
        started_at = datetime.utcnow()
        postprocess.started_at = started_at
        session.commit()
        # TODO pass control group in

        meta = phenopipe_r_pb2.PostprocessingMetadata(
            experiment_name=experiment_name,
            control_treatment_name=postprocess.control_group.treatment)
        response = r_stub.PostprocessAnalysis(
            phenopipe_r_pb2.PostprocessRequest(
                path_to_report=path_to_report,
                postprocess_stack_id=postprocessing_stack_id,
                snapshot_hash=postprocess.snapshot_hash,
                meta=meta,
                excluded_plant_identifiers=excluded_plant_names))
        task.update_message('Started Postprocessing Stack "{}"'.format(
            postprocessing_stack_name))
        log_store.put(
            job.id, 'Started Postprocessing Stack "{}"'.format(
                postprocessing_stack_name), 0)
        remote_job_id = response.job_id
        request = phenopipe_pb2.WatchJobRequest(job_id=remote_job_id)
        status = pipe_stub.WatchJob(request)
        for msg in status:
            log_store.put(job.id, msg.message.decode('string-escape'),
                          msg.progress)

        response = r_stub.FetchPostprocessingResult(
            phenopipe_pb2.FetchJobResultRequest(job_id=remote_job_id))
        finished_at = datetime.utcnow()
        postprocess.finished_at = finished_at
        task.update_message('Finished Postprocessing Job')
        log_store.put(job.id, 'Finished Postprocessing Job', 100)
        postprocess.result_path = response.path_to_results
        session.commit()
        return create_return_object(
            JobType.r_postprocess, analysis_id, {
                'path_to_results': response.path_to_results,
                'postprocess_stack_id': postprocessing_stack_id,
                'started_at': started_at,
                'finished_at': finished_at
            })
    except grpc.RpcError as e:
        session.delete(session.query(PostprocessModel).get(postprocess.id))
        session.commit()
        log_store.put(job.id, e.details(), 0)
        task.update_message('Postprocessing Job Failed')
        print(e.details())
        raise
    except DBAPIError as err:
        # TODO handle this
        print(err.message)
        session.rollback()
        log_store.put(job.id, e.details(), 0)
        task.update_message('Postprocessing Job Failed')
        raise