def status_get():
    """Displays the latest mapping statistics"""

    status = service_status.get_status()

    if status is None:
        # We ensure we can connect to the database during the status check
        with db.DBConn() as conn:
            try:
                number_of_mappings = db.query_db(conn, '''
                            SELECT COUNT(*) FROM projects
                            ''', one=True)['count']
            except psycopg2.errors.UndefinedTable:
                safe_fail_request(500, "DB uninitialized")

            current_rate = db.get_latest_rate(conn)

        status = {
            'status': 'ok',
            'project_count': number_of_mappings,
            'rate': current_rate
        }

        service_status.set_status(status)
    return status
def _delete_run(run_id, log):
    # Retry if a db integrity error occurs (e.g. when a worker is writing to the same row)
    # Randomly wait up to 2^x * 1 seconds between each retry until the range reaches 60 seconds, then randomly up to 60 seconds afterwards
    set_run_state_deleted(run_id)
    with db.DBConn() as conn:
        log.debug("Retrieving run details from database")
        similarity_file = get_similarity_file_for_run(conn, run_id)
        delete_run_data(conn, run_id)
    return similarity_file
def get(project_id, run_id):
    log = logger.bind(pid=project_id, rid=run_id)
    log.info("request description of a run")
    authorize_run_detail(project_id, run_id)
    log.debug("request for run description authorized")

    with db.DBConn() as conn:
        log.debug("Retrieving run description from database")
        run_object = db.get_run(conn, run_id)

    return RunDescription().dump(run_object)
def authorize_external_upload(project_id):
    if not config.UPLOAD_OBJECT_STORE_ENABLED:
        safe_fail_request(
            500,
            message=
            "Retrieving temporary object store credentials feature disabled",
            title="Feature Disabled")

    headers = request.headers

    log, parent_span = bind_log_and_span(project_id)

    log.debug("Authorizing external upload")
    token = precheck_upload_token(project_id, headers, parent_span)
    log.debug(f"Update token is valid")
    with db.DBConn() as conn:
        dp_id = db.get_dataprovider_id(conn, token)
        log = log.bind(dpid=dp_id)

    with opentracing.tracer.start_span('assume-role-request',
                                       child_of=parent_span):
        client = connect_to_upload_object_store()
        client.set_app_info("anonlink", "development version")

        bucket_name = config.UPLOAD_OBJECT_STORE_BUCKET
        path = object_store_upload_path(project_id, dp_id)
        log.info(
            f"Retrieving temporary object store credentials for path: '{bucket_name}/{path}'"
        )

        credentials_provider = AssumeRoleProvider(
            client,
            Policy=_get_upload_policy(bucket_name, path=path),
            DurationSeconds=config.UPLOAD_OBJECT_STORE_STS_DURATION)
        credential_values = Credentials(provider=credentials_provider).get()
        expiry = credentials_provider._expiry._expiration

        log.info("Retrieved temporary credentials")

    credentials_json = ObjectStoreCredentials().dump(credential_values)
    log.debug("Temp credentials", **credentials_json)

    # Convert datetime to ISO 8601 string
    credentials_json["Expiration"] = expiry.strftime('%Y-%m-%dT%H:%M:%S.%f%z')

    return {
        "credentials": credentials_json,
        "upload": {
            "endpoint": config.UPLOAD_OBJECT_STORE_SERVER,
            "secure": config.UPLOAD_OBJECT_STORE_SECURE,
            "bucket": bucket_name,
            "path": path
        }
    }, 201
Esempio n. 5
0
def get(project_id):
    log, parent_span = bind_log_and_span(project_id)
    log.info("Listing runs for project")

    authorize_run_listing(project_id)

    log.info("Authorized request to list runs")
    with db.DBConn() as conn:
        runs = get_runs(conn, project_id)

    return RunListItem(many=True).dump(runs)
def get_total_number_of_comparisons(project_id):
    r = connect_to_redis(read_only=True)
    key = _get_project_hash_key(project_id)
    res = r.hget(key, 'total_comparisons')
    # hget returns None if missing key/name, and bytes if present
    if res:
        return _convert_redis_result_to_int(res)
    else:
        # Calculate the number of comparisons
        with db.DBConn() as conn:
            total_comparisons = db.get_total_comparisons_for_project(
                conn, project_id)
        # get a writable connection to redis
        r = connect_to_redis()
        res = r.hset(key, 'total_comparisons', total_comparisons)
        r.expire(key, 60 * 60)
        return total_comparisons
Esempio n. 7
0
def post(project_id, run):
    log, span = bind_log_and_span(project_id)
    log.debug("Processing request to add a new run", run=run)
    # Check the resource exists
    abort_if_project_doesnt_exist(project_id)

    # Check the caller has a valid results token. Yes it should be renamed.
    abort_if_invalid_results_token(project_id,
                                   request.headers.get('Authorization'))

    abort_if_project_in_error_state(project_id)

    run_model = Run.from_json(run, project_id)

    log.debug("Saving run")

    with db.DBConn() as db_conn:
        run_model.save(db_conn)

    check_for_executable_runs.delay(project_id, serialize_span(span))
    return RunDescription().dump(run_model), 201
Esempio n. 8
0
def post(project_id, run):
    log = logger.bind(pid=project_id)
    log.debug("Processing request to add a new run", run=run)
    # Check the resource exists
    abort_if_project_doesnt_exist(project_id)

    # Check the caller has a valid results token. Yes it should be renamed.
    abort_if_invalid_results_token(project_id,
                                   request.headers.get('Authorization'))

    abort_if_project_in_error_state(project_id)

    run_model = Run.from_json(run, project_id)

    log.debug("Saving run")

    with db.DBConn() as db_conn:
        run_model.save(db_conn)
        project_object = db.get_project(db_conn, project_id)
        parties_contributed = db.get_number_parties_uploaded(
            db_conn, project_id)
        ready_to_run = parties_contributed == project_object['parties']
        log.debug(
            "Expecting {} parties to upload data. Have received {}".format(
                project_object['parties'], parties_contributed))
        if ready_to_run:
            log.info(
                "Scheduling task to carry out all runs for project {} now".
                format(project_id))
            update_run_mark_queued(db_conn, run_model.run_id)
        else:
            log.info("Task queued but won't start until CLKs are all uploaded")

    if ready_to_run:
        span = g.flask_tracer.get_span()
        span.set_tag("run_id", run_model.run_id)
        span.set_tag("project_id", run_model.project_id)
        check_for_executable_runs.delay(project_id, serialize_span(span))
    return RunDescription().dump(run_model), 201
Esempio n. 9
0
def get(project_id, run_id):
    log, parent_span = bind_log_and_span(project_id, run_id)
    log.info("Checking for results of run.")

    with opentracing.tracer.start_span('check-auth', child_of=parent_span) as span:
        # Check the project and run resources exist
        abort_if_run_doesnt_exist(project_id, run_id)
        # Check the caller has a valid results token.
        token = request.headers.get('Authorization')
        log.info("request to access run result authorized")
    with db.DBConn() as conn:
        with opentracing.tracer.start_span('get-run-state', child_of=parent_span) as span:
            state = db.get_run_state(conn, run_id)
            log.info("run state is '{}'".format(state))

        # Check that the run is not in a terminal state, otherwise 404
        if state == 'completed':
            with opentracing.tracer.start_span('get-run-result', child_of=parent_span) as span:
                return get_result(conn, project_id, run_id, token)
        elif state == 'error':
            safe_fail_request(500, message='Error during computation of run')
        else:
            safe_fail_request(404, message='run is not complete')