def test_insert_project(self): before = datetime.datetime.now() project, _ = self._create_project() assert len(project.result_token) == 48 # check we can fetch the inserted project back from the database conn, cur = _get_conn_and_cursor() project_response = get_project(conn, project.project_id) assert 'time_added' in project_response assert project_response['time_added'] - before >= datetime.timedelta( seconds=0) assert not project_response['marked_for_deletion'] assert not project_response['uses_blocking'] assert project_response['parties'] == 2 assert project_response['notes'] == '' assert project_response['name'] == '' assert project_response['result_type'] == 'groups' assert project_response['schema'] == {} assert project_response['encoding_size'] is None
def authorise_get_request(project_id): if request.headers is None or 'Authorization' not in request.headers: safe_fail_request(401, message="Authentication token required") auth_header = request.headers.get('Authorization') dp_id = None # Check the resource exists abort_if_project_doesnt_exist(project_id) with DBConn() as dbinstance: project_object = db.get_project(dbinstance, project_id) logger.info("Checking credentials") if project_object['result_type'] == 'mapping' or project_object[ 'result_type'] == 'similarity_scores': # Check the caller has a valid results token if we are including results abort_if_invalid_results_token(project_id, auth_header) elif project_object['result_type'] == 'permutations': dp_id = get_authorization_token_type_or_abort(project_id, auth_header) else: safe_fail_request(500, "Unknown error") return dp_id, project_object
def post(project_id, run): log = logger.bind(pid=project_id) log.debug("Processing request to add a new run", run=run) # Check the resource exists abort_if_project_doesnt_exist(project_id) # Check the caller has a valid results token. Yes it should be renamed. abort_if_invalid_results_token(project_id, request.headers.get('Authorization')) abort_if_project_in_error_state(project_id) run_model = Run.from_json(run, project_id) log.debug("Saving run") with db.DBConn() as db_conn: run_model.save(db_conn) project_object = db.get_project(db_conn, project_id) parties_contributed = db.get_number_parties_uploaded( db_conn, project_id) ready_to_run = parties_contributed == project_object['parties'] log.debug( "Expecting {} parties to upload data. Have received {}".format( project_object['parties'], parties_contributed)) if ready_to_run: log.info( "Scheduling task to carry out all runs for project {} now". format(project_id)) update_run_mark_queued(db_conn, run_model.run_id) else: log.info("Task queued but won't start until CLKs are all uploaded") if ready_to_run: span = g.flask_tracer.get_span() span.set_tag("run_id", run_model.run_id) span.set_tag("project_id", run_model.project_id) check_for_executable_runs.delay(project_id, serialize_span(span)) return RunDescription().dump(run_model), 201
def project_get(project_id): """ This endpoint describes a Project. """ log = logger.bind(pid=project_id) log.info("Getting detail for a project") abort_if_project_doesnt_exist(project_id) authorise_get_request(project_id) with DBConn() as db_conn: project_object = db.get_project(db_conn, project_id) # Expose the number of data providers who have uploaded clks parties_contributed = db.get_number_parties_uploaded( db_conn, project_id) num_parties_with_error = db.get_encoding_error_count( db_conn, project_id) log.info(f"{parties_contributed} parties have contributed hashes") project_object['parties_contributed'] = parties_contributed if num_parties_with_error > 0: log.warning( f"There are {num_parties_with_error} parties in error state") project_object['error'] = num_parties_with_error > 0 return ProjectDescription().dump(project_object)