def process(self, instruction: Instruction, data: any, callback: LoggingCallback = default_logger): # Count number of queries and log the beginning of the execution instruction.state = ExecutionState.FHIREXECUTION query_count: int = count(data) executed_counter: int = 0 callback.log_progress_event( instruction, information=f"{executed_counter}/{query_count}") # Iterate through CNF and execute one by one fhir_cnf_responses = [] for fhir_disjunction in data: fhir_disjunction_res = [] for query in fhir_disjunction: # Execute and put into disjunction list paged_query_result = execute_fhir_query(query) fhir_disjunction_res.append(paged_query_result) # Log executed_counter += 1 callback.log_progress_event( instruction, information=f"{executed_counter}/{query_count}") # Add results of disjunction into the cnf list fhir_cnf_responses.append(fhir_disjunction_res) return fhir_cnf_responses
def create_query_sync(): """ Submit a query for execution :return: location header containing the url to the result/processing progress """ # Extract data from Request content_type = request.headers["Content-Type"] query_syntax = content_type_to_query_syntax(content_type) accept = request.headers["Accept"] response_type = accept_to_response_type(accept) query_input: str = request.data.decode("iso-8859-1") # Create Instruction queue_insertion_time: int = time.time_ns() uuid: UUID = uuid4() instruction: Instruction = Instruction(query_input, str(uuid), queue_insertion_time, query_syntax=query_syntax, response_type=response_type) response: str = run_codex_query(instruction) # Respond with location header return response
def create_query(): """ Submit a query for execution :return: location header containing the url to the result/processing progress """ # Extract data from Request content_type = request.headers["Content-Type"] query_syntax = content_type_to_query_syntax(content_type) accept = request.headers["Accept"] response_type = accept_to_response_type(accept) i2b2_request: str = request.data.decode("UTF-8") # Create Instruction queue_insertion_time: int = time.time_ns() uuid: UUID = uuid4() instruction: Instruction = Instruction(i2b2_request, str(uuid), queue_insertion_time, query_syntax=query_syntax, response_type=response_type) # Create execution flag with open(instruction.file_path(), "x") as flag: flag.write(instruction_encoder.encode(instruction)) # Queue for execution instruction_queue.put(instruction) # Respond with location header response = app.make_response("") response.status_code = 202 response.headers["Location"] = f"/query/{str(instruction.request_id)}" return response
def create_query_translate(): """ Submit a query for translation :return: translated query as string """ # Extract data from Request content_type = request.headers["Content-Type"] query_syntax = content_type_to_query_syntax(content_type) accept = request.headers["Accept"] response_type = accept_to_response_type(accept) query_input: str = request.data.decode("iso-8859-1") # Create Instruction queue_insertion_time: int = time.time_ns() uuid: UUID = uuid4() instruction: Instruction = Instruction(query_input, str(uuid), queue_insertion_time, query_syntax=query_syntax, response_type=response_type) response: str = run_translate_query(instruction) # Respond with location header return response
def process(self, instruction: Instruction, data: any, callback: LoggingCallback = default_logger): instruction.state = ExecutionState.RESULTBUILDING default_logger.log_progress_event( instruction, information=instruction.response_type.name) return json.dumps(data)
def parse_input(instruction: Instruction) -> List[List[List[dict]]]: instruction.state = ExecutionState.PARSING default_logger.log_progress_event( instruction, information=instruction.query_syntax.name) intermediate_query_repr: List[List[List[dict]]] = \ syntax_parser_map[instruction.query_syntax](instruction.request_data) return intermediate_query_repr
def process(self, instruction: Instruction, data: any, callback: LoggingCallback = default_logger): instruction.state = ExecutionState.QUERYBUILDING default_logger.log_progress_event(instruction) return generate_fhir_cnf(data)
def process(self, instruction: Instruction, data: any, callback: LoggingCallback = default_logger): instruction.state = ExecutionState.RESULTBUILDING default_logger.log_progress_event( instruction, information=instruction.response_type.name) result_set = data x_response = build_response(result_set) insert_timestamps(x_response, instruction) response = Etree.tostring(x_response).decode("UTF-8") return response
def test_run_codex_query(self): content_type = "codex/json" query_syntax = content_type_to_query_syntax(content_type) accept = "internal/json" response_type = accept_to_response_type(accept) path = "test/testCases_copy" extension = "json" testCase_content_list = [] instructionSet = [] response_list = [] with open("test/testResults" + '.csv', 'w', newline='') as f: sheet = csv.writer(f) for filename in os.listdir(path): if filename.endswith(extension): with open(os.path.join(path, filename)) as f: # testCase_content_list.append(f.read().decode("iso-8859-1")) query_input = f.read() # query_input = query_input.decode("iso-8859-1") testCase_content_list.append(query_input) queue_insertion_time: int = time.time_ns() uuid: UUID = uuid4() singleInstruction = Instruction(query_input, str(uuid), queue_insertion_time, query_syntax=query_syntax, response_type=response_type) instructionSet.append(singleInstruction) try: result = run_codex_query(singleInstruction) if result != "1": test_name = filename test_name.strip(".json") query_json = json.loads(query_input) for inclusionCriteria in query_json["inclusionCriteria"][0]: sheet.writerow([test_name, inclusionCriteria["termCode"]["system"], inclusionCriteria["termCode"]["code"]]) response_list.append(result) except TypeError: query_json = json.loads(query_input) for inclusionCriteria in query_json["inclusionCriteria"][0]: sheet.writerow([test_name, inclusionCriteria["termCode"]["system"], inclusionCriteria["termCode"]["code"]]) print(query_input) print("This one") pass
def test_run_codex_query(self): content_type = "codex/json" query_syntax = content_type_to_query_syntax(content_type) accept = "internal/json" response_type = accept_to_response_type(accept) path = "test/testCases_copy" extension = "json" testCase_content_list = [] instructionSet = [] response_list = [] for filename in os.listdir(path): if filename.endswith(extension): with open(os.path.join(path, filename)) as f: #testCase_content_list.append(f.read().decode("iso-8859-1")) query_input = f.read() #query_input = query_input.decode("iso-8859-1") testCase_content_list.append(query_input) queue_insertion_time: int = time.time_ns() uuid: UUID = uuid4() singleInstruction = Instruction( query_input, str(uuid), queue_insertion_time, query_syntax=query_syntax, response_type=response_type) instructionSet.append(singleInstruction) try: response_list.append( run_codex_query(singleInstruction)) except TypeError: print(query_input) print("This one") exit() #print(testCase_content_list) #print(instructionSet) pass
def handle_i2b2_query(): """ Synchronous execution API (legacy) takes an I2B2 Query Definition in the body and executes it. :return: the number of matching patients found """ print("handling query") # Execute and timestamp start_time = time.time() i2b2_request = request.data.decode("UTF-8") try: result_set = run( Instruction(i2b2_request, str(uuid4()), time.time_ns())) response = build_response(result_set) except RequestException: return "Connection error with upstream FHIR server", 504 end_time = time.time() delta = end_time - start_time # Insert timestamps into result_set x_start_time = Etree.Element("start_time") x_start_time.attrib["value"] = str(start_time) x_end_time = Etree.Element("end_time") x_end_time.attrib["value"] = str(end_time) x_delta = Etree.Element("delta") x_delta.attrib["value"] = str(delta) response.insert(0, x_end_time) response.insert(0, x_start_time) response.insert(0, x_delta) response = Etree.tostring(response).decode("UTF-8") return str(response)
help="path to the file containing the i2b2 to FHIR mappings") parser.add_argument( "--querySyntax", type=str, choices=[e.name for e in QuerySyntax], help="detail which syntax the query is in, default is I2B2", default="I2B2", dest="query_syntax") parser.add_argument( "--responseType", type=str, choices=[e.name for e in ResponseType], default="RESULT", help="detail what result the user wants to process, default is result", dest="response_type") args = parser.parse_args() # Create instruction try: with open(args.query_file, 'r') as file: ins = Instruction(file.read(), "local_request", time.time_ns(), query_syntax=QuerySyntax[args.query_syntax], response_type=ResponseType[args.response_type]) except IOError: print("Error reading the query file") exit(-1) # Run the Script run(ins)
def process(self, instruction: Instruction, data: any, callback: LoggingCallback = default_logger): instruction.state = ExecutionState.AGGREGATING default_logger.log_progress_event(instruction) result_set = build_result_set_from_query_results(data) return result_set