def pipeline(): """POST a pipeline job (JSON) to run on the Luigi pipeline.""" if not request.data: return 'POST a JSON pipeline config to execute or an id to GET. Body should be pipeline JSON' try: init() p_cfg = PipelineConfig.from_dict(request.get_json()) p_id = insert_pipeline_config(p_cfg, util.conn_string) if p_id == -1: return '{ "success", false }' job_id = jobs.create_new_job(jobs.NlpJob(job_id=-1, name=p_cfg.name, description=p_cfg.description, owner=p_cfg.owner, status=jobs.STARTED, date_ended=None, phenotype_id=-1, pipeline_id=p_id, date_started=datetime.now(), job_type='PIPELINE'), util.conn_string) luigi_runner.run_pipeline( p_cfg.config_type, str(p_id), job_id, p_cfg.owner) output = dict() output["pipeline_id"] = str(p_id) output["job_id"] = str(job_id) output["luigi_task_monitoring"] = "%s/static/visualiser/index.html#search__search=job=%s" % ( util.luigi_url, str(job_id)) output["status_endpoint"] = "%s/status/%s" % ( util.main_url, str(job_id)) output["results_endpoint"] = "%s/job_results/%s/%s" % ( util.main_url, str(job_id), 'pipeline') return json.dumps(output, indent=4) except Exception as ex: return 'Failed to load and insert pipeline. ' + str(ex), 400
def pipeline(): """POST a pipeline job (JSON) to run on the Luigi pipeline.""" if not request.data: return 'POST a JSON pipeline config to execute or an id to GET. Body should be pipeline JSON' try: init() p_cfg = PipelineConfig.from_dict(request.get_json()) p_id = insert_pipeline_config(p_cfg, util.conn_string) if p_id == -1: return '{ "success", false }' job_id = jobs.create_new_job(jobs.NlpJob(job_id=-1, name=p_cfg.name, description=p_cfg.description, owner=p_cfg.owner, status=jobs.STARTED, date_ended=None, phenotype_id=-1, pipeline_id=p_id, date_started=datetime.now(), job_type='PIPELINE'), util.conn_string) luigi_runner.run_pipeline(p_cfg.config_type, str(p_id), job_id, p_cfg.owner) output = dict() output["pipeline_id"] = str(p_id) output["job_id"] = str(job_id) output["luigi_task_monitoring"] = "%s/static/visualiser/index.html#search__search=job=%s" % ( util.luigi_url, str(job_id)) output["status_endpoint"] = "%s/status/%s" % (util.main_url, str(job_id)) output["results_endpoint"] = "%s/job_results/%s/%s" % (util.main_url, str(job_id), 'pipeline') return json.dumps(output, indent=4) except Exception as ex: return 'Failed to load and insert pipeline. ' + str(ex), 400
def measurement_finder(): """POST to extract measurements, See samples/sample_measurement_finder.json""" if request.method == 'POST' and request.data: init() obj = NLPModel.from_dict(request.get_json()) results = run_measurement_finder_full(obj.text, obj.terms) return json.dumps([r.__dict__ for r in results], indent=4) return "Please POST a valid JSON object with terms and text"
def tnm_stage(): """POST to extract TNM cancer stage (See samples/sample_tnm_stage.json)""" if request.method == 'POST' and request.data: init() obj = NLPModel.from_dict(request.get_json()) res = run_tnm_stager_full(obj.text) return json.dumps(res, indent=4) return "Please POST a valid JSON object text"
def pos_tagger(): """POST to extract Tags. (See samples/sample_pos_tag_text.json)""" if request.method == 'POST' and request.data: init() obj = NLPModel.from_dict(request.get_json()) tags = get_tags(obj.text) return json.dumps([t.__dict__ for t in tags], indent=4) return "Please POST a valid JSON object with text"
def pos_tagger(): """POST to extract Tags. (See samples/sample_pos_tag_text.json)""" if request.method == 'POST' and request.data: init() obj = NLPModel.from_dict(request.get_json()) tags = get_tags(obj.text) return json.dumps([t.__dict__ for t in tags], indent=4) return "Please POST a valid JSON object with text"
def measurement_finder(): """POST to extract measurements, See samples/sample_measurement_finder.json""" if request.method == 'POST' and request.data: init() obj = NLPModel.from_dict(request.get_json()) results = run_measurement_finder_full(obj.text, obj.terms) return json.dumps([r.__dict__ for r in results], indent=4) return "Please POST a valid JSON object with terms and text"
def tnm_stage(): """POST to extract TNM cancer stage (See samples/sample_tnm_stage.json)""" if request.method == 'POST' and request.data: init() obj = NLPModel.from_dict(request.get_json()) res = run_tnm_stager_full(obj.text) return json.dumps(res, indent=4) return "Please POST a valid JSON object text"
def named_entity_recognition(): """POST to extract standard named entities. (See samples/sample_ner.json)""" if request.method == 'POST' and request.data: init() obj = NLPModel.from_dict(request.get_json()) results = get_standard_entities(obj.text) return json.dumps([r.__dict__ for r in results], indent=4) return "Please POST a valid JSON object with text"
def named_entity_recognition(): """POST to extract standard named entities. (See samples/sample_ner.json)""" if request.method == 'POST' and request.data: init() obj = NLPModel.from_dict(request.get_json()) results = get_standard_entities(obj.text) return json.dumps([r.__dict__ for r in results], indent=4) return "Please POST a valid JSON object with text"
def value_extractor(): """POST to extract values such as BP, LVEF, Vital Signs etc. (See samples/sample_value_extractor.json)""" if request.method == 'POST' and request.data: init() obj = NLPModel.from_dict(request.get_json()) results = run_value_extractor_full(obj.terms, obj.text, obj.min_value, obj.max_value, obj.case_sensitive) return json.dumps([r.__dict__ for r in results], indent=4) return "Please POST a valid JSON object with terms and text"
def term_finder(): """POST to extract terms, context, negex, sections from text, See samples/sample_term_finder.json""" if request.method == 'POST' and request.data: init() obj = NLPModel.from_dict(request.get_json()) finder = TermFinder(obj.terms) results = finder.get_term_full_text_matches(obj.text) return json.dumps([r.__dict__ for r in results], indent=4) return "Please POST a valid JSON object with terms and text"
def value_extractor(): """POST to extract values such as BP, LVEF, Vital Signs etc. (See samples/sample_value_extractor.json)""" if request.method == 'POST' and request.data: init() obj = NLPModel.from_dict(request.get_json()) results = run_value_extractor_full(obj.terms, obj.text, obj.min_value, obj.max_value, is_case_sensitive_text=obj .case_sensitive) return json.dumps([r.__dict__ for r in results], indent=4) return "Please POST a valid JSON object with terms and text"
def term_finder(): """POST to extract terms, context, negex, sections from text, See samples/sample_term_finder.json""" if request.method == 'POST' and request.data: init() obj = NLPModel.from_dict(request.get_json()) finder = TermFinder(obj.terms) results = finder.get_term_full_text_matches(obj.text) return json.dumps([r.__dict__ for r in results], indent=4) return "Please POST a valid JSON object with terms and text"
def post_phenotype(p_cfg: PhenotypeModel, raw_nlpql: str = '', background=False, tuple_def_docs=None): validated = phenotype_helper.validate_phenotype(p_cfg) if not validated['success']: return validated init() if len(raw_nlpql) > 0: p_cfg.nlpql = raw_nlpql p_id = insert_phenotype_model(p_cfg, util.conn_string) if p_id == -1: return {"success": False, "error": "Failed to insert phenotype"} job_id = jobs.create_new_job(jobs.NlpJob(job_id=-1, name=p_cfg.name, description=p_cfg.description, owner=p_cfg.owner, status=jobs.STARTED, date_ended=None, phenotype_id=p_id, pipeline_id=-1, date_started=datetime.now(), job_type='PHENOTYPE'), util.conn_string) if tuple_def_docs is not None and len(tuple_def_docs) > 0: # insert tuple def docs into Mongo client = util.mongo_client() mongo_db_obj = client[util.mongo_db] mongo_collection_obj = mongo_db_obj['phenotype_results'] result = tuple_processor.insert_tuple_def_docs(mongo_collection_obj, tuple_def_docs, job_id) if result: log('inserted {0} tuple definition docs for job_id {1}'.format(len(tuple_def_docs), job_id)) else: log('failed to insert {0} tuple definition docs for job_id {1}'.format(len(tuple_def_docs), job_id)) pipeline_ids = luigi_runner.run_phenotype(p_cfg, p_id, job_id, background=background) pipeline_urls = ["%s/pipeline_id/%s" % (util.main_url, str(pid)) for pid in pipeline_ids] output = dict() output["job_id"] = str(job_id) output["phenotype_id"] = str(p_id) output['phenotype_config'] = "%s/phenotype_id/%s" % ( util.main_url, str(p_id)) output['pipeline_ids'] = pipeline_ids output['pipeline_configs'] = pipeline_urls output["status_endpoint"] = "%s/status/%s" % (util.main_url, str(job_id)) # output["results_viewer"] = "%s?job=%s" % ( # util.results_viewer_url, str(job_id)) output["luigi_task_monitoring"] = "%s/static/visualiser/index.html#search__search=job=%s" % ( util.luigi_url, str(job_id)) output["intermediate_results_csv"] = "%s/job_results/%s/%s" % (util.main_url, str(job_id), 'phenotype_intermediate') output["main_results_csv"] = "%s/job_results/%s/%s" % ( util.main_url, str(job_id), 'phenotype') return output
def post_phenotype(p_cfg: PhenotypeModel, raw_nlpql: str = ''): validated = phenotype_helper.validate_phenotype(p_cfg) if not validated['success']: return validated init() if len(raw_nlpql) > 0: p_cfg.nlpql = raw_nlpql p_id = insert_phenotype_model(p_cfg, util.conn_string) if p_id == -1: return {"success": False, "error": "Failed to insert phenotype"} job_id = jobs.create_new_job( jobs.NlpJob(job_id=-1, name=p_cfg.name, description=p_cfg.description, owner=p_cfg.owner, status=jobs.STARTED, date_ended=None, phenotype_id=p_id, pipeline_id=-1, date_started=datetime.now(), job_type='PHENOTYPE'), util.conn_string) pipeline_ids = luigi_runner.run_phenotype(p_cfg, p_id, job_id) pipeline_urls = [ "%s/pipeline_id/%s" % (util.main_url, str(pid)) for pid in pipeline_ids ] output = dict() output["job_id"] = str(job_id) output["phenotype_id"] = str(p_id) output['phenotype_config'] = "%s/phenotype_id/%s" % (util.main_url, str(p_id)) output['pipeline_ids'] = pipeline_ids output['pipeline_configs'] = pipeline_urls output["status_endpoint"] = "%s/status/%s" % (util.main_url, str(job_id)) output["results_viewer"] = "%s?job=%s" % (util.results_viewer_url, str(job_id)) output[ "luigi_task_monitoring"] = "%s/static/visualiser/index.html#search__search=job=%s" % ( util.luigi_url, str(job_id)) output["intermediate_results_csv"] = "%s/job_results/%s/%s" % ( util.main_url, str(job_id), 'phenotype_intermediate') output["main_results_csv"] = "%s/job_results/%s/%s" % ( util.main_url, str(job_id), 'phenotype') return output
def post_phenotype(p_cfg: PhenotypeModel, raw_nlpql: str=''): validated = phenotype_helper.validate_phenotype(p_cfg) if not validated['success']: return validated init() if len(raw_nlpql) > 0: p_cfg.nlpql = raw_nlpql p_id = insert_phenotype_model(p_cfg, util.conn_string) if p_id == -1: return {"success": False, "error": "Failed to insert phenotype"} job_id = jobs.create_new_job(jobs.NlpJob(job_id=-1, name=p_cfg.name, description=p_cfg.description, owner=p_cfg.owner, status=jobs.STARTED, date_ended=None, phenotype_id=p_id, pipeline_id=-1, date_started=datetime.now(), job_type='PHENOTYPE'), util.conn_string) pipeline_ids = luigi_runner.run_phenotype(p_cfg, p_id, job_id) pipeline_urls = ["%s/pipeline_id/%s" % (util.main_url, str(pid)) for pid in pipeline_ids] output = dict() output["job_id"] = str(job_id) output["phenotype_id"] = str(p_id) output['phenotype_config'] = "%s/phenotype_id/%s" % (util.main_url, str(p_id)) output['pipeline_ids'] = pipeline_ids output['pipeline_configs'] = pipeline_urls output["status_endpoint"] = "%s/status/%s" % (util.main_url, str(job_id)) output["results_viewer"] = "%s?job=%s" % (util.results_viewer_url, str(job_id)) output["luigi_task_monitoring"] = "%s/static/visualiser/index.html#search__search=job=%s" % ( util.luigi_url, str(job_id)) output["intermediate_results_csv"] = "%s/job_results/%s/%s" % (util.main_url, str(job_id), 'phenotype_intermediate') output["main_results_csv"] = "%s/job_results/%s/%s" % (util.main_url, str(job_id), 'phenotype') return output