def post(self): """Add project This method allows adding a project connected to a user in the NGSOnto database. Requires the project identifier. Returns ------- code: 201 if added. """ id = current_user.id args = project_post_parser.parse_args() newstudyid = args.study_id UserURI = dbconAg.createURI(namespace=localNSpace, localname="users/" + str(id)) studyBelong2 = dbconAg.createURI(namespace=obo, localname="NGS_0000015") studyURI = dbconAg.createURI(namespace=localNSpace + "projects/", localname=str(newstudyid)) studyType = dbconAg.createURI(namespace=obo, localname="OBI_0000066") dbconAg.add(studyURI, RDF.TYPE, studyType) dbconAg.add(studyURI, studyBelong2, UserURI) return 201
def post(self, id): """Add NGSOnto pipeline This method allows adding a new pipeline to a project. Parameters ---------- id: str project identifier Returns ------- """ args = pipeline_post_parser.parse_args() newpipelineid = args.pipeline_id # need new pipeline ID pipelineURI = dbconAg.createURI(namespace=localNSpace + "projects/", localname=str(id) + "/pipelines/" + str(newpipelineid)) studyURI = dbconAg.createURI(namespace=localNSpace + "projects/", localname=str(id)) hasPart = dbconAg.createURI(namespace=obo, localname="BFO_0000051") pipelineType = dbconAg.createURI(namespace=obo, localname="OBI_0000471") dbconAg.add(pipelineURI, RDF.TYPE, pipelineType) dbconAg.add(studyURI, hasPart, pipelineURI) return 201
def put(self, id, id2, id3): """Change a specific output (DEPRECATED) Parameters ---------- id id2 id3 Returns ------- """ args = process_put_output_parser.parse_args() output_prop_to_type = { "run_info": "NGS_0000092", "run_output": "NGS_0000093", "run_stats": "NGS_0000094", "log_file": "NGS_0000096", "status": "NGS_0000097" } try: processURI = dbconAg.createURI(namespace=localNSpace + "projects/", localname=str(id) + "/pipelines/" + str(id2) + "/processes/" + str(id3)) # get output URI from process hasOutput = dbconAg.createURI(namespace=obo, localname="RO_0002234") statements = dbconAg.getStatements(processURI, hasOutput, None) outputURI = parseAgraphStatementsRes(statements) statements.close() outputURI = dbconAg.createURI(outputURI[0]['obj']) runInfo = dbconAg.createLiteral((args.property), datatype=XMLSchema.STRING) runInfoProp = dbconAg.createURI( namespace=obo, localname=output_prop_to_type[args.property]) dbconAg.remove(outputURI, runInfoProp, None) # add outputs paths to process stmt1 = dbconAg.createStatement(outputURI, runInfoProp, runInfo) # send to allegro dbconAg.add(stmt1) return 202 except Exception as e: print e return 404
def post(self, id, id2): """Add job identifier to process This method allows adding a job identifier to a process. It requires the processes ids and the job identifiers. Parameters ---------- id: str project identifier id2: str pipeline identifier Returns ------- dict: status and job identifiers """ args = parser_jobid.parse_args() tasks = args.task_ids.split(',') processes = args.processes_ids.split(',') countadded = 0 for index in range(0, len(processes)): try: processURI = dbconAg.createURI( namespace=localNSpace + "projects/", localname=str(id) + "/pipelines/" + str(id2) + "/processes/" + str(processes[index])) indexProp = dbconAg.createURI(namespace=obo, localname="NGS_0000089") indexInt = dbconAg.createLiteral(tasks[index], datatype=XMLSchema.STRING) # add jobID to process dbconAg.remove(processURI, indexProp, None) stmt1 = dbconAg.createStatement(processURI, indexProp, indexInt) # send to allegro dbconAg.add(stmt1) countadded += 1 except Exception: print 'error mapping process' if countadded == len(tasks): return {'status': 202, 'tasks': tasks} else: return 404
def set_unique_prop_output(project_id, pipeline_id, process_id, property_type, property_value): output_prop_to_type = { "run_info": "NGS_0000092", "run_output": "NGS_0000093", "warnings": "NGS_0000094", "log_file": "NGS_0000096", "status": "NGS_0000097" } property_types = property_type.split(",") property_values = property_value.split(",") try: for p, v in zip(property_types, property_values): # Agraph processURI = dbconAg.createURI( namespace=localNSpace + "projects/", localname=str(project_id) + "/pipelines/" + str( pipeline_id) + "/processes/" + str(process_id)) # get output URI from process hasOutput = dbconAg.createURI(namespace=obo, localname="RO_0002234") statements = dbconAg.getStatements(processURI, hasOutput, None) outputURI = parseAgraphStatementsRes(statements) statements.close() outputURI = dbconAg.createURI(outputURI[0]['obj']) runInfo = dbconAg.createLiteral((v), datatype=XMLSchema.STRING) runInfoProp = dbconAg.createURI( namespace=obo, localname=output_prop_to_type[p]) if p != "log_file" and p != "warnings": dbconAg.remove(outputURI, runInfoProp, None) # add outputs paths to process stmt1 = dbconAg.createStatement(outputURI, runInfoProp, runInfo) # send to allegro dbconAg.add(stmt1) except Exception as e: print e
def post(self): """Add protocol to workflow This method adds protocols to workflows. It requires the protocols identifiers and the id of the workflow. Returns ------- code: 201 if successfully added. """ args = workflow_post_parser.parse_args() protocol_ids = args.protocol_ids.split(',') workflow_id = args.workflow_id for p_id in protocol_ids: protocolURI = dbconAg.createURI(namespace=localNSpace, localname="protocols/" + str(p_id)) hasStep = dbconAg.createURI(namespace=obo, localname="NGS_0000078") workflowURI = dbconAg.createURI(namespace=localNSpace, localname="workflows/" + str(workflow_id)) statements = dbconAg.getStatements(workflowURI, hasStep, None) jsonResult = parseAgraphStatementsRes(statements) statements.close() numberOfProtocols = len(jsonResult) protocolStepType = dbconAg.createURI(namespace=obo, localname="NGS_0000075") protocStepUri = dbconAg.createURI(namespace=localNSpace, localname="workflows/" + str(workflow_id) + "/step/" + str(numberOfProtocols + 1)) indexProp = dbconAg.createURI(namespace=obo, localname="NGS_0000081") indexInt = dbconAg.createLiteral((numberOfProtocols + 1), datatype=XMLSchema.INT) hasProtocolRel = dbconAg.createURI(namespace=obo, localname="NGS_0000077") # add step + index dbconAg.add(protocStepUri, RDF.TYPE, protocolStepType) stmt1 = dbconAg.createStatement(protocStepUri, indexProp, indexInt) dbconAg.add(stmt1) # link workflow to step dbconAg.add(workflowURI, hasStep, protocStepUri) # add protocol + link to step dbconAg.add(protocStepUri, hasProtocolRel, protocolURI) return 201
def post(self): """Add new strain This method adds a new strain to the NGSOnto database. Requires the strain identifier Returns ------- code: 201 if added successfully. """ args = project_post_parser.parse_args() newstrainid = args.strain_id # Agraph strainURI = dbconAg.createURI(namespace=localNSpace + "strains/", localname=str(newstrainid)) strainType = dbconAg.createURI(namespace=obo, localname="OBI_0000747") dbconAg.add(strainURI, RDF.TYPE, strainType) return 201
def user_registered_handler(app, user, confirm_token): if not os.path.exists( os.path.join(app.config['UPLOAD_FOLDER'], str(user.email) + '_' + str(user.id))): os.makedirs( os.path.join(app.config['UPLOAD_FOLDER'], str(user.email) + '_' + str(user.id))) default_role = user_datastore.find_role('end-user') user_datastore.add_role_to_user(user, default_role) db.session.commit() id = user.id ############# Add user to NGS_onto ######################## UserURI = dbconAg.createURI(namespace=localNSpace, localname="users/" + str(id)) userType = dbconAg.createURI(namespace=dcterms, localname="Agent") dbconAg.add(UserURI, RDF.TYPE, userType)
def post(self): """Add protocols This methods allows adding protocols to the NGSOnto database. It requires the protocol identifier and the uri of the type. Returns ------- code: 201 if successfully added. """ args = project_post_parser.parse_args() ProtocolId = args.protocol_id protoclTypeURI = args.type_uri protocolURI = dbconAg.createURI(namespace=localNSpace, localname="protocols/" + str(ProtocolId)) protocolTypeURI = dbconAg.createURI(protoclTypeURI) dbconAg.add(protocolURI, RDF.TYPE, protocolTypeURI) return 201
def post(self, id, id2, id3): """Add output to process This method allows adding logs, run information, status and statistics to the NGSOnto entry. Parameters ---------- id: str project identifier id2: str pipeline identifier id3: str process identifier Returns ------- code: status code of the request """ args = process_post_output_parser.parse_args() try: processURI = dbconAg.createURI(namespace=localNSpace + "projects/", localname=str(id) + "/pipelines/" + str(id2) + "/processes/" + str(id3)) # get output URI from process hasOutput = dbconAg.createURI(namespace=obo, localname="RO_0002234") statements = dbconAg.getStatements(processURI, hasOutput, None) outputURI = parseAgraphStatementsRes(statements) statements.close() outputURI = dbconAg.createURI(outputURI[0]['obj']) runInfo = dbconAg.createLiteral((args.run_info), datatype=XMLSchema.STRING) runInfoProp = dbconAg.createURI(namespace=obo, localname="NGS_0000092") runStats = dbconAg.createLiteral((args.output), datatype=XMLSchema.STRING) runStatsProp = dbconAg.createURI(namespace=obo, localname="NGS_0000093") runFile = dbconAg.createLiteral((args.run_stats), datatype=XMLSchema.STRING) runFileProp = dbconAg.createURI(namespace=obo, localname="NGS_0000094") runStatus = dbconAg.createLiteral((args.status), datatype=XMLSchema.STRING) runStatusProp = dbconAg.createURI(namespace=obo, localname="NGS_0000097") dbconAg.remove(outputURI, runInfoProp, None) dbconAg.remove(outputURI, runStatsProp, None) dbconAg.remove(outputURI, runFileProp, None) dbconAg.remove(outputURI, runStatusProp, None) # add outputs paths to process stmt1 = dbconAg.createStatement(outputURI, runInfoProp, runInfo) stmt2 = dbconAg.createStatement(outputURI, runStatsProp, runStats) stmt3 = dbconAg.createStatement(outputURI, runFileProp, runFile) stmt4 = dbconAg.createStatement(processURI, runStatusProp, runStatus) # send to allegro dbconAg.add(stmt1) dbconAg.add(stmt2) dbconAg.add(stmt3) dbconAg.add(stmt4) return 202 except Exception as e: print e return 404
def post(self, id, id2): """Add processes to pipeline This method allows adding processes to a pipeline by linking the protocol ids with the processes. It requires the project id, pipeline id, protocol ids and strain identifier. Parameters ---------- id: str project identifier id2: str pipeline identifier Returns ------- list: list of processes identifiers """ args = process_post_parser.parse_args() pipelineStr = localNSpace + "projects/" + str( id) + "/pipelines/" + str(id2) # get number of processes already mapped on the pipeline hasPart = dbconAg.createURI(namespace=obo, localname="BFO_0000051") pipelineURI = dbconAg.createURI(pipelineStr) statements = dbconAg.getStatements(pipelineURI, hasPart, None) jsonResult = parseAgraphStatementsRes(statements) statements.close() numberOfProcesses = len(jsonResult) print "Request 1", str(id2) # get all ordered workflows from pipeline queryString = "SELECT (str(?proc) " \ "as ?StrProc) (str(?index) as ?StrIndex)" \ " WHERE{<"+pipelineStr+"> obo:BFO_0000051 ?proc." \ " ?proc obo:NGS_0000081 ?index.}" tupleQuery = dbconAg.prepareTupleQuery(QueryLanguage.SPARQL, queryString) result = tupleQuery.evaluate() procJsonResult = parseAgraphQueryRes(result, ["StrProc", "StrIndex"]) result.close() numberOfProcesses = len(procJsonResult) print "Request 2", str(id2) # get all ordered workflows from pipeline queryString = "SELECT ?execStep ?stepIndex" \ " ?workflowURI ?execStep " \ "WHERE {<"+pipelineStr+"> obo:NGS_0000076 ?execStep." \ " ?execStep obo:NGS_0000079" \ " ?workflowURI; obo:NGS_0000081" \ " ?stepIndex3} ORDER BY" \ " ASC(?stepIndex)" tupleQuery = dbconAg.prepareTupleQuery(QueryLanguage.SPARQL, queryString) result = tupleQuery.evaluate() jsonResult = parseAgraphQueryRes( result, ["stepIndex", "workflowURI", "execStep"]) result.close() print "Request 3", str(id2) # get all protocols per workflow listOrderedProtocolsURI = [] listOrderedProcessTypes = [] listOrderedMessageTypes = [] for result in jsonResult: workflowURI = result["workflowURI"] queryString = "SELECT ?protocStep ?stepIndex" \ " ?protocolURI ?type " \ "WHERE {"+workflowURI+\ " obo:NGS_0000078 ?protocStep. ?protocStep" \ " obo:NGS_0000077 ?protocolURI; obo:NGS_0000081" \ " ?stepIndex. ?protocolURI a ?type. ?type rdfs:label"\ " ?typelabel.} ORDER BY ASC(?stepIndex)" tupleQuery = dbconAg.prepareTupleQuery(QueryLanguage.SPARQL, queryString) result3 = tupleQuery.evaluate() jsonResult2 = parseAgraphQueryRes( result3, ["stepIndex", "protocolURI", "type"]) result3.close() for results in jsonResult2: for k, v in protocolsTypes.items(): if v in results["type"]: listOrderedProtocolsURI.append(results["protocolURI"]) listOrderedProcessTypes.append(processTypes[k]) listOrderedMessageTypes.append(processMessages[k]) print "Request 4 all protocols", str(id2) # Starts at 500 in case does not exists messageid = 500 # TEST query string queryString = """SELECT ?index {?message rdf:type/rdfs:subClassOf* obo:NGS_0000061; obo:NGS_0000081 ?index} order by desc(?index) limit 1""" tupleQuery = dbconAg.prepareTupleQuery(QueryLanguage.SPARQL, queryString) result = tupleQuery.evaluate() for bindingSet in result: messageid = int(str(bindingSet[0]).split('"')[1]) print "Request 5", str(id2) result.close() if args.strain_id != "null": strainid = args.strain_id rpipid = args.real_pipeline_id ppipid = rpipid ppropid = id pprocid = 0 else: ppipid = args.parent_pipeline_id ppropid = args.parent_project_id pprocid = args.parent_process_id rpipid = args.real_pipeline_id if ppipid == rpipid: for proc_json in procJsonResult: if int(proc_json["StrIndex"].replace('"', '')) > int(pprocid): todelUri = dbconAg.createURI( "<" + proc_json["StrProc"].replace('"', "") + ">") hasOutputRel = dbconAg.createURI(namespace=obo, localname="RO_0002234") statements = dbconAg.getStatements(todelUri, hasOutputRel, None) jsonResult = parseAgraphStatementsRes(statements) statements.close() todelUri2 = jsonResult[0]["obj"] todelUri2 = dbconAg.createURI(todelUri2) dbconAg.remove(todelUri2, None, None) dbconAg.remove(todelUri, None, None) dbconAg.remove(None, None, todelUri) statements = dbconAg.getStatements(todelUri, None, None) jsonResult = parseAgraphStatementsRes(statements) statements.close() numberOfProcesses -= 1 print "Request 6", str(id2) try: addedProcesses = numberOfProcesses hasOutputRel = dbconAg.createURI(namespace=obo, localname="RO_0002234") hasInputRel = dbconAg.createURI(namespace=obo, localname="RO_0002233") index = dbconAg.createURI(namespace=obo, localname="NGS_0000081") isRunOfProtocl = dbconAg.createURI(namespace=obo, localname="NGS_0000091") # prev process to link (strain URI most of times) if args.strain_id != "null": prevMessageURI = dbconAg.createURI( namespace=localNSpace, localname="strains/strain_" + str(strainid)) strainTypeURI = dbconAg.createURI( 'http://rdf.ebi.ac.uk/terms/biosd/Sample') dbconAg.add(prevMessageURI, RDF.TYPE, strainTypeURI) processes_ids = [] processid = addedProcesses # Case new run while addedProcesses < len(listOrderedProcessTypes): processid += 1 messageid += 1 processURI = dbconAg.createURI( namespace=localNSpace + "projects/", localname=str(id) + "/pipelines/" + str(id2) + "/processes/" + str(processid)) messageURI = dbconAg.createURI( namespace=localNSpace + "projects/", localname=str(id) + "/pipelines/" + str(id2) + "/messages/" + str(messageid)) processTypeURI = dbconAg.createURI( listOrderedProcessTypes[addedProcesses]) messageTypeURI = dbconAg.createURI( listOrderedMessageTypes[addedProcesses]) protocolTypeURI = dbconAg.createURI( listOrderedProtocolsURI[addedProcesses]) indexProp = dbconAg.createURI(namespace=obo, localname="NGS_0000081") indexInt = dbconAg.createLiteral((addedProcesses + 1), datatype=XMLSchema.INT) messageindexInt = dbconAg.createLiteral((messageid), datatype=XMLSchema.INT) # get specific process input type and uri queryString = """SELECT (STR(?out) as ?messageURI) WHERE {<"""+localNSpace+"projects/"+str(id)+"/pipelines/"+\ str(rpipid)+"""> obo:BFO_0000051 ?proc. ?proc obo:NGS_0000081 ?index; obo:RO_0002234 ?out} order by desc(?out)""" print queryString tupleQuery = dbconAg.prepareTupleQuery(QueryLanguage.SPARQL, queryString) result5 = tupleQuery.evaluate() jsonResult2 = parseAgraphQueryRes(result5, ["messageURI"]) result5.close() for results in jsonResult2: prevMessageURI = dbconAg.createURI( results["messageURI"].replace('"', '')) break # add process and link to pipeline dbconAg.add(processURI, RDF.TYPE, processTypeURI) dbconAg.add(pipelineURI, hasPart, processURI) stmt1 = dbconAg.createStatement(processURI, indexProp, indexInt) dbconAg.add(stmt1) # create output and input/output link messages to process dbconAg.add(messageURI, RDF.TYPE, messageTypeURI) dbconAg.add(messageURI, index, messageindexInt) dbconAg.add(processURI, hasOutputRel, messageURI) dbconAg.add(processURI, isRunOfProtocl, protocolTypeURI) dbconAg.add(processURI, hasInputRel, prevMessageURI) # prevMessageURI=messageURI addedProcesses += 1 processes_ids.append(processid) print "Request 7", str(id2) return processes_ids except Exception as e: print e return 404
def post(self, id, id1): """Add workflow This method allows adding a workflow to a pipeline. Requires workflow id and its location in the pipeline Parameters ---------- id: str project identifier id1: str pipeline identifier Returns ------- code: 201 if successfully added. """ # Agraph args = pipeline_post_parser.parse_args() wkflid = args.workflow_id prtjctid = id pplid = id1 step = args.step wkflid = wkflid.split(',') step = step.split(',') # check if workflow is on pipeline pipelineStr = localNSpace+"projects/"+str(prtjctid)+"/pipelines/"+\ str(pplid) queryString = "SELECT ?execStep (STR(?intstep) as ?step) WHERE {<" + pipelineStr + "> obo:NGS_0000076 ?execStep. ?execStep obo:NGS_0000081 ?intstep.}" tupleQuery = dbconAg.prepareTupleQuery(QueryLanguage.SPARQL, queryString) result = tupleQuery.evaluate() jsonResult = parseAgraphQueryRes(result, ["execStep", "step"]) result.close() for result in jsonResult: aux1 = result["execStep"] aux2 = result["step"] step_converted = map(int, step) if int(aux2.replace('"', '')) in step_converted \ or int(aux2.replace('"', '')) > max(step_converted): toremove = dbconAg.createURI(aux1) dbconAg.remove(None, None, toremove) dbconAg.remove(toremove, None, None) counter = -1 for i in wkflid: counter += 1 # add new workflow exStepType = dbconAg.createURI(namespace=obo, localname="NGS_0000074") workflowURI = dbconAg.createURI(namespace=localNSpace, localname="workflows/" + str(i)) executeRel = dbconAg.createURI(namespace=obo, localname="NGS_0000076") pipelineURI = dbconAg.createURI( namespace=localNSpace + "projects/", localname=str(prtjctid) + "/pipelines/" + str(pplid)) exStepURI = dbconAg.createURI(namespace=localNSpace + "projects/", localname=str(prtjctid) + "/pipelines/" + str(pplid) + "/step/" + str(step[counter])) indexInt = dbconAg.createLiteral((step[counter]), datatype=XMLSchema.INT) indexProp = dbconAg.createURI(namespace=obo, localname="NGS_0000081") hasWorkflRel = dbconAg.createURI(namespace=obo, localname="NGS_0000079") dbconAg.add(exStepURI, RDF.TYPE, exStepType) stmt1 = dbconAg.createStatement(exStepURI, indexProp, indexInt) dbconAg.add(stmt1) # link pipeline to step dbconAg.add(pipelineURI, executeRel, exStepURI) # add workflow + link to step workflowType = dbconAg.createURI(namespace=obo, localname="OBI_0500000") dbconAg.add(workflowURI, RDF.TYPE, workflowType) dbconAg.add(exStepURI, hasWorkflRel, workflowURI) return 201
def set_process_output(project_id, pipeline_id, process_id, run_info, run_stats, output, log_file, status): try: # Agraph processURI = dbconAg.createURI( namespace=localNSpace + "projects/", localname=str(project_id) + "/pipelines/" + str( pipeline_id) + "/processes/" + str(process_id)) # get output URI from process hasOutput = dbconAg.createURI(namespace=obo, localname="RO_0002234") statements = dbconAg.getStatements(processURI, hasOutput, None) outputURI = parseAgraphStatementsRes(statements) statements.close() outputURI = dbconAg.createURI(outputURI[0]['obj']) runInfo = dbconAg.createLiteral((run_info), datatype=XMLSchema.STRING) runInfoProp = dbconAg.createURI(namespace=obo, localname="NGS_0000092") runStats = dbconAg.createLiteral((run_stats), datatype=XMLSchema.STRING) runStatsProp = dbconAg.createURI(namespace=obo, localname="NGS_0000093") runFile = dbconAg.createLiteral((output), datatype=XMLSchema.STRING) runFileProp = dbconAg.createURI(namespace=obo, localname="NGS_0000094") logFile = dbconAg.createLiteral((log_file), datatype=XMLSchema.STRING) logFileProp = dbconAg.createURI(namespace=obo, localname="NGS_0000096") runStatus = dbconAg.createLiteral((status), datatype=XMLSchema.STRING) runStatusProp = dbconAg.createURI(namespace=obo, localname="NGS_0000097") dbconAg.remove(outputURI, runInfoProp, None) dbconAg.remove(outputURI, runStatsProp, None) dbconAg.remove(outputURI, runFileProp, None) dbconAg.remove(outputURI, runStatusProp, None) # add outputs paths to process stmt1 = dbconAg.createStatement(outputURI, runInfoProp, runInfo) stmt2 = dbconAg.createStatement(outputURI, runStatsProp, runStats) stmt3 = dbconAg.createStatement(outputURI, runFileProp, runFile) stmt4 = dbconAg.createStatement(outputURI, logFileProp, logFile) stmt5 = dbconAg.createStatement(outputURI, runStatusProp, runStatus) # send to allegro dbconAg.add(stmt1) dbconAg.add(stmt2) dbconAg.add(stmt3) dbconAg.add(stmt4) dbconAg.add(stmt5) except Exception as e: print "ERROR", e