def upload_metadata_source_post(request): source_upload = ast.literal_eval(request.params["sources"]) # Check if source already in DB for source in source_upload: try: query = DBSession.query(Source.source_id) \ .filter(Source.patient_id == source['patient_id']) \ .filter(Source.organ == source['organ']) \ .filter(Source.organism == source['organism']) \ .filter(Source.histology == source['histology']) \ .filter(Source.dignity == source['dignity']) \ .filter(Source.location == source['location']) \ .filter(Source.treatment == source['treatment']) \ .filter(Source.metastatis == source['metastatis']) \ .filter(Source.celltype == source['celltype']) \ .filter(Source.comment == source['comment']) \ .filter(Source.person == source['person']) \ .filter(Source.prep_date == source['prep_date']) test_source = query.all() except DBAPIError: return Response(conn_err_msg, content_type='text/plain', status_int=500) # if in DB abort whole upload if len(test_source) > 0: log_writer("source_metadata_complete", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" +source) log_writer("source_metadata_complete",strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" +"The source is already in the Database. Aborted whole upload!") return Response("The source is already in the Database. Aborted whole upload!", content_type='text/plain', status_int=500) # upload each source for source in source_upload: # #################################################### # Source: # # #################################################### try: sample_id = source["patient_id"]+"_"+ source["organ"]+"_"+source['dignity']+"_"+ source['histology']+"_"+\ source['celltype'] +"_"+ source['location'] +"_"+ source['treatment'] +"_"+ source['prep_date'] source_insert = Source(patient_id=source['patient_id'], organ=source['organ'], organism=source['organism'], histology=source['histology'], dignity=source['dignity'], location=source['location'], treatment=source['treatment'], metastatis=source['metastatis'], celltype=source['celltype'], comment=source['comment'], prep_date= source['prep_date'], person=source['person'], sample_id=sample_id) #DBSession.add(source_insert) #DBSession.flush() #source_id = source_insert.source_id except DBAPIError: return Response(conn_err_msg + "\n Insert into Source failed!", content_type='text/plain', status_int=500) if source['typing'] is not "": # ############### # hla_types # # ############### hla_alleles = source['typing'].split(";") for hla_typing in hla_alleles: hla_typing_split = hla_typing.strip().split(":") for i in range(0, len(hla_typing_split)): sub_type = ":".join(hla_typing_split[0:i + 1]) try: query = DBSession.query(HlaType.hla_types_id).filter(HlaType.hla_string == sub_type) hla_types_id = query.all() except DBAPIError: return Response(conn_err_msg, content_type='text/plain', status_int=500) # unknown hla_lookup if len(hla_types_id) == 0: try: hla_type = HlaType(hla_string=sub_type, digits=hla_digits_extractor(sub_type)) DBSession.add(hla_type) DBSession.flush() hla_types_id = hla_type.hla_types_id except DBAPIError: return Response(conn_err_msg + "\n Insert into Hla-Types failed!", content_type='text/plain', status_int=500) else: hla_types_id = hla_types_id[0] hla_type = query = DBSession.query(HlaType).filter(HlaType.hla_string == sub_type).all()[0] # ############### # hla_map # # ############### try: source_insert.append(hla_type) except DBAPIError: return Response(conn_err_msg + "\n Insert into Hla-Map failed!", content_type='text/plain', status_int=500) try: log_writer("source_metadata", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" +source) log_writer("source_metadata_complete", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" +source) DBSession.add(source_insert) DBSession.flush() except DBAPIError: return Response(conn_err_msg + "\n Insert into Source failed!", content_type='text/plain', status_int=500) else: log_writer("source_metadata", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" +source) log_writer("source_metadata_complete", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" +source) DBSession.add(source_insert) DBSession.flush() hla_lookup_id = "NULL" return dict()
def upload_metadata_source_post(request): source_upload = ast.literal_eval(request.params["sources"]) # Check if source already in DB for source in source_upload: try: query = DBSession.query(Source.source_id) \ .filter(Source.patient_id == source['patient_id']) \ .filter(Source.organ == source['organ']) \ .filter(Source.organism == source['organism']) \ .filter(Source.histology == source['histology']) \ .filter(Source.dignity == source['dignity']) \ .filter(Source.location == source['location']) \ .filter(Source.treatment == source['treatment']) \ .filter(Source.metastatis == source['metastatis']) \ .filter(Source.celltype == source['celltype']) \ .filter(Source.comment == source['comment']) \ .filter(Source.person == source['person']) \ .filter(Source.prep_date == source['prep_date']) test_source = query.all() except DBAPIError: return Response(conn_err_msg, content_type='text/plain', status_int=500) # if in DB abort whole upload if len(test_source) > 0: log_writer("source_metadata_complete", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" + source) log_writer( "source_metadata_complete", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" + "The source is already in the Database. Aborted whole upload!") return Response( "The source is already in the Database. Aborted whole upload!", content_type='text/plain', status_int=500) # upload each source for source in source_upload: # #################################################### # Source: # # #################################################### try: sample_id = source["patient_id"]+"_"+ source["organ"]+"_"+source['dignity']+"_"+ source['histology']+"_"+\ source['celltype'] +"_"+ source['location'] +"_"+ source['treatment'] +"_"+ source['prep_date'] source_insert = Source(patient_id=source['patient_id'], organ=source['organ'], organism=source['organism'], histology=source['histology'], dignity=source['dignity'], location=source['location'], treatment=source['treatment'], metastatis=source['metastatis'], celltype=source['celltype'], comment=source['comment'], prep_date=source['prep_date'], person=source['person'], sample_id=sample_id) #DBSession.add(source_insert) #DBSession.flush() #source_id = source_insert.source_id except DBAPIError: return Response(conn_err_msg + "\n Insert into Source failed!", content_type='text/plain', status_int=500) if source['typing'] is not "": # ############### # hla_types # # ############### hla_alleles = source['typing'].split(";") for hla_typing in hla_alleles: hla_typing_split = hla_typing.strip().split(":") for i in range(0, len(hla_typing_split)): sub_type = ":".join(hla_typing_split[0:i + 1]) try: query = DBSession.query(HlaType.hla_types_id).filter( HlaType.hla_string == sub_type) hla_types_id = query.all() except DBAPIError: return Response(conn_err_msg, content_type='text/plain', status_int=500) # unknown hla_lookup if len(hla_types_id) == 0: try: hla_type = HlaType( hla_string=sub_type, digits=hla_digits_extractor(sub_type)) DBSession.add(hla_type) DBSession.flush() hla_types_id = hla_type.hla_types_id except DBAPIError: return Response(conn_err_msg + "\n Insert into Hla-Types failed!", content_type='text/plain', status_int=500) else: hla_types_id = hla_types_id[0] hla_type = query = DBSession.query(HlaType).filter( HlaType.hla_string == sub_type).all()[0] # ############### # hla_map # # ############### try: source_insert.append(hla_type) except DBAPIError: return Response(conn_err_msg + "\n Insert into Hla-Map failed!", content_type='text/plain', status_int=500) try: log_writer( "source_metadata", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" + source) log_writer( "source_metadata_complete", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" + source) DBSession.add(source_insert) DBSession.flush() except DBAPIError: return Response(conn_err_msg + "\n Insert into Source failed!", content_type='text/plain', status_int=500) else: log_writer("source_metadata", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" + source) log_writer("source_metadata_complete", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" + source) DBSession.add(source_insert) DBSession.flush() hla_lookup_id = "NULL" return dict()
def update_metadata_source_post(request): source = ast.literal_eval(request.params["sources"]) try: log_writer("source_update_complete", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" +source) source_update = DBSession.query(Source).join(t_hla_map).join(HlaType).filter(Source.source_id == source["source_id"]).all() except: log_writer("source_update_complete", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" +" Source update failed!") return Response(conn_err_msg + " \n Source update failed", content_type='text/plain', status_int=500) if len(source_update)>0: if source['patient_id'] != "": source_update[0].patient_id = source['patient_id'] if source['organ'] != "": source_update[0].organ = source['organ'] if source['organism'] != "": source_update[0].orgnaism = source['organism'] if source['comment'] != "": source_update[0].comment = source['comment'] if source['histology'] != "": source_update[0].histology = source['histology'] if source['dignity'] != "": source_update[0].dignity = source['dignity'] if source['celltype'] != "": source_update[0].celltype = source['celltype'] if source['person'] != "": source_update[0].person = source['person'] if source['location'] != "": source_update[0].location = source['location'] if source['metastatis'] != "": source_update[0].metastatis = source['metastatis'] if source['treatment'] != "": source_update[0].treatment = source['treatment'] if source['prep_date'] != "": source_update[0].prep_date = source['prep_date'] source_update[0].sample_id = source_update[0].patient_id + "_" + source_update[0].organ + "_" + source_update[0].dignity\ + "_" + source_update[0].histology + "_" + \ source_update[0].celltype + "_" + source_update[0].location + "_" + source_update[0].treatment\ + "_" + source_update[0].prep_date if source['typing'] != "": # remove all mappings source_update[0].hlatypes[:] = [] # create all hla links hla_split = source['typing'].split(";") for hla_typing in hla_split: hla_typing_split = hla_typing.strip().split(":") for i in range(0, len(hla_typing_split)): sub_type = ":".join(hla_typing_split[0:i + 1]) try: query = DBSession.query(HlaType.hla_types_id).filter(HlaType.hla_string == sub_type) hla_types_id = query.all() except DBAPIError: return Response(conn_err_msg, content_type='text/plain', status_int=500) # unknown hla_lookup if len(hla_types_id) == 0: try: hla_type = HlaType(hla_string=sub_type, digits=hla_digits_extractor(sub_type)) DBSession.add(hla_type) DBSession.flush() hla_types_id = hla_type.hla_types_id except DBAPIError: return Response(conn_err_msg + "\n Insert into Hla-Types failed!", content_type='text/plain', status_int=500) else: hla_types_id = hla_types_id[0] hla_type = query = DBSession.query(HlaType).filter(HlaType.hla_string == sub_type).all()[0] try: # add the hla type source_update[0].hlatypes.append(hla_type) except DBAPIError: return Response(conn_err_msg + "\n Insert into Hla-Map failed!", content_type='text/plain', status_int=500) try: transaction.commit() DBSession.flush() log_writer("source_update", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" +source) except: log_writer("source_update_complete",strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" + "Source update failed!") DBSession.rollback() return Response("Source update failed!", content_type='text/plain', status_int=500) return HTTPFound(location="/update_metadata_source?id=%s" % source["source_id"])
def update_metadata_source_post(request): source = ast.literal_eval(request.params["sources"]) try: log_writer("source_update_complete", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" + source) source_update = DBSession.query(Source).join(t_hla_map).join( HlaType).filter(Source.source_id == source["source_id"]).all() except: log_writer( "source_update_complete", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" + " Source update failed!") return Response(conn_err_msg + " \n Source update failed", content_type='text/plain', status_int=500) if len(source_update) > 0: if source['patient_id'] != "": source_update[0].patient_id = source['patient_id'] if source['organ'] != "": source_update[0].organ = source['organ'] if source['organism'] != "": source_update[0].orgnaism = source['organism'] if source['comment'] != "": source_update[0].comment = source['comment'] if source['histology'] != "": source_update[0].histology = source['histology'] if source['dignity'] != "": source_update[0].dignity = source['dignity'] if source['celltype'] != "": source_update[0].celltype = source['celltype'] if source['person'] != "": source_update[0].person = source['person'] if source['location'] != "": source_update[0].location = source['location'] if source['metastatis'] != "": source_update[0].metastatis = source['metastatis'] if source['treatment'] != "": source_update[0].treatment = source['treatment'] if source['prep_date'] != "": source_update[0].prep_date = source['prep_date'] source_update[0].sample_id = source_update[0].patient_id + "_" + source_update[0].organ + "_" + source_update[0].dignity\ + "_" + source_update[0].histology + "_" + \ source_update[0].celltype + "_" + source_update[0].location + "_" + source_update[0].treatment\ + "_" + source_update[0].prep_date if source['typing'] != "": # remove all mappings source_update[0].hlatypes[:] = [] # create all hla links hla_split = source['typing'].split(";") for hla_typing in hla_split: hla_typing_split = hla_typing.strip().split(":") for i in range(0, len(hla_typing_split)): sub_type = ":".join(hla_typing_split[0:i + 1]) try: query = DBSession.query(HlaType.hla_types_id).filter( HlaType.hla_string == sub_type) hla_types_id = query.all() except DBAPIError: return Response(conn_err_msg, content_type='text/plain', status_int=500) # unknown hla_lookup if len(hla_types_id) == 0: try: hla_type = HlaType( hla_string=sub_type, digits=hla_digits_extractor(sub_type)) DBSession.add(hla_type) DBSession.flush() hla_types_id = hla_type.hla_types_id except DBAPIError: return Response(conn_err_msg + "\n Insert into Hla-Types failed!", content_type='text/plain', status_int=500) else: hla_types_id = hla_types_id[0] hla_type = query = DBSession.query(HlaType).filter( HlaType.hla_string == sub_type).all()[0] try: # add the hla type source_update[0].hlatypes.append(hla_type) except DBAPIError: return Response(conn_err_msg + "\n Insert into Hla-Map failed!", content_type='text/plain', status_int=500) try: transaction.commit() DBSession.flush() log_writer("source_update", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" + source) except: log_writer( "source_update_complete", strftime("%Y.%m.%d %H:%M:%S", gmtime()) + "\t" + "Source update failed!") DBSession.rollback() return Response("Source update failed!", content_type='text/plain', status_int=500) return HTTPFound(location="/update_metadata_source?id=%s" % source["source_id"])