def test_addMIRIAMinchiKey(self): #no inchikeys should be added rpsbml = rpSBML('test', path=os.path.join('data', 'rpsbml', 'rpsbml.xml')) self.assertTrue(rpsbml.addMIRIAMinchiKey()) self.assertEqual(rpsbml.readMIRIAMAnnotation(rpsbml.model.getSpecies('MNXM100__64__MNXC3').getAnnotation())['inchikey'][0], 'GVVPGTZRZFNKDS-JXMROGBWSA-K') #inchikeys are added gem = rpSBML('test', path=os.path.join('data', 'rpsbml', 'gem.xml')) self.assertTrue(gem.addMIRIAMinchiKey()) self.assertEqual(gem.readMIRIAMAnnotation(gem.model.getSpecies('M_2pg_c').getAnnotation())['inchikey'][0], 'GXIURPTVHJPJLF-UWTATZPHSA-K')
def test_createMultiFluxObj(self): #create new group with tempfile.TemporaryDirectory() as tmp_output_folder: rpsbml = rpSBML('test', path=os.path.join('data', 'rpsbml', 'rpsbml.xml')) flux_obj = rpsbml.createMultiFluxObj('test', ['RP1'], [1.0]) self.assertEqual(flux_obj.getId(), 'test') rpsbml.writeSBML(os.path.join(tmp_output_folder, 'test.xml')) self.assertEqual(hashlib.md5(open(os.path.join(tmp_output_folder, 'test.xml'), 'rb').read()).hexdigest(), 'c4647ea976dbf1eebbf0e7400af4b65d') #recover already existing group rpsbml = rpSBML('test', path=os.path.join('data', 'rpsbml', 'rpsbml.xml')) flux_obj = rpsbml.createMultiFluxObj('obj_fraction', ['RP1'], [1.0]) self.assertEqual(flux_obj.getId(), 'obj_fraction')
def test_createGroup(self): #create new group with tempfile.TemporaryDirectory() as tmp_output_folder: rpsbml = rpSBML('test', path=os.path.join('data', 'rpsbml', 'rpsbml.xml')) gro = rpsbml.createGroup('test') self.assertEqual(gro.getId(), 'test') rpsbml.writeSBML(os.path.join(tmp_output_folder, 'test.xml')) self.assertEqual(hashlib.md5(open(os.path.join(tmp_output_folder, 'test.xml'), 'rb').read()).hexdigest(), '19c61349df77d97118210a359710dbbf') #recover already existing group rpsbml = rpSBML('test', path=os.path.join('data', 'rpsbml', 'rpsbml.xml')) gro = rpsbml.createGroup('rp_pathway', 'MNXC3') self.assertEqual(gro.getId(), 'rp_pathway')
def test_createSpecies(self): #create new species with tempfile.TemporaryDirectory() as tmp_output_folder: rpsbml = rpSBML('test', path=os.path.join('data', 'rpsbml', 'rpsbml.xml')) spe = rpsbml.createSpecies('test', 'MNXC3') self.assertEqual(spe.getId(), 'test') rpsbml.writeSBML(os.path.join(tmp_output_folder, 'test.xml')) self.assertEqual(hashlib.md5(open(os.path.join(tmp_output_folder, 'test.xml'), 'rb').read()).hexdigest(), '8222edc93f5c1430231d7e16afbf3079') #recover already existing species rpsbml = rpSBML('test', path=os.path.join('data', 'rpsbml', 'rpsbml.xml')) spe = rpsbml.createSpecies('MNXM6', 'MNXC3') self.assertEqual(spe.getId(), 'MNXM6')
def test_createCompartment(self): #test with no comp xref with tempfile.TemporaryDirectory() as tmp_output_folder: new = rpSBML('test') new.createModel('test_name', 'test_id') new.createCompartment('test') new.writeSBML(os.path.join(tmp_output_folder, 'test.xml')) self.assertEqual(hashlib.md5(open(os.path.join(tmp_output_folder, 'test.xml'), 'rb').read()).hexdigest(), '87fd0a156f470799bdcf6295decb5faa') #test with xref with tempfile.TemporaryDirectory() as tmp_output_folder: new = rpSBML('test') new.createModel('test_name', 'test_id') new.createCompartment('MNXC3') new.writeSBML(os.path.join(tmp_output_folder, 'test.xml')) self.assertEqual(hashlib.md5(open(os.path.join(tmp_output_folder, 'test.xml'), 'rb').read()).hexdigest(), 'c27be1b46e4c0f10aaf86362364749e7')
def test_findCreateObjective(self): #the find part self.assertEqual(self.rpsbml.findCreateObjective(['RP1_sink'], [1.0]), 'obj_RP1_sink') #the create part with tempfile.TemporaryDirectory() as tmp_output_folder: rpsbml = rpSBML('test', path=os.path.join('data', 'rpsbml', 'rpsbml.xml')) self.assertEqual(rpsbml.findCreateObjective(['RP2'], [1.0]), 'obj_RP2')
def test_createReturnFluxParameter(self): #create feature new = rpSBML('test') new.createModel('test_name', 'test_id') param = new.createReturnFluxParameter(8888.0) self.assertEqual(param.id, 'B_8888_0') self.assertEqual(param.value, 8888.0)
def test_runCollection(self): with tempfile.TemporaryDirectory() as tmp_output_folder: rpFBA.runCollection(os.path.join('data', 'rpfba', 'test.rpcol'), os.path.join('data', 'rpfba', 'gem.xml'), os.path.join(tmp_output_folder, 'test.rpcol'), num_workers=1) tar = tarfile.open(os.path.join(tmp_output_folder, 'test.rpcol'), mode='r') os.mkdir(os.path.join(tmp_output_folder, 'results')) tar.extractall(os.path.join(tmp_output_folder, 'results')) self.assertTrue( len( glob.glob( os.path.join(tmp_output_folder, 'results', 'rpsbml_collection', 'models', '*'))) == 1) rpsbml = rpSBML(path=glob.glob( os.path.join(tmp_output_folder, 'results', 'rpsbml_collection', 'models', '*'))[0]) asdict = rpsbml.asDict() self.assertAlmostEqual( asdict['pathway']['brsynth']['fba_obj_biomass_restricted'] ['value'], 0.6577479108178588) self.assertAlmostEqual( asdict['pathway']['brsynth']['fba_obj_fraction']['value'], 0.9438866396863238) self.assertAlmostEqual( asdict['pathway']['brsynth']['fba_obj_biomass']['value'], 0.8769972144238116)
def rpPipeline(): with tempfile.TemporaryDirectory() as tmpdir: params = json.load(request.files['params']) rp2_file = os.path.join(tmpdir, 'rp2_file.csv') with open(rp2_file, 'wb') as fo: fo.write(request.files['rp2_file'].read()) rp2paths_compounds_file = os.path.join(tmpdir, 'rp2paths_compounds_file.csv') with open(rp2paths_compounds_file, 'wb') as fo: fo.write(request.files['rp2paths_compounds_file'].read()) rp2paths_pathways_file = os.path.join(tmpdir, 'rp2paths_pathways_file') with open(rp2paths_pathways_file, 'wb') as fo: fo.write(request.files['rp2paths_pathways_file'].read()) gem_file = os.path.join(tmpdir, 'gem_file.sbml') with open(gem_file, 'wb') as fo: fo.write(request.files['gem_file'].read()) rpcollection_file = os.path.join(tmpdir, 'rpcollection.tar.xz') rpre_status = rpReader.rp2ToCollection(rp2_file, rp2paths_compounds_file, rp2paths_pathways_file, rpcollection_file, rpcache=GLOBAL_RPCACHE) rpeq_status = rpEquilibrator.runCollection( rpcollection_file, rpcollection_file, ph=float(params['ph']), ionic_strength=float(params['ionic_strength']), temp_k=float(params['temp_k']), rpcache=GLOBAL_RPCACHE) rpfba_status = rpFBA.runCollection(rpcollection_file, gem_file, rpcollection_file, num_workers=params['num_workers'], keep_merged=params['keep_merged'], del_sp_pro=params['del_sp_pro'], del_sp_react=params['del_sp_react'], rpcache=GLOBAL_RPCACHE) if params['taxo_id'] == None: #if you cannot find the annotation then try to recover it from the GEM file rpsbml_gem = rpSBML(model_name='tmp', path=gem_file) params['taxo_id'] = rpsbml_gem.readTaxonomy() rpsel_status = rpSelenzyme.runCollection( rpcollection_file, params['taxo_id'], rpcollection_file, uniprot_aa_length=SELENZYME_UNIPROT_AA_LENGTH, data_dir=SELENZYNE_DATA_DIR, pc=SELENZYME_PC, rpcache=GLOBAL_RPCACHE) rpglo_status = rpGlobalScore.runCollection(rpcollection_file, rpcollection_file, rpcache=GLOBAL_RPCACHE) rpcollection_file.seek(0) return send_file(rpcollection_file, as_attachment=True, attachment_filename='rpcollection.tar.xz', mimetype='application/x-tar')
def test_createUnitDefinition(self): with tempfile.TemporaryDirectory() as tmp_output_folder: new = rpSBML('test') new.createModel('test_name', 'test_id') unit_def = new.createUnitDefinition('mmol_per_gDW_per_hr') new.createUnit(unit_def, libsbml.UNIT_KIND_MOLE, 1, -3, 1) new.createUnit(unit_def, libsbml.UNIT_KIND_GRAM, 1, 0, 1) new.createUnit(unit_def, libsbml.UNIT_KIND_SECOND, 1, 0, 3600) new.writeSBML(os.path.join(tmp_output_folder, 'test.xml')) self.assertEqual(hashlib.md5(open(os.path.join(tmp_output_folder, 'test.xml'), 'rb').read()).hexdigest(), '7e98e6667c2de2fa9078795121203c99')
def test_mergeSBMLFiles(self): with tempfile.TemporaryDirectory() as tmp_output_folder: rpMerge.mergeSBMLFiles( os.path.join('data', 'rpmerge', 'rpsbml.xml'), os.path.join('data', 'rpmerge', 'gem.xml'), os.path.join(tmp_output_folder, 'merged.xml')) rpsbml = rpSBML(path=os.path.join(tmp_output_folder, 'merged.xml')) reac = rpsbml.model.getReaction('RP1') self.assertEqual(reac.getId(), 'RP1') self.assertTrue( len([i.species for i in reac.getListOfReactants()]) == 1) spe_id = [i.species for i in reac.getListOfReactants()][0] self.assertEqual(spe_id, 'M_grdp_c') self.assertTrue( len([i.species for i in reac.getListOfProducts()]) == 1) spe_id = [i.species for i in reac.getListOfProducts()][0] self.assertEqual(spe_id, 'TARGET_0000000001__64__MNXC3')
def test_createReaction(self): #TODO: add detection with tempfile.TemporaryDirectory() as tmp_output_folder: rpsbml = rpSBML('test', path=os.path.join('data', 'rpsbml', 'rpsbml.xml')) step = {'rule_id': None, 'left': {'MNXM89557': 1}, 'right': {}, 'step': None, 'sub_step': None, 'path_id': None, 'transformation_id': None, 'rule_score': None, 'rule_ori_reac': None} rpsbml.createReaction('test', 999999.0, 0.0, step, 'MNXC3', xref={'ec': ['1.1.1.1']}) rpsbml.writeSBML(os.path.join(tmp_output_folder, 'test.xml')) self.assertEqual(hashlib.md5(open(os.path.join(tmp_output_folder, 'test.xml'), 'rb').read()).hexdigest(), '6c832eb02bb3f1849d2e0b754e44e748')
def test_runCollection(self): with tempfile.TemporaryDirectory() as tmp_output_folder: rpEquilibrator.runCollection( os.path.join('data', 'rpequilibrator', 'test.rpcol'), os.path.join(tmp_output_folder, 'test.rpcol')) tar = tarfile.open(os.path.join(tmp_output_folder, 'test.rpcol'), mode='r') os.mkdir(os.path.join(tmp_output_folder, 'results')) tar.extractall(os.path.join(tmp_output_folder, 'results')) self.assertTrue( len( glob.glob( os.path.join(tmp_output_folder, 'results', 'rpsbml_collection', 'models', '*'))) == 1) rpsbml = rpSBML(path=glob.glob( os.path.join(tmp_output_folder, 'results', 'rpsbml_collection', 'models', '*'))[0]) asdict = rpsbml.asDict() self.assertAlmostEqual( asdict['pathway']['brsynth']['dfG_prime_o']['value'], 1784.7384959433493)
def setUpClass(self): self.gem = rpSBML(path=os.path.join('data', 'rpmerge', 'gem.xml')) self.rpsbml = rpSBML( path=os.path.join('data', 'rpmerge', 'rpsbml.xml')) self.rpmerge = rpMerge(True, path=os.path.join('data', 'rpmerge', 'gem.xml'))
def setUpClass(self): #load a rpSBML file self.rpsbml = rpSBML('test', path=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', 'rpsbml', 'rpsbml.xml')) self.gem = rpSBML('gem', path=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', 'rpsbml', 'gem.xml')) self.data = json.load(open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', 'rpsbml', 'data.json'), 'r')) self.maxDiff = None #to be able to compare large dict
def rpPipelineService(): with tempfile.TemporaryDirectory() as tmpdir: ########### file in ################ rp2_file = os.path.join(tmpdir, 'rp2_file.csv') rp2paths_compounds_file = os.path.join(tmpdir, 'rp2paths_compounds_file.csv') rp2paths_pathways_file = os.path.join(tmpdir, 'rp2paths_pathways_file') gem_file = os.path.join(tmpdir, 'gem_file.sbml') try: with open(rp2_file, 'wb') as fo: fo.write(request.files['rp2_file'].read()) with open(rp2paths_compounds_file, 'wb') as fo: fo.write(request.files['rp2paths_compounds_file'].read()) with open(rp2paths_pathways_file, 'wb') as fo: fo.write(request.files['rp2paths_pathways_file'].read()) with open(gem_file, 'wb') as fo: fo.write(request.files['gem_file'].read()) except KeyError as e: app.logger.error('A required file is missing: ' + str(e)) return Response('A required file is missing: ' + str(e), status=400) ############ parameters ########## try: params = json.load(request.files['data']) except ValueError as e: app.logger.error('One or more parameters are malformed: ' + str(e)) return Response('One or more parameters are malformed: ' + str(e), status=400) except KeyError as e: app.logger.error('One or more of the parameters are missing: ' + str(e)) return Response('One or more of the parameters are missing: ' + str(e), status=400) try: ph = float(params['ph']) except KeyError: app.logger.info('No ph passed. Setting to default to 7.5') ph = 7.5 except ValueError: app.logger.warning('ph is not recognised. Setting to default 7.5') ph = 7.5 try: temp_k = float(params['temp_k']) except KeyError: app.logger.info('No temp_k passed. Setting to default to 298.15') temp_k = 298.15 except ValueError: app.logger.warning( 'temp_k is not recognised. Setting to default 298.15') temp_k = 298.15 try: ionic_strength = float(params['ionic_strength']) except KeyError: app.logger.info( 'No ionic_strength passed. Setting to default to 200.0') ionic_strength = 200.0 except ValueError: app.logger.warning( 'ionic_strength is not recognised. Setting to default 200.0') ionic_strength = 200.0 try: num_workers = int(params['num_workers']) except KeyError: app.logger.info('No num_workers passed. Setting to default to 1') num_workers = 1 except ValueError: app.logger.warning( 'num_workers is not recognised. Setting to default 1') num_workers = 1 try: keep_merged = bool(params['keep_merged']) except KeyError: app.logger.info( 'No keep_merged passed. Setting to default to True') keep_merged = True except ValueError: app.logger.warning( 'keep_merged is not recognised. Setting to default True') keep_merged = True try: del_sp_pro = bool(params['del_sp_pro']) except KeyError: app.logger.info( 'No del_sp_pro passed. Setting to default to False') del_sp_pro = False except ValueError: app.logger.warning( 'del_sp_pro is not recognised. Setting to default False') del_sp_pro = False try: del_sp_react = bool(params['del_sp_react']) except KeyError: app.logger.info( 'No del_sp_react passed. Setting to default to False') del_sp_react = False except ValueError: app.logger.warning( 'del_sp_react is not recognised. Setting to default False') del_sp_react = False try: taxo_id = int(params['taxo_id']) except KeyError: app.logger.info( 'No taxo_id passed. Setting to default to None to try and recover from GEM SBML' ) taxo_id = None except ValueError: app.logger.warning( 'taxo_id is not recognised. Setting to default None to try to recover from GEM SBML' ) taxo_id = None rpcollection_file = os.path.join(tmpdir, 'rpcollection.tar.xz') rpre_status = rpReader.rp2ToCollection(rp2_file, rp2paths_compounds_file, rp2paths_pathways_file, rpcollection_file, rpcache=GLOBAL_RPCACHE) if not rpre_status: app.logger.error( 'rpReader has encountered a problem.... please investigate futher' ) return Response( 'rpReader has encountered a problem.... please investigate futher', status=400) rpeq_status = rpEquilibrator.runCollection( rpcollection_file, rpcollection_file, ph=ph, ionic_strength=ionic_strength, temp_k=temp_k, rpcache=GLOBAL_RPCACHE) if not rpeq_status: app.logger.error( 'rpEquilibrator has encountered a problem.... please investigate futher' ) return Response( 'rpEquilibrator has encountered a problem.... please investigate futher', status=400) rpfba_status = rpFBA.runCollection(rpcollection_file, gem_file, rpcollection_file, num_workers=num_workers, keep_merged=keep_merged, del_sp_pro=del_sp_pro, del_sp_react=del_sp_react, rpcache=GLOBAL_RPCACHE) if not rpfba_status: app.logger.error( 'rpFBA has encountered a problem.... please investigate futher' ) return Response( 'rpFBA has encountered a problem.... please investigate futher', status=400) if taxo_id == None: #if you cannot find the annotation then try to recover it from the GEM file rpsbml_gem = rpSBML(model_name='tmp', path=gem_file) taxo_id = rpsbml_gem.readTaxonomy() logging.info('The taxonomy_id is ' + str(taxo_id)) try: taxo_id = taxo_id[0] except IndexError: app.logger.error( 'Could not retreive the taxonomy id and none was passed') return Response( 'Could not retreive the taxonomy id and none was passed', status=400) rpsel_status = rpSelenzyme.runCollection( rpcollection_file, taxo_id, rpcollection_file, is_cleanup=False, uniprot_aa_length=SELENZYME_UNIPROT_AA_LENGTH, data_dir=SELENZYNE_DATA_DIR, pc=SELENZYME_PC, rpcache=GLOBAL_RPCACHE) if not rpsel_status: app.logger.error( 'rpSelenzyme has encountered a problem.... please investigate futher' ) return Response( 'rpSelenzyme has encountered a problem.... please investigate futher', status=400) rpglo_status = rpGlobalScore.runCollection(rpcollection_file, rpcollection_file, rpcache=GLOBAL_RPCACHE) if not rpglo_status: app.logger.error( 'rpGlobalScore has encountered a problem.... please investigate futher' ) return Response( 'rpGlobalScore has encountered a problem.... please investigate futher', status=400) #rpcollection_file.seek(0) return send_file(rpcollection_file, as_attachment=True, attachment_filename='rpcollection.tar.xz', mimetype='application/x-tar')
def test_genericModel(self): with tempfile.TemporaryDirectory() as tmp_output_folder: new = rpSBML('test') new.genericModel('test_name', 'test_id', 'MNXC3') new.writeSBML(os.path.join(tmp_output_folder, 'test.xml')) self.assertEqual(hashlib.md5(open(os.path.join(tmp_output_folder, 'test.xml'), 'rb').read()).hexdigest(), 'f1de251a278b40ef0448579e308edf9d')
def test_createModel(self): with tempfile.TemporaryDirectory() as tmp_output_folder: new = rpSBML('test') new.createModel('test_name', 'test_id') new.writeSBML(os.path.join(tmp_output_folder, 'test.xml')) self.assertEqual(hashlib.md5(open(os.path.join(tmp_output_folder, 'test.xml'), 'rb').read()).hexdigest(), '79b9063da98b5bd8286afab444f52227')
def test_updateBRSynthPathway(self): rpsbml = rpSBML('test', path=os.path.join('data', 'rpsbml', 'rpsbml.xml')) tmp_dict = rpsbml.asDict() tmp_dict['pathway']['brsynth']['test'] = {'value': 99} rpsbml.updateBRSynthPathway(tmp_dict) self.assertEqual(rpsbml.asDict()['pathway']['brsynth']['test']['value'], 99)