def export_dataStage02IsotopomerFluxMap_js(self,analysis_id_I,simulation_id_I = None,data_dir_I="tmp"): '''Export flux map for viewing''' MFAmethods = MFA_methods(); # Get the simulation information if simulation_id_I: simulation_id = simulation_id_I; else: simulation_ids = []; simulation_ids = self.get_simulationID_analysisID_dataStage02IsotopomerAnalysis(analysis_id_I); if not simulation_ids: print('No simulation found for the analysis_id ' + analysis_id_I); elif len(simulation_ids)>1: print('More than 1 simulation found for the analysis_id ' + analysis_id_I); simulation_id_I = simulation_ids[0]; else: simulation_id_I = simulation_ids[0]; # Get the flux information flux = []; flux_tmp = []; #flux = self.get_rowsEscherFluxList_simulationID_dataStage02IsotopomerfittedNetFluxes(simulation_id_I); flux_tmp = self.get_rows_simulationID_dataStage02IsotopomerfittedNetFluxes(simulation_id_I); for i,row in enumerate(flux_tmp): observable = MFAmethods.check_observableNetFlux(row['flux'],row['flux_lb'],row['flux_ub']); if not row['flux'] is None and row['flux']!=0.0 and np.abs(row['flux']) < 10.0: flux_tmp[i]['simulation_dateAndTime'] = self.convert_datetime2string(row['simulation_dateAndTime']); #flux_tmp[i]['flux_units'] = self.remove_jsRegularExpressions(row['flux_units']); if observable: flux_tmp[i]['observable'] = 'Yes'; else: flux_tmp[i]['observable'] = 'No'; flux.append(flux_tmp[i]); elif row['flux']==0.0 and np.abs(np.mean([row['flux_lb'],row['flux_ub']]))<10.0: flux_tmp[i]['simulation_dateAndTime'] = self.convert_datetime2string(row['simulation_dateAndTime']); #flux_tmp[i]['flux_units'] = self.remove_jsRegularExpressions(row['flux_units']); #flux_tmp[i]['flux'] = np.mean([row['flux_lb'],row['flux_ub']]); if observable: flux_tmp[i]['observable'] = 'Yes'; else: flux_tmp[i]['observable'] = 'No'; flux.append(flux_tmp[i]); # Get the map information map = []; map = self.get_rows_modelID_modelsEschermaps('iJO1366'); # dump chart parameters to a js files data1_keys = ['simulation_id','rxn_id','simulation_dateAndTime','flux_units','observable' ]; data1_nestkeys = ['simulation_id']; data1_keymap = {'values':'flux','key':'rxn_id'}; data2_keys = ['model_id','eschermap_id' ]; data2_nestkeys = ['model_id']; data2_keymap = {'data':'eschermap_json'}; # make the data object dataobject_O = [{"data":flux,"datakeys":data1_keys,"datanestkeys":data1_nestkeys}, {"data":map,"datakeys":data2_keys,"datanestkeys":data2_nestkeys}]; # make the tile parameter objects formtileparameters1_O = {'tileheader':'Filter menu','tiletype':'html','tileid':"filtermenu1",'rowid':"row1",'colid':"col1", 'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-6"}; formparameters1_O = {'htmlid':'filtermenuform1',"htmltype":'form_01',"formsubmitbuttonidtext":{'id':'submit1','text':'submit'},"formresetbuttonidtext":{'id':'reset1','text':'reset'},"formupdatebuttonidtext":{'id':'update1','text':'update'}}; formtileparameters1_O.update(formparameters1_O); formtileparameters2_O = {'tileheader':'Filter menu','tiletype':'html','tileid':"filtermenu2",'rowid':"row1",'colid':"col2", 'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-6"}; formparameters2_O = {'htmlid':'filtermenuform2',"htmltype":'form_01',"formsubmitbuttonidtext":{'id':'submit2','text':'submit'},"formresetbuttonidtext":{'id':'reset2','text':'reset'},"formupdatebuttonidtext":{'id':'update2','text':'update'}}; formtileparameters2_O.update(formparameters2_O); htmlparameters_O = {"htmlkeymap":[data1_keymap,data2_keymap], 'htmltype':'escher_01','htmlid':'html1', 'escherdataindex':{"reactiondata":0,"mapdata":1}, 'escherembeddedcss':None, 'escheroptions':None}; htmltileparameters_O = {'tileheader':'Escher map','tiletype':'html','tileid':"tile1",'rowid':"row2",'colid':"col1", 'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-12"}; htmltileparameters_O.update(htmlparameters_O); tableparameters_O = {"tabletype":'responsivetable_01', 'tableid':'table1', "tablefilters":None, "tableclass":"table table-condensed table-hover", 'tableformtileid':'filtermenu1','tableresetbuttonid':'reset1','tablesubmitbuttonid':'submit1'}; tabletileparameters_O = {'tileheader':'Flux precision','tiletype':'table','tileid':"tile2",'rowid':"row3",'colid':"col1", 'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-12"}; tabletileparameters_O.update(tableparameters_O); parametersobject_O = [formtileparameters1_O,formtileparameters2_O,htmltileparameters_O,tabletileparameters_O]; tile2datamap_O = {"filtermenu1":[0],"filtermenu2":[1],"tile1":[0,1],"tile2":[0]}; filtermenuobject_O = [{"filtermenuid":"filtermenu1","filtermenuhtmlid":"filtermenuform1", "filtermenusubmitbuttonid":"submit1","filtermenuresetbuttonid":"reset1", "filtermenuupdatebuttonid":"update1"},{"filtermenuid":"filtermenu2","filtermenuhtmlid":"filtermenuform2", "filtermenusubmitbuttonid":"submit2","filtermenuresetbuttonid":"reset2", "filtermenuupdatebuttonid":"update2"}]; # dump the data to a json file ddtutilities = ddt_container(parameters_I = parametersobject_O,data_I = dataobject_O,tile2datamap_I = tile2datamap_O,filtermenu_I = filtermenuobject_O); if data_dir_I=='tmp': filename_str = self.settings['visualization_data'] + '/tmp/ddt_data.js' elif data_dir_I=='data_json': data_json_O = ddtutilities.get_allObjects_js(); return data_json_O; with open(filename_str,'w') as file: file.write(ddtutilities.get_allObjects());
def execute_findNetFluxSignificantDifferences(self,analysis_id_I, criteria_I = 'flux_lb/flux_ub', simulation_ids_I=[],simulation_dateAndTimes_I = [], rxn_ids_I = [],flux_units_I = [], control_simulation_id_I=None, control_simulation_dateAndTime_I=None, redundancy_I=False, observable_only_I=False): """Find fluxes that are significantly different Input: analysis_id_I = string, criteria_I = string, flux_lb/flux_ub: use flux_lb and flux_ub to determine significance (default) flux_mean/flux_stdev: use the flux_mean and flux_stdev to determine significance control_simulation_id_I = string, simulation_id to compare all other simulation_ids to simulation_dateAndTime_I = string, simulation_dateAndTime to compare all other simulation_ids to redundancy_I = boolean, if true, all values with be compared, if false (default), only unique comparisons will be made observable_only_I = boolean, if true, only observable fluxes will be compared, if false (default), observable and unobservable fluxes will be compared """ mfamethods = MFA_methods(); data_O = []; print('executing findNetFluxSignificantDifferences...') # get the simulation_id and simulation_id dateAndTimes if simulation_ids_I and simulation_dateAndTimes_I: simulation_ids = simulation_ids_I; simulation_dateAndTimes = simulation_dateAndTimes_I; else: simulation_ids = []; simulation_ids_unique = []; simulation_dateAndTimes = []; # get the simulation unique ids simulation_ids_unique = self.get_simulationID_analysisID_dataStage02IsotopomerAnalysis(analysis_id_I); for simulation_id in simulation_ids_unique: # get the simulation dateAndTimes simulation_dateAndTimes_tmp = [] simulation_dateAndTimes_tmp = self.get_simulationDateAndTimes_simulationID_dataStage02IsotopomerfittedNetFluxes(simulation_id); simulation_ids_tmp = [simulation_id for x in simulation_dateAndTimes_tmp]; simulation_dateAndTimes.extend(simulation_dateAndTimes_tmp) simulation_ids.extend(simulation_ids_tmp) if control_simulation_id_I and control_simulation_dateAndTime_I: index = simulation_ids.index(control_simulation_id_I); value = simulation_ids.pop(index); simulation_ids.insert(0, value); control_simulation_dateAndTime_I = self.convert_string2datetime(control_simulation_dateAndTime_I); index = simulation_dateAndTimes.index(control_simulation_dateAndTime_I); value = simulation_dateAndTimes.pop(index) simulation_dateAndTimes.insert(0, value); for simulation_cnt_1, simulation_id_1 in enumerate(simulation_ids): print("calculating netFluxDifferences for simulation_id " + simulation_id_1); # check for control if control_simulation_id_I and control_simulation_dateAndTime_I and simulation_cnt_1>0: break; #prevents redundancy and if simulation_cnt_1+1 >= len(simulation_ids): break; # get the units if flux_units_I: flux_units = flux_units_I; else: flux_units = self.get_fluxUnits_simulationIDAndSimulationDateAndTime_dataStage02IsotopomerfittedNetFluxes(simulation_id_1,simulation_dateAndTimes[simulation_cnt_1]) for flux_unit in flux_units: print("calculating netFluxDifferences for flux_units " + flux_unit); # get the rxn_ids if rxn_ids_I: rxn_ids = rxn_ids_I; else: rxn_ids = []; rxn_ids = self.get_rxnIDs_simulationIDAndSimulationDateAndTimeAndFluxUnits_dataStage02IsotopomerfittedNetFluxes(simulation_id_1,simulation_dateAndTimes[simulation_cnt_1],flux_unit); for rxn_id in rxn_ids: print("calculating netFluxDifferes for rxn_id " + rxn_id); # get simulation_id_1 flux data flux_1,flux_stdev_1,flux_lb_1,flux_ub_1,flux_units_1=None,None,None,None,None; flux_1,flux_stdev_1,flux_lb_1,flux_ub_1,flux_units_1=self.get_flux_simulationIDAndSimulationDateAndTimeAndFluxUnitsAndRxnID_dataStage02IsotopomerfittedNetFluxes(simulation_id_1,simulation_dateAndTimes[simulation_cnt_1],flux_unit,rxn_id); if not mfamethods.check_criteria(flux_1,flux_stdev_1,flux_lb_1,flux_ub_1, criteria_I): continue; if redundancy_I: list_2 = simulation_ids; else: list_2 = simulation_ids[simulation_cnt_1+1:]; if observable_only_I: observable_1 = mfamethods.check_observableNetFlux(flux_1,flux_lb_1,flux_ub_1) if not observable_1: continue; for cnt,simulation_id_2 in enumerate(list_2): #prevents redundancy if redundancy_I: simulation_cnt_2 = cnt; else: simulation_cnt_2 = simulation_cnt_1+cnt+1; if simulation_cnt_2 == simulation_cnt_1: continue; # simulation_id_2 flux_data flux_2,flux_stdev_2,flux_lb_2,flux_ub_2,flux_units_2=None,None,None,None,None; flux_2,flux_stdev_2,flux_lb_2,flux_ub_2,flux_units_2=self.get_flux_simulationIDAndSimulationDateAndTimeAndFluxUnitsAndRxnID_dataStage02IsotopomerfittedNetFluxes(simulation_id_2,simulation_dateAndTimes[simulation_cnt_2],flux_unit,rxn_id); if not mfamethods.check_criteria(flux_2,flux_stdev_2,flux_lb_2,flux_ub_2, criteria_I): continue; if observable_only_I: observable_2 = mfamethods.check_observableNetFlux(flux_2,flux_lb_2,flux_ub_2); if not observable_2: continue; flux_diff,flux_distance,fold_change,significant = None,None,None,False; flux_diff,flux_distance,fold_change,significant = mfamethods.calculate_fluxDifference(flux_1,flux_stdev_1,flux_lb_1,flux_ub_1,flux_units_1, flux_2,flux_stdev_2,flux_lb_2,flux_ub_2,flux_units_2, criteria_I = criteria_I); # record the data data_O.append({ 'analysis_id':analysis_id_I, 'simulation_id_1':simulation_id_1, 'simulation_dateAndTime_1':simulation_dateAndTimes[simulation_cnt_1], 'simulation_id_2':simulation_id_2, 'simulation_dateAndTime_2':simulation_dateAndTimes[simulation_cnt_2], 'rxn_id':rxn_id, 'flux_difference':flux_diff, 'significant':significant, 'significant_criteria':criteria_I, 'flux_units':flux_unit, 'fold_change_geo':fold_change, 'flux_distance':flux_distance, 'used_':True, 'comment_':None}); # add data to the database self.add_data_stage02_isotopomer_fittedNetFluxDifferences(data_O);
def export_dataStage02IsotopomerFittedNetFluxes_js(self,analysis_id_I = None, simulation_ids_I = [], bullet_chart_I = True, data_dir_I="tmp"): '''Plot the flux precision for a given set of simulations and a given set of reactions Input: analysis_id_I = string, analysis id Optional Input: simulation_ids_I = [] of strings, simulation_ids in a specific order bullet_chart_I = True: show the flux estimation +/- StDev False: show the 95% confidence invervals and flux estimation +/- StDev ''' MFAmethods = MFA_methods(); #Input: # analysis_id_I or # simulation_ids_I if simulation_ids_I: simulation_ids = simulation_ids_I; else: simulation_ids = []; simulation_ids = self.get_simulationID_analysisID_dataStage02IsotopomerAnalysis(analysis_id_I); data_O =[]; for simulation_id in simulation_ids: # get the flux information for each simulation flux_data = []; flux_data = self.get_rows_simulationID_dataStage02IsotopomerfittedNetFluxes(simulation_id); #min_flux,max_flux = None,None; #min_flux,max_flux = self.get_fluxMinAndMax_simulationID_dataStage02IsotopomerfittedNetFluxes(simulation_id) for i,row in enumerate(flux_data): observable = MFAmethods.check_observableNetFlux(row['flux'],row['flux_lb'],row['flux_ub']); if not row['flux'] is None: row['simulation_dateAndTime'] = self.convert_datetime2string(row['simulation_dateAndTime']); #row['flux_units'] = row['flux_units'].replace('*','x'); row['flux_lb_stdev'] = row['flux'] - row['flux_stdev']; row['flux_ub_stdev'] = row['flux'] + row['flux_stdev']; row['flux_mean'] = np.mean([row['flux_lb'],row['flux_ub']]); if observable: row['observable'] = 'Yes'; else: row['observable'] = 'No'; data_O.append(row); #if not row['flux'] is None and observable: # row['simulation_dateAndTime'] = self.convert_datetime2string(row['simulation_dateAndTime']); # row['flux_units'] = row['flux_units'].replace('*','x'); # row['flux_lb_stdev'] = row['flux'] - row['flux_stdev']; # row['flux_ub_stdev'] = row['flux'] + row['flux_stdev']; # row['flux_mean'] = np.mean([row['flux_lb'],row['flux_ub']]); # data_O.append(row); #elif not row['flux'] is None and not observable: # row['simulation_dateAndTime'] = self.convert_datetime2string(row['simulation_dateAndTime']); # row['flux_units'] = row['flux_units'].replace('*','x'); # row['flux_lb'] = None; # row['flux_ub'] = None; # row['flux_mean'] = None; # data_O.append(row); #elif row['flux']==0.0 and observable: # row['simulation_dateAndTime'] = self.convert_datetime2string(row['simulation_dateAndTime']); # row['flux_units'] = row['flux_units'].replace('*','x'); # row['flux_lb_stdev'] = 0.0; # row['flux_ub_stdev'] = 0.0; # row['flux_mean'] = np.mean([row['flux_lb'],row['flux_ub']]); # data_O.append(row); #elif row['flux']==0.0 and not observable: # row['simulation_dateAndTime'] = self.convert_datetime2string(row['simulation_dateAndTime']); # row['flux_units'] = row['flux_units'].replace('*','x'); # row['flux_lb'] = None; # row['flux_ub'] = None; # #row['flux_mean'] = None; # row['flux_lb_stdev'] = None; # row['flux_ub_stdev'] = None; # row['flux']=None; # data_O.append(row); elif row['flux']==0.0: row['simulation_dateAndTime'] = self.convert_datetime2string(row['simulation_dateAndTime']); #row['flux_units'] = row['flux_units'].replace('*','x'); row['flux_lb_stdev'] = 0.0; row['flux_ub_stdev'] = 0.0; row['flux_mean'] = np.mean([row['flux_lb'],row['flux_ub']]); if observable: row['observable'] = 'Yes'; else: row['observable'] = 'No'; data_O.append(row); # dump chart parameters to a js files data1_keys = ['simulation_id','rxn_id','simulation_dateAndTime','flux_units','observable' ]; data1_nestkeys = ['rxn_id']; if bullet_chart_I: data1_keymap = {'xdata':'rxn_id', 'ydatamean':'flux', 'ydatalb':'flux_lb_stdev', 'ydataub':'flux_ub_stdev', 'serieslabel':'simulation_id', 'featureslabel':'rxn_id'}; else: data1_keymap = {'xdata':'rxn_id', 'ydatamean':'flux', #'ydata':'flux_mean', 'ydatalb':'flux_lb', 'ydataub':'flux_ub', #'ydatamin':'min', #'ydatamax':'max', 'ydataiq1':'flux_lb_stdev', 'ydataiq3':'flux_ub_stdev', 'ydatamedian':'flux', 'serieslabel':'simulation_id', 'featureslabel':'rxn_id'}; # make the data object dataobject_O = [{"data":data_O,"datakeys":data1_keys,"datanestkeys":data1_nestkeys}]; # make the tile parameter objects formtileparameters_O = {'tileheader':'Filter menu','tiletype':'html','tileid':"filtermenu1",'rowid':"row1",'colid':"col1", 'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-12"}; formparameters_O = {'htmlid':'filtermenuform1',"htmltype":'form_01',"formsubmitbuttonidtext":{'id':'submit1','text':'submit'},"formresetbuttonidtext":{'id':'reset1','text':'reset'},"formupdatebuttonidtext":{'id':'update1','text':'update'}}; formtileparameters_O.update(formparameters_O); svgparameters_O = {"svgtype":'boxandwhiskersplot2d_02',"svgkeymap":[data1_keymap,data1_keymap], 'svgid':'svg1', "svgmargin":{ 'top': 50, 'right': 350, 'bottom': 50, 'left': 50 }, "svgwidth":750,"svgheight":350, "svgx1axislabel":"rxn_id","svgy1axislabel":"flux", 'svgformtileid':'filtermenu1','svgresetbuttonid':'reset1','svgsubmitbuttonid':'submit1'}; svgtileparameters_O = {'tileheader':'Flux precision','tiletype':'svg','tileid':"tile2",'rowid':"row1",'colid':"col1", 'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-12"}; svgtileparameters_O.update(svgparameters_O); tableparameters_O = {"tabletype":'responsivetable_01', 'tableid':'table1', "tablefilters":None, "tableclass":"table table-condensed table-hover", 'tableformtileid':'filtermenu1','tableresetbuttonid':'reset1','tablesubmitbuttonid':'submit1'}; tabletileparameters_O = {'tileheader':'Flux precision','tiletype':'table','tileid':"tile3",'rowid':"row1",'colid':"col1", 'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-12"}; tabletileparameters_O.update(tableparameters_O); parametersobject_O = [formtileparameters_O,svgtileparameters_O,tabletileparameters_O]; tile2datamap_O = {"filtermenu1":[0],"tile2":[0],"tile3":[0]}; # dump the data to a json file ddtutilities = ddt_container(parameters_I = parametersobject_O,data_I = dataobject_O,tile2datamap_I = tile2datamap_O,filtermenu_I = None); if data_dir_I=='tmp': filename_str = self.settings['visualization_data'] + '/tmp/ddt_data.js' elif data_dir_I=='data_json': data_json_O = ddtutilities.get_allObjects_js(); return data_json_O; with open(filename_str,'w') as file: file.write(ddtutilities.get_allObjects());