コード例 #1
0
def test_fva_infeasible(model):
    """Test FVA infeasibility."""
    infeasible_model = model.copy()
    infeasible_model.reactions.get_by_id("EX_glc__D_e").lower_bound = 0
    # ensure that an infeasible model does not run FVA
    with pytest.raises(Infeasible):
        flux_variability_analysis(infeasible_model)
コード例 #2
0
ファイル: test_variability.py プロジェクト: opencobra/cobrapy
def test_fva_infeasible(model):
    """Test FVA infeasibility."""
    infeasible_model = model.copy()
    infeasible_model.reactions.get_by_id("EX_glc__D_e").lower_bound = 0
    # ensure that an infeasible model does not run FVA
    with pytest.raises(Infeasible):
        flux_variability_analysis(infeasible_model)
コード例 #3
0
ファイル: test_variability.py プロジェクト: cgaray/cobrapy
def test_pfba_flux_variability(
    model: Model,
    pfba_fva_results: pd.DataFrame,
    fva_results: pd.DataFrame,
    all_solvers: List[str],
) -> None:
    """Test FVA using pFBA."""
    model.solver = all_solvers
    with pytest.warns(UserWarning):
        flux_variability_analysis(
            model,
            pfba_factor=0.1,
            reaction_list=model.reactions[1::3],
            processes=1,
        )
    fva_out = flux_variability_analysis(model,
                                        pfba_factor=1.1,
                                        reaction_list=model.reactions,
                                        processes=1)
    fva_out.sort_index(inplace=True)
    assert np.allclose(fva_out, pfba_fva_results)
    abs_fva_out = fva_out.dropna().abs()
    abs_fva_results = fva_results.dropna().abs()
    comparison = np.isclose(abs_fva_out,
                            abs_fva_results) | (abs_fva_out < abs_fva_results)
    assert comparison["minimum"].all()
    assert comparison["maximum"].all()
コード例 #4
0
ファイル: model_data__.py プロジェクト: mmoskon/Grohar
    def run_FVA(self, r):
        print("Running FVA for reaction", r)
        if self.perturbed_model:
            b = fva.flux_variability_analysis(self.perturbed_model, [r])[r]
        else:
            b = fva.flux_variability_analysis(self.model, [r])[r]

        return [b['minimum'], b['maximum']]
コード例 #5
0
def test_flux_variability_loopless(model, all_solvers):
    """Test loopless FVA."""
    model.solver = all_solvers
    loop_reactions = [model.reactions.get_by_id(rid) for rid in ("FRD7", "SUCDi")]
    fva_normal = flux_variability_analysis(model, reaction_list=loop_reactions)
    fva_loopless = flux_variability_analysis(
        model, reaction_list=loop_reactions, loopless=True
    )

    assert not np.allclose(fva_normal["maximum"], fva_normal["minimum"])
    assert np.allclose(fva_loopless["maximum"], fva_loopless["minimum"])
コード例 #6
0
ファイル: test_variability.py プロジェクト: opencobra/cobrapy
def test_flux_variability_loopless(model, all_solvers):
    """Test loopless FVA."""
    model.solver = all_solvers
    loop_reactions = [model.reactions.get_by_id(rid)
                      for rid in ("FRD7", "SUCDi")]
    fva_normal = flux_variability_analysis(
        model, reaction_list=loop_reactions)
    fva_loopless = flux_variability_analysis(
        model, reaction_list=loop_reactions, loopless=True)

    assert not np.allclose(fva_normal["maximum"],
                           fva_normal["minimum"])
    assert np.allclose(fva_loopless["maximum"],
                       fva_loopless["minimum"])
コード例 #7
0
ファイル: write_fva.py プロジェクト: pcm32/iso2flux
def write_fva(model,fn="reaction_fluxes.xlsx",fraction=1,remove0=True,change_threshold=0.001,mode="full",lp_tolerance_feasibility=1e-6,flux_precision=1e-3):
    precision=max(int(-1*(math.log10(flux_precision))),6)
    fva=flux_variability_analysis(model,fraction_of_optimum=fraction,tolerance_feasibility=lp_tolerance_feasibility)
    if mode=="full":
       row=["ID","Name","Stoichiometry","Minimum","Maximum"]
    else:
       row=["ID","Name","Flux"]
    sheet_row_data_dict={"fva":[row]}   
    for x in sorted(fva,key=lambda v: v.upper()):  
         if "_RATIO" in x:
            continue
         if abs(fva[x]["maximum"])>0.000001 or abs(fva[x]["minimum"])>0.000001 or remove0==False:
            row=[]
            reaction=model.reactions.get_by_id(x)
            row.append(reaction.id)
            row.append(reaction.name)
            minimum=fva[x]["minimum"]
            maximum=fva[x]["maximum"]
            if mode=="full":
               row.append(reaction.reaction)
               row.append(round(minimum,precision))
               row.append(round(maximum,precision))
            else:
               if (maximum-minimum)>change_threshold:
                  string=("%s < "+reaction.id+" <%s ")%( round(minimum,precision),round(maximum,precision))
               else:
                  string=str(round(maximum,precision)) 
               row.append(string)
            sheet_row_data_dict["fva"].append(row)
            #f.write(reaction.name+";"+reaction.subsystem+" ;"+reaction.reaction+";"+str(fva[x]["minimum"])+";"+str(fva[x]["maximum"])+"\n")
    print 1
    write_spreadsheet(file_name=fn,sheet_row_data_dict=sheet_row_data_dict,force_extension=True)
コード例 #8
0
ファイル: test_variability.py プロジェクト: opencobra/cobrapy
def test_flux_variability(model, fva_results, all_solvers):
    """Test FVA."""
    model.solver = all_solvers
    fva_out = flux_variability_analysis(model, reaction_list=model.reactions,
                                        processes=1)
    fva_out.sort_index(inplace=True)
    assert np.allclose(fva_out, fva_results)
コード例 #9
0
def test_fva_minimization(model):
    """Test minimization using FVA."""
    model.objective = model.reactions.EX_glc__D_e
    model.objective_direction = 'min'
    solution = flux_variability_analysis(model, fraction_of_optimum=.95)
    assert solution.at['EX_glc__D_e', 'minimum'] == -10.0
    assert solution.at['EX_glc__D_e', 'maximum'] == -9.5
コード例 #10
0
def test_flux_variability(model, fva_results, all_solvers):
    """Test FVA."""
    model.solver = all_solvers
    fva_out = flux_variability_analysis(model, reaction_list=model.reactions)
    for name, result in iteritems(fva_out.T):
        for k, v in iteritems(result):
            assert abs(fva_results[k][name] - v) < 0.00001
コード例 #11
0
ファイル: variability.py プロジェクト: jonm4024/cobrapy
def find_blocked_reactions(cobra_model, the_reactions=None, allow_loops=True,
                            solver='glpk', the_problem='return',
                           tolerance_optimality=1e-9,
                           open_exchanges=False, **kwargs):
    """Finds reactions that cannot carry a flux with the current
    exchange reaction settings for cobra_model, using flux variability
    analysis.
    
    """
    print 'This needs to be updated to deal with external boundaries'
    cobra_model = cobra_model.copy()
    blocked_reactions = []
    if not the_reactions:
        the_reactions = cobra_model.reactions
    if open_exchanges:
        print 'DEPRECATED: Move to using the Reaction.boundary attribute'
        exchange_reactions = [x for x in cobra_model.reactions
                              if x.startswith('EX')]
        for the_reaction in exchange_reactions:
            if the_reaction.lower_bound >= 0:
                the_reaction.lower_bound = -1000
            if the_reaction.upper_bound >= 0:
                the_reaction.upper_bound = 1000
    flux_span_dict = flux_variability_analysis(cobra_model,
                                               fraction_of_optimum = 0.,
                                               the_reactions = the_reactions,
                                               allow_loops = allow_loops,
                                               solver = solver,
                                               the_problem = the_problem,
                                               tolerance_optimality = tolerance_optimality,
                                               **kwargs)
    blocked_reactions = [k for k, v in flux_span_dict.items()\
                          if max(map(abs,v.values())) < tolerance_optimality]
    return(blocked_reactions)
コード例 #12
0
ファイル: variability.py プロジェクト: jonm4024/cobrapy
def flux_variability_analysis_wrapper(keywords):
    """Provides an interface to call flux_variability_analysis from ppmap
    
    """
    try:
        from cPickle import dump
    except:
        from pickle import dump
    import sys
    #Need to do a top level import because of how the parallel processes are called
    sys.path.insert(0, "../..")
    from cobra.flux_analysis.variability import flux_variability_analysis
    sys.path.pop(0)
    results_dict = {}
    new_objective = keywords.pop('new_objective')
    output_directory = None
    if 'output_directory' in keywords:
        output_directory = keywords.pop('output_directory')
    if not hasattr(new_objective, '__iter__'):
        new_objective = [new_objective]
    for the_objective in new_objective:
        the_result = results_dict[the_objective] = flux_variability_analysis(**keywords)
        if output_directory:
            with open('%s%s.pickle'%(output_directory,
                                     the_objective), 'w') as out_file:
                dump(the_result, out_file)
    if len(new_objective) == 1:
        return the_result
    else:
        return results_dict
コード例 #13
0
ファイル: test_variability.py プロジェクト: cgaray/cobrapy
def test_parallel_flux_variability(model: Model, fva_results: pd.DataFrame,
                                   all_solvers: List[str]) -> None:
    """Test parallel FVA."""
    model.solver = all_solvers
    fva_out = flux_variability_analysis(model, processes=2)
    fva_out.sort_index(inplace=True)
    assert np.allclose(fva_out, fva_results)
コード例 #14
0
ファイル: test_variability.py プロジェクト: opencobra/cobrapy
def test_fva_minimization(model):
    """Test minimization using FVA."""
    model.objective = model.reactions.EX_glc__D_e
    model.objective_direction = 'min'
    solution = flux_variability_analysis(model, fraction_of_optimum=.95)
    assert solution.at['EX_glc__D_e', 'minimum'] == -10.0
    assert solution.at['EX_glc__D_e', 'maximum'] == -9.5
コード例 #15
0
ファイル: test_variability.py プロジェクト: cgaray/cobrapy
def test_fva_minimization(model: Model) -> None:
    """Test minimization using FVA."""
    model.objective = model.reactions.EX_glc__D_e
    model.objective_direction = "min"
    solution = flux_variability_analysis(model, fraction_of_optimum=0.95)
    assert solution.at["EX_glc__D_e", "minimum"] == -10.0
    assert solution.at["EX_glc__D_e", "maximum"] == -9.5
コード例 #16
0
def test_flux_variability(model, fva_results, all_solvers):
    """Test FVA."""
    model.solver = all_solvers
    fva_out = flux_variability_analysis(model,
                                        reaction_list=model.reactions,
                                        processes=1)
    fva_out.sort_index(inplace=True)
    assert np.allclose(fva_out, fva_results)
コード例 #17
0
ファイル: test_variability.py プロジェクト: opencobra/cobrapy
def test_loopless_pfba_fva(model):
    loop_reactions = [model.reactions.get_by_id(rid)
                      for rid in ("FRD7", "SUCDi")]
    fva_loopless = flux_variability_analysis(
        model, pfba_factor=1.1, reaction_list=loop_reactions,
        loopless=True, processes=1)
    assert np.allclose(fva_loopless["maximum"],
                       fva_loopless["minimum"])
コード例 #18
0
ファイル: model_data.py プロジェクト: mmoskon/Grohar
    def run_FVA(self, r_id):
        print("Running FVA for reaction", r_id)
        if cobra.__version__ >= '0.9.1':
            r = self.model.reactions[self.reaction_position[r_id]]
        else:
            r = r_id

        if self.perturbed_model:
            b = fva.flux_variability_analysis(self.perturbed_model, [r])
        else:
            b = fva.flux_variability_analysis(self.model, [r])

        if cobra.__version__ >= '0.9.1':
            b = b.to_dict()
            return [b['minimum'][r_id], b['maximum'][r_id]]
        else:
            return [b[r_id]['minimum'], b[r_id]['maximum']]
コード例 #19
0
ファイル: flux_analysis.py プロジェクト: bbarker/cobrapy
    def test_flux_variability(self):
        fva_results = {
            '5DGLCNtex': {'minimum': 0.0, 'maximum': 0.0},
            'ABTA': {'minimum': 0.0, 'maximum': 0.0},
            '5DOAN': {'minimum': 0.0, 'maximum': 0.0},
            'A5PISO': {'minimum': 0.00692, 'maximum': 0.00692},
            'AACPS1': {'minimum': 0.0, 'maximum': 0.0},
            'AACPS2': {'minimum': 0.0, 'maximum': 0.0},
            'ACALDtex': {'minimum': 0.0, 'maximum': 0.0},
            'AACPS3': {'minimum': 0.0, 'maximum': 0.0},
            'AACPS4': {'minimum': 0.0, 'maximum': 0.0},
            'ABUTD': {'minimum': 0.0, 'maximum': 0.0},
            'AACPS5': {'minimum': 0.0, 'maximum': 0.0},
            'AACPS6': {'minimum': 0.0, 'maximum': 0.0},
            'AACPS7': {'minimum': 0.0, 'maximum': 0.0},
            'AACPS8': {'minimum': 0.0, 'maximum': 0.0},
            'AACPS9': {'minimum': 0.0, 'maximum': 0.0},
            'AACTOOR': {'minimum': 0.0, 'maximum': 0.0},
            'ABUTt2pp': {'minimum': 0.0, 'maximum': 0.0},
            '3OAS140': {'minimum': 0.50419, 'maximum': 0.50419},
            '3OAS141': {'minimum': 0.03748, 'maximum': 0.03748},
            '3OAS160': {'minimum': 0.41769, 'maximum': 0.41769},
            '3OAS161': {'minimum': 0.03748, 'maximum': 0.03748},
            '3OAS180': {'minimum': 0.01071, 'maximum': 0.01071},
            '3OAS181': {'minimum': 0.01606, 'maximum': 0.01606},
            'ABUTtex': {'minimum': 0.0, 'maximum': 0.0},
            '3OAS60': {'minimum': 0.54399, 'maximum': 0.54399},
            '3OAS80': {'minimum': 0.54399, 'maximum': 0.54399},
            'AAMYL': {'minimum': 0.0, 'maximum': 0.0},
            '3PEPTabcpp': {'minimum': 0.0, 'maximum': 0.0},
            '3PEPTtex': {'minimum': 0.0, 'maximum': 0.0},
            '3UMPtex': {'minimum': 0.0, 'maximum': 0.0},
            '4HOXPACDtex': {'minimum': 0.0, 'maximum': 0.0},
            'ACACtex': {'minimum': 0.0, 'maximum': 0.0},
            '4PCP': {'minimum': 0.0, 'maximum': 0.0},
            '4PCPpp': {'minimum': 0.0, 'maximum': 0.0},
            'AAMYLpp': {'minimum': 0.0, 'maximum': 0.0},
            '4PEPTabcpp': {'minimum': 0.0, 'maximum': 0.0},
            '4PEPTtex': {'minimum': 0.0, 'maximum': 0.0},
            '5DGLCNR': {'minimum': 0.0, 'maximum': 0.0},
            '5DGLCNt2rpp': {'minimum': 0.0, 'maximum': 0.0},
            'ACALD': {'minimum': 3.35702, 'maximum': 7.49572}}

        infeasible_model = create_test_model()
        infeasible_model.reactions.get_by_id("EX_glyc_e").lower_bound = 0
        for solver in solver_dict:
            cobra_model = create_test_model()
            initialize_growth_medium(cobra_model, 'LB')
            fva_out = flux_variability_analysis(
                cobra_model, solver=solver,
                reaction_list=cobra_model.reactions[100:140:2])
            for the_reaction, the_range in iteritems(fva_out):
                for k, v in iteritems(the_range):
                    self.assertAlmostEqual(fva_results[the_reaction][k], v,
                                           places=5)
            # ensure that an infeasible model does not run FVA
            self.assertRaises(ValueError, flux_variability_analysis,
                              infeasible_model, solver=solver)
コード例 #20
0
def test_loopless_pfba_fva(model):
    loop_reactions = [
        model.reactions.get_by_id(rid) for rid in ("FRD7", "SUCDi")
    ]
    fva_loopless = flux_variability_analysis(model,
                                             pfba_factor=1.1,
                                             reaction_list=loop_reactions,
                                             loopless=True,
                                             processes=1)
    assert np.allclose(fva_loopless["maximum"], fva_loopless["minimum"])
コード例 #21
0
ファイル: test_variability.py プロジェクト: opencobra/cobrapy
def test_pfba_flux_variability(model, pfba_fva_results,
                               fva_results, all_solvers):
    """Test FVA using pFBA."""
    model.solver = all_solvers
    with pytest.warns(UserWarning):
        flux_variability_analysis(
            model, pfba_factor=0.1, reaction_list=model.reactions[1::3],
            processes=1)
    fva_out = flux_variability_analysis(
        model, pfba_factor=1.1, reaction_list=model.reactions,
        processes=1)
    fva_out.sort_index(inplace=True)
    assert np.allclose(fva_out, pfba_fva_results)
    abs_fva_out = fva_out.dropna().abs()
    abs_fva_results = fva_results.dropna().abs()
    comparison = np.isclose(abs_fva_out, abs_fva_results) | (
            abs_fva_out < abs_fva_results)
    assert comparison["minimum"].all()
    assert comparison["maximum"].all()
コード例 #22
0
def removeBlockedReactionsLoopless(GEM):
    fva = flux_variability_analysis(GEM,
                                    reaction_list=None,
                                    fraction_of_optimum=0,
                                    processes=2,
                                    loopless=True).round(decimals=8)
    for rxn_id in fva.index:
        v_min, v_max = fva.loc[rxn_id].minimum, fva.loc[rxn_id].maximum
        if v_min == 0 and v_max == 0:
            GEM.reactions.get_by_id(rxn_id).remove_from_model(
                remove_orphans=True)
コード例 #23
0
ファイル: minimal_flux.py プロジェクト: pcm32/iso2flux
def create_minimal_fux_model(metabolic_model,fraction_of_optimum_objective=0.8, fraction_of_flux_minimum=2,boundaries_precision=0.001,label_model=None,metabolite_list_file_name=None):
    model=copy.deepcopy(metabolic_model)
    if len(model.objective)>1:
      raise ValueError('Error:Only one objective supported')
    original_objective=copy.deepcopy(model.objective)
    for reaction in model.objective:
         fva=flux_variability_analysis(model,reaction_list=[reaction.id], fraction_of_optimum=fraction_of_optimum_objective)
         if reaction.objective_coefficient>0:
            reaction.lower_bound=min(fva[reaction.id]["maximum"],reaction.upper_bound)
            reaction.upper_bound=min(fva[reaction.id]["maximum"],reaction.upper_bound)
         elif reaction.objective_coefficient<0:
            reaction.lower_bound=max(fva[reaction.id]["minimum"],reaction.lower_bound)
            reaction.upper_bound=max(fva[reaction.id]["minimum"],reaction.lower_bound)
         reaction.lower_bound-=boundaries_precision/2.0
         reaction.upper_bound+=boundaries_precision/2.0
         reaction.objective_coefficient=0
    reversible_reactions=[]
    #convert_to_irreversible(model)
    for reaction in model.reactions:
        if reaction.lower_bound<0:
           reversible_reactions.append(reaction.id)  
    convert_to_irreversible_with_indicators(model,reversible_reactions,metabolite_list=[], mutually_exclusive_directionality_constraint = True)
    if metabolite_list_file_name!=None and metabolite_list_file_name!="":
       metabolite_id_list=read_metabolomics_data(model,metabolite_list_fname=metabolite_list_file_name)
    else:
       metabolite_id_list=[]
    add_turnover_metabolites(model, metabolite_id_list=metabolite_id_list, epsilon=1e-6,label_model=label_model)
    flux_reporter = Metabolite('flux_reporter_0',formula='',name='',compartment='0')
    reporter_coef=1
    for reaction in model.reactions:
        if "IRRMILP_" in reaction.id or "RATIO_" in reaction.id or "RGROUP_" in reaction.id or "TMS_" in reaction.id:
            continue
        reaction.add_metabolites({flux_reporter:reporter_coef})        
    total_flux_reaction = Reaction('total_flux')
    total_flux_reaction.name = 'total_flux'
    total_flux_reaction.subsystem = 'misc'
    total_flux_reaction.lower_bound = 0  
    total_flux_reaction.upper_bound = 1000*reporter_coef*len(model.reactions)
    total_flux_reaction.objective_coefficient=-1  
    total_flux_reaction.add_metabolites({flux_reporter:-1}) 
    model.add_reaction(total_flux_reaction)
    minimal_flux=-1*model.optimize().f
    total_flux_reaction.upper_bound = minimal_flux*fraction_of_flux_minimum
    total_flux_reaction.objective_coefficient=0.0
    for objective in original_objective:
        reaction=model.reactions.get_by_id(objective.id)
        reaction.lower_bound=(fva[reaction.id]["minimum"]-boundaries_precision/2.0)
        reaction.upper_bound=(fva[reaction.id]["maximum"]+boundaries_precision/2.0)
    milp_reactions=model.reactions.query("IRRMILP_")
    non_milp_reactions=[]
    for reaction in  model.reactions:
      if reaction not in milp_reactions:
         non_milp_reactions.append(reaction)
    return model,minimal_flux,non_milp_reactions 
コード例 #24
0
    def FVA(self, obj_frac=0.9, reactions=None, constraints=None, loopless=False, internal=None, solver=None,
            format='dict'):
        """ Flux Variability Analysis (FVA).

        :param model: An instance of a constraint-based model.
        :param float obj_frac: The minimum fraction of the maximum growth rate (default 0.9). Requires that the \
            objective value is at least the fraction times maximum objective value. A value of 0.85 for instance \
            means that the objective has to be at least at 85% percent of its maximum.
        :param list reactions: List of reactions to analyze (default: all).
        :param dic constraints: Additional constraints (optional).
        :param boolean loopless: Run looplessFBA internally (very slow) (default: false).
        :param list internal: List of internal reactions for looplessFBA (optional).
        :param solver: A pre-instantiated solver instance (optional).
        :param format: The return format: 'dict', returns a dictionary,'df' returns a data frame.
        :returns: A dictionary of flux variation ranges.

        """
        from cobra.flux_analysis.variability import flux_variability_analysis

        simul_constraints = {}
        if self.environmental_conditions:
            simul_constraints.update(self.environmental_conditions)
        if constraints:
            simul_constraints.update(constraints)

        with self.model as model:

            if simul_constraints:
                for rxn in list(simul_constraints.keys()):
                    reac = model.reactions.get_by_id(rxn)
                    if isinstance(simul_constraints.get(rxn), tuple):
                        reac.bounds = (simul_constraints.get(
                            rxn)[0], simul_constraints.get(rxn)[1])
                    else:
                        reac.bounds = (simul_constraints.get(
                            rxn), simul_constraints.get(rxn))

            df = flux_variability_analysis(
                model, reaction_list=reactions, loopless=loopless, fraction_of_optimum=obj_frac)

        variability = {}
        for r_id in reactions:
            variability[r_id] = [
                float(df.loc[r_id][0]), float(df.loc[r_id][1])]

        if format == 'df':
            import pandas as pd
            e = variability.items()
            f = [[a, b, c] for a, [b, c] in e]
            df = pd.DataFrame(f, columns=['Reaction ID', 'Minimum', 'Maximum'])
            return df
        else:
            return variability
コード例 #25
0
def FVA(model, reaclist=None, subopt=1.0, IncZeroes=True, VaryOnly=False,
        AsMtx=False, tol=1e-10, PrintStatus=False, cobra=False, processes=None):
    state = model.GetState()
    model.Solve(PrintStatus=PrintStatus)
    if model.solution.status != 'optimal' or math.isnan(model.solution.f):
        statusmsg = model.solution.status
        model.SetState(state)
        print "no optimal solution, problem "+statusmsg
    else:
        if reaclist == None:
            reaclist = model.Reactions()
        elif (type(reaclist) in types.StringTypes) or isinstance(
                                                reaclist, Reaction):
            reaclist = [reaclist]
        if not cobra:
            model.DelSumReacsConstraint("FVA_objective")
            model.SetObjAsConstraint(name="FVA_objective", subopt=subopt)
            rv = fva(bounds=model.bounds)
            pool = multiprocessing.Pool(processes=processes)
            results = [pool.apply_async(FluxRange, args=(model, reac, tol,
                        False, True)) for reac in reaclist]
            pool.close()
            pool.join()
            for x in results:
                rv[x.get().keys()[0]] = x.get().values()[0]
#            for reac in reaclist:
#                rv[str(reac)] = pool.apply_async(FluxRange, args=(model, reac, tol,
#                                                                False))
#            for reac in reaclist:
#                lo,hi = model.FluxRange(reac,tol=tol,resetstate=False)
#                if IncZeroes or abs(lo) > tol or abs(hi) > 0.0:
#                    rv[str(reac)] = (lo,hi)
            model.DelSumReacsConstraint("FVA_objective")
        else:
            fvadict = variability.flux_variability_analysis(
                cobra_model=model, reaction_list=reaclist,
                fraction_of_optimum=subopt, solver=model.solver,
                objective_sense=model.objective_direction)
            rv = fva(bounds=model.bounds)
            for reac in fvadict:
                lo = fvadict[reac]["minimum"] if abs(
                        fvadict[reac]["minimum"]) > tol else 0.0
                hi = fvadict[reac]["maximum"] if abs(
                        fvadict[reac]["maximum"]) > tol else 0.0
                rv[reac] = (lo,hi)
        if VaryOnly:
            rv = rv.Variable()
        if AsMtx:
            rv = rv.AsMtx()
        model.SetState(state)
        return rv
コード例 #26
0
ファイル: flux_analysis.py プロジェクト: Ajami712/cobrapy
    def test_flux_variability(self):
        fva_results = {
            '5DGLCNtex': {'minimum': -1.9748300208638403e-05, 'maximum': 0.0},
            'ABTA': {'minimum': 0.0, 'maximum': 0.00014811225541408996},
            '5DOAN': {'minimum': 0.0, 'maximum': 3.2227507421302166e-06},
            'A5PISO': {'minimum': 0.006920856282000001, 'maximum': 0.006922717378372606},
            'AACPS1': {'minimum': 0.0, 'maximum': 3.7028063376249126e-05},
            'AACPS2': {'minimum': 0.0, 'maximum': 3.7028063733878864e-05},
            'ACALDtex': {'minimum': -0.00011848980305159615, 'maximum': 0.0},
            'AACPS3': {'minimum': 0.0, 'maximum': 3.702806337623859e-05},
            'AACPS4': {'minimum': 0.0, 'maximum': 3.702806373387888e-05},
            'ABUTD': {'minimum': 0.0, 'maximum': 0.00014811225541406058},
            'AACPS5': {'minimum': 0.0, 'maximum': 2.8211857518389774e-05},
            'AACPS6': {'minimum': 0.0, 'maximum': 2.821185753295664e-05},
            'AACPS7': {'minimum': 0.0, 'maximum': 3.702806368868028e-05},
            'AACPS8': {'minimum': 0.0, 'maximum': 3.702806338788376e-05},
            'AACPS9': {'minimum': 0.0, 'maximum': 3.702806309933293e-05},
            'AACTOOR': {'minimum': 0.0, 'maximum': 1.5388286124597477e-05},
            'ABUTt2pp': {'minimum': 0.0, 'maximum': 0.0},
            '3OAS140': {'minimum': 0.5041754136687804, 'maximum': 0.5042009621703677},
            '3OAS141': {'minimum': 0.037484893950000084, 'maximum': 0.03750284695065363},
            '3OAS160': {'minimum': 0.41767086529953557, 'maximum': 0.41769641380045963},
            '3OAS161': {'minimum': 0.03748489395, 'maximum': 0.03750284695060761},
            '3OAS180': {'minimum': 0.01069201669939239, 'maximum': 0.010717565200387778},
            '3OAS181': {'minimum': 0.01606495455, 'maximum': 0.01608290755044158},
            'ABUTtex': {'minimum': 0.0, 'maximum': 0.0},
            '3OAS60': {'minimum': 0.5439852127139995, 'maximum': 0.5439896193596934},
            '3OAS80': {'minimum': 0.5439852127140001, 'maximum': 0.5439896193596934},
            'AAMYL': {'minimum': 0.0, 'maximum': 0.0},
            '3PEPTabcpp': {'minimum': 0.0, 'maximum': 5.808323730923103e-06},
            '3PEPTtex': {'minimum': -3.4245609402880297e-06, 'maximum': 0.0},
            '3UMPtex': {'minimum': 0.0, 'maximum': 0.0},
            '4HOXPACDtex': {'minimum': 0.0, 'maximum': 0.0},
            'ACACtex': {'minimum': 0.0, 'maximum': 0.0},
            '4PCP': {'minimum': 0.0, 'maximum': 6.171343917391756e-06},
            '4PCPpp': {'minimum': 0.0, 'maximum': 5.58914186256664e-06},
            'AAMYLpp': {'minimum': 0.0, 'maximum': 0.0},
            '4PEPTabcpp': {'minimum': 0.0, 'maximum': 5.696625084349692e-06},
            '4PEPTtex': {'minimum': -3.2198316806921494e-06, 'maximum': 0.0},
            '5DGLCNR': {'minimum': -2.1942555793285538e-05, 'maximum': 0.0},
            '5DGLCNt2rpp': {'minimum': -1.9748300208638403e-05, 'maximum': 0.0},
            'ACALD': {'minimum': 3.356574143593833, 'maximum': 7.4957163478682105}}

        for solver in solver_dict:
            cobra_model = create_test_model()
            initialize_growth_medium(cobra_model, 'LB')
            fva_out = flux_variability_analysis(cobra_model, solver=solver,
                    reaction_list=cobra_model.reactions[100:140])
            for the_reaction, the_range in fva_out.iteritems():
                for k, v in the_range.iteritems():
                    self.assertAlmostEqual(fva_results[the_reaction][k], v, places=3)
コード例 #27
0
def reduce_model(cobra_model, cobra_model_outFileName=None):
    '''reduce model'''
    # Input: cobra_model
    # Output: cobra_model
    #         the lower and upper bounds have been set to 0.0
    #         for all reactions that cannot carry a flux
    cobra_model.optimize()
    sol_f = cobra_model.solution.f

    fva_data = flux_variability_analysis(cobra_model,
                                         fraction_of_optimum=0.9,
                                         objective_sense='maximize',
                                         the_reactions=None,
                                         allow_loops=True,
                                         solver='gurobi',
                                         the_problem='return',
                                         tolerance_optimality=1e-6,
                                         tolerance_feasibility=1e-6,
                                         tolerance_barrier=1e-8,
                                         lp_method=1,
                                         lp_parallel=0,
                                         new_objective=None,
                                         relax_b=None,
                                         error_reporting=None,
                                         number_of_processes=1,
                                         copy_model=False)
    #with open("data\\ijo1366_irrev_fva.json", 'w') as outfile:
    #    json.dump(data, outfile, indent=4);

    #fva_data = json.load(open("data\\ijo1366_irrev_fva.json"));

    # Reduce model
    rxns_noflux = []
    for k, v in fva_data.items():
        if v['minimum'] == 0.0 and v['maximum'] == 0.0:
            cobra_model.reactions.get_by_id(k).lower_bound = 0.0
            cobra_model.reactions.get_by_id(k).upper_bound = 0.0
            rxns_noflux.append(k)

    if cobra_model_outFileName:
        write_cobra_model_to_sbml_file(cobra_model, cobra_model_outFileName)

    cobra_model.optimize()
    sol_reduced_f = cobra_model.solution.f

    # Check that the reduced model is consistent with the original model
    if not sol_f == sol_reduced_f:
        print('reduced model is inconsistent with the original model')
        print('original model solution: ' + str(sol_f))
        print('reduced model solution: ' + str(sol_reduced_f))
コード例 #28
0
def build_flux_confidence_interval_dict(label_model,flux_confidence_interval_dict,parameter_list=[]):
     fva=flux_variability_analysis(label_model.constrained_model, fraction_of_optimum=0,tolerance_feasibility=label_model.lp_tolerance_feasibility)
     for flux in label_model.flux_dict:
              forward_reaction=False
              reverse_reaction=False
              flux_value=label_model.flux_dict[flux]
              if "RATIO" in flux:
                  continue
              if flux in fva:
                 fva_max=fva[flux]["maximum"]
                 fva_min=fva[flux]["minimum"]
              if (flux+"_reverse") in label_model.flux_dict:
                 forward_reaction=True
                 #net_flux=flux_value-flux_dict[flux+"_reverse"]
              elif ("_reverse") in flux:
                 reverse_reaction=True
                 #net_flux=flux_value-flux_dict[flux+"_reverse"]
              if flux not in  flux_confidence_interval_dict:
                 if forward_reaction:
                     flux_confidence_interval_dict[flux]={"lb":fva_min,"ub":fva_max}
                     flux_confidence_interval_dict[flux+"_forward"]={"lb":flux_value,"ub":flux_value} 
                 elif reverse_reaction:
                     flux_confidence_interval_dict[flux]={"lb":flux_value,"ub":flux_value}
                 else: 
                     flux_confidence_interval_dict[flux]={"lb":fva_min,"ub":fva_max}
              else: 
                 if forward_reaction:   
                   flux_confidence_interval_dict[flux]["lb"]=min(flux_confidence_interval_dict[flux]["lb"],fva_min)       
                   flux_confidence_interval_dict[flux]["ub"]=max(flux_confidence_interval_dict[flux]["ub"],fva_max)       
                   if flux+"_forward" not in flux_confidence_interval_dict:
                      flux_confidence_interval_dict[flux+"_forward"]={"lb":flux_value,"ub":flux_value} 
                   flux_confidence_interval_dict[flux+"_forward"]["lb"]=min(flux_confidence_interval_dict[flux+"_forward"]["lb"],flux_value)       
                   flux_confidence_interval_dict[flux+"_forward"]["ub"]=max(flux_confidence_interval_dict[flux+"_forward"]["ub"],flux_value) 
                 elif reverse_reaction:
                   flux_confidence_interval_dict[flux]["lb"]=min(flux_confidence_interval_dict[flux]["lb"],flux_value)       
                   flux_confidence_interval_dict[flux]["ub"]=max(flux_confidence_interval_dict[flux]["ub"],flux_value) 
                 else:     
                   flux_confidence_interval_dict[flux]["lb"]=min(flux_confidence_interval_dict[flux]["lb"],fva_min)       
                   flux_confidence_interval_dict[flux]["ub"]=max(flux_confidence_interval_dict[flux]["ub"],fva_max) 
     for parameter in parameter_list:
          if "/" in parameter:
            try: 
             value,lb,ub=get_ratios_bounds(label_model,parameter,0.1,label_model.lp_tolerance_feasibility)
             if parameter not in flux_confidence_interval_dict:
                flux_confidence_interval_dict[parameter]={"lb":lb,"ub":ub}
             else:
                flux_confidence_interval_dict[parameter]["lb"]=min(flux_confidence_interval_dict[parameter]["lb"],lb)       
                flux_confidence_interval_dict[parameter]["ub"]=max(flux_confidence_interval_dict[parameter]["ub"],ub) 
            except:
               print parameter+" not recognized as ratio"   
コード例 #29
0
def test_pfba_flux_variability(model, pfba_fva_results, fva_results,
                               all_solvers):
    """Test FVA using pFBA."""
    model.solver = all_solvers
    with pytest.warns(UserWarning):
        flux_variability_analysis(model,
                                  pfba_factor=0.1,
                                  reaction_list=model.reactions[1::3])
    fva_out = flux_variability_analysis(model,
                                        pfba_factor=1.1,
                                        reaction_list=model.reactions)
    for name, result in iteritems(fva_out.T):
        for k, v in iteritems(result):
            assert abs(pfba_fva_results[k][name] - v) < 0.00001
            assert abs(pfba_fva_results[k][name]) <= abs(fva_results[k][name])
    loop_reactions = [
        model.reactions.get_by_id(rid) for rid in ("FRD7", "SUCDi")
    ]
    fva_loopless = flux_variability_analysis(model,
                                             pfba_factor=1.1,
                                             reaction_list=loop_reactions,
                                             loopless=True)
    assert np.allclose(fva_loopless["maximum"], fva_loopless["minimum"])
コード例 #30
0
def findReactionsWithPositiveFluxAtOptimum(GEM, fraction_of_optimum=1):
    """from cobra.flux_analysis.variability import flux_variability_analysis
    GEM must be irreversible, i.e., split reversible reactions
    """
    positive_rxns = []
    fva = flux_variability_analysis(GEM,
                                    reaction_list=None,
                                    fraction_of_optimum=fraction_of_optimum,
                                    processes=2,
                                    loopless=False).round(decimals=8)
    for rxn_id in fva.index:
        v_min, v_max = fva.loc[rxn_id].minimum, fva.loc[rxn_id].maximum
        if v_min > 0:
            positive_rxns.append(rxn_id)
    return positive_rxns
コード例 #31
0
ファイル: variability.py プロジェクト: jonm4024/cobrapy
def run_fva(solver='cplex'):
    """
    For debugging purposes only
    
    """
    from test import salmonella_pickle
    try:
        from cPickle import load
    except:
        from pickle import load
    with open(salmonella_pickle) as in_file:
        cobra_model = load(in_file)
    fva_out =  flux_variability_analysis(cobra_model,
                                         the_reactions=cobra_model.reactions,#[100:140],
                                         solver=solver,number_of_processes=1)
コード例 #32
0
def fva(json):
    model = load_json_model(json)
    bio = {
        reaction: reaction.objective_coefficient
        for reaction in model.reactions if search('biomass', reaction.name)
    }
    biom = bio.keys()[0]
    # set the objective
    model.change_objective(biom)
    # add constraints
    model.optimize()
    f0 = model.solution.f
    # get a dictionary with all non-zero fluxes
    fluxes = {
        reaction.id: reaction.x
        for reaction in model.reactions if reaction.x != 0
    }
    # first time store for the wild type
    v = variability.flux_variability_analysis(model)
    return v
コード例 #33
0
ファイル: cobra_methods.py プロジェクト: SBRG/sbaas
    def reduce_model(self,cobra_model,cobra_model_outFileName=None):
        '''reduce model'''
        # Input: cobra_model
        # Output: cobra_model 
        #         the lower and upper bounds have been set to 0.0
        #         for all reactions that cannot carry a flux
        cobra_model.optimize()
        sol_f = cobra_model.solution.f

        fva_data = flux_variability_analysis(cobra_model, fraction_of_optimum=0.9,
                                              objective_sense='maximize', the_reactions=None,
                                              allow_loops=True, solver='gurobi',
                                              the_problem='return', tolerance_optimality=1e-6,
                                              tolerance_feasibility=1e-6, tolerance_barrier=1e-8,
                                              lp_method=1, lp_parallel=0, new_objective=None,
                                              relax_b=None, error_reporting=None,
                                              number_of_processes=1, copy_model=False);
        #with open("data/ijo1366_irrev_fva.json", 'w') as outfile:
        #    json.dump(data, outfile, indent=4);

        #fva_data = json.load(open("data/ijo1366_irrev_fva.json"));

        # Reduce model
        rxns_noflux = [];
        for k,v in fva_data.items():
            if v['minimum'] == 0.0 and v['maximum'] == 0.0:
                cobra_model.reactions.get_by_id(k).lower_bound = 0.0;
                cobra_model.reactions.get_by_id(k).upper_bound = 0.0;
                rxns_noflux.append(k);

        if cobra_model_outFileName:
            write_cobra_model_to_sbml_file(cobra_model,cobra_model_outFileName)

        cobra_model.optimize()
        sol_reduced_f = cobra_model.solution.f

        # Check that the reduced model is consistent with the original model
        if not sol_f == sol_reduced_f:
            print('reduced model is inconsistent with the original model')
            print('original model solution: ' + str(sol_f))
            print('reduced model solution: ' + str(sol_reduced_f))
コード例 #34
0
def get_bounds(label_model,best_parameter_dict,parameter,force_flux_value_bounds,flux_parameter_list):
       lb_list=[]#[best_parameter_dict[parameter]["lb"]]
       ub_list=[]#[best_parameter_dict[parameter]["ub"]]
       if best_parameter_dict[parameter]["type"]=="flux value":
          clear_parameters(label_model,parameter_dict=best_parameter_dict,parameter_list=flux_parameter_list, clear_ratios=False,clear_turnover=False,clear_fluxes=True,restore_objectives=False) #Clear all parameters
          #Get real upper and lower bound for the parameters
          for reaction_id in best_parameter_dict[parameter]["reactions"]: 
                fva=flux_variability_analysis(label_model.constrained_model,fraction_of_optimum=0,reaction_list=[reaction_id],tolerance_feasibility=label_model.lp_tolerance_feasibility)
                lb_list.append(fva[reaction_id]["minimum"])
                ub_list.append(fva[reaction_id]["maximum"])
       if best_parameter_dict[parameter]["type"]!="flux value" or force_flux_value_bounds:  
          lb_list.append(best_parameter_dict[parameter]["lb"])
          ub_list.append(best_parameter_dict[parameter]["ub"])  
       """if best_parameter_dict[parameter]["type"]=="turnover" or force_flux_value_bounds:
          lb_list.append(best_parameter_dict[parameter]["lb"])
          ub_list.append(best_parameter_dict[parameter]["ub"])
       if "ratio" in best_parameter_dict[parameter]["type"]:
           lb_list,ub_list=get_ratios_bounds(label_model.constrained_model,parameter,0.1,lp_tolerance_feasibility=label_model.lp_tolerance_feasibility)"""
       parameter_lb=max(lb_list)
       parameter_ub=min(ub_list)
       return parameter_lb, parameter_ub
コード例 #35
0
ファイル: minimal_flux.py プロジェクト: pcm32/iso2flux
def add_flux_limit_constraints(metabolic_model,fraction_of_optimum_objective=0.8, fraction_of_flux_minimum=2,solver=None,boundaries_precision=0.001,label_model=None,metabolite_list_file_name=None):
    precision=int(-1*(math.log10(boundaries_precision)))
    irreversible_model,minimal_flux,non_milp_reactions=create_minimal_fux_model(metabolic_model,fraction_of_optimum_objective, fraction_of_flux_minimum,boundaries_precision=boundaries_precision,label_model=label_model,metabolite_list_file_name=metabolite_list_file_name)
    irreversible_fva=flux_variability_analysis(irreversible_model,reaction_list=non_milp_reactions,fraction_of_optimum=0,solver=solver)
    reversible_fva={}
    reverese_reactions=irreversible_model.reactions.query("_reverse") 
    for reaction in irreversible_model.reactions:
        if reaction in reverese_reactions or "IRRMILP_" in reaction.id:
           continue
        reversible_fva[reaction.id]=copy.deepcopy(irreversible_fva[reaction.id])
        if "reflection" in reaction.notes:
            reverse_reaction= reaction.notes["reflection"]
            reversible_fva[reaction.id]["minimum"]=-irreversible_fva[reverse_reaction]["maximum"]
            if irreversible_fva[reverse_reaction]["minimum"]!=0: 
               reversible_fva[reaction.id]["maximum"]=-irreversible_fva[reverse_reaction]["minimum"] 
        if reaction.id in metabolic_model.reactions:
           original_model_reaction=metabolic_model.reactions.get_by_id(reaction.id) 
           if original_model_reaction.lower_bound!=0:
              original_model_reaction.lower_bound=max(round_down(reversible_fva[reaction.id]["minimum"],precision),original_model_reaction.lower_bound)
           if original_model_reaction.upper_bound!=0:
              original_model_reaction.upper_bound=min(round_up(reversible_fva[reaction.id]["maximum"],precision),original_model_reaction.upper_bound)
           original_model_reaction.objective_coefficient=0.0
        
    return reversible_fva  
コード例 #36
0
ファイル: FVA.py プロジェクト: mauriceccy/scobra
def FVA(model,
        reaclist=None,
        subopt=1.0,
        IncZeroes=True,
        VaryOnly=False,
        AsMtx=False,
        tol=1e-10,
        PrintStatus=False,
        cobra=True,
        processes=None,
        loopless=False,
        pfba_factor=None,
        reset_state=True):
    if reset_state:
        state = model.GetState()
#    import time
#    print 'before fva solve ' + time.asctime(time.localtime(time.time()))
    model.Solve(PrintStatus=PrintStatus)
    #    print 'after fva solve ' + time.asctime(time.localtime(time.time()))
    if model.solution.status != 'optimal' or math.isnan(
            model.solution.objective_value):
        statusmsg = model.solution.status
        model.SetState(state)
        raise ValueError("no optimal solution, problem " + statusmsg)
    else:
        if reaclist == None:
            reaclist = model.reactions
        elif isinstance(reaclist, str) or isinstance(reaclist, Reaction):
            reaclist = [reaclist]
        if cobra:
            #            print 'before cobra fva ' + time.asctime(time.localtime(time.time()))
            reaclist = model.GetReactions(reaclist)
            fvadict = variability.flux_variability_analysis(
                model=model,
                reaction_list=reaclist,
                fraction_of_optimum=subopt,
                loopless=loopless,
                pfba_factor=pfba_factor)
            #solver=model.solver, objective_sense=model.objective_direction)
            #            print 'after cobra fva ' + time.asctime(time.localtime(time.time()))
            rv = fva({}, bounds=model.bounds)
            #            for reac in fvadict:
            #                lo = fvadict[reac]["minimum"] if abs(
            #                        fvadict[reac]["minimum"]) > tol else 0.0
            #                hi = fvadict[reac]["maximum"] if abs(
            #                        fvadict[reac]["maximum"]) > tol else 0.0
            #                rv[reac] = (lo,hi)
            for row in fvadict.iterrows():
                lo = row[1][0] if abs(row[1][0]) > tol else 0.0
                hi = row[1][1] if abs(row[1][1]) > tol else 0.0
                rv[model.GetReactionName(row[0])] = (lo, hi)
#                rv[model.GetReaction(row[0])] = (lo,hi)
        else:
            #            print 'not cobra fva'
            model.DelSumReacsConstraint("FVA_objective")
            model.SetObjAsConstraint(name="FVA_objective", subopt=subopt)
            rv = fva({}, bounds=model.bounds)
            pool = multiprocessing.Pool(processes=processes)
            results = [
                pool.apply_async(FluxRange,
                                 args=(model, reac, tol, False, True))
                for reac in reaclist
            ]
            pool.close()
            pool.join()
            for x in results:
                rv[model.GetReactionName(
                    x.get().keys()[0])] = x.get().values()[0]
            if "FVA_objective_sum_reaction" in rv:
                rv.pop("FVA_objective_sum_reaction")
#            for reac in reaclist:
#                rv[str(reac)] = pool.apply_async(FluxRange, args=(model, reac, tol,
#                                                                False))
#            for reac in reaclist:
#                lo,hi = model.FluxRange(reac,tol=tol,reset_state=False)
#                if IncZeroes or abs(lo) > tol or abs(hi) > 0.0:
#                    rv[str(reac)] = (lo,hi)
            model.DelSumReacsConstraint("FVA_objective")
        if VaryOnly:
            rv = rv.Variable()
        if AsMtx:
            rv = rv.AsMtx()
#        print 'before fva set state ' + time.asctime(time.localtime(time.time()))
        if reset_state:
            model.SetState(state)


#        print 'after fva set state ' + time.asctime(time.localtime(time.time()))
        return rv
コード例 #37
0
def test_parallel_flux_variability(model, fva_results, all_solvers):
    """Test parallel FVA."""
    model.solver = all_solvers
    fva_out = flux_variability_analysis(model, processes=2)
    fva_out.sort_index(inplace=True)
    assert np.allclose(fva_out, fva_results)
コード例 #38
0
ファイル: small.py プロジェクト: jireva/mexmodel
#!/bin/sh

gurobi-env python2 << '!'

import cobra
from cobra.flux_analysis import variability as fva

m = cobra.io.load_json_model("small.json")
o = m.optimize()
s = fva.flux_variability_analysis(m, fraction_of_optimum=0.99, solver='gurobi') # this is a dict of fluxes that are 99% of optimum

for (rx, flux) in o.x_dict.items():
	print "{}	{}".format(rx, flux/10)

!
コード例 #39
0
def evaluate_parameter(label_model,parameter_dict,flux_parameter_list,parameter,parameter_lb,parameter_ub,parameter_new_value,signficance_threshold,feasability_process,parameter_precision,max_perturbation,force_flux_value_bounds,annealing_n,annealing_m,annealing_p0,annealing_pf,max_random_sample,min_random_sample,annealing_n_processes,annealing_cycle_time_limit, annealing_cycle_max_attempts,annealing_iterations,annealing_restore_parameters=True):
   "Function used internally to evaluate the ChiSquare with a given parameter to locked to a single value" 
   parameter_dict=copy.deepcopy(parameter_dict)
   #is_flux_value=parameter in flux_parameter_list
   if parameter_dict[parameter]["type"] in ("ratio","flux value"):
             clear_parameters(label_model,parameter_dict=parameter_dict,parameter_list=flux_parameter_list, clear_ratios=False,clear_turnover=False,clear_fluxes=True,restore_objectives=False) #Clear all parameters
             """#Get real upper and lower bound for the parameters
             for reaction_id in parameter_dict[parameter]["reactions"]: 
                status,feasability_process=check_feasibility(label_model.constrained_model,tolerance_feasibility=label_model.lp_tolerance_feasibility,time_limit=60,pool=feasability_process)
                fva=flux_variability_analysis(label_model.constrained_model,fraction_of_optimum=0,reaction_list=[reaction_id],tolerance_feasibility=label_model.lp_tolerance_feasibility)
                lb_list.append(fva[reaction_id]["minimum"])
                ub_list.append(fva[reaction_id]["maximum"])
   parameter_lb=max(lb_list)
   parameter_ub=min(ub_list)"""
   print parameter_dict
   parameter_dict[parameter]["v"]=parameter_new_value
   print [parameter,parameter_dict[parameter]]
   apply_parameters(label_model,parameter_dict,apply_flux_values=True,parameter_precision=parameter_precision,parameter_list=[parameter])
   apply_ratios(label_model.constrained_model,label_model.ratio_dict)
   if parameter_dict[parameter]["type"] in ("ratio","flux value"):
      parameter_backup=copy.deepcopy(parameter_dict)
      for attempt in range(0,10):
          retry_flag=False
          random_parameter_sample=random.sample(flux_parameter_list, len(flux_parameter_list))
          for flux_value in random_parameter_sample:
                  if flux_value==parameter:
                     continue
                  lb=-999999
                  ub=999999
                  for reaction_id in parameter_dict[flux_value]["reactions"]:
                      status,feasability_process=check_feasibility(label_model.constrained_model,tolerance_feasibility=label_model.lp_tolerance_feasibility,time_limit=60,pool=feasability_process)
                      if status =="infeasible":
                         retry_flag=True 
                         clear_parameters(label_model,parameter_dict=parameter_dict,parameter_list=flux_parameter_list, clear_ratios=False ,clear_turnover=False ,clear_fluxes=True, restore_objectives=False)
                         parameter_dict=copy.deepcopy(parameter_backup)
                         apply_parameters(label_model,parameter_dict,apply_flux_values=True,parameter_precision=parameter_precision,parameter_list=[parameter])
                         break
                      fva=flux_variability_analysis(label_model.constrained_model,fraction_of_optimum=0,reaction_list=[reaction_id],tolerance_feasibility=label_model.lp_tolerance_feasibility)
                      ub=min(fva[reaction_id]["maximum"],ub)
                      lb=max(fva[reaction_id]["minimum"],lb)
                  if retry_flag==True:
                     break 
                  value=parameter_dict[flux_value]["v"] #previous value
                  parameter_dict[flux_value]["v"]=min(max(lb,value),ub)
                  apply_parameters(label_model,parameter_dict,apply_flux_values=True,parameter_precision=parameter_precision,parameter_list=[flux_value])
          #print model.optimize()
          if  retry_flag==False:
              break #If no errors where encountered no need for further attempts
   apply_parameters(label_model,parameter_dict,apply_flux_values=True,parameter_precision=parameter_precision)
   print "Delta1"
   a,b=solver(label_model)
   print "Delta2"
   f_best,b,c=get_objective_function(label_model,output=False)
   best_flux_dict=copy.deepcopy(label_model.flux_dict)
   print ["FBEST",f_best]
   if f_best>=signficance_threshold:
             backup_parameter_dict=copy.deepcopy(parameter_dict)
             print "coordinated descent"
             parameters_to_fit=copy.copy(parameter_dict)
             del parameters_to_fit[parameter]
             f_best, new_parameters,best_flux_dict=coordinate_descent(label_model,mode="fsolve",parameter_precision=parameter_precision,parameter_to_be_fitted=parameters_to_fit,max_perturbation=max_perturbation,perturbation=1.2,fba_mode="fba",parameter_dict=parameter_dict,force_flux_value_bounds=force_flux_value_bounds)  
             best_flux_dict=label_model.flux_dict
             print [parameter_new_value,f_best]
             if f_best<=signficance_threshold:
                parameter_dict=new_parameters
             else:
                
                for x in range(0,annealing_iterations): #Try several times to make sure the result is above the signficance threeshol
                    parameter_dict,best_flux_dict,f_best=annealing(label_model,n=annealing_n,m=annealing_m,p0=annealing_p0,pf=annealing_pf,max_random_sample=max_random_sample,min_random_sample=min_random_sample,mode="fsolve",fraction_of_optimum=0,parameter_precision=parameter_precision,parameter_to_be_fitted=parameters_to_fit,max_perturbation=max_perturbation,gui=None,fba_mode="fba", break_threshold=signficance_threshold,parameter_dict=parameter_dict,n_processes=annealing_n_processes,cycle_time_limit=annealing_cycle_time_limit, cycle_max_attempts=annealing_cycle_max_attempts,output=False,force_flux_value_bounds=force_flux_value_bounds)
                    if f_best<signficance_threshold:
                        break
                    elif annealing_restore_parameters==True: 
                        parameter_dict=copy.deepcopy(backup_parameter_dict) 
   
   return parameter_dict, f_best
コード例 #40
0
ファイル: summary.py プロジェクト: synchon/cobrapy
def metabolite_summary(met, solution=None, threshold=0.01, fva=False,
                       floatfmt='.3g'):
    """Print a summary of the reactions which produce and consume this
    metabolite

    solution : cobra.core.Solution
        A previously solved model solution to use for generating the
        summary. If none provided (default), the summary method will resolve
        the model. Note that the solution object must match the model, i.e.,
        changes to the model such as changed bounds, added or removed
        reactions are not taken into account by this method.

    threshold : float
        a percentage value of the maximum flux below which to ignore reaction
        fluxes

    fva : float (0->1), or None
        Whether or not to include flux variability analysis in the output.
        If given, fva should be a float between 0 and 1, representing the
        fraction of the optimum objective to be searched.

    floatfmt : string
        format method for floats, passed to tabulate. Default is '.3g'.

    """
    if solution is None:
        met.model.slim_optimize(error_value=None)
        solution = get_solution(met.model, reactions=met.reactions)

    rxn_id = list()
    flux = list()
    reaction = list()
    for rxn in met.reactions:
        rxn_id.append(format_long_string(rxn.id, 10))
        flux.append(solution.fluxes[rxn.id] * rxn.metabolites[met])
        reaction.append(format_long_string(rxn.reaction, 40 if fva else 50))

    flux_summary = pd.DataFrame(data={
        "id": rxn_id, "flux": flux, "reaction": reaction})

    if fva:
        fva_results = flux_variability_analysis(
            met.model, met.reactions, fraction_of_optimum=fva)

        flux_summary.index = flux_summary["id"]
        flux_summary["maximum"] = zeros(len(rxn_id))
        flux_summary["minimum"] = zeros(len(rxn_id))
        for rid, rxn in zip(rxn_id, met.reactions):
            imax = rxn.metabolites[met] * fva_results.loc[rxn.id, "maximum"]
            imin = rxn.metabolites[met] * fva_results.loc[rxn.id, "minimum"]
            flux_summary.loc[rid, "fmax"] = (imax if abs(imin) <= abs(imax)
                                             else imin)
            flux_summary.loc[rid, "fmin"] = (imin if abs(imin) <= abs(imax)
                                             else imax)

    assert flux_summary.flux.sum() < 1E-6, "Error in flux balance"

    flux_summary = _process_flux_dataframe(flux_summary, fva, threshold,
                                           floatfmt)

    flux_summary['percent'] = 0
    total_flux = flux_summary[flux_summary.is_input].flux.sum()

    flux_summary.loc[flux_summary.is_input, 'percent'] = \
        flux_summary.loc[flux_summary.is_input, 'flux'] / total_flux
    flux_summary.loc[~flux_summary.is_input, 'percent'] = \
        flux_summary.loc[~flux_summary.is_input, 'flux'] / total_flux

    flux_summary['percent'] = flux_summary.percent.apply(
        lambda x: '{:.0%}'.format(x))

    if fva:
        flux_table = tabulate(
            flux_summary.loc[:, ['percent', 'flux', 'fva_fmt', 'id',
                                 'reaction']].values, floatfmt=floatfmt,
            headers=['%', 'FLUX', 'RANGE', 'RXN ID', 'REACTION']).split('\n')
    else:
        flux_table = tabulate(
            flux_summary.loc[:, ['percent', 'flux', 'id', 'reaction']].values,
            floatfmt=floatfmt, headers=['%', 'FLUX', 'RXN ID', 'REACTION']
        ).split('\n')

    flux_table_head = flux_table[:2]

    met_tag = "{0} ({1})".format(format_long_string(met.name, 45),
                                 format_long_string(met.id, 10))

    head = "PRODUCING REACTIONS -- " + met_tag
    print_(head)
    print_("-" * len(head))
    print_('\n'.join(flux_table_head))
    print_('\n'.join(
        pd.np.array(flux_table[2:])[flux_summary.is_input.values]))

    print_()
    print_("CONSUMING REACTIONS -- " + met_tag)
    print_("-" * len(head))
    print_('\n'.join(flux_table_head))
    print_('\n'.join(
        pd.np.array(flux_table[2:])[~flux_summary.is_input.values]))
コード例 #41
0
def estimate_confidence_intervals(label_model,significance=0.95,perturbation=0.1,min_absolute_perturbation=0.1,max_absolute_perturbation=25,parameter_precision=None,best_parameter_dict=None,evaluate_turnovers=False,parameter_list=None,fraction_of_optimum=0.9,force_flux_value_bounds=False,relative_max_random_sample=0.5, relative_min_random_sample= 0.25,annealing_n=50,annealing_m=100,annealing_p0=0.4,annealing_pf=0.001,annealing_n_processes=1,annealing_cycle_time_limit=1800, annealing_cycle_max_attempts=5,annealing_iterations=2,annealing_restore_parameters=True,fname="confidence.json",sbml_name=None,output=True):
   """
   Computes the confidence intervals for fluxes 
   
   label_model: label_model object
   signficance: float
        Signficance level for the confidence intervals. Default is 0.95 (95%)
   perturbation: float
        Relative perturbation for each parameter at each step. Default is 0.1 (10%). Regardless of this value the absolute perturbation will will never be lower than the value defined by the min_absolute_perturbation and will never be larger than the maximum value defined by the max_absolute_perturbation
   min_absolute_perturbation: float
	See above
   max_absolute_perturbation: float
	See above
   parameter_precision: float,
        Defines the precision of the flux value parameters. If none is defined the precision defined in the label_model object will be used
   best_parameter_dict: dict
	Dict with the best parameters that have been obtained after fitting the parameters to experimental data
   evaluate_turnovers: bool,
        If set to False (default) it will not calculate the confidence intervals for turnovers. 
   parameter_list: list
	List of the parameters that should be evaluated. Unless all flux value parameters are selected, the confidence intervals for fluxes (other than those directly analyzed) won't be meaningful
   fraction_of_optimum: float
        Fraction of the objective flux that should be mantained. If the parameters have been added automatically this will have no effect as the objective is alsways added as parameters 
   force_flux_value_bounds: bool,
   	If False it will ignore the bounds defined in the parameter dict and use the FVA limits for flux value parameters. If set to True it migh in some instances result on  unfeasible solutions
   relative_max_random_sample: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters
   relative_min_random_sample: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters 
   annealing_n: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters   
   annealing_m: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters
   annealing_p0: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters
   annealing_pf: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters
   annealing_n_processes: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters
   annealing_cycle_time_limit: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters
   annealing_cycle_max_attempts
   annealing_iterations: int
	Number of times annealing should be run once the signficance threeshold has been surpassed by a parameter to ensure this values is the real upper/lower limit for the paramater
   annealing_restore_parameters: bool:
	If True after each annealing iterations it will return the paramaters to each original value to reduce the risk of being trapper into local minimums
   fname: string:
        Name of the file where the results will be saved. It must have ither a xlsx, CSV or json extension.  
   sbml_name;: string
        Name of of the SBML that will generated containng the constrained_model restricted by the confidence interval results. If none (default) no SBML will be generated.
   output: bool
        If True it will indicate progress of the analysis on a text file named estimate estimate_confidence_interval_output.txt	
   """
   if parameter_precision==None:
      parameter_precision=label_model.parameter_precision
   if best_parameter_dict==None:
      best_parameter_dict=label_model.parameter_dict
   print parameter_list
   if parameter_list==None or parameter_list==[]:
      full_mode=True
      if evaluate_turnovers:
         parameter_list=best_parameter_dict.keys()  
      else:
         parameter_list=[] 
         for x in best_parameter_dict:
             if best_parameter_dict[x]["type"] != "turnover":
                parameter_list.append(x) 
   else:
      full_mode=False
   precision=int(-1*(math.log10(parameter_precision)))
   max_random_sample=int(relative_max_random_sample*len(best_parameter_dict))
   min_random_sample=int(relative_min_random_sample*len(best_parameter_dict))
   #chi_parameters_sets_dict={}
   parameter_confidence_interval_dict={}
   flux_confidence_interval_dict={}
   parameter_value_parameters_sets_dict={}
   build_flux_confidence_interval_dict(label_model,flux_confidence_interval_dict,parameter_list)
   build_confidence_dicts(parameter_confidence_interval_dict,parameter_value_parameters_sets_dict,best_parameter_dict)
   """for flux in label_model.flux_dict:
       flux_confidence_interval_dict[flux]={"lb":label_model.flux_dict[flux],"ub":label_model.flux_dict[flux]}
       if (flux+"_reverse") in label_model.flux_dict:
          net_flux=label_model.flux_dict[flux]-label_model.flux_dict[flux+"_reverse"]
          flux_confidence_interval_dict["net_"+flux]={"lb":net_flux,"ub":net_flux}"""
   print flux_confidence_interval_dict
   apply_parameters(label_model,best_parameter_dict,parameter_precision=parameter_precision)
   a,b=solver(label_model)
   best_objective,b,c=get_objective_function(label_model,output=False)
   delta_chi = chi2.isf(q=1-significance, df=1)
   signficance_threshold=delta_chi+best_objective
   print signficance_threshold
   if output:
      with open("estimate_confidence_interval_output.txt", "a") as myfile:
           myfile.write("signficance_threshold "+str(signficance_threshold)+"\n")
   original_objectives_bounds={}
   for reaction in label_model.constrained_model.objective: 
              original_objectives_bounds[reaction.id]={}
              original_objectives_bounds[reaction.id]["lb"]=reaction.lower_bound
              original_objectives_bounds[reaction.id]["ub"]=reaction.upper_bound
              original_objectives_bounds[reaction.id]["obj_coef"]=reaction.objective_coefficient
              fva=flux_variability_analysis(label_model.constrained_model,reaction_list=[reaction], fraction_of_optimum=fraction_of_optimum,tolerance_feasibility=label_model.lp_tolerance_feasibility)
              reaction.lower_bound=max(round_down(fva[reaction.id]["minimum"],precision),reaction.lower_bound)
              reaction.upper_bound=min(round_up(fva[reaction.id]["maximum"],precision),reaction.upper_bound)
              reaction.objective_coefficient=0
   
   flux_parameter_list=[]
   
   for parameter in best_parameter_dict:
       if best_parameter_dict[parameter]["type"]=="flux value":
          flux_parameter_list.append(parameter)
   feasability_process = Pool(processes=1)
   for parameter in parameter_list:
       apply_parameters(label_model,best_parameter_dict,parameter_precision=parameter_precision)
       a,b=solver(label_model)
       #chi_parameters_sets_dict[parameter]={}
       #variation_range= best_parameter_dict[parameter]["ub"]-best_parameter_dict[parameter]["lb"] 
       #Find the highest/lowest value found on previous simulations
       n=1
       sign=1
       #is_flux_value=parameter in flux_parameter_list
       if parameter not in best_parameter_dict:
          additional_parameter=True 
          parameter_dict=max_parameter_dict=min_parameter_dict=copy.deepcopy(best_parameter_dict)
          if parameter in label_model.constrained_model.reactions:
             print "is not ratio"
             value=label_model.constrained_model.solution.x_dict[parameter]
             reaction=label_model.constrained_model.reactions.get_by_id(parameter)
             lb=reaction.lower_bound
             ub=reaction.upper_bound
             parameter_dict[parameter]={"v":value,"lb":lb,"ub":ub ,"type":"flux value","reactions":[parameter],"max_d":0.1,"original_lb":lb,"original_ub":ub,"original_objective_coefficient":0.0}
          elif "/" in parameter:
             print "is ratio"
             reaction1=parameter.split("/")[0]
             reaction2=parameter.split("/")[1]  
             value,lb,ub=get_ratios_bounds(label_model,parameter,0.1,lp_tolerance_feasibility=label_model.lp_tolerance_feasibility,parameter_dict=parameter_dict)
             parameter_dict[parameter]={"v":value,"lb":lb,"ub":ub ,"type":"ratio","ratio":{reaction1:"v",reaction2:1},"max_d":0.1}
             print parameter      
       else:
          additional_parameter=False 
          #TODO Make it so it can start from the highuest and lowest value of the parameter found in previous simulations   
          min_parameter_dict=parameter_value_parameters_sets_dict[parameter]["lb_parameter_dict"]
          max_parameter_dict=parameter_value_parameters_sets_dict[parameter]["ub_parameter_dict"]
       parameter_lb,parameter_ub=get_bounds(label_model,min_parameter_dict,parameter,force_flux_value_bounds,flux_parameter_list)
       """lb_list=[]#[best_parameter_dict[parameter]["lb"]]
       ub_list=[]#[best_parameter_dict[parameter]["ub"]]
       if is_flux_value==True:
          clear_parameters(label_model,parameter_dict=best_parameter_dict,parameter_list=flux_parameter_list, clear_ratios=False,clear_turnover=False,clear_fluxes=True,restore_objectives=False) #Clear all parameters
          #Get real upper and lower bound for the parameters
          for reaction_id in best_parameter_dict[parameter]["reactions"]: 
                fva=flux_variability_analysis(label_model.constrained_model,fraction_of_optimum=0,reaction_list=[reaction_id],tolerance_feasibility=label_model.lp_tolerance_feasibility)
                lb_list.append(fva[reaction_id]["minimum"])
                ub_list.append(fva[reaction_id]["maximum"])
       if is_flux_value==False or force_flux_value_bounds:
          lb_list.append(best_parameter_dict[parameter]["lb"])
          ub_list.append(best_parameter_dict[parameter]["ub"])
       parameter_lb=max(lb_list)
       parameter_ub=min(ub_list)"""
       if output:
          with open("estimate_confidence_interval_output.txt", "a") as myfile:
               myfile.write("///////"+parameter+"(lb="+str(parameter_lb)+" ub="+str(parameter_ub)+ ")\n")
       while(n<=100000):
          stop_flag=False
          if n==1:
             parameter_dict=copy.deepcopy(max_parameter_dict)
             #Run a quick evaluation of the upper bound to see if it is not necessary to "walk there" 
             parameter_dict,f_best=evaluate_parameter(label_model,parameter_dict,flux_parameter_list,parameter,parameter_lb,parameter_ub,parameter_ub,signficance_threshold,feasability_process,parameter_precision,max_absolute_perturbation/10.0,force_flux_value_bounds,max(int(annealing_n*0.5),2),annealing_m,annealing_p0,annealing_pf,max_random_sample,min_random_sample,annealing_n_processes,annealing_cycle_time_limit, annealing_cycle_max_attempts,annealing_iterations=1,annealing_restore_parameters=annealing_restore_parameters)
             if f_best<=signficance_threshold:
                build_flux_confidence_interval_dict(label_model,flux_confidence_interval_dict,parameter_list)
                parameter_dict_to_store=copy.deepcopy(parameter_dict)
                if additional_parameter:
                   del parameter_dict_to_store[parameter]
                build_confidence_dicts(parameter_confidence_interval_dict,parameter_value_parameters_sets_dict,parameter_dict_to_store)
                
             else:
                parameter_dict=copy.deepcopy(max_parameter_dict)
             if output:
                with open("estimate_confidence_interval_output.txt", "a") as myfile:
                     myfile.write(parameter+" "+"v="+str(parameter_ub)+" chi="+str(f_best)+"\n")
          delta_parameter=min(max(perturbation*abs(parameter_dict[parameter]["v"]),min_absolute_perturbation),max_absolute_perturbation)
          print delta_parameter        
          parameter_new_value=max(min(parameter_dict[parameter]["v"]+delta_parameter*sign,parameter_ub),parameter_lb)
          parameter_dict,f_best=evaluate_parameter(label_model,parameter_dict,flux_parameter_list,parameter,parameter_lb,parameter_ub,parameter_new_value,signficance_threshold,feasability_process,parameter_precision,max_absolute_perturbation/10.0,force_flux_value_bounds,annealing_n,annealing_m,annealing_p0,annealing_pf,max_random_sample,min_random_sample,annealing_n_processes,annealing_cycle_time_limit, annealing_cycle_max_attempts,annealing_iterations,annealing_restore_parameters=annealing_restore_parameters) 
          if output:
             with open("estimate_confidence_interval_output.txt", "a") as myfile:
               myfile.write(parameter+" "+"v="+str(parameter_new_value)+" chi="+str(f_best)+"\n")
          
          if f_best>signficance_threshold:
             stop_flag=True
          else: 
             if f_best<best_objective: #If a solution is found that is better than the optimal solution restart the confidence interval simulation with the new parameter set
                parameter_dict_to_store=copy.deepcopy(parameter_dict)
                if additional_parameter:
                   clear_parameters(label_model,parameter_dict=parameter_dict,parameter_list=[parameter], clear_ratios=True,clear_turnover=False,clear_fluxes=True,restore_objectives=False) #
                   del parameter_dict_to_store[parameter]
                parameter_confidence_interval_dict={}
                flux_confidence_interval_dict={}
                parameter_value_parameters_sets_dict={}
                if output:
                   with open("estimate_confidence_interval_output.txt", "a") as myfile:
                        myfile.write("Restarting analysis with new bestfit\n")
                best_parameter_dict,best_flux_dict,f_best=annealing(label_model,n=annealing_n,m=annealing_m,p0=annealing_p0,pf=annealing_pf,max_random_sample=max_random_sample,min_random_sample=min_random_sample,mode="fsolve",fraction_of_optimum=0,parameter_precision=parameter_precision,parameter_to_be_fitted=[],max_perturbation=max_absolute_perturbation,gui=None,fba_mode="fba", break_threshold=signficance_threshold,parameter_dict=parameter_dict_to_store,n_processes=annealing_n_processes,cycle_time_limit=annealing_cycle_time_limit, cycle_max_attempts=annealing_cycle_max_attempts,output=False,force_flux_value_bounds=force_flux_value_bounds)
                if full_mode:
                   parameter_list=None
                parameter_confidence_interval_dict,flux_confidence_interval_dict,parameter_value_parameters_sets_dict,constrained_model=estimate_confidence_intervals(label_model,significance=significance,perturbation=perturbation,min_absolute_perturbation=min_absolute_perturbation, max_absolute_perturbation=max_absolute_perturbation ,parameter_precision=parameter_precision, best_parameter_dict=best_parameter_dict ,parameter_list=parameter_list ,fraction_of_optimum=fraction_of_optimum ,force_flux_value_bounds=force_flux_value_bounds ,relative_max_random_sample=relative_max_random_sample, relative_min_random_sample= relative_min_random_sample,annealing_n=annealing_n,annealing_m=annealing_m,annealing_p0=annealing_p0,annealing_pf=annealing_pf,annealing_n_processes=annealing_n_processes,annealing_cycle_time_limit=annealing_cycle_time_limit, annealing_cycle_max_attempts= annealing_cycle_max_attempts, annealing_iterations=annealing_iterations ,annealing_restore_parameters=annealing_restore_parameters ,fname=fname,output=output,sbml_name=sbml_name,evaluate_turnovers=evaluate_turnovers)
                #parameter_confidence_interval_dict,flux_confidence_interval_dict,parameter_value_parameters_sets_dict =estimate_confidence_intervals(label_model,significance=significance,perturbation=perturbation,min_absolute_perturbation=min_absolute_perturbation,max_absolute_perturbation=max_absolute_perturbation,parameter_precision=parameter_precision,best_parameter_dict=parameter_dict,parameter_list=parameter_list,fraction_of_optimum=fraction_of_optimum,force_flux_value_bounds=force_flux_value_bounds,relative_max_random_sample=relative_max_random_sample, relative_min_random_sample= relative_min_random_sample,annealing_n=annealing_n,annealing_m=annealing_m,annealing_p0=annealing_p0,annealing_pf=annealing_pf,output=output,annealing_n_processes=annealing_n_processes,annealing_cycle_time_limit=annealing_cycle_time_limit, annealing_cycle_max_attempts=annealing_cycle_max_attempts,annealing_iterations=annealing_iterations,annealing_restore_parameters=annealing_restore_parameters,fname=fname)
                return parameter_confidence_interval_dict,flux_confidence_interval_dict,parameter_value_parameters_sets_dict,constrained_model
             if parameter_dict[parameter]["v"]<=parameter_lb or parameter_dict[parameter]["v"]>=parameter_ub:
                stop_flag=True
             """if sign==1:
                parameter_confidence_interval_dict[parameter]["ub"]=new_value
             else:
                parameter_confidence_interval_dict[parameter]["lb"]=new_value"""
             build_flux_confidence_interval_dict(label_model,flux_confidence_interval_dict,parameter_list)
             parameter_dict_to_store=copy.deepcopy(parameter_dict)
             if additional_parameter:
                del parameter_dict_to_store[parameter]
             build_confidence_dicts(parameter_confidence_interval_dict,parameter_value_parameters_sets_dict,parameter_dict_to_store)
          if stop_flag==True:
            print "stop"
            if sign==1:
                sign=-1
                parameter_dict=copy.deepcopy(min_parameter_dict)
                parameter_dict,f_best=evaluate_parameter(label_model,parameter_dict,flux_parameter_list,parameter,parameter_lb,parameter_ub,parameter_lb,signficance_threshold,feasability_process,parameter_precision,max_absolute_perturbation/10.0,force_flux_value_bounds,annealing_n,annealing_m,annealing_p0,annealing_pf,max_random_sample,min_random_sample,annealing_n_processes,annealing_cycle_time_limit, annealing_cycle_max_attempts,annealing_iterations=1,annealing_restore_parameters=annealing_restore_parameters)
                if f_best<=signficance_threshold:
                    build_flux_confidence_interval_dict(label_model,flux_confidence_interval_dict,parameter_list)
                    parameter_dict_to_store=copy.deepcopy(parameter_dict)
                    if additional_parameter:
                       del parameter_dict_to_store[parameter]
                    build_confidence_dicts(parameter_confidence_interval_dict,parameter_value_parameters_sets_dict,parameter_dict_to_store)
                else:
                    parameter_dict=copy.deepcopy(min_parameter_dict)
                if output:
                   with open("estimate_confidence_interval_output.txt", "a") as myfile:
                        myfile.write(parameter+" "+"v="+str(parameter_lb)+" chi="+str(f_best)+"\n")
                
            else:
                clear_parameters(label_model,parameter_dict=parameter_dict,parameter_list=[parameter], clear_ratios=True,clear_turnover=False,clear_fluxes=True,restore_objectives=False) #Clear all parameters
                break
          n+=1
          print ["n",n]
          
         
   for reaction_id in original_objectives_bounds:
            reaction=label_model.constrained_model.reactions.get_by_id(reaction_id)
            reaction.lower_bound=original_objectives_bounds[reaction_id]["lb"]
            reaction.upper_bound=original_objectives_bounds[reaction_id]["ub"]
            reaction.objective_coefficient=original_objectives_bounds[reaction_id]["obj_coef"]
   #apply_parameters(label_model,best_parameter_dict,parameter_precision=parameter_precision)
   feasability_process.close()
   if "xlsx" in fname or "csv" in fname: 
      print [full_mode]
      if not full_mode:
         save_flux_confidence_interval(label_model,flux_confidence_interval_dict,significance=significance,fn=fname,omit_turnovers=not evaluate_turnovers,parameter_list=parameter_list)
      else:
        save_flux_confidence_interval(label_model,flux_confidence_interval_dict,significance=significance,fn=fname,omit_turnovers=not evaluate_turnovers,parameter_list=None)   
   elif "json" in  fname:
      save_confidence_interval_json(flux_confidence_interval_dict,parameter_confidence_interval_dict,fn=fname)
   constrained_model=save_sbml_with_confidence_results(label_model,flux_confidence_interval_dict,fname=sbml_name,parameter_dict=best_parameter_dict,full_mode=full_mode,parameter_list=parameter_list,precision=precision)     
   apply_parameters(label_model,best_parameter_dict,parameter_precision=parameter_precision)
   return parameter_confidence_interval_dict,flux_confidence_interval_dict,parameter_value_parameters_sets_dict,constrained_model
コード例 #42
0
ファイル: test_variability.py プロジェクト: opencobra/cobrapy
def test_parallel_flux_variability(model, fva_results, all_solvers):
    """Test parallel FVA."""
    model.solver = all_solvers
    fva_out = flux_variability_analysis(model, processes=2)
    fva_out.sort_index(inplace=True)
    assert np.allclose(fva_out, fva_results)
コード例 #43
0
def get_ratios_bounds(label_model,ratio,perturbation,lp_tolerance_feasibility=1e-9,parameter_dict=None):
    reactions=ratio.split("/")
    reaction1=reactions[0]
    reaction2=reactions[1]
    model=label_model.constrained_model
    model.optimize(tolerance_feasibility=lp_tolerance_feasibility)
    try:
        v=original_v=ub=lb=teoric_lb=teoric_ub=model.solution.x_dict[reaction1]/(model.solution.x_dict[reaction2])
    except:
       v=original_v=ub=lb=teoric_lb=teoric_ub=v=model.solution.x_dict[reaction1]/(model.solution.x_dict[reaction2]+lp_tolerance_feasibility)
    if parameter_dict!=None:
       clear_parameters(label_model,parameter_dict=parameter_dict,parameter_list=parameter_dict.keys(), clear_ratios=True,clear_turnover=False,clear_fluxes=True,restore_objectives=False) #Clear all parameters
    fva=flux_variability_analysis(model,reaction_list=[reaction1,reaction2], fraction_of_optimum=0,tolerance_feasibility=lp_tolerance_feasibility)
    try:
       value=fva[reaction1]["minimum"]/(fva[reaction2]["maximum"])
       print [fva[reaction1]["minimum"],fva[reaction2]["maximum"]] 
    except:
       value=fva[reaction1]["minimum"]/(fva[reaction2]["maximum"]+lp_tolerance_feasibility) 
    print value
    teoric_lb=min(teoric_lb,value)
    teoric_ub=max(teoric_ub,value) 
    try:
       value=fva[reaction1]["maximum"]/(fva[reaction2]["minimum"])
       print [fva[reaction1]["maximum"],fva[reaction2]["minimum"]] 
    except:
        value=fva[reaction1]["maximum"]/(fva[reaction2]["minimum"]+lp_tolerance_feasibility)
    print value
    teoric_lb=min(teoric_lb,value)
    teoric_ub=max(teoric_ub,value)  
    try:
       value=fva[reaction1]["minimum"]/(fva[reaction2]["minimum"])
       print [fva[reaction1]["minimum"],fva[reaction2]["minimum"]] 
    except:
        value=fva[reaction1]["minimum"]/(fva[reaction2]["minimum"]+lp_tolerance_feasibility)
    print value
    teoric_lb=min(teoric_lb,value)
    teoric_ub=max(teoric_ub,value)  
    try:
       value=fva[reaction1]["maximum"]/(fva[reaction2]["maximum"])
       print [fva[reaction1]["maximum"],fva[reaction2]["maximum"]] 
    except:
        value=fva[reaction1]["maximum"]/(fva[reaction2]["maximum"]+lp_tolerance_feasibility)
    print value
    teoric_lb=min(teoric_lb,value)
    teoric_ub=max(teoric_ub,value)  
    print [teoric_lb,teoric_ub]
    v=lb=ub=original_v
    print v
    #Increase until not feasible
    ratio_dict={ratio:{reaction1:v,reaction2:1}}
    while model.optimize().status=="optimal":
          ub=v
          delta_parameter=min(max(perturbation*abs(v),0.001),100000000)
          print delta_parameter
          v=max(min(v+delta_parameter*1,teoric_ub),teoric_lb)
          ratio_dict[ratio]={reaction1:v,reaction2:1}
          print ratio_dict
          apply_ratios(model,ratio_dict)
          print model.optimize()
          if v>=teoric_ub:
             print "breaking 1"
             break
    ub=v
    ratio_dict={ratio:{reaction1:original_v,reaction2:1}}
    v=original_v
    while model.optimize().status=="optimal":
          lb=v
          delta_parameter=min(max(perturbation*abs(v),0.001),100000000)
          print delta_parameter
          v=max(min(v+delta_parameter*-1,teoric_ub),teoric_lb)
          ratio_dict[ratio]={reaction1:v,reaction2:1}
          apply_ratios(model,ratio_dict)
          ratio_dict[ratio]={reaction1:v,reaction2:1}
          print ratio_dict
          apply_ratios(model,ratio_dict)
          print model.optimize()
          if v<=teoric_lb:
             print "breaking -1"
             break
    lb=v
    remove_ratio(model,ratio,ratio_dict) #Remove the Ratio
    if parameter_dict!=None:
       apply_parameters(label_model,parameter_dict)
    apply_ratios(label_model.constrained_model,label_model.ratio_dict) #Re add all ratios that migh have been disabled
    return original_v,lb,ub
コード例 #44
0
def identify_free_fluxes(label_model,parameter_dict={},fraction_of_optimum=0,change_threshold=0.1,parameter_precision=0.01,max_d=0.1,key_reactions=[],original_fva=None,restore_model=False,debug=False):
    precision=int(-1*(math.log10(parameter_precision)))
    #print label_model.constrained_model.reactions.get_by_id("biomass").lower_bound
    reaction_list=[]
    original_model=copy.deepcopy(label_model.constrained_model)
    #apply_parameters(label_model,parameter_dict=parameter_dict) #Paremeters will be applied with the default precision im label_model
    #original_objective_list=[]
    #model=label_model.constrained_model
    for reaction in label_model.reactions_propagating_label:#label_model.reaction_n_dict: #Find reactions that propagate label
        if reaction in label_model.merged_reactions_reactions_dict:
           for merged_reaction in label_model.merged_reactions_reactions_dict[reaction]:
               if "_reverse" not in merged_reaction:
                  reaction_list.append(merged_reaction) 
        elif "_reverse" not in reaction and "RATIO_" not in reaction:
           reaction_list.append(reaction)
    for reaction in key_reactions:
        if reaction in label_model.constrained_model.reactions and reaction not in reaction_list:
           reaction_list.append(reaction) 
    #Classify reactions 
    if original_fva==None:
       original_fva=flux_variability_analysis(label_model.constrained_model,fraction_of_optimum=0,tolerance_feasibility=label_model.lp_tolerance_feasibility, reaction_list=reaction_list)
    #original_mfva=flux_variability_analysis(label_model.constrained_model,reaction_list=reaction_list, fraction_of_optimum=0,tolerance_feasibility=label_model.lp_tolerance_feasibility)
    additional_parameters_list=[]
    first_time=True
    done_reactions=[]
    for i in xrange(0,10000):
      #print model.optimize()
      free_ex_reactions_list=[]
      free_reactions_list=[]
      priortizided_reactions_list=[]
      mfva=flux_variability_analysis(label_model.constrained_model,reaction_list=reaction_list, fraction_of_optimum=0,tolerance_feasibility=label_model.lp_tolerance_feasibility)
      for reaction in mfva:
          """if reaction not in label_model.reaction_emu_dict and reaction not in label_model.reaction_merged_reactions_dict:
             print "Error"
             continue"""
          maximum=mfva[reaction]["maximum"]
          minimum=mfva[reaction]["minimum"]
          if abs(maximum-minimum)>change_threshold:
             if reaction in key_reactions:
                priortizided_reactions_list.append(reaction)
                """elif "EX_" in reaction:
                free_ex_reactions_list.append(reaction)
                #print free_ex_reactions_list"""
             else:
                free_reactions_list.append(reaction)
      #print free_reactions_list 
      #print free_ex_reactions_list
      if  free_reactions_list==[] and priortizided_reactions_list==[]:
          break
      elif priortizided_reactions_list!=[]:
           #Find the reaction with more variation allowed
           max_variation=0
           for reaction in priortizided_reactions_list:
               variation=mfva[reaction]["maximum"]-mfva[reaction]["minimum"]
               if variation>=max_variation:
                   max_variation=variation
                   reaction_id=reaction
                   maximum=mfva[reaction_id]["maximum"]
                   minimum=mfva[reaction_id]["minimum"]  
                   original_maximum=round_down(original_fva[reaction_id]["maximum"],precision)
                   original_minimum=round_up(original_fva[reaction_id]["minimum"],precision)
           """elif free_ex_reactions_list!=[]:
           #Find the reaction with more variation allowed
           max_variation=0
           for reaction in free_ex_reactions_list:
               variation=mfva[reaction]["maximum"]-mfva[reaction]["minimum"]
               if variation>=max_variation:
                   max_variation=variation
                   reaction_id=reaction
                   maximum=mfva[reaction_id]["maximum"]
                   minimum=mfva[reaction_id]["minimum"]  
                   original_maximum=round(original_mfva[reaction_id]["maximum"],precision)
                   original_minimum=round(original_mfva[reaction_id]["minimum"],precision)"""
      elif free_reactions_list!=[]:
               max_variation=0
               for reaction in free_reactions_list:
                 variation=mfva[reaction]["maximum"]-mfva[reaction]["minimum"]
                 if variation>=max_variation:
                   max_variation=variation
                   reaction_id=reaction 
                   maximum=mfva[reaction_id]["maximum"]
                   minimum=mfva[reaction_id]["minimum"]  
                   original_maximum=original_fva[reaction_id]["maximum"]#round_down(original_fva[reaction_id]["maximum"],precision)
                   original_minimum=original_fva[reaction_id]["minimum"]#round_up(original_fva[reaction_id]["minimum"],precision)
      print ["max variation",max_variation]
      reaction=label_model.constrained_model.reactions.get_by_id(reaction_id)
      value=random.uniform(minimum,maximum)
      #value=round((4*minimum+1*maximum)/5,precision) #Weighted average
      parameter_dict[reaction_id]={"v":value,"lb":original_minimum,"ub":original_maximum ,"type":"flux value","reactions":[reaction_id],"max_d":max_d,"original_lb":original_model.reactions.get_by_id(reaction_id).lower_bound,"original_ub":original_model.reactions.get_by_id(reaction_id).upper_bound,"original_objective_coefficient":0.0}
      apply_parameters(label_model,parameter_dict=parameter_dict,parameter_precision=parameter_precision,parameter_list=[reaction_id])
      done_reactions.append(reaction_id)
      if debug:
         print [reaction_id,mfva[reaction_id],[reaction.lower_bound,reaction.upper_bound]]
      """reaction.lower_bound=value
      reaction.upper_bound=value"""
      #print([reaction.id,mfva[reaction_id],value,model.optimize()])
      #print(reaction_id+" "+str(parameter_dict[reaction_id]))
    print ("%s free fluxes found"%(len(done_reactions)))
    """if add_turnover==True:
       for turnover in label_model.turnover_flux_dict:
        if turnover not in excluded_turnovers or (turnover+"_turnover") in label_model.parameter_dict: 
           parameter_dict[turnover+"_turnover"]={"v":label_model.turnover_flux_dict[turnover],"lb":0,"ub":turnover_upper_bound,"max_d":max_d,"type":"turnover","reactions":[turnover]}"""
    if restore_model==True:   
        label_model.constrained_model=original_model
    return parameter_dict
コード例 #45
0
ファイル: pFVA.py プロジェクト: dandanvidi/capacity-usage
flux_counter.add_metabolites(metabolites={fake:-1})                
m.add_reaction(flux_counter) 
m.change_objective(flux_counter)

m.reactions.get_by_id('EX_glc_e_reverse').upper_bound = 0
   
rxns = {r.id:r for r in m.reactions}

index = pd.MultiIndex.from_product([gc.index, ['maximum', 'minimum']])
fluxes = pd.DataFrame(index=index, columns=rxns.keys())

for i,c in enumerate(gc.index):
        
    rxns['EX_'+gc['media_key'][c]+'_e'].lower_bound = -1000

    rxns['Ec_biomass_iJO1366_WT_53p95M'].upper_bound = gc['growth rate [h-1]'][c]
    rxns['Ec_biomass_iJO1366_WT_53p95M'].lower_bound = gc['growth rate [h-1]'][c]

    for j, r in enumerate(m.reactions):
        fva_results = flux_variability_analysis(m,reaction_list=[r],
                                            objective_sense='minimize',fraction_of_optimum=1.0001)    
        fva = pd.DataFrame.from_dict(fva_results)
        fluxes[r.id][c,'maximum'] = fva.loc['maximum'][0]
        fluxes[r.id][c,'minimum'] = fva.loc['minimum'][0]
        print c, i, j, r
        
    rxns['EX_'+gc['media_key'][c]+'_e'].lower_bound = 0
    
fluxes.dropna(how='all', inplace=True)
fluxes.T.to_csv('../data/flux_variability_[mmol_gCDW_h]_01%.csv')    
コード例 #46
0
def test_fva_data_frame(model):
    """Test DataFrame obtained from FVA."""
    df = flux_variability_analysis(model)
    assert np.all([df.columns.values == ['minimum', 'maximum']])
#too slow
#mymodel_bounds = [([rec.id for rec in mymodel.reactions],
#                   [rec.lower_bound for rec in mymodel.reactions],
#                   [rec.upper_bound for rec in mymodel.reactions],
#                   [flux_variability_analysis(mymodel, reaction_list=rec.id, pfba_factor=1.1) for rec in mymodel.reactions])]
mymodel_flux_minimum = []
mymodel_flux_maximum = []
mymodel_upper_bound = []
mymodel_lower_bound = []
bar = myprogressbar(maxvalue=len(mymodel.reactions))
bar.start()
for i in range(len(mymodel.reactions)):
    mymodel_lower_bound.append(mymodel.reactions[i].lower_bound)
    mymodel_upper_bound.append(mymodel.reactions[i].upper_bound)
    bounds = flux_variability_analysis(mymodel,
                                       reaction_list=mymodel.reactions[i],
                                       pfba_factor=1.1)
    mymodel_flux_maximum.append(bounds.loc[mymodel.reactions[i].id, 'maximum'])
    mymodel_flux_minimum.append(bounds.loc[mymodel.reactions[i].id, 'minimum'])
    bar.update(i + 1)
bar.finish()

mymodel_bounds = zip(mymodel_flux_minimum, mymodel_flux_maximum,
                     mymodel_lower_bound, mymodel_upper_bound)

import csv

with open("mitocore_bounds.csv", 'w', newline='') as out:
    csv_out = csv.writer(out)
    csv_out.writerow(
        ['Minimum Flux', 'Maximum Flux', 'Lower_bound', 'Upper_bound'])
コード例 #48
0
ファイル: test_variability.py プロジェクト: opencobra/cobrapy
def test_fva_data_frame(model):
    """Test DataFrame obtained from FVA."""
    df = flux_variability_analysis(model)
    assert np.all([df.columns.values == ['minimum', 'maximum']])
コード例 #49
0
def identify_free_parameters(label_model,parameter_dict={},n_samples=50,add_to_model=True,parameter_precision=1e-5,change_threshold=1e-3,fraction_of_optimum=0,max_d=0.1,key_reactions=[],add_turnover=True,excluded_turnovers=[],turnover_upper_bound=None,debug=True):
 free_parameters=copy.deepcopy(parameter_dict)
 precision=int(-1*(math.log10(parameter_precision)))
 apply_parameters(label_model,parameter_dict=free_parameters)
 for reaction in label_model.constrained_model.objective: 
     fva=flux_variability_analysis(label_model.constrained_model,reaction_list=[reaction], fraction_of_optimum=fraction_of_optimum,tolerance_feasibility=label_model.lp_tolerance_feasibility)
     print fva
     minimum=round_down(fva[reaction.id]["minimum"],precision)#max(round_up(fva[reaction.id]["minimum"],precision),reaction.lower_bound)
     maximum=round_up(fva[reaction.id]["maximum"],precision)#min(round_down(fva[reaction.id]["maximum"],precision),reaction.upper_bound)
     value=((minimum+maximum)/2) 
     #original_objective_list.append(reaction)
     free_parameters[reaction.id]={"v":value,"lb":minimum,"ub":maximum ,"type":"flux value","reactions":[reaction.id],"max_d":max_d,"original_lb":reaction.lower_bound,"original_ub":reaction.upper_bound,"original_objective_coefficient":reaction.objective_coefficient}
     reaction.lower_bound=minimum
     reaction.upper_bound=maximum
     reaction.objective_coefficient=0
     #print [model.optimize(),minimum,maximum]
 original_fva=flux_variability_analysis(label_model.constrained_model,fraction_of_optimum=0,tolerance_feasibility=label_model.lp_tolerance_feasibility)
 #print free_parameters
 apply_parameters(label_model,parameter_dict=free_parameters,parameter_precision=parameter_precision)
 #print model.optimize()
 for i in range(0,n_samples):
  print "sample "+str(i)
  try:
   #original_model=copy.deepcopy(label_model.constrained_model)
   free_parameters=identify_free_fluxes(label_model,parameter_dict=free_parameters,fraction_of_optimum=fraction_of_optimum ,change_threshold=change_threshold, parameter_precision=parameter_precision,max_d=max_d,key_reactions=key_reactions,original_fva=original_fva,debug=debug)
   flux_value_parameter_list=[]
   for parameter in free_parameters:
        local_parameter_dict=free_parameters[parameter]
        if local_parameter_dict["type"]=="flux value":
           flux_value_parameter_list.append(parameter)
           #Save the original lower and upper_bounds and turnover 
           #Generate the samples
   for parameter in flux_value_parameter_list:
       for reaction_id in free_parameters[parameter]["reactions"]:
           reaction=label_model.constrained_model.reactions.get_by_id(reaction_id)
           reaction.lower_bound=max(free_parameters[parameter]["original_lb"],free_parameters[parameter]["lb"])
           reaction.upper_bound=min(free_parameters[parameter]["original_ub"],free_parameters[parameter]["ub"])
   working_flux_value_parameter_list=random.sample(flux_value_parameter_list, len(flux_value_parameter_list))
   for reaction_id in working_flux_value_parameter_list: 
    
            fva=flux_variability_analysis(label_model.constrained_model, reaction_list=[reaction_id],fraction_of_optimum=0,tolerance_feasibility=label_model.lp_tolerance_feasibility)
            #if fva[reaction_id]["maximum"]-fva[reaction_id]["minimum"]>parameter_precision:
            new_value=random.uniform(fva[reaction_id]["minimum"],fva[reaction_id]["maximum"])
            free_parameters[reaction_id]["v"]=new_value
            apply_parameters(label_model,parameter_dict=free_parameters,parameter_precision=parameter_precision,parameter_list=[reaction_id])
            reaction=label_model.constrained_model.reactions.get_by_id(reaction_id)
            #print[fva,new_value,[reaction.lower_bound,reaction.upper_bound],label_model.constrained_model.optimize()]
            
  except:
            print "Error caught"
            continue
 if add_turnover==True:
       """if turnover_upper_bound==None: 
          turnover_upper_bound=0
          for reaction_id in label_model.reaction_n_dict:
              if reaction_id in original_fva:
                  turnover_upper_bound=max(original_fva[reaction_id]["maximum"],turnover_upper_bound)
          turnover_upper_bound=round_up(turnover_upper_bound,0)""" 
       for turnover in label_model.turnover_flux_dict:
        if turnover not in excluded_turnovers and (turnover+"_turnover") not in free_parameters and label_model.turnover_flux_dict[turnover]["ub"]!=label_model.turnover_flux_dict[turnover]["lb"]:
           ub=label_model.turnover_flux_dict[turnover]["ub"] 
           lb=label_model.turnover_flux_dict[turnover]["lb"]
           v=label_model.turnover_flux_dict[turnover]["v"] 
           free_parameters[turnover+"_turnover"]={"v":v,"lb":lb,"ub":ub,"max_d":max_d,"type":"turnover","reactions":[turnover]}
 if add_to_model==True:
     label_model.parameter_dict=free_parameters
     apply_parameters(label_model,parameter_dict=free_parameters)
 else:
     clear_parameters(label_model,parameter_dict=copy.deepcopy(free_parameters))
 return free_parameters   
コード例 #50
0
ファイル: summary.py プロジェクト: TPXP/cobrapy
def model_summary(model, threshold=1E-8, fva=None, floatfmt='.3g',
                  **solver_args):
    """Print a summary of the input and output fluxes of the model.

    threshold: float
        tolerance for determining if a flux is zero (not printed)

    fva: int or None
        Whether or not to calculate and report flux variability in the
        output summary

    floatfmt: string
        format method for floats, passed to tabulate. Default is '.3g'.

    """
    legacy, _ = choose_solver(model, solver=solver_args.get("solver"))
    if legacy:
        raise NotImplementedError(
            "Summary support for legacy solvers was removed.")

    # Create a dataframe of objective fluxes
    objective_reactions = linear_reaction_coefficients(model)
    obj_fluxes = pd.DataFrame({key: key.flux * value for key, value in
                               iteritems(objective_reactions)},
                              index=['flux']).T
    obj_fluxes['id'] = obj_fluxes.apply(
        lambda x: format_long_string(x.name.id, 15), 1)

    # Build a dictionary of metabolite production from the boundary reactions
    # collect rxn.flux before fva which invalidates previous solver state
    boundary_reactions = model.exchanges
    metabolite_fluxes = {}
    for rxn in boundary_reactions:
        for met, stoich in iteritems(rxn.metabolites):
            metabolite_fluxes[met] = {
                'id': format_long_string(met.id, 15),
                'flux': stoich * rxn.flux}

    # Calculate FVA results if requested
    if fva:
        fva_results = flux_variability_analysis(
            model, reaction_list=boundary_reactions, fraction_of_optimum=fva,
            **solver_args)

        for rxn in boundary_reactions:
            for met, stoich in iteritems(rxn.metabolites):
                imin = stoich * fva_results.loc[rxn.id]['minimum']
                imax = stoich * fva_results.loc[rxn.id]['maximum']
                # Correct 'max' and 'min' for negative values
                metabolite_fluxes[met].update({
                    'fmin': imin if abs(imin) <= abs(imax) else imax,
                    'fmax': imax if abs(imin) <= abs(imax) else imin,
                })

    # Generate a dataframe of boundary fluxes
    metabolite_fluxes = pd.DataFrame(metabolite_fluxes).T
    metabolite_fluxes = _process_flux_dataframe(
        metabolite_fluxes, fva, threshold, floatfmt)

    # Begin building string output table
    def get_str_table(species_df, fva=False):
        """Formats a string table for each column"""

        if not fva:
            return tabulate(species_df.loc[:, ['id', 'flux']].values,
                            floatfmt=floatfmt, tablefmt='plain').split('\n')

        else:
            return tabulate(
                species_df.loc[:, ['id', 'flux', 'fva_fmt']].values,
                floatfmt=floatfmt, tablefmt='simple',
                headers=['id', 'Flux', 'Range']).split('\n')

    in_table = get_str_table(
        metabolite_fluxes[metabolite_fluxes.is_input], fva=fva)
    out_table = get_str_table(
        metabolite_fluxes[~metabolite_fluxes.is_input], fva=fva)
    obj_table = get_str_table(obj_fluxes, fva=False)

    # Print nested output table
    print_(tabulate(
        [entries for entries in zip_longest(in_table, out_table, obj_table)],
        headers=['IN FLUXES', 'OUT FLUXES', 'OBJECTIVES'], tablefmt='simple'))
コード例 #51
0
def remove_impossible_emus(label_model):
   """
   Removes emus that cannot with labelled substrates used in the experiment
    
   """
   #size_expanded_model2_dict={} 
   possible_isotopomers=[]
   impossible_isotopomers=[]
   f=open("impossible_isotopologues","w")
   #If some metabolites are marked as input but have no label described assume they all are m0 and add them to initial label
   for isotopomer in label_model.isotopomer_object_list:
       if isotopomer.input==True:
          if isotopomer.id not in label_model.metabolite_emu_dict:
             continue
          emus=label_model.metabolite_emu_dict[isotopomer.id]
          for condition in label_model.initial_label:
              present=False
              for emu in emus:
                for mi in label_model.emu_dict[emu]["mid"]:
                  if label_model.emu_dict[emu]["mid"][mi] in label_model.initial_label[condition]:
                     present=True
                     break
                if not present:
                 for mi in label_model.emu_dict[emu]["mid"]:
                     if mi==0:
                        label_model.initial_label[condition][label_model.emu_dict[emu]["mid"][mi]]=1
                     else:
                        label_model.initial_label[condition][label_model.emu_dict[emu]["mid"][mi]]=0
   present_initial_label=[]
   for condition in label_model.initial_label:  #For simplicity the label model is shared across all conditions
        for label in label_model.initial_label[condition]:
          if label_model.initial_label[condition][label]>0.0:
             if label not in present_initial_label:
                present_initial_label.append(label)                            
   for model_size in sorted(label_model.size_expanded_model_dict.keys()): #needs to be done in order of size
      expanded_emu_model=label_model.size_expanded_model_dict[model_size]
      label_exchange_reactions=[]
      for metabolite in expanded_emu_model.metabolites: 
          reaction = Reaction("EX_"+metabolite.id)
          reaction.name = 'EX'+metabolite.name
          reaction.subsystem = 'Test'
          reaction.lower_bound = 0
          reaction.upper_bound = 1000.  # This is the default
          reaction.add_metabolites({metabolite:-1})        
          expanded_emu_model.add_reaction(reaction) 
          label_exchange_reactions.append(reaction)
      for condition in label_model.initial_label:  #For simplicity the label model is shared across all conditions
        for label in label_model.initial_label[condition]:
             ex_id="EX_"+label
             if ex_id in expanded_emu_model.reactions:
                if label in present_initial_label:
                   expanded_emu_model.reactions.get_by_id(ex_id).lower_bound=-1000
                else: 
                    expanded_emu_model.reactions.get_by_id(ex_id).lower_bound=0
                    expanded_emu_model.reactions.get_by_id(ex_id).upper_bound=0
        """for isotopomer in label_model.isotopomer_object_list:
            if isotopomer.input==True:
             if isotopomer.id in label_model.metabolite_emu_dict:
               for emu in label_model.metabolite_emu_dict[isotopomer.id]:
                   mid0=label_model.emu_dict[emu]["mid"][0]
                   ex_id="EX_"+mid0
                   if ex_id in expanded_emu_model.reactions:
                      expanded_emu_model.reactions.get_by_id(ex_id).lower_bound=-1000"""
      """if model_size==1.0:
         import cobra
         cobra.io.write_sbml_model(expanded_emu_model,"size1.sbml")"""
      
      for isotopomer in possible_isotopomers: #If a isotopomer is produced in a smaller size it should appear as present in this size
          ex_id="EX_"+isotopomer
          if ex_id in expanded_emu_model.reactions:
             #print ex_id 
             expanded_emu_model.reactions.get_by_id(ex_id).lower_bound=-1000
      fva=flux_variability_analysis(expanded_emu_model,reaction_list=label_exchange_reactions,fraction_of_optimum=0)
      for x in label_exchange_reactions: 
          if fva[x.id]["maximum"]>0.00001 or fva[x.id]["minimum"]<-0.00001 : 
             if x.id[3:] not in possible_isotopomers: #prevent repeated entries
                possible_isotopomers.append(x.id[3:])
             #print x.id
          else: 
             if x.id[3:] not in impossible_isotopomers: #prevent repeated entries
                impossible_isotopomers.append(x.id[3:])
             #print x.id
      #Remove reactions where those isotopomers participate: 
      reactions_to_remove=[]
      for x in  label_exchange_reactions:
          x.remove_from_model(remove_orphans=True)
      impossible_isotopomers_object_list=[]    
      for isotopomer_id in impossible_isotopomers:
          if isotopomer_id not in expanded_emu_model.metabolites:
             continue  
          isotopomer=expanded_emu_model.metabolites.get_by_id(isotopomer_id)
          for x in isotopomer._reaction:
              if x not in reactions_to_remove and x.metabolites[isotopomer]<1:
                 reactions_to_remove.append(x)
          isotopomer.remove_from_model(method='subtractive')
      for reaction in expanded_emu_model.reactions:
           if len(reaction.metabolites)==0 and reaction not in reactions_to_remove:
              reactions_to_remove.append(reaction) 
      for reaction in reactions_to_remove:
         print "removing "+reaction.id
         f.write("removing "+reaction.id+"\n")
         emu_reaction=label_model.expanded_reaction_emu_dict[reaction.id]
         label_model.emu_reaction_expanded_dict[emu_reaction].remove(reaction.id)
         del label_model.expanded_reaction_emu_dict[reaction.id]               
         reaction.remove_from_model(remove_orphans=True)
      #size_expanded_model2_dict[model_size]=expanded_emu_model
      #labek_model=size_expanded_model_dict[model_size]=expanded_emu_model 
              
   for emu in impossible_isotopomers:
       f.write(emu+"\n")
   f.close()
   #Remove the reactions where only m0 is transmited and replace them with input reactions
   remove_m0_reactions(label_model,impossible_isotopomers,possible_isotopomers)
   return label_model.size_expanded_model_dict
コード例 #52
0
ファイル: summary.py プロジェクト: TPXP/cobrapy
def metabolite_summary(met, threshold=0.01, fva=False, floatfmt='.3g',
                       **solver_args):
    """Print a summary of the reactions which produce and consume this
    metabolite

    threshold: float
    a value below which to ignore reaction fluxes

    fva: float (0->1), or None
    Whether or not to include flux variability analysis in the output.
    If given, fva should be a float between 0 and 1, representing the
    fraction of the optimum objective to be searched.

    floatfmt: string
        format method for floats, passed to tabulate. Default is '.3g'.

    """
    rxn_id = list()
    flux = list()
    reaction = list()
    for rxn in met.reactions:
        rxn_id.append(format_long_string(rxn.id, 10))
        flux.append(rxn.flux * rxn.metabolites[met])
        reaction.append(format_long_string(rxn.reaction, 40 if fva else 50))

    flux_summary = pd.DataFrame(data={
        "id": rxn_id, "flux": flux, "reaction": reaction})

    if fva:
        fva_results = flux_variability_analysis(
            met.model, met.reactions, fraction_of_optimum=fva,
            **solver_args)
        flux_summary.index = flux_summary["id"]
        flux_summary["maximum"] = zeros(len(rxn_id))
        flux_summary["minimum"] = zeros(len(rxn_id))
        for rid, rxn in zip(rxn_id, met.reactions):
            imax = rxn.metabolites[met] * fva_results.loc[rxn.id, "maximum"]
            imin = rxn.metabolites[met] * fva_results.loc[rxn.id, "minimum"]
            flux_summary.loc[rid, "fmax"] = (imax if abs(imin) <= abs(imax)
                                             else imin)
            flux_summary.loc[rid, "fmin"] = (imin if abs(imin) <= abs(imax)
                                             else imax)

    assert flux_summary.flux.sum() < 1E-6, "Error in flux balance"

    flux_summary = _process_flux_dataframe(flux_summary, fva, threshold,
                                           floatfmt)

    flux_summary['percent'] = 0
    total_flux = flux_summary[flux_summary.is_input].flux.sum()

    flux_summary.loc[flux_summary.is_input, 'percent'] = \
        flux_summary.loc[flux_summary.is_input, 'flux'] / total_flux
    flux_summary.loc[~flux_summary.is_input, 'percent'] = \
        flux_summary.loc[~flux_summary.is_input, 'flux'] / total_flux

    flux_summary['percent'] = flux_summary.percent.apply(
        lambda x: '{:.0%}'.format(x))

    if fva:
        flux_table = tabulate(
            flux_summary.loc[:, ['percent', 'flux', 'fva_fmt', 'id',
                                 'reaction']].values, floatfmt=floatfmt,
            headers=['%', 'FLUX', 'RANGE', 'RXN ID', 'REACTION']).split('\n')
    else:
        flux_table = tabulate(
            flux_summary.loc[:, ['percent', 'flux', 'id', 'reaction']].values,
            floatfmt=floatfmt, headers=['%', 'FLUX', 'RXN ID', 'REACTION']
        ).split('\n')

    flux_table_head = flux_table[:2]

    met_tag = "{0} ({1})".format(format_long_string(met.name, 45),
                                 format_long_string(met.id, 10))

    head = "PRODUCING REACTIONS -- " + met_tag
    print_(head)
    print_("-" * len(head))
    print_('\n'.join(flux_table_head))
    print_('\n'.join(
        pd.np.array(flux_table[2:])[flux_summary.is_input.values]))

    print_()
    print_("CONSUMING REACTIONS -- " + met_tag)
    print_("-" * len(head))
    print_('\n'.join(flux_table_head))
    print_('\n'.join(
        pd.np.array(flux_table[2:])[~flux_summary.is_input.values]))
コード例 #53
0
ファイル: geometric.py プロジェクト: taylo5jm/cobrapy
def geometric_fba(model, epsilon=1E-06, max_tries=200):
    """
    Perform geometric FBA to obtain a unique, centered flux distribution.

    Geometric FBA [1]_ formulates the problem as a polyhedron and
    then solves it by bounding the convex hull of the polyhedron.
    The bounding forms a box around the convex hull which reduces
    with every iteration and extracts a unique solution in this way.

    Parameters
    ----------
    model: cobra.Model
        The model to perform geometric FBA on.
    epsilon: float, optional
        The convergence tolerance of the model (default 1E-06).
    max_tries: int, optional
        Maximum number of iterations (default 200).

    Returns
    -------
    cobra.Solution
        The solution object containing all the constraints required
        for geometric FBA.

    References
    ----------
    .. [1] Smallbone, Kieran & Simeonidis, Vangelis. (2009).
           Flux balance analysis: A geometric perspective.
           Journal of theoretical biology.258. 311-5.
           10.1016/j.jtbi.2009.01.027.

    """

    with model:
        # Variables' and constraints' storage variables.
        consts = []
        obj_vars = []
        updating_vars_cons = []

        # The first iteration.
        prob = model.problem
        add_pfba(model)  # Minimize the solution space to a convex hull.
        model.optimize()
        fva_sol = flux_variability_analysis(model)
        mean_flux = (fva_sol["maximum"] + fva_sol["minimum"]).abs() / 2

        # Set the gFBA constraints.
        for rxn in model.reactions:
            var = prob.Variable("geometric_fba_" + rxn.id,
                                lb=0,
                                ub=mean_flux[rxn.id])
            upper_const = prob.Constraint(rxn.flux_expression - var,
                                          ub=mean_flux[rxn.id],
                                          name="geometric_fba_upper_const_" +
                                          rxn.id)
            lower_const = prob.Constraint(rxn.flux_expression + var,
                                          lb=fva_sol.at[rxn.id, "minimum"],
                                          name="geometric_fba_lower_const_" +
                                          rxn.id)
            updating_vars_cons.append((rxn.id, var, upper_const, lower_const))
            consts.extend([var, upper_const, lower_const])
            obj_vars.append(var)
        model.add_cons_vars(consts)

        # Minimize the distance between the flux distribution and center.
        model.objective = prob.Objective(Zero, sloppy=True, direction="min")
        model.objective.set_linear_coefficients({v: 1.0 for v in obj_vars})
        # Update loop variables.
        sol = model.optimize()
        fva_sol = flux_variability_analysis(model)
        mean_flux = (fva_sol["maximum"] + fva_sol["minimum"]).abs() / 2
        delta = (fva_sol["maximum"] - fva_sol["minimum"]).max()
        count = 1
        LOGGER.debug("Iteration: %d; delta: %.3g; status: %s.", count, delta,
                     sol.status)

        # Following iterations that minimize the distance below threshold.
        while delta > epsilon and count < max_tries:
            for rxn_id, var, u_c, l_c in updating_vars_cons:
                var.ub = mean_flux[rxn_id]
                u_c.ub = mean_flux[rxn_id]
                l_c.lb = fva_sol.at[rxn_id, "minimum"]
            # Update loop variables.
            sol = model.optimize()
            fva_sol = flux_variability_analysis(model)
            mean_flux = (fva_sol["maximum"] + fva_sol["minimum"]).abs() / 2
            delta = (fva_sol["maximum"] - fva_sol["minimum"]).max()
            count += 1
            LOGGER.debug("Iteration: %d; delta: %.3g; status: %s.", count,
                         delta, sol.status)

        if count == max_tries:
            raise RuntimeError(
                "The iterations have exceeded the maximum value of {}. "
                "This is probably due to the increased complexity of the "
                "model and can lead to inaccurate results. Please set a "
                "different convergence tolerance and/or increase the "
                "maximum iterations".format(max_tries))

    return sol
コード例 #54
0
ファイル: summary.py プロジェクト: synchon/cobrapy
def model_summary(model, solution=None, threshold=0.01, fva=None,
                  floatfmt='.3g'):
    """Print a summary of the input and output fluxes of the model.

    solution : cobra.core.Solution
        A previously solved model solution to use for generating the
        summary. If none provided (default), the summary method will resolve
        the model. Note that the solution object must match the model, i.e.,
        changes to the model such as changed bounds, added or removed
        reactions are not taken into account by this method.

    threshold : float
        a percentage value of the maximum flux below which to ignore reaction
        fluxes

    fva : int or None
        Whether or not to calculate and report flux variability in the
        output summary

    floatfmt : string
        format method for floats, passed to tabulate. Default is '.3g'.

    """
    objective_reactions = linear_reaction_coefficients(model)
    boundary_reactions = model.exchanges
    summary_rxns = set(objective_reactions.keys()).union(boundary_reactions)

    if solution is None:
        model.slim_optimize(error_value=None)
        solution = get_solution(model, reactions=summary_rxns)

    # Create a dataframe of objective fluxes
    obj_fluxes = pd.DataFrame({key: solution.fluxes[key.id] * value for key,
                               value in iteritems(objective_reactions)},
                              index=['flux']).T
    obj_fluxes['id'] = obj_fluxes.apply(
        lambda x: format_long_string(x.name.id, 15), 1)

    # Build a dictionary of metabolite production from the boundary reactions
    metabolite_fluxes = {}
    for rxn in boundary_reactions:
        for met, stoich in iteritems(rxn.metabolites):
            metabolite_fluxes[met] = {
                'id': format_long_string(met.id, 15),
                'flux': stoich * solution.fluxes[rxn.id]}

    # Calculate FVA results if requested
    if fva:
        fva_results = flux_variability_analysis(
            model, reaction_list=boundary_reactions, fraction_of_optimum=fva)

        for rxn in boundary_reactions:
            for met, stoich in iteritems(rxn.metabolites):
                imin = stoich * fva_results.loc[rxn.id]['minimum']
                imax = stoich * fva_results.loc[rxn.id]['maximum']
                # Correct 'max' and 'min' for negative values
                metabolite_fluxes[met].update({
                    'fmin': imin if abs(imin) <= abs(imax) else imax,
                    'fmax': imax if abs(imin) <= abs(imax) else imin,
                })

    # Generate a dataframe of boundary fluxes
    metabolite_fluxes = pd.DataFrame(metabolite_fluxes).T
    metabolite_fluxes = _process_flux_dataframe(
        metabolite_fluxes, fva, threshold, floatfmt)

    # Begin building string output table
    def get_str_table(species_df, fva=False):
        """Formats a string table for each column"""

        if not fva:
            return tabulate(species_df.loc[:, ['id', 'flux']].values,
                            floatfmt=floatfmt, tablefmt='plain').split('\n')

        else:
            return tabulate(
                species_df.loc[:, ['id', 'flux', 'fva_fmt']].values,
                floatfmt=floatfmt, tablefmt='simple',
                headers=['id', 'Flux', 'Range']).split('\n')

    in_table = get_str_table(
        metabolite_fluxes[metabolite_fluxes.is_input], fva=fva)
    out_table = get_str_table(
        metabolite_fluxes[~metabolite_fluxes.is_input], fva=fva)
    obj_table = get_str_table(obj_fluxes, fva=False)

    # Print nested output table
    print_(tabulate(
        [entries for entries in zip_longest(in_table, out_table, obj_table)],
        headers=['IN FLUXES', 'OUT FLUXES', 'OBJECTIVES'], tablefmt='simple'))