def identify_free_parameters(label_model,parameter_dict={},n_samples=50,add_to_model=True,parameter_precision=1e-5,change_threshold=1e-3,fraction_of_optimum=0,max_d=0.1,key_reactions=[],add_turnover=True,excluded_turnovers=[],turnover_upper_bound=None,debug=True):
 free_parameters=copy.deepcopy(parameter_dict)
 precision=int(-1*(math.log10(parameter_precision)))
 apply_parameters(label_model,parameter_dict=free_parameters)
 for reaction in label_model.constrained_model.objective: 
     fva=flux_variability_analysis(label_model.constrained_model,reaction_list=[reaction], fraction_of_optimum=fraction_of_optimum,tolerance_feasibility=label_model.lp_tolerance_feasibility)
     print fva
     minimum=round_down(fva[reaction.id]["minimum"],precision)#max(round_up(fva[reaction.id]["minimum"],precision),reaction.lower_bound)
     maximum=round_up(fva[reaction.id]["maximum"],precision)#min(round_down(fva[reaction.id]["maximum"],precision),reaction.upper_bound)
     value=((minimum+maximum)/2) 
     #original_objective_list.append(reaction)
     free_parameters[reaction.id]={"v":value,"lb":minimum,"ub":maximum ,"type":"flux value","reactions":[reaction.id],"max_d":max_d,"original_lb":reaction.lower_bound,"original_ub":reaction.upper_bound,"original_objective_coefficient":reaction.objective_coefficient}
     reaction.lower_bound=minimum
     reaction.upper_bound=maximum
     reaction.objective_coefficient=0
     #print [model.optimize(),minimum,maximum]
 original_fva=flux_variability_analysis(label_model.constrained_model,fraction_of_optimum=0,tolerance_feasibility=label_model.lp_tolerance_feasibility)
 #print free_parameters
 apply_parameters(label_model,parameter_dict=free_parameters,parameter_precision=parameter_precision)
 #print model.optimize()
 for i in range(0,n_samples):
  print "sample "+str(i)
  try:
   #original_model=copy.deepcopy(label_model.constrained_model)
   free_parameters=identify_free_fluxes(label_model,parameter_dict=free_parameters,fraction_of_optimum=fraction_of_optimum ,change_threshold=change_threshold, parameter_precision=parameter_precision,max_d=max_d,key_reactions=key_reactions,original_fva=original_fva,debug=debug)
   flux_value_parameter_list=[]
   for parameter in free_parameters:
        local_parameter_dict=free_parameters[parameter]
        if local_parameter_dict["type"]=="flux value":
           flux_value_parameter_list.append(parameter)
           #Save the original lower and upper_bounds and turnover 
           #Generate the samples
   for parameter in flux_value_parameter_list:
       for reaction_id in free_parameters[parameter]["reactions"]:
           reaction=label_model.constrained_model.reactions.get_by_id(reaction_id)
           reaction.lower_bound=max(free_parameters[parameter]["original_lb"],free_parameters[parameter]["lb"])
           reaction.upper_bound=min(free_parameters[parameter]["original_ub"],free_parameters[parameter]["ub"])
   working_flux_value_parameter_list=random.sample(flux_value_parameter_list, len(flux_value_parameter_list))
   for reaction_id in working_flux_value_parameter_list: 
    
            fva=flux_variability_analysis(label_model.constrained_model, reaction_list=[reaction_id],fraction_of_optimum=0,tolerance_feasibility=label_model.lp_tolerance_feasibility)
            #if fva[reaction_id]["maximum"]-fva[reaction_id]["minimum"]>parameter_precision:
            new_value=random.uniform(fva[reaction_id]["minimum"],fva[reaction_id]["maximum"])
            free_parameters[reaction_id]["v"]=new_value
            apply_parameters(label_model,parameter_dict=free_parameters,parameter_precision=parameter_precision,parameter_list=[reaction_id])
            reaction=label_model.constrained_model.reactions.get_by_id(reaction_id)
            #print[fva,new_value,[reaction.lower_bound,reaction.upper_bound],label_model.constrained_model.optimize()]
            
  except:
            print "Error caught"
            continue
 if add_turnover==True:
       """if turnover_upper_bound==None: 
          turnover_upper_bound=0
          for reaction_id in label_model.reaction_n_dict:
              if reaction_id in original_fva:
                  turnover_upper_bound=max(original_fva[reaction_id]["maximum"],turnover_upper_bound)
          turnover_upper_bound=round_up(turnover_upper_bound,0)""" 
       for turnover in label_model.turnover_flux_dict:
        if turnover not in excluded_turnovers and (turnover+"_turnover") not in free_parameters and label_model.turnover_flux_dict[turnover]["ub"]!=label_model.turnover_flux_dict[turnover]["lb"]:
           ub=label_model.turnover_flux_dict[turnover]["ub"] 
           lb=label_model.turnover_flux_dict[turnover]["lb"]
           v=label_model.turnover_flux_dict[turnover]["v"] 
           free_parameters[turnover+"_turnover"]={"v":v,"lb":lb,"ub":ub,"max_d":max_d,"type":"turnover","reactions":[turnover]}
 if add_to_model==True:
     label_model.parameter_dict=free_parameters
     apply_parameters(label_model,parameter_dict=free_parameters)
 else:
     clear_parameters(label_model,parameter_dict=copy.deepcopy(free_parameters))
 return free_parameters   
def identify_free_fluxes(label_model,parameter_dict={},fraction_of_optimum=0,change_threshold=0.1,parameter_precision=0.01,max_d=0.1,key_reactions=[],original_fva=None,restore_model=False,debug=False):
    precision=int(-1*(math.log10(parameter_precision)))
    #print label_model.constrained_model.reactions.get_by_id("biomass").lower_bound
    reaction_list=[]
    original_model=copy.deepcopy(label_model.constrained_model)
    #apply_parameters(label_model,parameter_dict=parameter_dict) #Paremeters will be applied with the default precision im label_model
    #original_objective_list=[]
    #model=label_model.constrained_model
    for reaction in label_model.reactions_propagating_label:#label_model.reaction_n_dict: #Find reactions that propagate label
        if reaction in label_model.merged_reactions_reactions_dict:
           for merged_reaction in label_model.merged_reactions_reactions_dict[reaction]:
               if "_reverse" not in merged_reaction:
                  reaction_list.append(merged_reaction) 
        elif "_reverse" not in reaction and "RATIO_" not in reaction:
           reaction_list.append(reaction)
    for reaction in key_reactions:
        if reaction in label_model.constrained_model.reactions and reaction not in reaction_list:
           reaction_list.append(reaction) 
    #Classify reactions 
    if original_fva==None:
       original_fva=flux_variability_analysis(label_model.constrained_model,fraction_of_optimum=0,tolerance_feasibility=label_model.lp_tolerance_feasibility, reaction_list=reaction_list)
    #original_mfva=flux_variability_analysis(label_model.constrained_model,reaction_list=reaction_list, fraction_of_optimum=0,tolerance_feasibility=label_model.lp_tolerance_feasibility)
    additional_parameters_list=[]
    first_time=True
    done_reactions=[]
    for i in xrange(0,10000):
      #print model.optimize()
      free_ex_reactions_list=[]
      free_reactions_list=[]
      priortizided_reactions_list=[]
      mfva=flux_variability_analysis(label_model.constrained_model,reaction_list=reaction_list, fraction_of_optimum=0,tolerance_feasibility=label_model.lp_tolerance_feasibility)
      for reaction in mfva:
          """if reaction not in label_model.reaction_emu_dict and reaction not in label_model.reaction_merged_reactions_dict:
             print "Error"
             continue"""
          maximum=mfva[reaction]["maximum"]
          minimum=mfva[reaction]["minimum"]
          if abs(maximum-minimum)>change_threshold:
             if reaction in key_reactions:
                priortizided_reactions_list.append(reaction)
                """elif "EX_" in reaction:
                free_ex_reactions_list.append(reaction)
                #print free_ex_reactions_list"""
             else:
                free_reactions_list.append(reaction)
      #print free_reactions_list 
      #print free_ex_reactions_list
      if  free_reactions_list==[] and priortizided_reactions_list==[]:
          break
      elif priortizided_reactions_list!=[]:
           #Find the reaction with more variation allowed
           max_variation=0
           for reaction in priortizided_reactions_list:
               variation=mfva[reaction]["maximum"]-mfva[reaction]["minimum"]
               if variation>=max_variation:
                   max_variation=variation
                   reaction_id=reaction
                   maximum=mfva[reaction_id]["maximum"]
                   minimum=mfva[reaction_id]["minimum"]  
                   original_maximum=round_down(original_fva[reaction_id]["maximum"],precision)
                   original_minimum=round_up(original_fva[reaction_id]["minimum"],precision)
           """elif free_ex_reactions_list!=[]:
           #Find the reaction with more variation allowed
           max_variation=0
           for reaction in free_ex_reactions_list:
               variation=mfva[reaction]["maximum"]-mfva[reaction]["minimum"]
               if variation>=max_variation:
                   max_variation=variation
                   reaction_id=reaction
                   maximum=mfva[reaction_id]["maximum"]
                   minimum=mfva[reaction_id]["minimum"]  
                   original_maximum=round(original_mfva[reaction_id]["maximum"],precision)
                   original_minimum=round(original_mfva[reaction_id]["minimum"],precision)"""
      elif free_reactions_list!=[]:
               max_variation=0
               for reaction in free_reactions_list:
                 variation=mfva[reaction]["maximum"]-mfva[reaction]["minimum"]
                 if variation>=max_variation:
                   max_variation=variation
                   reaction_id=reaction 
                   maximum=mfva[reaction_id]["maximum"]
                   minimum=mfva[reaction_id]["minimum"]  
                   original_maximum=original_fva[reaction_id]["maximum"]#round_down(original_fva[reaction_id]["maximum"],precision)
                   original_minimum=original_fva[reaction_id]["minimum"]#round_up(original_fva[reaction_id]["minimum"],precision)
      print ["max variation",max_variation]
      reaction=label_model.constrained_model.reactions.get_by_id(reaction_id)
      value=random.uniform(minimum,maximum)
      #value=round((4*minimum+1*maximum)/5,precision) #Weighted average
      parameter_dict[reaction_id]={"v":value,"lb":original_minimum,"ub":original_maximum ,"type":"flux value","reactions":[reaction_id],"max_d":max_d,"original_lb":original_model.reactions.get_by_id(reaction_id).lower_bound,"original_ub":original_model.reactions.get_by_id(reaction_id).upper_bound,"original_objective_coefficient":0.0}
      apply_parameters(label_model,parameter_dict=parameter_dict,parameter_precision=parameter_precision,parameter_list=[reaction_id])
      done_reactions.append(reaction_id)
      if debug:
         print [reaction_id,mfva[reaction_id],[reaction.lower_bound,reaction.upper_bound]]
      """reaction.lower_bound=value
      reaction.upper_bound=value"""
      #print([reaction.id,mfva[reaction_id],value,model.optimize()])
      #print(reaction_id+" "+str(parameter_dict[reaction_id]))
    print ("%s free fluxes found"%(len(done_reactions)))
    """if add_turnover==True:
       for turnover in label_model.turnover_flux_dict:
        if turnover not in excluded_turnovers or (turnover+"_turnover") in label_model.parameter_dict: 
           parameter_dict[turnover+"_turnover"]={"v":label_model.turnover_flux_dict[turnover],"lb":0,"ub":turnover_upper_bound,"max_d":max_d,"type":"turnover","reactions":[turnover]}"""
    if restore_model==True:   
        label_model.constrained_model=original_model
    return parameter_dict
Ejemplo n.º 3
0
def sampling(label_model,
             n=100,
             fraction_of_optimum=0,
             output_emu_list=None,
             max_turnover=100,
             parameter_dict={},
             fba_mode="fba",
             parameter_precision=0.001,
             gui=None,
             change_threshold=0.01):
    print parameter_precision
    precision = int(-1 * (math.log10(parameter_precision)))
    #print ["hello",label_model.constrained_model.reactions.get_by_id("biomass").lower_bound]
    original_model = copy.deepcopy(label_model.constrained_model)
    original_turnover = copy.deepcopy(label_model.turnover_flux_dict)
    apply_parameters(label_model, parameter_dict)
    #model=label_model.constrained_model
    #try:
    objective_dict = {}
    if fraction_of_optimum != 0:
        for reaction in label_model.constrained_model.objective:
            fva = flux_variability_analysis(
                label_model.constrained_model,
                reaction_list=[reaction],
                fraction_of_optimum=fraction_of_optimum,
                tolerance_feasibility=label_model.lp_tolerance_feasibility)
            objective_dict[reaction] = {
                "lb": reaction.lower_bound,
                "ub": reaction.upper_bound,
                "obj": reaction.objective_coefficient
            }
            reaction.lower_bound = round_down(fva[reaction.id]["minimum"],
                                              precision)
            reaction.upper_bound = round_up(fva[reaction.id]["maximum"],
                                            precision)
            reaction.objective_coefficient = 0
    if output_emu_list == None:
        output_emu_list = label_model.data_name_emu_dict.keys()
    #Prepare output
    output_dict = {}
    for condition in label_model.condition_initial_label_yy_dict:
        output_dict[condition] = {}
        for emu in output_emu_list:
            size = label_model.emu_dict[emu]["size"]
            if size not in output_dict[condition]:
                output_dict[condition][size] = {}
            for mi in label_model.emu_dict[emu]["mid"]:
                output_dict[condition][size][label_model.emu_dict[emu]["mid"]
                                             [mi]] = []
    #identfy the turnover fluxes that are already part of parameters as they will be excluded
    apply_ratios(label_model.constrained_model, label_model.ratio_dict)
    #free_parameters=identify_free_fluxes(label_model,parameter_dict={},fraction_of_optimum=0,change_threshold=change_threshold,parameter_precision=parameter_precision,max_d=0.1,key_reactions=[],original_fva=None,debug=False)
    """if len (free_parameters)==0:
       return output_dict"""
    #print "%s free parameters found"%(len(free_parameters))
    #turnover_parameter_list=[]
    """flux_value_parameter_list=[]
    for parameter in free_parameters:
        parameter_dict=free_parameters[parameter]
        if parameter_dict["type"]=="turnover":
           turnover_parameter_list.append(parameter)
        elif parameter_dict["type"]=="flux value":
           flux_value_parameter_list.append(parameter)"""
    #Save the original lower and upper_bounds and turnover
    #Generate the samples
    free_parameters = {}
    for i in range(0, n):
        clear_parameters(label_model,
                         parameter_dict=free_parameters,
                         parameter_list=[],
                         clear_ratios=True,
                         clear_turnover=True,
                         clear_fluxes=True,
                         restore_objectives=True,
                         delete_parameters=True)
        free_parameters = identify_free_fluxes(
            label_model,
            parameter_dict=free_parameters,
            fraction_of_optimum=0,
            change_threshold=change_threshold,
            parameter_precision=parameter_precision,
            max_d=0.1,
            key_reactions=[],
            original_fva=None,
            debug=False)
        for reaction_id in free_parameters:
            reaction = label_model.constrained_model.reactions.get_by_id(
                reaction_id)
            original_reaction = original_model.reactions.get_by_id(reaction_id)
            reaction.lower_bound = original_reaction.lower_bound
            reaction.upper_bound = original_reaction.upper_bound

        working_flux_value_parameter_list = random.sample(
            free_parameters, len(free_parameters))
        #try:
        #print free_parameters
        for reaction_id in working_flux_value_parameter_list:
            fva = flux_variability_analysis(
                label_model.constrained_model,
                reaction_list=[reaction_id],
                fraction_of_optimum=0,
                tolerance_feasibility=label_model.lp_tolerance_feasibility)
            if fva[reaction_id]["maximum"] - fva[reaction_id][
                    "minimum"] > parameter_precision:
                new_value = random.uniform(fva[reaction_id]["minimum"],
                                           fva[reaction_id]["maximum"])
                free_parameters[reaction_id]["v"] = new_value
                #print [reaction_id,new_value]
                apply_parameters(label_model,
                                 parameter_dict=free_parameters,
                                 parameter_precision=parameter_precision,
                                 parameter_list=[reaction_id])
        for turnover in label_model.turnover_flux_dict:
            if (turnover + "_turnover") not in parameter_dict:
                lb = label_model.turnover_flux_dict[turnover]["lb"]
                ub = label_model.turnover_flux_dict[turnover]["ub"]
                new_value = round(random.uniform(lb, ub), precision)
                #print new_value
                label_model.turnover_flux_dict[turnover]["v"] = new_value
        #print label_model.turnover_flux_dict
        #apply_parameters(label_model,parameter_dict=free_parameters,parameter_list=turnover_parameter_list,parameter_precision=parameter_precision)
        a, b = solver(label_model, mode="fsolve", fba_mode=fba_mode)
        #Store output
        for condition in output_dict:
            for size in output_dict[condition]:
                for mi in output_dict[condition][size]:
                    if mi in label_model.size_variable_dict[size]:
                        n_emu = label_model.size_variable_dict[size][mi]
                        mi_value = round(
                            label_model.condition_size_yy_dict[condition][size]
                            [n_emu], 4)
                        output_dict[condition][size][mi].append(mi_value)
        if gui != None:
            #get_objective_function(label_model)
            gui.update_label_sampling()
            gui.root.update_idletasks()
            a, b, c = get_objective_function(
                label_model,
                force_balance=label_model.force_balance,
                output=False)

        print "sample %s of %s..." % ((i + 1), n)
        #Restore original values for flux constraints
        #except:
        # continue
    clear_parameters(label_model,
                     parameter_dict=free_parameters,
                     parameter_list=[],
                     clear_ratios=True,
                     clear_turnover=True,
                     clear_fluxes=True,
                     restore_objectives=True,
                     delete_parameters=True)

    for reaction in objective_dict:
        reaction.lower_bound = objective_dict[reaction]["lb"]
        reaction.lower_bound = objective_dict[reaction]["ub"]
        reaction.objective_coefficienT = objective_dict[reaction]["obj"]
    label_model.turnover_flux_dict = original_turnover
    #label_model.constrained_model=original_model
    #print ["hello",label_model.constrained_model.reactions.get_by_id("biomass").lower_bound]
    #apply_parameters(label_model,parameter_dict=None,parameter_precision=parameter_precision)
    a, b = solver(label_model, mode="fsolve", fba_mode=fba_mode)
    get_objective_function(label_model)
    #except:
    """print "Error"
    label_model.turnover_flux_dict=original_turnover
    label_model.constrained_model=original_model"""
    #print ["hello",label_model.constrained_model.reactions.get_by_id("biomass").lower_bound]
    return output_dict
def get_ratios_bounds(label_model,ratio,perturbation,lp_tolerance_feasibility=1e-9,parameter_dict=None):
    reactions=ratio.split("/")
    reaction1=reactions[0]
    reaction2=reactions[1]
    model=label_model.constrained_model
    model.optimize(tolerance_feasibility=lp_tolerance_feasibility)
    try:
        v=original_v=ub=lb=teoric_lb=teoric_ub=model.solution.x_dict[reaction1]/(model.solution.x_dict[reaction2])
    except:
       v=original_v=ub=lb=teoric_lb=teoric_ub=v=model.solution.x_dict[reaction1]/(model.solution.x_dict[reaction2]+lp_tolerance_feasibility)
    if parameter_dict!=None:
       clear_parameters(label_model,parameter_dict=parameter_dict,parameter_list=parameter_dict.keys(), clear_ratios=True,clear_turnover=False,clear_fluxes=True,restore_objectives=False) #Clear all parameters
    fva=flux_variability_analysis(model,reaction_list=[reaction1,reaction2], fraction_of_optimum=0,tolerance_feasibility=lp_tolerance_feasibility)
    try:
       value=fva[reaction1]["minimum"]/(fva[reaction2]["maximum"])
       print [fva[reaction1]["minimum"],fva[reaction2]["maximum"]] 
    except:
       value=fva[reaction1]["minimum"]/(fva[reaction2]["maximum"]+lp_tolerance_feasibility) 
    print value
    teoric_lb=min(teoric_lb,value)
    teoric_ub=max(teoric_ub,value) 
    try:
       value=fva[reaction1]["maximum"]/(fva[reaction2]["minimum"])
       print [fva[reaction1]["maximum"],fva[reaction2]["minimum"]] 
    except:
        value=fva[reaction1]["maximum"]/(fva[reaction2]["minimum"]+lp_tolerance_feasibility)
    print value
    teoric_lb=min(teoric_lb,value)
    teoric_ub=max(teoric_ub,value)  
    try:
       value=fva[reaction1]["minimum"]/(fva[reaction2]["minimum"])
       print [fva[reaction1]["minimum"],fva[reaction2]["minimum"]] 
    except:
        value=fva[reaction1]["minimum"]/(fva[reaction2]["minimum"]+lp_tolerance_feasibility)
    print value
    teoric_lb=min(teoric_lb,value)
    teoric_ub=max(teoric_ub,value)  
    try:
       value=fva[reaction1]["maximum"]/(fva[reaction2]["maximum"])
       print [fva[reaction1]["maximum"],fva[reaction2]["maximum"]] 
    except:
        value=fva[reaction1]["maximum"]/(fva[reaction2]["maximum"]+lp_tolerance_feasibility)
    print value
    teoric_lb=min(teoric_lb,value)
    teoric_ub=max(teoric_ub,value)  
    print [teoric_lb,teoric_ub]
    v=lb=ub=original_v
    print v
    #Increase until not feasible
    ratio_dict={ratio:{reaction1:v,reaction2:1}}
    while model.optimize().status=="optimal":
          ub=v
          delta_parameter=min(max(perturbation*abs(v),0.001),100000000)
          print delta_parameter
          v=max(min(v+delta_parameter*1,teoric_ub),teoric_lb)
          ratio_dict[ratio]={reaction1:v,reaction2:1}
          print ratio_dict
          apply_ratios(model,ratio_dict)
          print model.optimize()
          if v>=teoric_ub:
             print "breaking 1"
             break
    ub=v
    ratio_dict={ratio:{reaction1:original_v,reaction2:1}}
    v=original_v
    while model.optimize().status=="optimal":
          lb=v
          delta_parameter=min(max(perturbation*abs(v),0.001),100000000)
          print delta_parameter
          v=max(min(v+delta_parameter*-1,teoric_ub),teoric_lb)
          ratio_dict[ratio]={reaction1:v,reaction2:1}
          apply_ratios(model,ratio_dict)
          ratio_dict[ratio]={reaction1:v,reaction2:1}
          print ratio_dict
          apply_ratios(model,ratio_dict)
          print model.optimize()
          if v<=teoric_lb:
             print "breaking -1"
             break
    lb=v
    remove_ratio(model,ratio,ratio_dict) #Remove the Ratio
    if parameter_dict!=None:
       apply_parameters(label_model,parameter_dict)
    apply_ratios(label_model.constrained_model,label_model.ratio_dict) #Re add all ratios that migh have been disabled
    return original_v,lb,ub
def estimate_confidence_intervals(label_model,significance=0.95,perturbation=0.1,min_absolute_perturbation=0.1,max_absolute_perturbation=25,parameter_precision=None,best_parameter_dict=None,evaluate_turnovers=False,parameter_list=None,fraction_of_optimum=0.9,force_flux_value_bounds=False,relative_max_random_sample=0.5, relative_min_random_sample= 0.25,annealing_n=50,annealing_m=100,annealing_p0=0.4,annealing_pf=0.001,annealing_n_processes=1,annealing_cycle_time_limit=1800, annealing_cycle_max_attempts=5,annealing_iterations=2,annealing_restore_parameters=True,fname="confidence.json",sbml_name=None,output=True):
   """
   Computes the confidence intervals for fluxes 
   
   label_model: label_model object
   signficance: float
        Signficance level for the confidence intervals. Default is 0.95 (95%)
   perturbation: float
        Relative perturbation for each parameter at each step. Default is 0.1 (10%). Regardless of this value the absolute perturbation will will never be lower than the value defined by the min_absolute_perturbation and will never be larger than the maximum value defined by the max_absolute_perturbation
   min_absolute_perturbation: float
	See above
   max_absolute_perturbation: float
	See above
   parameter_precision: float,
        Defines the precision of the flux value parameters. If none is defined the precision defined in the label_model object will be used
   best_parameter_dict: dict
	Dict with the best parameters that have been obtained after fitting the parameters to experimental data
   evaluate_turnovers: bool,
        If set to False (default) it will not calculate the confidence intervals for turnovers. 
   parameter_list: list
	List of the parameters that should be evaluated. Unless all flux value parameters are selected, the confidence intervals for fluxes (other than those directly analyzed) won't be meaningful
   fraction_of_optimum: float
        Fraction of the objective flux that should be mantained. If the parameters have been added automatically this will have no effect as the objective is alsways added as parameters 
   force_flux_value_bounds: bool,
   	If False it will ignore the bounds defined in the parameter dict and use the FVA limits for flux value parameters. If set to True it migh in some instances result on  unfeasible solutions
   relative_max_random_sample: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters
   relative_min_random_sample: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters 
   annealing_n: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters   
   annealing_m: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters
   annealing_p0: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters
   annealing_pf: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters
   annealing_n_processes: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters
   annealing_cycle_time_limit: float
   	Defines the parameter of the same name in the annealing function used to reoptimize the parameters
   annealing_cycle_max_attempts
   annealing_iterations: int
	Number of times annealing should be run once the signficance threeshold has been surpassed by a parameter to ensure this values is the real upper/lower limit for the paramater
   annealing_restore_parameters: bool:
	If True after each annealing iterations it will return the paramaters to each original value to reduce the risk of being trapper into local minimums
   fname: string:
        Name of the file where the results will be saved. It must have ither a xlsx, CSV or json extension.  
   sbml_name;: string
        Name of of the SBML that will generated containng the constrained_model restricted by the confidence interval results. If none (default) no SBML will be generated.
   output: bool
        If True it will indicate progress of the analysis on a text file named estimate estimate_confidence_interval_output.txt	
   """
   if parameter_precision==None:
      parameter_precision=label_model.parameter_precision
   if best_parameter_dict==None:
      best_parameter_dict=label_model.parameter_dict
   print parameter_list
   if parameter_list==None or parameter_list==[]:
      full_mode=True
      if evaluate_turnovers:
         parameter_list=best_parameter_dict.keys()  
      else:
         parameter_list=[] 
         for x in best_parameter_dict:
             if best_parameter_dict[x]["type"] != "turnover":
                parameter_list.append(x) 
   else:
      full_mode=False
   precision=int(-1*(math.log10(parameter_precision)))
   max_random_sample=int(relative_max_random_sample*len(best_parameter_dict))
   min_random_sample=int(relative_min_random_sample*len(best_parameter_dict))
   #chi_parameters_sets_dict={}
   parameter_confidence_interval_dict={}
   flux_confidence_interval_dict={}
   parameter_value_parameters_sets_dict={}
   build_flux_confidence_interval_dict(label_model,flux_confidence_interval_dict,parameter_list)
   build_confidence_dicts(parameter_confidence_interval_dict,parameter_value_parameters_sets_dict,best_parameter_dict)
   """for flux in label_model.flux_dict:
       flux_confidence_interval_dict[flux]={"lb":label_model.flux_dict[flux],"ub":label_model.flux_dict[flux]}
       if (flux+"_reverse") in label_model.flux_dict:
          net_flux=label_model.flux_dict[flux]-label_model.flux_dict[flux+"_reverse"]
          flux_confidence_interval_dict["net_"+flux]={"lb":net_flux,"ub":net_flux}"""
   print flux_confidence_interval_dict
   apply_parameters(label_model,best_parameter_dict,parameter_precision=parameter_precision)
   a,b=solver(label_model)
   best_objective,b,c=get_objective_function(label_model,output=False)
   delta_chi = chi2.isf(q=1-significance, df=1)
   signficance_threshold=delta_chi+best_objective
   print signficance_threshold
   if output:
      with open("estimate_confidence_interval_output.txt", "a") as myfile:
           myfile.write("signficance_threshold "+str(signficance_threshold)+"\n")
   original_objectives_bounds={}
   for reaction in label_model.constrained_model.objective: 
              original_objectives_bounds[reaction.id]={}
              original_objectives_bounds[reaction.id]["lb"]=reaction.lower_bound
              original_objectives_bounds[reaction.id]["ub"]=reaction.upper_bound
              original_objectives_bounds[reaction.id]["obj_coef"]=reaction.objective_coefficient
              fva=flux_variability_analysis(label_model.constrained_model,reaction_list=[reaction], fraction_of_optimum=fraction_of_optimum,tolerance_feasibility=label_model.lp_tolerance_feasibility)
              reaction.lower_bound=max(round_down(fva[reaction.id]["minimum"],precision),reaction.lower_bound)
              reaction.upper_bound=min(round_up(fva[reaction.id]["maximum"],precision),reaction.upper_bound)
              reaction.objective_coefficient=0
   
   flux_parameter_list=[]
   
   for parameter in best_parameter_dict:
       if best_parameter_dict[parameter]["type"]=="flux value":
          flux_parameter_list.append(parameter)
   feasability_process = Pool(processes=1)
   for parameter in parameter_list:
       apply_parameters(label_model,best_parameter_dict,parameter_precision=parameter_precision)
       a,b=solver(label_model)
       #chi_parameters_sets_dict[parameter]={}
       #variation_range= best_parameter_dict[parameter]["ub"]-best_parameter_dict[parameter]["lb"] 
       #Find the highest/lowest value found on previous simulations
       n=1
       sign=1
       #is_flux_value=parameter in flux_parameter_list
       if parameter not in best_parameter_dict:
          additional_parameter=True 
          parameter_dict=max_parameter_dict=min_parameter_dict=copy.deepcopy(best_parameter_dict)
          if parameter in label_model.constrained_model.reactions:
             print "is not ratio"
             value=label_model.constrained_model.solution.x_dict[parameter]
             reaction=label_model.constrained_model.reactions.get_by_id(parameter)
             lb=reaction.lower_bound
             ub=reaction.upper_bound
             parameter_dict[parameter]={"v":value,"lb":lb,"ub":ub ,"type":"flux value","reactions":[parameter],"max_d":0.1,"original_lb":lb,"original_ub":ub,"original_objective_coefficient":0.0}
          elif "/" in parameter:
             print "is ratio"
             reaction1=parameter.split("/")[0]
             reaction2=parameter.split("/")[1]  
             value,lb,ub=get_ratios_bounds(label_model,parameter,0.1,lp_tolerance_feasibility=label_model.lp_tolerance_feasibility,parameter_dict=parameter_dict)
             parameter_dict[parameter]={"v":value,"lb":lb,"ub":ub ,"type":"ratio","ratio":{reaction1:"v",reaction2:1},"max_d":0.1}
             print parameter      
       else:
          additional_parameter=False 
          #TODO Make it so it can start from the highuest and lowest value of the parameter found in previous simulations   
          min_parameter_dict=parameter_value_parameters_sets_dict[parameter]["lb_parameter_dict"]
          max_parameter_dict=parameter_value_parameters_sets_dict[parameter]["ub_parameter_dict"]
       parameter_lb,parameter_ub=get_bounds(label_model,min_parameter_dict,parameter,force_flux_value_bounds,flux_parameter_list)
       """lb_list=[]#[best_parameter_dict[parameter]["lb"]]
       ub_list=[]#[best_parameter_dict[parameter]["ub"]]
       if is_flux_value==True:
          clear_parameters(label_model,parameter_dict=best_parameter_dict,parameter_list=flux_parameter_list, clear_ratios=False,clear_turnover=False,clear_fluxes=True,restore_objectives=False) #Clear all parameters
          #Get real upper and lower bound for the parameters
          for reaction_id in best_parameter_dict[parameter]["reactions"]: 
                fva=flux_variability_analysis(label_model.constrained_model,fraction_of_optimum=0,reaction_list=[reaction_id],tolerance_feasibility=label_model.lp_tolerance_feasibility)
                lb_list.append(fva[reaction_id]["minimum"])
                ub_list.append(fva[reaction_id]["maximum"])
       if is_flux_value==False or force_flux_value_bounds:
          lb_list.append(best_parameter_dict[parameter]["lb"])
          ub_list.append(best_parameter_dict[parameter]["ub"])
       parameter_lb=max(lb_list)
       parameter_ub=min(ub_list)"""
       if output:
          with open("estimate_confidence_interval_output.txt", "a") as myfile:
               myfile.write("///////"+parameter+"(lb="+str(parameter_lb)+" ub="+str(parameter_ub)+ ")\n")
       while(n<=100000):
          stop_flag=False
          if n==1:
             parameter_dict=copy.deepcopy(max_parameter_dict)
             #Run a quick evaluation of the upper bound to see if it is not necessary to "walk there" 
             parameter_dict,f_best=evaluate_parameter(label_model,parameter_dict,flux_parameter_list,parameter,parameter_lb,parameter_ub,parameter_ub,signficance_threshold,feasability_process,parameter_precision,max_absolute_perturbation/10.0,force_flux_value_bounds,max(int(annealing_n*0.5),2),annealing_m,annealing_p0,annealing_pf,max_random_sample,min_random_sample,annealing_n_processes,annealing_cycle_time_limit, annealing_cycle_max_attempts,annealing_iterations=1,annealing_restore_parameters=annealing_restore_parameters)
             if f_best<=signficance_threshold:
                build_flux_confidence_interval_dict(label_model,flux_confidence_interval_dict,parameter_list)
                parameter_dict_to_store=copy.deepcopy(parameter_dict)
                if additional_parameter:
                   del parameter_dict_to_store[parameter]
                build_confidence_dicts(parameter_confidence_interval_dict,parameter_value_parameters_sets_dict,parameter_dict_to_store)
                
             else:
                parameter_dict=copy.deepcopy(max_parameter_dict)
             if output:
                with open("estimate_confidence_interval_output.txt", "a") as myfile:
                     myfile.write(parameter+" "+"v="+str(parameter_ub)+" chi="+str(f_best)+"\n")
          delta_parameter=min(max(perturbation*abs(parameter_dict[parameter]["v"]),min_absolute_perturbation),max_absolute_perturbation)
          print delta_parameter        
          parameter_new_value=max(min(parameter_dict[parameter]["v"]+delta_parameter*sign,parameter_ub),parameter_lb)
          parameter_dict,f_best=evaluate_parameter(label_model,parameter_dict,flux_parameter_list,parameter,parameter_lb,parameter_ub,parameter_new_value,signficance_threshold,feasability_process,parameter_precision,max_absolute_perturbation/10.0,force_flux_value_bounds,annealing_n,annealing_m,annealing_p0,annealing_pf,max_random_sample,min_random_sample,annealing_n_processes,annealing_cycle_time_limit, annealing_cycle_max_attempts,annealing_iterations,annealing_restore_parameters=annealing_restore_parameters) 
          if output:
             with open("estimate_confidence_interval_output.txt", "a") as myfile:
               myfile.write(parameter+" "+"v="+str(parameter_new_value)+" chi="+str(f_best)+"\n")
          
          if f_best>signficance_threshold:
             stop_flag=True
          else: 
             if f_best<best_objective: #If a solution is found that is better than the optimal solution restart the confidence interval simulation with the new parameter set
                parameter_dict_to_store=copy.deepcopy(parameter_dict)
                if additional_parameter:
                   clear_parameters(label_model,parameter_dict=parameter_dict,parameter_list=[parameter], clear_ratios=True,clear_turnover=False,clear_fluxes=True,restore_objectives=False) #
                   del parameter_dict_to_store[parameter]
                parameter_confidence_interval_dict={}
                flux_confidence_interval_dict={}
                parameter_value_parameters_sets_dict={}
                if output:
                   with open("estimate_confidence_interval_output.txt", "a") as myfile:
                        myfile.write("Restarting analysis with new bestfit\n")
                best_parameter_dict,best_flux_dict,f_best=annealing(label_model,n=annealing_n,m=annealing_m,p0=annealing_p0,pf=annealing_pf,max_random_sample=max_random_sample,min_random_sample=min_random_sample,mode="fsolve",fraction_of_optimum=0,parameter_precision=parameter_precision,parameter_to_be_fitted=[],max_perturbation=max_absolute_perturbation,gui=None,fba_mode="fba", break_threshold=signficance_threshold,parameter_dict=parameter_dict_to_store,n_processes=annealing_n_processes,cycle_time_limit=annealing_cycle_time_limit, cycle_max_attempts=annealing_cycle_max_attempts,output=False,force_flux_value_bounds=force_flux_value_bounds)
                if full_mode:
                   parameter_list=None
                parameter_confidence_interval_dict,flux_confidence_interval_dict,parameter_value_parameters_sets_dict,constrained_model=estimate_confidence_intervals(label_model,significance=significance,perturbation=perturbation,min_absolute_perturbation=min_absolute_perturbation, max_absolute_perturbation=max_absolute_perturbation ,parameter_precision=parameter_precision, best_parameter_dict=best_parameter_dict ,parameter_list=parameter_list ,fraction_of_optimum=fraction_of_optimum ,force_flux_value_bounds=force_flux_value_bounds ,relative_max_random_sample=relative_max_random_sample, relative_min_random_sample= relative_min_random_sample,annealing_n=annealing_n,annealing_m=annealing_m,annealing_p0=annealing_p0,annealing_pf=annealing_pf,annealing_n_processes=annealing_n_processes,annealing_cycle_time_limit=annealing_cycle_time_limit, annealing_cycle_max_attempts= annealing_cycle_max_attempts, annealing_iterations=annealing_iterations ,annealing_restore_parameters=annealing_restore_parameters ,fname=fname,output=output,sbml_name=sbml_name,evaluate_turnovers=evaluate_turnovers)
                #parameter_confidence_interval_dict,flux_confidence_interval_dict,parameter_value_parameters_sets_dict =estimate_confidence_intervals(label_model,significance=significance,perturbation=perturbation,min_absolute_perturbation=min_absolute_perturbation,max_absolute_perturbation=max_absolute_perturbation,parameter_precision=parameter_precision,best_parameter_dict=parameter_dict,parameter_list=parameter_list,fraction_of_optimum=fraction_of_optimum,force_flux_value_bounds=force_flux_value_bounds,relative_max_random_sample=relative_max_random_sample, relative_min_random_sample= relative_min_random_sample,annealing_n=annealing_n,annealing_m=annealing_m,annealing_p0=annealing_p0,annealing_pf=annealing_pf,output=output,annealing_n_processes=annealing_n_processes,annealing_cycle_time_limit=annealing_cycle_time_limit, annealing_cycle_max_attempts=annealing_cycle_max_attempts,annealing_iterations=annealing_iterations,annealing_restore_parameters=annealing_restore_parameters,fname=fname)
                return parameter_confidence_interval_dict,flux_confidence_interval_dict,parameter_value_parameters_sets_dict,constrained_model
             if parameter_dict[parameter]["v"]<=parameter_lb or parameter_dict[parameter]["v"]>=parameter_ub:
                stop_flag=True
             """if sign==1:
                parameter_confidence_interval_dict[parameter]["ub"]=new_value
             else:
                parameter_confidence_interval_dict[parameter]["lb"]=new_value"""
             build_flux_confidence_interval_dict(label_model,flux_confidence_interval_dict,parameter_list)
             parameter_dict_to_store=copy.deepcopy(parameter_dict)
             if additional_parameter:
                del parameter_dict_to_store[parameter]
             build_confidence_dicts(parameter_confidence_interval_dict,parameter_value_parameters_sets_dict,parameter_dict_to_store)
          if stop_flag==True:
            print "stop"
            if sign==1:
                sign=-1
                parameter_dict=copy.deepcopy(min_parameter_dict)
                parameter_dict,f_best=evaluate_parameter(label_model,parameter_dict,flux_parameter_list,parameter,parameter_lb,parameter_ub,parameter_lb,signficance_threshold,feasability_process,parameter_precision,max_absolute_perturbation/10.0,force_flux_value_bounds,annealing_n,annealing_m,annealing_p0,annealing_pf,max_random_sample,min_random_sample,annealing_n_processes,annealing_cycle_time_limit, annealing_cycle_max_attempts,annealing_iterations=1,annealing_restore_parameters=annealing_restore_parameters)
                if f_best<=signficance_threshold:
                    build_flux_confidence_interval_dict(label_model,flux_confidence_interval_dict,parameter_list)
                    parameter_dict_to_store=copy.deepcopy(parameter_dict)
                    if additional_parameter:
                       del parameter_dict_to_store[parameter]
                    build_confidence_dicts(parameter_confidence_interval_dict,parameter_value_parameters_sets_dict,parameter_dict_to_store)
                else:
                    parameter_dict=copy.deepcopy(min_parameter_dict)
                if output:
                   with open("estimate_confidence_interval_output.txt", "a") as myfile:
                        myfile.write(parameter+" "+"v="+str(parameter_lb)+" chi="+str(f_best)+"\n")
                
            else:
                clear_parameters(label_model,parameter_dict=parameter_dict,parameter_list=[parameter], clear_ratios=True,clear_turnover=False,clear_fluxes=True,restore_objectives=False) #Clear all parameters
                break
          n+=1
          print ["n",n]
          
         
   for reaction_id in original_objectives_bounds:
            reaction=label_model.constrained_model.reactions.get_by_id(reaction_id)
            reaction.lower_bound=original_objectives_bounds[reaction_id]["lb"]
            reaction.upper_bound=original_objectives_bounds[reaction_id]["ub"]
            reaction.objective_coefficient=original_objectives_bounds[reaction_id]["obj_coef"]
   #apply_parameters(label_model,best_parameter_dict,parameter_precision=parameter_precision)
   feasability_process.close()
   if "xlsx" in fname or "csv" in fname: 
      print [full_mode]
      if not full_mode:
         save_flux_confidence_interval(label_model,flux_confidence_interval_dict,significance=significance,fn=fname,omit_turnovers=not evaluate_turnovers,parameter_list=parameter_list)
      else:
        save_flux_confidence_interval(label_model,flux_confidence_interval_dict,significance=significance,fn=fname,omit_turnovers=not evaluate_turnovers,parameter_list=None)   
   elif "json" in  fname:
      save_confidence_interval_json(flux_confidence_interval_dict,parameter_confidence_interval_dict,fn=fname)
   constrained_model=save_sbml_with_confidence_results(label_model,flux_confidence_interval_dict,fname=sbml_name,parameter_dict=best_parameter_dict,full_mode=full_mode,parameter_list=parameter_list,precision=precision)     
   apply_parameters(label_model,best_parameter_dict,parameter_precision=parameter_precision)
   return parameter_confidence_interval_dict,flux_confidence_interval_dict,parameter_value_parameters_sets_dict,constrained_model
def evaluate_parameter(label_model,parameter_dict,flux_parameter_list,parameter,parameter_lb,parameter_ub,parameter_new_value,signficance_threshold,feasability_process,parameter_precision,max_perturbation,force_flux_value_bounds,annealing_n,annealing_m,annealing_p0,annealing_pf,max_random_sample,min_random_sample,annealing_n_processes,annealing_cycle_time_limit, annealing_cycle_max_attempts,annealing_iterations,annealing_restore_parameters=True):
   "Function used internally to evaluate the ChiSquare with a given parameter to locked to a single value" 
   parameter_dict=copy.deepcopy(parameter_dict)
   #is_flux_value=parameter in flux_parameter_list
   if parameter_dict[parameter]["type"] in ("ratio","flux value"):
             clear_parameters(label_model,parameter_dict=parameter_dict,parameter_list=flux_parameter_list, clear_ratios=False,clear_turnover=False,clear_fluxes=True,restore_objectives=False) #Clear all parameters
             """#Get real upper and lower bound for the parameters
             for reaction_id in parameter_dict[parameter]["reactions"]: 
                status,feasability_process=check_feasibility(label_model.constrained_model,tolerance_feasibility=label_model.lp_tolerance_feasibility,time_limit=60,pool=feasability_process)
                fva=flux_variability_analysis(label_model.constrained_model,fraction_of_optimum=0,reaction_list=[reaction_id],tolerance_feasibility=label_model.lp_tolerance_feasibility)
                lb_list.append(fva[reaction_id]["minimum"])
                ub_list.append(fva[reaction_id]["maximum"])
   parameter_lb=max(lb_list)
   parameter_ub=min(ub_list)"""
   print parameter_dict
   parameter_dict[parameter]["v"]=parameter_new_value
   print [parameter,parameter_dict[parameter]]
   apply_parameters(label_model,parameter_dict,apply_flux_values=True,parameter_precision=parameter_precision,parameter_list=[parameter])
   apply_ratios(label_model.constrained_model,label_model.ratio_dict)
   if parameter_dict[parameter]["type"] in ("ratio","flux value"):
      parameter_backup=copy.deepcopy(parameter_dict)
      for attempt in range(0,10):
          retry_flag=False
          random_parameter_sample=random.sample(flux_parameter_list, len(flux_parameter_list))
          for flux_value in random_parameter_sample:
                  if flux_value==parameter:
                     continue
                  lb=-999999
                  ub=999999
                  for reaction_id in parameter_dict[flux_value]["reactions"]:
                      status,feasability_process=check_feasibility(label_model.constrained_model,tolerance_feasibility=label_model.lp_tolerance_feasibility,time_limit=60,pool=feasability_process)
                      if status =="infeasible":
                         retry_flag=True 
                         clear_parameters(label_model,parameter_dict=parameter_dict,parameter_list=flux_parameter_list, clear_ratios=False ,clear_turnover=False ,clear_fluxes=True, restore_objectives=False)
                         parameter_dict=copy.deepcopy(parameter_backup)
                         apply_parameters(label_model,parameter_dict,apply_flux_values=True,parameter_precision=parameter_precision,parameter_list=[parameter])
                         break
                      fva=flux_variability_analysis(label_model.constrained_model,fraction_of_optimum=0,reaction_list=[reaction_id],tolerance_feasibility=label_model.lp_tolerance_feasibility)
                      ub=min(fva[reaction_id]["maximum"],ub)
                      lb=max(fva[reaction_id]["minimum"],lb)
                  if retry_flag==True:
                     break 
                  value=parameter_dict[flux_value]["v"] #previous value
                  parameter_dict[flux_value]["v"]=min(max(lb,value),ub)
                  apply_parameters(label_model,parameter_dict,apply_flux_values=True,parameter_precision=parameter_precision,parameter_list=[flux_value])
          #print model.optimize()
          if  retry_flag==False:
              break #If no errors where encountered no need for further attempts
   apply_parameters(label_model,parameter_dict,apply_flux_values=True,parameter_precision=parameter_precision)
   print "Delta1"
   a,b=solver(label_model)
   print "Delta2"
   f_best,b,c=get_objective_function(label_model,output=False)
   best_flux_dict=copy.deepcopy(label_model.flux_dict)
   print ["FBEST",f_best]
   if f_best>=signficance_threshold:
             backup_parameter_dict=copy.deepcopy(parameter_dict)
             print "coordinated descent"
             parameters_to_fit=copy.copy(parameter_dict)
             del parameters_to_fit[parameter]
             f_best, new_parameters,best_flux_dict=coordinate_descent(label_model,mode="fsolve",parameter_precision=parameter_precision,parameter_to_be_fitted=parameters_to_fit,max_perturbation=max_perturbation,perturbation=1.2,fba_mode="fba",parameter_dict=parameter_dict,force_flux_value_bounds=force_flux_value_bounds)  
             best_flux_dict=label_model.flux_dict
             print [parameter_new_value,f_best]
             if f_best<=signficance_threshold:
                parameter_dict=new_parameters
             else:
                
                for x in range(0,annealing_iterations): #Try several times to make sure the result is above the signficance threeshol
                    parameter_dict,best_flux_dict,f_best=annealing(label_model,n=annealing_n,m=annealing_m,p0=annealing_p0,pf=annealing_pf,max_random_sample=max_random_sample,min_random_sample=min_random_sample,mode="fsolve",fraction_of_optimum=0,parameter_precision=parameter_precision,parameter_to_be_fitted=parameters_to_fit,max_perturbation=max_perturbation,gui=None,fba_mode="fba", break_threshold=signficance_threshold,parameter_dict=parameter_dict,n_processes=annealing_n_processes,cycle_time_limit=annealing_cycle_time_limit, cycle_max_attempts=annealing_cycle_max_attempts,output=False,force_flux_value_bounds=force_flux_value_bounds)
                    if f_best<signficance_threshold:
                        break
                    elif annealing_restore_parameters==True: 
                        parameter_dict=copy.deepcopy(backup_parameter_dict) 
   
   return parameter_dict, f_best
Ejemplo n.º 7
0
def sampling(label_model,n=100,fraction_of_optimum=0,output_emu_list=None,max_turnover=100,parameter_dict={},fba_mode="fba",parameter_precision=0.001,gui=None,change_threshold=0.01):
    print parameter_precision
    precision=int(-1*(math.log10(parameter_precision)))
    #print ["hello",label_model.constrained_model.reactions.get_by_id("biomass").lower_bound]
    original_model=copy.deepcopy(label_model.constrained_model)
    original_turnover=copy.deepcopy(label_model.turnover_flux_dict)
    apply_parameters(label_model,parameter_dict)
    #model=label_model.constrained_model
    #try:
    objective_dict={}
    if fraction_of_optimum!=0:       
       for reaction in label_model.constrained_model.objective: 
            fva=flux_variability_analysis(label_model.constrained_model,reaction_list=[reaction], fraction_of_optimum=fraction_of_optimum,tolerance_feasibility=label_model.lp_tolerance_feasibility)
            objective_dict[reaction]={"lb":reaction.lower_bound,"ub":reaction.upper_bound,"obj":reaction.objective_coefficient}
            reaction.lower_bound=round_down(fva[reaction.id]["minimum"],precision)
            reaction.upper_bound=round_up(fva[reaction.id]["maximum"],precision)
            reaction.objective_coefficient=0
    if output_emu_list==None:
       output_emu_list=label_model.data_name_emu_dict.keys()
    #Prepare output
    output_dict={}
    for condition in label_model.condition_initial_label_yy_dict:
        output_dict[condition]={}
        for emu in output_emu_list:
            size=label_model.emu_dict[emu]["size"]
            if size not in output_dict[condition]:
               output_dict[condition][size]={}
            for mi in label_model.emu_dict[emu]["mid"]:
                output_dict[condition][size][label_model.emu_dict[emu]["mid"][mi]]=[]    
    #identfy the turnover fluxes that are already part of parameters as they will be excluded 
    apply_ratios(label_model.constrained_model,label_model.ratio_dict)
    #free_parameters=identify_free_fluxes(label_model,parameter_dict={},fraction_of_optimum=0,change_threshold=change_threshold,parameter_precision=parameter_precision,max_d=0.1,key_reactions=[],original_fva=None,debug=False)
    """if len (free_parameters)==0:
       return output_dict"""
    #print "%s free parameters found"%(len(free_parameters))
    #turnover_parameter_list=[]
    """flux_value_parameter_list=[]
    for parameter in free_parameters:
        parameter_dict=free_parameters[parameter]
        if parameter_dict["type"]=="turnover":
           turnover_parameter_list.append(parameter)
        elif parameter_dict["type"]=="flux value":
           flux_value_parameter_list.append(parameter)"""
    #Save the original lower and upper_bounds and turnover 
    #Generate the samples
    free_parameters={}
    for i in range(0,n):
         clear_parameters(label_model,parameter_dict=free_parameters,parameter_list=[], clear_ratios=True,clear_turnover=True,clear_fluxes=True,restore_objectives=True,delete_parameters=True)
         free_parameters=identify_free_fluxes(label_model,parameter_dict=free_parameters,fraction_of_optimum=0,change_threshold=change_threshold,parameter_precision=parameter_precision,max_d=0.1,key_reactions=[],original_fva=None,debug=False)
         for reaction_id in free_parameters:
            reaction=label_model.constrained_model.reactions.get_by_id(reaction_id)
            original_reaction=original_model.reactions.get_by_id(reaction_id)
            reaction.lower_bound=original_reaction.lower_bound
            reaction.upper_bound=original_reaction.upper_bound
        
         working_flux_value_parameter_list=random.sample(free_parameters, len(free_parameters))
         #try:
         #print free_parameters
         for reaction_id in working_flux_value_parameter_list: 
            fva=flux_variability_analysis(label_model.constrained_model, reaction_list=[reaction_id],fraction_of_optimum=0,tolerance_feasibility=label_model.lp_tolerance_feasibility)
            if fva[reaction_id]["maximum"]-fva[reaction_id]["minimum"]>parameter_precision:
               new_value=random.uniform(fva[reaction_id]["minimum"],fva[reaction_id]["maximum"])
               free_parameters[reaction_id]["v"]=new_value
               #print [reaction_id,new_value]
               apply_parameters(label_model,parameter_dict=free_parameters,parameter_precision=parameter_precision,parameter_list=[reaction_id])
         for turnover in label_model.turnover_flux_dict:
             if (turnover+"_turnover") not in parameter_dict: 
                lb=label_model.turnover_flux_dict[turnover]["lb"]
                ub=label_model.turnover_flux_dict[turnover]["ub"]
                new_value=round(random.uniform(lb,ub),precision) 
                #print new_value
                label_model.turnover_flux_dict[turnover]["v"]=new_value
         #print label_model.turnover_flux_dict     
         #apply_parameters(label_model,parameter_dict=free_parameters,parameter_list=turnover_parameter_list,parameter_precision=parameter_precision)
         a,b=solver(label_model,mode="fsolve",fba_mode=fba_mode)
         #Store output
         for condition in output_dict:
            for size in output_dict[condition]:
                for mi in output_dict[condition][size]:
                    if mi in label_model.size_variable_dict[size]:
                       n_emu=label_model.size_variable_dict[size][mi]
                       mi_value=round(label_model.condition_size_yy_dict[condition][size][n_emu],4)
                       output_dict[condition][size][mi].append(mi_value)
         if gui!=None:
           #get_objective_function(label_model)
           gui.update_label_sampling()
           gui.root.update_idletasks()
           a,b,c=get_objective_function(label_model,force_balance=label_model.force_balance,output=False)               
            
         print "sample %s of %s..."%((i+1),n)
         #Restore original values for flux constraints
         #except:
          # continue
    clear_parameters(label_model,parameter_dict=free_parameters,parameter_list=[], clear_ratios=True,clear_turnover=True,clear_fluxes=True,restore_objectives=True,delete_parameters=True)
    
    for  reaction in objective_dict:
         reaction.lower_bound=objective_dict[reaction]["lb"]
         reaction.lower_bound=objective_dict[reaction]["ub"]
         reaction.objective_coefficienT=objective_dict[reaction]["obj"]
    label_model.turnover_flux_dict=original_turnover
    #label_model.constrained_model=original_model
    #print ["hello",label_model.constrained_model.reactions.get_by_id("biomass").lower_bound]
    #apply_parameters(label_model,parameter_dict=None,parameter_precision=parameter_precision)
    a,b=solver(label_model,mode="fsolve",fba_mode=fba_mode)
    get_objective_function(label_model)
    #except:
    """print "Error"
    label_model.turnover_flux_dict=original_turnover
    label_model.constrained_model=original_model"""
    #print ["hello",label_model.constrained_model.reactions.get_by_id("biomass").lower_bound]
    return output_dict