コード例 #1
0
def load_into_CheKiPUEQ(simulation_function, observed_data, pars_initial_guess = [], pars_lower_bnds=[], pars_upper_bnds =[], pars_bnds_exist = [], observed_data_lower_bounds=[], observed_data_upper_bounds=[], weights_data=[], pars_uncertainty_distribution='automatic', sigma_multiple = 3.0, num_rate_constants_and_rate_constant_parameters=[]):
    #observed_data is an array of values of observed data (can be nested if there is more than one observable)
    #pars_lower_bnds and pars_upper_bnds are the bounds of the parameters ('coefficents') in absolute values.
    #  for 'uniform' distribution the bounds are taken directly. For Gaussian, the larger of the 2 deltas is taken and divided by 3 for sigma.
    # rate_constants_parameters_bnds_exist is an array-like with values like [True False] where the Booleans are about whether the parameter has a lower bound and upper bound, respectively. So there is one pair of booleans per parameter.
    #pars_initial_guess is the initial guess for the parameters ('coefficients')
    #weights_data is an optional array of values that matches observed data in length.
    #sigma_multiple is how many sigma the bounds are equal to (relative to mean).
    #pars_uncertainty_distribution allows 3 choices: automatic, gaussian, uniform.  Automatic gives 'uniform' to the rate constants and 'gaussian' to the rate_constant_parameters.
    #num_rate_constants_and_rate_constant_parameters  allows the 'automatic setting of pars_uncertainty_distribution to assign distribution types based on the par index. If not supplied, everything is assumed to be a rate_constant.
    
    #TODO: put a "clear UserInput" type call here to UnitTesterSG_local
    
    if len(num_rate_constants_and_rate_constant_parameters) == 0: num_rate_constants_and_rate_constant_parameters = [len(pars_initial_guess), 0]
    
    UserInput.responses['responses_abscissa'] = []
    UserInput.responses['responses_observed'] = np.array(observed_data).T
    num_responses = np.shape(UserInput.responses['responses_observed'])[0]
    UserInput.responses['responses_observed_uncertainties'] = []
    if len(observed_data_lower_bounds) > 0: #assume that both lower and upper bounds exist on data if there is a lower bounds array provided.
        UserInput.responses['responses_observed_uncertainties'] = extract_larger_delta_and_make_sigma_values(UserInput.responses['responses_observed'], observed_data_lower_bounds, observed_data_upper_bounds, sigma_multiple)   
    try:
        #weights_data = np.atleast_2d(weights_data).T
        UserInput.responses['responses_observed_weighting'] = weights_data #(weights_data*np.ones(num_responses)).T
    except:
        print("There was an error in the weightings in CheKiPEUQ_from_Frhodo processing.")
    UserInput.model['InputParameterPriorValues'] = pars_initial_guess
    if pars_uncertainty_distribution.lower() == 'uniform': #make an array of -1 for uncertainties to signify a uniform distribution.
        UserInput.model['InputParametersPriorValuesUncertainties'] = -1*np.ones(len(pars_initial_guess))
    if pars_uncertainty_distribution.lower() == 'gaussian': 
        UserInput.model['InputParametersPriorValuesUncertainties'] = extract_larger_delta_and_make_sigma_values(pars_initial_guess, pars_lower_bnds, pars_upper_bnds, sigma_multiple)
    if pars_uncertainty_distribution.lower() == 'automatic' or pars_uncertainty_distribution.lower() == 'auto': 
        num_rate_constants = num_rate_constants_and_rate_constant_parameters[0] 
        num_rate_constants_parameters = num_rate_constants_and_rate_constant_parameters[1]
        rate_constant_uncertainties = -1*np.ones(num_rate_constants_parameters) #by default, use uniform for the rate_constant_uncertainties (signified by -1).
        rate_constant_parameters_uncertainties = extract_larger_delta_and_make_sigma_values(pars_initial_guess[num_rate_constants+0:], pars_lower_bnds[num_rate_constants+0:], pars_upper_bnds[num_rate_constants+0:], sigma_multiple)  #this returns a 1 sigma value for a gaussian, assuming that the range indicates a certain sigma_multiple in each direction. The "+0" is to start at next value with array indexing. Kind of like "-1 +1".
        UserInput.model['InputParametersPriorValuesUncertainties'] = np.concatenate( (rate_constant_uncertainties, rate_constant_parameters_uncertainties) )
    if len(pars_bnds_exist)> 1: #If this is not a blank list, we're going to check each entry. For anything which has a "False", we are going to set the InputParametersPriorValuesUncertainties value to "-1" to indicate uniform since that means it can't be a Gaussian.
        for exist_index, lower_upper_booleans in enumerate(pars_bnds_exist):
            #print("line 85", exist_index, lower_upper_booleans, np.sum(lower_upper_booleans))
            if np.sum(lower_upper_booleans) < 2: #True True will add to 2, anything else does not pass.
                UserInput.model['InputParametersPriorValuesUncertainties'][exist_index] = -1
    #print("line 86", UserInput.model['InputParametersPriorValuesUncertainties']) 
    
    #CheKiPEUQ cannot handle much larger than 1E100 for upper bounds.
    for upper_bound_index, upper_bound in enumerate(pars_upper_bnds):
        if upper_bound > 1.0E100:
            pars_upper_bnds[upper_bound_index] = 1.0E100

    #CheKiPEUQ cannot handle much more negative than -1E100 for lower bounds.
    for lower_bound_index, lower_bound in enumerate(pars_lower_bnds):
        if lower_bound < -1.0E100:
            pars_lower_bnds[lower_bound_index] = -1.0E100
    
    UserInput.model['InputParameterPriorValues_upperBounds'] = np.array(pars_upper_bnds)
    UserInput.model['InputParameterPriorValues_lowerBounds'] = np.array(pars_lower_bnds)
    UserInput.model['simulateByInputParametersOnlyFunction'] = simulation_function
    #print("line 61", CKPQ.frog)
    PE_object = CKPQ.parameter_estimation(UserInput)
    return PE_object
コード例 #2
0
def run_simulation(inputs):

    folder_name, priors = inputs

    import observed_values_00  #Just a simple example. The user can also put the values in directly into the runfile or extract from a csv, for example.
    import simulation_model_00  #Simple example.

    try:
        os.mkdir(folder_name)
    except OSError:
        print('')

    os.chdir(folder_name)

    UserInput.responses[
        'responses_abscissa'] = observed_values_00.observed_data_x_values
    UserInput.responses[
        'responses_observed'] = observed_values_00.observed_data_y_values
    UserInput.responses[
        'responses_observed_uncertainties'] = observed_values_00.observed_data_y_values_uncertainties

    UserInput.simulated_response_plot_settings['x_label'] = 'distance (m)'
    UserInput.simulated_response_plot_settings['y_label'] = r'$time (s)$'
    UserInput.simulated_response_plot_settings['fontdict'] = {'size': 16}

    UserInput.model['parameterNamesAndMathTypeExpressionsDict'] = {
        'a': 'a',
        'b': 'b'
    }
    UserInput.model[
        'InputParameterPriorValues'] = priors  #prior expected values for a and b
    UserInput.model['InputParametersPriorValuesUncertainties'] = [
        100, 200
    ]  #required. #If user wants to use a prior with covariance, then this must be a 2D array/ list. To assume no covariance, a 1D
    #UserInput.model['InputParameterInitialGuess'] = [150,400] #Can optionally change the initial guess to be different from prior means.

    UserInput.model[
        'simulateByInputParametersOnlyFunction'] = simulation_model_00.simulation_function_wrapper  #This must simulate with *only* the parameters listed above, and no other arguments.

    UserInput.parameter_estimation_settings[
        'mcmc_length'] = 10000  #10000 is the default.

    #UserInput.parameter_estimation_settings['mcmc_random_seed'] = 0 This can be useful for testing.
    #After making the UserInput, now we make a 'parameter_estimation' object from it.
    PE_object = CKPQ.parameter_estimation(UserInput)
    PE_object.doMetropolisHastings()
    PE_object.createAllPlots(
    )  #This function calls each of the below functions so that the user does not have to.
コード例 #3
0
    UserInput.parameter_estimation_settings['exportLog'] = False
    UserInput.parameter_estimation_settings['checkPointFrequency'] = 100

    UserInput.parameter_estimation_settings['mcmc_mode'] = 'unbiased'
    UserInput.parameter_estimation_settings[
        'mcmc_random_seed'] = 0  #Normally set to None so that mcmc is set to be random. To get the same results repeatedly, such as for testing purposes, set the random seed to 0 or another integer for testing purposes.
    UserInput.parameter_estimation_settings['mcmc_burn_in'] = 1
    UserInput.parameter_estimation_settings['mcmc_length'] = 1001
    UserInput.parameter_estimation_settings['mcmc_relative_step_length'] = 0.05
    UserInput.parameter_estimation_settings[
        'mcmc_modulate_accept_probability'] = 1000  #Default value of 0. Changing this value sharpens or flattens the posterior. A value greater than 1 flattens the posterior by accepting low values more often. It can be useful when greater sampling is more important than accuracy. One way of using this feature is to try with a value of 0, then with the value equal to the number of priors for comparison, and then to gradually decrease this number as low as is useful (to minimize distortion of the result). A downside of changing changing this variable to greater than 1 is that it slows the the ascent to the maximum of the prior, so there is a balance in using it. In contrast, numbers increasingly less than one (such as 0.90 or 0.10) will speed up the ascent to the maximum of the posterior, but will also result in fewer points being retained.

    UserInput.contour_plot_settings['contours_normalized'] = True

    #After making the UserInput, now we make a 'parameter_estimation' object from it.
    PE_object = CKPQ.parameter_estimation(UserInput)

    #    #Now we do parameter estimation.
    #    PE_object.doMetropolisHastings()
    # PE_object.doSinglePoint()
    #    PE_object.createAllPlots() #This function calls each of the below functions.

    #    PE_object.doOptimizeNegLogP(method="Nelder-Mead", printOptimum=True, verbose=True)

    #    PE_object.doGridSearch('getLogP')
    #PE_object.doGridSearch('doMetropolisHastings')
    #    PE_object.doGridSearch('doOptimizeNegLogP', verbose = True,gridSamplingRadii = [], passThroughArgs={'method':'BFGS'})

    PE_object.doGridSearch(
        'doMetropolisHastings',
        gridSamplingAbsoluteIntervalSize=UserInput.
コード例 #4
0
    ax.set_title('Surface Plot of F_A')
    fig.colorbar(surf, shrink=0.5, aspect=5)
    fig.savefig('synthetic_observables.png',dpi=220)

    fun.k_1 = 1e2
    fun.k_minus_1 = 1
    prior = np.random.normal(2e3,1e3,10000)
    info_gains=[]
    PE_object_list = []
    for v in volumes:
        for t in temperatures:
            sol = odeint(fun.cmr, F0, np.linspace(0,v,50), args=(k_1,k_minus_1,k_m,t))
            conc_sol_last=sol[-1,0].T
            print('conc_sol_last',conc_sol_last)
            UserInput.responses['responses_observed'] = conc_sol_last
            PE_object_list.append(CKPQ.parameter_estimation(UserInput))
            fun.T = t
            fun.volume = v
            [map_parameter_set, muap_parameter_set, stdap_parameter_set, evidence, info_gain, samples, logP] = PE_object_list[-1].doMetropolisHastings()
            info_gains.append(info_gain)
            fig, ax = plt.subplots()
            (density0,bins0,pathces0)=ax.hist([prior,PE_object_list[-1].post_burn_in_samples.flatten()],bins=100,label=['prior','posterior'],density=True)
            ax.legend()
            ax.set_ylabel('Probability density')
            ax.set_title('Prior and Posterior Density Plot at T = {} (K) volume = {} cm^3'.format(str(t),str(v)))
            fig.savefig('km_only_figures/prior_and_posterior_histogram_T_{}_V_{}.png'.format(str(t),str(v)), dpi=300)
    fig,ax = plt.subplots(figsize=(5,5))
    #ax = fig.add_subplot(111, projection='3d')
    T, V = np.meshgrid(temperatures, volumes)
    info_gains=np.asarray(info_gains)
    IG = info_gains.reshape(T.shape)
def main():
    import simulationFunctionTPRandTp #This will provide the "simulation" function.
    import numpy as np
    global PE_object

    UserInput.responses['responses_abscissa'] = np.array([1]) # arbitrary since there is only one response value.
    UserInput.responses['responses_observed'] = np.array([[555]]) #
    UserInput.responses['responses_observed_uncertainties'] = np.array([[6]])

    UserInput.model['parameterNamesAndMathTypeExpressionsDict'] = {'logA':r'log(A / $s^{-1}$)','Ea':r'Ea (J $mol^{-1}$)'}#,'Theta0':'Theta0', 'beta_H':'beta_H', 'n':'n'}
    UserInput.model['InputParameterPriorValues'] = [12, 114820] 
    UserInput.model['InputParametersPriorValuesUncertainties'] = [1, 20000] #If user wants to use a prior with covariance, then this must be a 2D array/ list. To assume no covariance, a 1D
    #UserInput.model['InputParameterInitialGuess'] = [13, 65000] #This is where the mcmc chain will start.
    UserInput.model['responses_simulation_uncertainties'] = [[25]] #This is nested because it mirrors the responses observed uncertainties.
    
    UserInput.model['simulateByInputParametersOnlyFunction'] = simulationFunctionTPRandTp.getTpFromKineticParametersAndInitialCoverageWrapper #This must simulate with *only* the parameters listed above, and no other arguments.
        
    
    #UserInput.simulated_response_plot_settings['figure_name'] = 'Posterior_Example_two_response' #This creates the filename, also.
    

    
    UserInput.parameter_estimation_settings['verbose'] = False 
    UserInput.parameter_estimation_settings['exportAllSimulatedOutputs'] = False
    UserInput.parameter_estimation_settings['mcmc_checkPointFrequency'] = 1000
    UserInput.parameter_estimation_settings['exportLog'] = True
     
    UserInput.parameter_estimation_settings['mcmc_mode'] = 'unbiased'
    UserInput.parameter_estimation_settings['mcmc_random_seed'] = 0 #Normally set to None so that mcmc is set to be random. To get the same results repeatedly, such as for testing purposes, set the random seed to 0 or another integer for testing purposes.
    UserInput.parameter_estimation_settings['mcmc_burn_in'] = 1000
    UserInput.parameter_estimation_settings['mcmc_length'] = 10000
    UserInput.parameter_estimation_settings['mcmc_relative_step_length'] = 1.0
    UserInput.parameter_estimation_settings['mcmc_modulate_accept_probability']  = 0 #Default value of 0. Changing this value sharpens or flattens the posterior. A value greater than 1 flattens the posterior by accepting low values more often. It can be useful when greater sampling is more important than accuracy. One way of using this feature is to try with a value of 0, then with the value equal to the number of priors for comparison, and then to gradually decrease this number as low as is useful (to minimize distortion of the result). A downside of changing changing this variable to greater than 1 is that it slows the the ascent to the maximum of the prior, so there is a balance in using it. In contrast, numbers increasingly less than one (such as 0.90 or 0.10) will speed up the ascent to the maximum of the posterior, but will also result in fewer points being retained.
    UserInput.parameter_estimation_settings['scaling_uncertainties_type'] = "off"
    UserInput.contour_plot_settings['parameter_pairs'] = [[0, 1]]
    #UserInput.contour_plot_settings['contours_normalized'] = True

    #After making the UserInput, now we make a 'parameter_estimation' object from it.
    PE_object = CKPQ.parameter_estimation(UserInput)
    
    #Now we do parameter estimation.
    PE_object.doMetropolisHastings()
    #[map_parameter_set, muap_parameter_set, stdap_parameter_set, evidence, info_gain, samples, samples_simulatedOutputs, logP] = PE_object.doMetropolisHastings()
    
    PE_object.createAllPlots() #This function calls each of the below functions.
#    PE_object.makeHistogramsForEachParameter()    
#    PE_object.makeSamplingScatterMatrixPlot()
#    PE_object.createSimulatedResponsesPlots()
    #TODO: call the mum_pce plotting objects, which will be PE_object.createContourGraphs() or something like that.

    # PE_object = CKPQ.parameter_estimation(UserInput)
    # PE_object.doOptimizeNegLogP(method="BFGS", printOptimum=True, verbose=False)
    # PE_object.createAllPlots()
    
        
    UserInput.contour_plot_settings['figure_name']='PosteriorContourPlotDisproportionation'
    UserInput.contour_plot_settings['fontsize']='22'
    UserInput.contour_plot_settings['num_y_ticks']=3
    UserInput.contour_plot_settings['num_x_ticks']=3
    UserInput.contour_plot_settings['colorbars']='False'
    
    PE_object.createAllPlots()
コード例 #6
0
#    PE_object = CKPQ.parameter_estimation(UserInput)
#    PE_object.doMetropolisHastings()
#PE_object.createAllPlots()


UserInput.doe_settings['info_gains_matrices_array_format'] = 'meshgrid'
UserInput.doe_settings['info_gains_matrices_multiple_parameters'] = 'sum'
UserInput.doe_settings['independent_variable_grid_center'] = [500, 0.5]
UserInput.doe_settings['independent_variable_grid_interval_size'] = [100, 0.1]
UserInput.doe_settings['independent_variable_grid_num_intervals'] = [1,1] #This is the number in each direction outward from center. So a 2 here gives 5 evaluations. A zero means we don't allow the parameter to vary.

UserInput.doe_settings['parameter_modulation_grid_interval_size'] = [1,1] #use a non-zero value even for parameters that you will not vary.
UserInput.doe_settings['parameter_modulation_grid_num_intervals'] = [1,1] #make the number of intervals zero for any parameter that you don't want to vary.
UserInput.doe_settings['parallel_conditions_exploration'] = True

PE_object = CKPQ.parameter_estimation(UserInput)




PE_object.doeParameterModulationPermutationsScanner()
#PE_object.createInfoGainPlots() 

if len(PE_object.info_gains_matrices_array) > 0:
    CKPQ.pickleAnObject(PE_object.info_gains_matrices_array, "runfile_for_unit_test_parallel_doe_conditions_exploration")



#To obtain a single info gain matrix, for a single set of indepependet variables, we would use the following syntax:
# del PE_object
# UserInput.doe_settings['info_gains_matrices_array_format'] = 'meshgrid'
コード例 #7
0
ファイル: test_3.py プロジェクト: AdityaSavara/CheKiPEUQ
#get the suffix argument for check_results
suffix = ut.returnDigitFromFilename(__file__)
#prefix. Make this '' if you do not want any prefix.
prefix = ''
"""
#This file is an example/template for when we ***don't have an analytical result*** but we know our function is working.
#We know the function is working during template distribution because we are just using the test 12 example.
In this template, we ***will not*** use the "set_expected_result" command. So we are commenting out the below lines, and will go directly to using the function to create an actual output.
"""
import sys
sys.path.append('../../')
import CheKiPEUQ as CKPQ
import CheKiPEUQ
import numpy as np
import runfile_for_unit_test_parallel_doe_control  #This will run the file, given how it's structured.
expectedResult = CheKiPEUQ.unpickleAnObject(
    "runfile_for_unit_test_parallel_doe_control")

# # # #input for the unit that will be tested
# # # input = 4
#expectedFirstPart = runfile_for_unit_test_parallel_doe_a.PE_object.info_gains_matrices_array
# # # expectedSecondPart = [32,64]
# # # expectedResult = (expectedFirstPart,expectedSecondPart) #We are using a tuple, but it this could have been a list.

ut.set_expected_result(
    expectedResult,
    expected_result_str=str(expectedResult),
    prefix=prefix,
    suffix=suffix
)  #This is the typical syntax if you want to force an analytical result for your test.
"""
#Calculate our function outputs (actual results). We can use functions from another module in this section.
コード例 #8
0
ファイル: test_4.py プロジェクト: AdityaSavara/CheKiPEUQ
"""
#This file is an example/template for when we ***don't have an analytical result*** but we know our function is working.
#We know the function is working during template distribution because we are just using the test 12 example.
In this template, we ***will not*** use the "set_expected_result" command. So we are commenting out the below lines, and will go directly to using the function to create an actual output.
"""
import sys

sys.path.append('../../')
import CheKiPEUQ as CKPQ
import CheKiPEUQ
import numpy as np
#NOTE:  We are **skipping** importing the below runfile because we assume test_3 will be run before test_4.  To use test_4 by itself, the below would need to be done differently.
try:
    expectedResultFile = open("runfile_for_unit_test_parallel_doe_control.pkl")
    expectedResult = CheKiPEUQ.unpickleAnObject(
        "runfile_for_unit_test_parallel_doe_control")[
            -1]  #Take the last info_matrix_array.
except:
    import runfile_for_unit_test_parallel_doe_control  #This will run the file, given how it's structured.
    expectedResult = CheKiPEUQ.unpickleAnObject(
        "runfile_for_unit_test_parallel_doe_control")[
            -1]  #Take the last info_matrix_array.
finally:
    expectedResultFile.close()

# # # #input for the unit that will be tested
# # # input = 4
#expectedFirstPart = runfile_for_unit_test_parallel_doe_a.PE_object.info_gains_matrices_array
# # # expectedSecondPart = [32,64]
# # # expectedResult = (expectedFirstPart,expectedSecondPart) #We are using a tuple, but it this could have been a list.
コード例 #9
0
    UserInput.model['parameterNamesAndMathTypeExpressionsDict'] = {
        'a': 'a',
        'b': 'b'
    }
    UserInput.model['InputParameterPriorValues'] = [
        200, 500
    ]  #prior expected values for a and b
    UserInput.model['InputParametersPriorValuesUncertainties'] = [
        100, 200
    ]  #required. #If user wants to use a prior with covariance, then this must be a 2D array/ list. To assume no covariance, a 1D
    #UserInput.model['InputParameterInitialGuess'] = [150,400] #Can optionally change the initial guess to be different from prior means.

    UserInput.model[
        'simulateByInputParametersOnlyFunction'] = simulation_model_00.simulation_function_wrapper  #This must simulate with *only* the parameters listed above, and no other arguments.
    UserInput.parameter_estimation_settings[
        'mcmc_length'] = 1000  #10000 is the default.

    #After making the UserInput, now we make a 'parameter_estimation' object from it.
    PE_object = CKPQ.parameter_estimation(UserInput)
    mcmc_output = PE_object.doEnsembleSliceSampling()
    PE_object.createAllPlots(
    )  #This function calls each of the below functions so that the user does not have to.
    #    PE_object.makeHistogramsForEachParameter()
    #    PE_object.makeSamplingScatterMatrixPlot()
    #    PE_object.createSimulatedResponsesPlots()

    CKPQ.save_PE_object(PE_object, "SavingProjectExample")

    PE_object = CKPQ.load_PE_object("SavingProjectExample")