data = data.T
sd = np.array([[i for i in range(1,201)]], dtype='float64')
sd = sd.T


with model:
    #The prior is a drawn from a uniform distribution from -5 to 15 for each of the 200 dimensions.
    params = pm.Uniform('params', lower=-5.0, upper=15.0, shape=(200,1), dtype='float64')
    
    #I can't get the multivariate distribution to run for some reason.  I think a bunch of normal distributions with the correct mu and sds should work...
    pm.Normal('true_dist', mu=params, sd=sd, observed=data)
    #pm.MvNormal('true_dist', mu=params, tau=prec_matrix, observed=data)
    
    #Algorithmic values given in paper (eps= b* in paper)
    step = pm.Dream(DEpairs=1, snooker=.1, nseedchains=2000, eps=10e-6, nCR=3, multitry=5)
    
    #njobs = number of chains
    trace = pm.sample(400000, step, njobs=3)
    
    dictionary_to_pickle = {}
    
    for dictionary in trace:
        for var in dictionary:
           dictionary_to_pickle[var] = trace[var] 
    
    pickle.dump(dictionary_to_pickle, open('2015_03_17_mv_test_mtdreamzs.p', 'wb'))     
    
    print 'Saving trace to text...'
    text.dump('2015_03_17_mv_test_mtdreamzs', trace)
    print 'Trace saved successfully.'
    
Beispiel #2
0
    td = (t10 + t90) / 2
    ts = t90 - t10
    yfinal = ysim_momp[-1]
    momp_sim = [td, ts, yfinal]
    e3 = np.sum(np.exp(-1*(momp_data - momp_sim) ** 2 / (2 * momp_var)))
    error = -1.*np.log(e1) + -1.*np.log(e2) + -1.*np.log(e3)
    #print error
    return -1*error,
    #return (e1, e2, e3,)

if "__main__" == __name__:
    
    nm = Nightmare(EARM,likelihood,xnominal,'test')
    nm.run_pso(4, 25,200)
    ranked = nm.pso.return_ranked_populations()
    savename = 'unweighted_cost_function_100k_std_1'
    traces = nm.run_DREAM(nsamples=100000)
    from pymc.backends import text
    
    text.dump('traces_%s'%savename, traces)    
        
    dictionary_to_pickle = {}
    
    for dictionary in traces:
        for var in dictionary:
            dictionary_to_pickle[var] = traces[var] 
    
    pickle.dump(dictionary_to_pickle, open('pickled_traces_%s.p'%savename, 'wb'))


    pm.Normal('thermodynamic_box2', mu=box2, sd=1e-2, observed=1)
    
    box3 = likelihood_thermobox3(model.KD_AA_cat1, model.KD_AA_cat2, model.KD_AG_allo1, model.KD_AG_allo2)
    pm.Normal('thermodynamic_box3', mu=box3, sd=1e-2, observed=1)
    
    box4 = likelihood_thermobox4(model.KD_AG_cat1, model.KD_AG_cat2, model.KD_AG_allo1, model.KD_AG_allo3)
    pm.Normal('thermodynamic_box4', mu=box4, sd=1e-2, observed=1)
    
    #Start from end of last trace
    #start2 = {('KD_AA_cat1',np.log10(cox2_model.parameters['kr_AA_cat1'].value/cox2_model.parameters['kf_AA_cat1'].value)), ('kcat_AA1', np.log10(cox2_model.parameters['kcat_AA1'].value)), ('KD_AA_cat2',-2.76), ('kcat_AA2',.235), ('KD_AA_cat3',-2.265), ('kcat_AA3',.1760), ('KD_AG_cat1', np.log10(cox2_model.parameters['kr_AG_cat1'].value/cox2_model.parameters['kf_AG_cat1'].value)), ('kcat_AG1', np.log10(cox2_model.parameters['kcat_AG1'].value)), ('KD_AG_cat2',-.7495), ('KD_AG_cat3',-2.793), ('kcat_AG3',-.5095), ('KD_AA_allo1',2.290), ('KD_AA_allo2',.1001), ('KD_AA_allo3',-.3808), ('KD_AG_allo1',2.280), ('KD_AG_allo2', np.log10(cox2_model.parameters['kr_AG_allo2'].value/cox2_model.parameters['kf_AG_allo2'].value)), ('KD_AG_allo3',1.650)}   
    
    #Select MCMC stepping method
    step = pm.Dream(nseedchains=120, blocked=True, start_random=True, save_history=True, parallel=False, multitry=5, adapt_crossover=True, model_name='CORM_mtdreamzs_5chain_profiling')
    
    #old_trace = text.load('test')
    #print 'old trace: ',old_trace['KD_AA_cat2']
    
    trace = pm.sample(500, step, start=start, njobs=5)
    
        
    dictionary_to_pickle = {}
        
    for dictionary in trace:
        for var in dictionary:
            dictionary_to_pickle[var] = trace[var] 
    
    text.dump('CORM_mtdreamzs_5chain_profiling', trace)
    
    pickle.dump(dictionary_to_pickle, open('CORM_mtdreamzs_5chain_profiling.p', 'wb'))
    
    icrp = pm.Deterministic('icrp', icrp)
    ecrp = pm.Deterministic('ecrp', ecrp)
    momp = pm.Deterministic('momp', momp)
    #error_like = pm.ArbLikelihood('like', error)    
    
    #Select point in parameter space to start
    #start = pm.find_MAP()
    
    #Select stepping method
    nseedchains = 10*len(earm.parameters_rules())
    step = pm.Dream(variables=[model.params], nseedchains=nseedchains, blocked=True, multitry=5, start_random=False, save_history=True, parallel=False, adapt_crossover=False, history_file='2015_04_30_earm_embedded_mtdreamzs_normal_prior_history.npy', crossover_file='2015_04_18_earm_embedded_mtdreamzs_normal_prior_crossovervals.npy')

    old_trace = text.load('2015_04_30_earm_embedded_mtdreamzs_normal_prior')
    trace = pm.sample(15000, step, njobs=3, trace=old_trace, use_mpi=False) #pass njobs=None to start multiple chains on different cpus
    
    text.dump('2015_05_01_earm_embedded_mtdreamzs_normal_prior', trace)    
    
    dictionary_to_pickle = {}

    for dictionary in trace:
        for var in dictionary:
            dictionary_to_pickle[var] = trace[var] 
    
    pickle.dump(dictionary_to_pickle, open('2015_05_01_earm_embedded_mtdreamzs_normal_prior.p', 'wb'))
    
    from helper_fxns import convert_param_vec_dict_to_param_dict
    from helper_fxns import merge_traces
    from helper_fxns import print_convergence_summary
    
    old_traces = pickle.load(open('2015_04_30_earm_embedded_mtdreamzs_normal_prior_merged_traces_80000.p'))
    trace_list = [old_traces, dictionary_to_pickle]
Beispiel #5
0
    return log_L

with pm.Model() as model:
    
    params = pm.Flat('params', shape=(2))       
       
    #log_like = likelihood(model.x, model.y)
    like = pm.ArbLikelihood('like', likelihood(model.params))
    #like = pm.Potential('like', likelihood(model.params))
    
    step = pm.Dream_mpi(blocked=True, start_random=False, save_history=True, parallel=True, history_file='ndim_banana_seed.npy', multitry=5)
    
    start = [{'x':m[chain][0], 'y':m[chain][1]} for chain in range(3)]
    
    trace = pm.sample(25000, step, start=start, njobs=3, use_mpi=True)
    
    dictionary_to_pickle = {}

    for dictionary in trace:
        for var in dictionary:
           dictionary_to_pickle[var] = trace[var] 
    
    text.dump('2015_04_22_ndim_banana_mtdreamzs', trace)
    
    pickle.dump(dictionary_to_pickle, open('2015_04_22_ndim_banana_mtdreamzs.p', 'wb'))

    
    


Beispiel #6
0
    
    erl_like = pm.ArbLikelihood('erl_output', erl)
    no_erl_like = pm.ArbLikelihood('no_erl_output', no_erl)    
    
    erl = pm.Deterministic('erl', erl)
    no_erl = pm.Deterministic('no_erl', no_erl)

    nseedchains = 10*len(egfr.parameters_rules())

    step = pm.Dream_mpi(variables=[model.params], nseedchains=nseedchains, blocked=True, multitry=5, start_random=True, save_history=True, parallel=False, adapt_crossover=True)
    
    #old_trace = text.load('2015_04_29_earm_direct_mtdreamzs_normal_prior')
    trace = pm.sample(100, step, njobs=5, use_mpi=True) #pass njobs=None to start multiple chains on different cpus
    
    if rank == 0:
        text.dump('2015_06_07_egfr_qualitative_calibration', trace)    
    
        dictionary_to_pickle = {}

        for dictionary in trace:
            for var in dictionary:
                dictionary_to_pickle[var] = trace[var] 
    
        pickle.dump(dictionary_to_pickle, open('2015_06_07_egfr_qualitative_calibration.p', 'wb'))
    
        from helper_fxns import convert_param_vec_dict_to_param_dict
        from helper_fxns import merge_traces
        from helper_fxns import print_convergence_summary

        #old_traces = pickle.load(open('2015_04_29_earm_direct_mtdreamzs_normal_prior_merged_traces_80000.p'))
        #trace_list = [old_traces, dictionary_to_pickle]
    icrp = pm.Deterministic('icrp', icrp)
    ecrp = pm.Deterministic('ecrp', ecrp)
    momp = pm.Deterministic('momp', momp)
    #error_like = pm.ArbLikelihood('like', error)    
    
    #Select point in parameter space to start
    #start = pm.find_MAP()
    
    #Select stepping method
    nseedchains = 10*len(earm.parameters_rules())
    step = pm.Dream(variables=[model.params], nseedchains=nseedchains, blocked=True, multitry=5, start_random=False, save_history=True, parallel=False, adapt_crossover=False, history_file='2015_04_30_earm_direct_mtdreamzs_normal_prior_history.npy', crossover_file='2015_04_18_earm_direct_mtdreamzs_normal_prior_crossovervals.npy')
    
    old_trace = text.load('2015_04_30_earm_direct_mtdreamzs_normal_prior')
    trace = pm.sample(15000, step, njobs=3, trace=old_trace, use_mpi=False) #pass njobs=None to start multiple chains on different cpus
    
    text.dump('2015_05_01_earm_direct_mtdreamzs_normal_prior', trace)    
    #text.dump('test', trace)       
    
    dictionary_to_pickle = {}

    for dictionary in trace:
        for var in dictionary:
            dictionary_to_pickle[var] = trace[var] 
    
    pickle.dump(dictionary_to_pickle, open('2015_05_01_earm_direct_mtdreamzs_normal_prior.p', 'wb'))
    #pickle.dump(dictionary_to_pickle, open('test.p', 'wb'))
    
    from helper_fxns import convert_param_vec_dict_to_param_dict
    from helper_fxns import merge_traces
    from helper_fxns import print_convergence_summary
    
    #Select point in parameter space to start
    starting_pts = np.ones(len(starting_vals)) * np.random.randn(3, len(starting_vals)) + starting_vals
    for npt, pt in enumerate(starting_pts):
       while np.any(pt < lower_limits) or np.any(pt > upper_limits):
           starting_pts[npt] = np.ones(len(starting_vals)) * np.random.randn(1, len(starting_vals)) + starting_vals
    starts = [{'params':starting_pts[chain]} for chain in range(3)]
    starts[0]['params'] = starting_vals 
    #print 'starts: ',starts
    #Select stepping method
    nseedchains = 10*len(earm.parameters_rules())
    step = pm.Dream(variables=[model.params], nseedchains=nseedchains, verbose=True, snooker = 0, blocked=True, start_random=True, save_history=True, parallel=False, model_name='earm_syn_dreamz_3chain_a')
    
    #old_trace = text.load('2015_04_30_earm_direct_mtdreamzs_normal_prior')
    trace = pm.sample(100000, step, njobs=3, use_mpi=False) #pass njobs=None to start multiple chains on different cpus
    
    text.dump('earm_syn_dreamz_3chain_a', trace)    
    #text.dump('test', trace)       
    
    dictionary_to_pickle = {}

    for dictionary in trace:
        for var in dictionary:
            dictionary_to_pickle[var] = trace[var] 
    
    pickle.dump(dictionary_to_pickle, open('earm_syn_dreamz_3chain_a.p', 'wb'))
    #pickle.dump(dictionary_to_pickle, open('test.p', 'wb'))
    
#    from helper_fxns import convert_param_vec_dict_to_param_dict
#    from helper_fxns import merge_traces
#    from helper_fxns import print_convergence_summary
#    
Beispiel #9
0
 def setUpClass(cls):
     super(TestTextDumpFunction, cls).setUpClass()
     text.dump(cls.name1, cls.mtrace1)
     with cls.model:
         cls.mtrace1 = text.load(cls.name1)
    
    icrp = pm.Deterministic('icrp', icrp)
    ecrp = pm.Deterministic('ecrp', ecrp)
    momp = pm.Deterministic('momp', momp)    
    
    #Select point in parameter space to start
    #start = pm.find_MAP()
    
    #Select stepping method
    nseedchains = 10*len(earm.parameters_rules())
    step = pm.Dream(variables=[model.params], nseedchains=nseedchains, blocked=True, multitry=5, start_random=False, save_history=True, parallel=False, adapt_crossover=False)
    
    #old_trace = text.load('2015_04_28_earm_embedded_mtdreamzs_normal_prior')
    trace = pm.sample(150000, step, njobs=3, use_mpi=False) #pass njobs=None to start multiple chains on different cpus
    
    text.dump('2015_04_11_earm_indirect_mtdreamzs_normal_prior', trace)    
    
    dictionary_to_pickle = {}

    for dictionary in trace:
        for var in dictionary:
           dictionary_to_pickle[var] = trace[var] 
    
    pickle.dump(dictionary_to_pickle, open('2015_04_11_earm_indirect_mtdreamzs_normal_prior.p', 'wb'))
    
    from helper_fxns import convert_param_vec_dict_to_param_dict
    from helper_fxns import merge_traces
    from helper_fxns import print_convergence_summary
    
    #old_traces = pickle.load(open('2015_04_28_earm_embedded_mtdreamzs_normal_prior_merged_traces_50000.p'))
    #trace_list = [old_traces, dictionary_to_pickle]
    #Select point in parameter space to start
    starting_pts = np.ones(len(starting_vals)) * np.random.randn(3, len(starting_vals)) + starting_vals
    for npt, pt in enumerate(starting_pts):
       while np.any(pt < lower_limits) or np.any(pt > upper_limits):
           starting_pts[npt] = np.ones(len(starting_vals)) * np.random.randn(1, len(starting_vals)) + starting_vals
    starts = [{'params':starting_pts[chain]} for chain in range(3)]
    starts[0]['params'] = starting_vals 
    #print 'starts: ',starts
    #Select stepping method
    nseedchains = 10*len(earm.parameters_rules())
    step = pm.Dream(variables=[model.params], nseedchains=nseedchains, verbose=True, blocked=True, start_random=False, save_history=True, parallel=False, model_name='earm_mtdreamzs_3chain_uni')
    
    #old_trace = text.load('2015_04_30_earm_direct_mtdreamzs_normal_prior')
    trace = pm.sample(30000, step, start=starts, njobs=3, use_mpi=False) #pass njobs=None to start multiple chains on different cpus
    
    text.dump('earm_mtdreamzs_3chain_uni', trace)    
    #text.dump('test', trace)       
    
    dictionary_to_pickle = {}

    for dictionary in trace:
        for var in dictionary:
            dictionary_to_pickle[var] = trace[var] 
    
    pickle.dump(dictionary_to_pickle, open('earm_mtdreamzs_3chain_uni.p', 'wb'))
    #pickle.dump(dictionary_to_pickle, open('test.p', 'wb'))
    
#    from helper_fxns import convert_param_vec_dict_to_param_dict
#    from helper_fxns import merge_traces
#    from helper_fxns import print_convergence_summary
#    
Beispiel #12
0
    return log_L

with pm.Model() as model:
    
    params = pm.Flat('params', shape=(100))       
       
    #log_like = likelihood(model.x, model.y)
    like = pm.ArbLikelihood('like', likelihood(model.params))
    #like = pm.Potential('like', likelihood(model.params))
    
    step = pm.Dream_mpi(blocked=True, start_random=False, save_history=True, history_file='ndim_gaussian_seed.npy', multitry=5, parallel=True)
    
    start = [{'params':m[chain]} for chain in range(3)]
    
    trace = pm.sample(50000, step, start=start, njobs=3, use_mpi=True)
    
    dictionary_to_pickle = {}

    for dictionary in trace:
        for var in dictionary:
           dictionary_to_pickle[var] = trace[var] 
    
    text.dump('2015_04_22_ndim_gaussian_mtdreamzs', trace)
    
    pickle.dump(dictionary_to_pickle, open('2015_04_22_ndim_gaussian_mtdreamzs.p', 'wb'))

    
    


    #error_like = pm.ArbLikelihood('like', error)    
    
    #Select point in parameter space to start
    #start = pm.find_MAP()
    
    #Select stepping method
    nseedchains = 10*len(earm.parameters_rules())

    step = pm.Dream_mpi(variables=[model.params], nseedchains=nseedchains, blocked=True, multitry=5, start_random=False, save_history=True, parallel=False, adapt_crossover=False)
    
    #old_trace = text.load('2015_04_29_earm_direct_mtdreamzs_normal_prior')
    trace = pm.sample(150, step, njobs=3, use_mpi=True) #pass njobs=None to start multiple chains on different cpus
    
    if rank == 0:
   	 #text.dump('2015_04_30_earm_direct_mtdreamzs_normal_prior', trace)    
    	text.dump('test', trace)       
    
    	dictionary_to_pickle = {}

    	for dictionary in trace:
            for var in dictionary:
            	dictionary_to_pickle[var] = trace[var] 
    
    	#pickle.dump(dictionary_to_pickle, open('2015_04_30_earm_direct_mtdreamzs_normal_prior.p', 'wb'))
    	pickle.dump(dictionary_to_pickle, open('test.p', 'wb'))
    
    	from helper_fxns import convert_param_vec_dict_to_param_dict
    	from helper_fxns import merge_traces
    	from helper_fxns import print_convergence_summary

    	#old_traces = pickle.load(open('2015_04_29_earm_direct_mtdreamzs_normal_prior_merged_traces_80000.p'))