def load_inference(loglikelihood, working_path, *param_data): """ Load previous simulation results. :param loglikelihood: loglikelihood data of inference :param working_path: Where to print the results :param param_data: List containing: [name, range_min, range_max, resolution, mean, sigma, value] :return: inference object ready to evaluate """ p = [] for item in param_data: p.append( RandomVariable(name=item[0], range_min=float(item[1]), range_max=float(item[2]), resolution=float(item[3]), mean=float(item[4]), sigma=float(item[5]), value=float(item[6]))) pset = ParameterSet(*p) res = Analyse(loglikelihood, pset, working_path) print("Previous inference data result loaded!") return res
def load_parameter_set(params_data): """ Create ParameterSet object from parameter data :param params_data: Data list of parameters (save_params output) [param1, param2, ...] param1=param_init... :return: ParameterSet object """ from module.probability import RandomVariable, ParameterSet p = [] for item in params_data: p.append( RandomVariable(name=item[0], range_min=float(item[1]), range_max=float(item[2]), resolution=float(item[3]), mean=float(item[4]), sigma=float(item[5]), value=float(item[6]))) p_set = ParameterSet(*p) return p_set
print(invcovmat.shape) # END OF SETTING UP SIMULATION PARAMETERS ----------------------------------------------------------------------------- # Set up parameters using prior information about them (fix the range we are assuming the true parameter) prior_params = [] for idx, item in enumerate(p_names): prior_params.append( RandomVariable(name=item, range_min=p_range[idx][0], range_max=p_range[idx][1], resolution=p_res[idx], sigma=p_std[idx], mean=p_mean[idx])) prior_set = ParameterSet(*prior_params) prior_set.batch_len = batch_size if batch_size is not None: prior_set.isBatch = True else: prior_set.isBatch = False prior_set.create_batch() # Create fixed params sampled from prior fixed_params = sampling_from_prior(prior_set, fixed_param_num) # Save parameter informations # Create database for data database = tb.open_file( "/Users/Dani/TDK/parameter_estim/stim_protocol2/comb_colored_srsoma-rdend_gpas-dens/paramsetup.hdf5", mode="w")
mean=current_Ra, sigma=pRa.sigma) gpas = RandomVariable(name='gpas', range_min=gpas_start, range_max=gpas_end, resolution=40, mean=current_gpas, sigma=pgpas.sigma) cm = RandomVariable(name='cm', range_min=cm_start, range_max=cm_end, resolution=40, mean=current_cm, sigma=pcm.sigma) Ra_cm_gpas = ParameterSet(Ra, cm, gpas) inference = IndependentInference( data, Ra_cm_gpas, working_path="/Users/Dani/TDK/parameter_estim/stim_protocol2/ramp", speed='min') multi_comp = partial( stick_and_ball, stype='custom', custom_stim=stim) # fix chosen stimulus type for simulations if __name__ == '__main__': inference.run_sim(multi_comp, noise_sigma) inference.run_evaluation()
# Set up random variables Ra = RandomVariable(name='Ra', range_min=Ra_start, range_max=Ra_end, resolution=80, mean=current_Ra, sigma=pRa.sigma) gpas = RandomVariable(name='gpas', range_min=gpas_start, range_max=gpas_end, resolution=80, mean=current_gpas, sigma=pgpas.sigma) Ra_gpas = ParameterSet(Ra, gpas) inference = DependentInference( data, Ra_gpas, working_path="/Users/Dani/TDK/parameter_estim/stim_protocol/mc") multi_comp = partial( stick_and_ball, stype='broad') # fix chosen stimulus type for simulations if __name__ == '__main__': inference.run_sim(multi_comp, inv_covmat) inference.run_evaluation() # Do statistics for the current inference if stat(Ra) is not str:
"/Users/Dani/TDK/parameter_estim/stim_protocol2/zap/best_comb/%i/stim.txt" % item) working_path = "/Users/Dani/TDK/parameter_estim/stim_protocol2/zap/best_comb/%i" % item # Do statistics for each parameter stat_list = [] for _ in p_names: stat_list.append(np.empty((n, 6), dtype=np.float)) # Load fixed parameters: list of parameters to be inferred fixed_params = [] for name in p_names: fixed_params.append(get_default_param(name)) # Generate deterministic trace and create synthetic data with noise model t, v = model(stype='custom', custom_stim=stim) data = white(noise, v) pset = ParameterSet(*fixed_params) modell = partial(model, stype='custom', custom_stim=stim) inf = IndependentInference(model=modell, noise_std=noise, target_trace=data, parameter_set=pset, working_path=working_path, speed=speed) if __name__ == '__main__': inf.run_sim()
# Set up random seed np.random.seed(42) # Set up parameters using prior information about them (fix the range we are assuming the true parameter) prior_params = [] for idx, item in enumerate(p_names): prior_params.append( RandomVariable(name=item, range_min=p_range[idx][0], range_max=p_range[idx][1], resolution=p_res[idx], sigma=p_std[idx], mean=p_mean[idx])) prior_set = ParameterSet(*prior_params) # Create fixed params sampled from prior fixed_params = sampling_from_prior(prior_set, fixed_param_num) for idx, current_params in enumerate(fixed_params): print( "\n\n---------------------------------------- %ith FIXED PARAMETER -------------------------------------" % idx) for param in prior_set.params: param.value = current_params[param.name] for item in hz: print( "\n\n---------------------------------------- Running %i Hz zap protocol"
for item in hz: print("\n\n---------------------------------------- Running %i Hz zap protocol" % item) # Stimulus path stim = np.loadtxt("/Users/Dani/TDK/parameter_estim/stim_protocol2/zap/%i/stim.txt" % item) working_path = "/Users/Dani/TDK/parameter_estim/stim_protocol2/zaps/%i(%i)" % (item,i) # Generate deterministic trace and create synthetic data with noise model _, v = model(stype='custom', custom_stim=stim, Ra=current_value['Ra'], gpas=current_value['gpas'], cm=current_value['cm']) # Generate noise_rep synthetic data (noise_rep portion noise realisation) data = more_w_trace(noise, v, noise_rep) pset = ParameterSet(*current_params) modell = partial(model, stype='custom', custom_stim=stim) inf = IndependentInference(model=modell, noise_std=noise, target_trace=data, parameter_set=pset, working_path=working_path) if __name__ == '__main__': inf.run_moretrace_inf() for item in duration: print("\n\n---------------------------------------- Running %i ms impulse protocol" % item) # Stimulus path stim = np.loadtxt("/Users/Dani/TDK/parameter_estim/stim_protocol2/steps/%i/stim.txt" % item) working_path = "/Users/Dani/TDK/parameter_estim/stim_protocol2/steps/%i(%i)" % (item, i) # Generate deterministic trace and create synthetic data with noise model
range_max=1.5, resolution=60, mean=1.2, sigma=0.2, value=1.) gpas = RandomVariable(name='gpas', range_min=0.00005, range_max=0.00015, resolution=60, mean=0.00008, sigma=0.00002, value=0.0001) # Ra = RandomVariable(name='Ra', range_min=50., range_max=150., resolution=60, mean=100., sigma=20.) # 2.) Set up parameter set cm_gpas = ParameterSet(cm, gpas) # 3.) Sythetic data t, v = stick_and_ball() exp_v = white(noise, v) # 4.) Set up inference inf = IndependentInference( model=stick_and_ball, noise_std=noise, target_trace=exp_v, parameter_set=cm_gpas, working_path= "/home/terbed/PROJECTS/SPE/parameter-inference/module/examples/output", speed='max', save=False)