def analyze_traverse_sparsity(dirname_in): filename_in = 'sparsity_data.pickle' path_in = os.path.join(dirname_in, filename_in) sparsity_data = pickle_import(path_in) filename_in = 'system_init.pickle' path_in = os.path.join(dirname_in, filename_in) SS0 = pickle_import(path_in) plot_sparsity_data(SS0, sparsity_data, dirname_in)
def routine_load(): folderstr = 'example_systems' timestr = '1587086073p9661696_example_network_all_steps_backtrack' dirname_in = os.path.join(folderstr, timestr) filename_in = os.path.join(dirname_in, 'monte_carlo_all_dict.pickle') all_dict = pickle_import(filename_in) plot_data(all_dict,dirname_in)
def routine_load(): folderstr = 'systems_keepers' timestr = '1558543320p7456546_example_random_monte_carlo' dirname_in = os.path.join(folderstr, timestr) filename_in = os.path.join(dirname_in, 'monte_carlo_all_dict.pickle') all_dict = pickle_import(filename_in) plot_data(all_dict, dirname_in)
def load_system(folderstr,timestr): # Import dirname_in = os.path.join(folderstr,timestr) filename_only = 'system_init.pickle' SS = pickle_import(os.path.join(dirname_in,filename_only)) #Export timestr = str(time()).replace('.','p') dirname_out = os.path.join('systems',timestr) SS.dirname = dirname_out filename_out = os.path.join(dirname_out,filename_only) pickle_export(dirname_out, filename_out, SS) return SS
def compare_optmethods(dirname_in): filename_in = 'system_init.pickle' path_in = os.path.join(dirname_in, filename_in) SS0 = pickle_import(path_in) # optiongroup_list = ['gradient','subgradient','proximal_gradient'] # optiongroup_list = ['subgradient'] optiongroup_list = ['proximal_gradient'] sparsity_data_all = {} for optiongroup in optiongroup_list: optiongroup_dir = os.path.join(dirname_in, optiongroup) filename_in = 'sparsity_data.pickle' path_in = os.path.join(optiongroup_dir, filename_in) sparsity_data = pickle_import(path_in) sparsity_data_all[optiongroup] = sparsity_data plot_comparisons(SS0, sparsity_data_all, dirname_in)
def routine_load(): timestr = '1558459899p686552_example_suspension_model_known' folderstr = 'example_systems' dirname_in = os.path.join(folderstr,timestr) SS1 = load_system(folderstr,timestr) SS2 = copy(SS1) SS2.set_a(np.zeros_like(SS2.a)) SS2.set_b(np.zeros_like(SS2.b)) filename_in = os.path.join(dirname_in,'chist_data.pickle') chist_data = pickle_import(filename_in) plot_results(SS1,SS2,chist_data,dirname_in,)
import sys sys.path.append("..") from matrixmath import specrad, vec from ltimult import dare_mult from system_identification import generate_sample_data, collect_rollouts, estimate_model, estimate_model_var_only, ctrb from pickle_io import pickle_import, pickle_export # Load same system used in pol-grad experiments folderstr = os.path.join('..', 'example_systems') timestr = '1587086073p9661696_example_network_all_steps_sysid' dirname_in = os.path.join(folderstr, timestr) filename_in = os.path.join(dirname_in, 'system_init.pickle') SS = pickle_import(filename_in) # Get the true minimum cost true_min_cost = SS.ccare # Reprocess system parameters to match format used in sys-id code n = np.copy(SS.n) m = np.copy(SS.m) A = np.copy(SS.A) B = np.copy(SS.B) varAi = np.copy(SS.a) varBj = np.copy(SS.b) Aa = SS.Aa Bb = SS.Bb
def routine_gen(): # folderstr = 'systems' # timestr = str(time()).replace('.','p') # dirname_in = os.path.join(folderstr,timestr) # create_directory(dirname_in) nSS = 20 # Number of independent runs # Settings for backtracking line search stepsize_method = 'backtrack' nr = 100000 PGO_dict = {'gradient_model_free': {'eta': 1e-1, 'max_iters': 20, 'exact': False}, 'gradient': {'eta': 1e-1, 'max_iters': 20, 'exact': True}, 'natural_gradient': {'eta': 1e-1, 'max_iters': 20, 'exact': True}, 'gauss_newton': {'eta': 1/2, 'max_iters': 20, 'exact': True}} all_dict = {key: {'costnorm':[],'gradnorm':[]} for key in PGO_dict.keys()} # Generate system from scratch seed = 1 # SS = gen_system_erdos_renyi(n=4, # diffusion_constant=1.0, # leakiness_constant=0.1, # time_constant=0.05, # leaky=True, # seed=seed) # SS = gen_system_erdos_renyi(n=2, # diffusion_constant=1.0, # leakiness_constant=0.1, # time_constant=0.05, # leaky=True, # seed=seed) # Load system folderstr = 'example_systems' timestr = '1587086073p9661696_example_network_all_steps_backtrack' dirname_in = os.path.join(folderstr, timestr) filename_in = os.path.join(dirname_in, 'system_init.pickle') SS = pickle_import(filename_in) for i in range(nSS): # Policy gradient setup K0_method = 'zero' K0 = set_initial_gains(SS,K0_method=K0_method) sleep(0.5) for step_direction in PGO_dict: SS.setK(K0) t_start = time() eta = PGO_dict[step_direction]['eta'] max_iters = PGO_dict[step_direction]['max_iters'] exact = PGO_dict[step_direction]['exact'] PGO = policy_gradient_setup(SS, eta, step_direction, max_iters, exact, stepsize_method, nr) t_end = time() print('Initialization completed after %.3f seconds' % (t_end-t_start)) SS, histlist = run_policy_gradient(SS,PGO) costnorm = (histlist[2]/SS.ccare)-1 gradnorm = la.norm(histlist[1], ord='fro', axis=(0,1)) all_dict[step_direction]['costnorm'].append(costnorm) all_dict[step_direction]['gradnorm'].append(gradnorm) filename_out = 'monte_carlo_all_dict.pickle' path_out = os.path.join(dirname_in,filename_out) pickle_export(dirname_in,path_out,all_dict) plot_data(all_dict,dirname_in)
folderstr_list.append("1564554983p5677059_1e4") folderstr_list.append("1564555001p5515425_1e5") folderstr_list.append("1564555047p6032026_1e6") folderstr_list.append("1564555255p6612067_1e7") folderstr_list.append("1564525514p9662921_1e8") nr_list = [1e4,1e5,1e6,1e7,1e8] N = len(nr_list) data_noiseless = [] data_noisy = [] for i,folderstr in enumerate(folderstr_list): dirname_in = folderstr filename = 'data_noiseless.pickle' filename_in = os.path.join(dirname_in,filename) data_noiseless.append(pickle_import(filename_in)) filename = 'data_noisy.pickle' filename_in = os.path.join(dirname_in,filename) data_noisy.append(pickle_import(filename_in)) # Plotting mean_error_norm_noiseless = np.zeros(N) mean_error_norm_noisy = np.zeros(N) mean_error_angle_noiseless = np.zeros(N) mean_error_angle_noisy = np.zeros(N) for i in range(N): mean_error_norm_noiseless[i] = np.mean(data_noiseless[i][4])/la.norm(data_noiseless[0][0]) mean_error_norm_noisy[i] = np.mean(data_noisy[i][4])/la.norm(data_noisy[0][0])