def __init__(self, data_dims, nsteps=1, window_size=1, bias=False, linear_map=slim.Linear, nonlin=nn.GELU, hsizes=[64], timedelay=0, input_keys=['Yp'], linargs=dict(), name='MLP_estim'): """ See base class for arguments """ super().__init__(data_dims, nsteps=nsteps, window_size=window_size, input_keys=input_keys, timedelay=timedelay, name=name) self.net = blocks.MLP(self.in_features, self.out_features, bias=bias, linear_map=linear_map, nonlin=nonlin, hsizes=hsizes, linargs=linargs)
def __init__(self, data_dims, input_keys=['x'], bias=False, Linear=slim.Linear, nonlin=nn.GELU, hsizes=[64], linargs=dict(), name='sol_map'): """ Solution map for multiparametric programming problems :param data_dims: dict {str: tuple of ints) Data structure describing dimensions of input variables :param input_keys: (List of str) List of input variable names :param bias: (bool) Whether to use bias in MLP :param Linear: (class) slim.Linear class for subcomponents :param nonlin: (class) Pytorch elementwise activation function class for subcomponents :param hsizes: (List [int]) Sizes of hidden layers in MLP :param linargs: (dict) Arguments for instantiating linear layers. :param name: (str) Name for tracking output of module. """ super().__init__() check_keys(set(input_keys), set(data_dims.keys())) self.name = name self.input_keys = input_keys data_dims_in = {k: v for k, v in data_dims.items() if k in input_keys} self.input_size = sum(v[-1] for k, v in data_dims_in.items()) self.output_size = data_dims['z'][-1] self.net = blocks.MLP(insize=self.input_size, outsize=self.output_size, bias=bias, Linear=Linear, nonlin=nonlin, hsizes=hsizes, linargs=linargs)
def __init__(self, data_dims, nsteps=1, bias=False, Linear=slim.Linear, nonlin=nn.GELU, hsizes=[64], input_keys=['x0'], linargs=dict(), name='MLP_policy'): """ See LinearPolicy for arguments """ super().__init__(data_dims, nsteps=nsteps, input_keys=input_keys, name=name) self.net = blocks.MLP(insize=self.in_features, outsize=self.out_features, bias=bias, Linear=Linear, nonlin=nonlin, hsizes=hsizes, linargs=linargs)
bias = False J_vars = [] Norms = {} for linmap in linmaps: for (sigmin, sigmax) in sigma_min_max: for act_name, act in activations.items(): combo_string = f"{linmap}_x{nx}_({sigmin},{sigmax})_{act_name}_{nlayers}l_{'real' if real else 'complex'}" torch.manual_seed(SEED) np.random.seed(SEED) fx = blocks.MLP( nx, nx, nonlin=act, linear_map=slim.linear.maps[linmap], hsizes=[nx] * nlayers, bias=bias, linargs={ "sigma_min": sigmin, "sigma_max": sigmax, "real": real, }, ) plot_phase_portrait_hidim( fx, nx, limits=(-6, 6), nsamples=1000, fname=os.path.join(outdir, f"phase_plot_{combo_string}.png")) eigvals = plot_eigenvalues_set(
A_stars += [Astar.detach().cpu().numpy()] eigvals = compute_eigenvalues(A_stars) samples += [(A_stars, eigvals)] return samples if __name__ == "__main__": print("Gershgorin: Sampling trajectories from random data...") linmap = slim.linear.maps["gershgorin"] fx = blocks.MLP( 2, 2, bias=False, Linear=linmap, nonlin=torch.nn.Sigmoid, hsizes=[2] * 10, linargs=dict(sigma_min=0.9, sigma_max=1.0, real=False), ) samples = _sample_random_trajectories(fx, nsamples=1) for i, (A_stars, eigvals) in enumerate(samples): plot_Astar_anim(A_stars, eigvals, fname=f"gershgorin_random_sample_{i}.mp4") CSTR_MODEL_PATH = "neuromancer/train_scripts/L4DC_paper/models/cstr_model.pth" TANK_MODEL_PATH = "neuromancer/train_scripts/L4DC_paper/models/tank_model.pth" # CSTR A* visualizations
import pandas as pd from mk_plots import plot_eigenvalues_set, plot_phase_portrait_hidim, plot_singular_values, plot_Jacobian_norms if __name__ == "__main__": SEED = 410 nx = 64 linmap = slim.linear.maps["damp_skew_symmetric"] # nonlin = torch.nn.ReLU nonlin = torch.nn.Identity fx = blocks.MLP( nx, nx, bias=False, linear_map=linmap, nonlin=nonlin, hsizes=[nx] * 4, linargs=dict(sigma_min=0.5, sigma_max=1.0, real=False), ) plot_singular_values(fx, nx) plot_eigenvalues_set(fx, nx) # plot_phase_portrait_hidim(fx, nx, limits=(-6, 6)) _, _, _, _ = plot_Jacobian_norms(fx, nx, limits=(-6, 6)) plt.show() # test weight Astars_np_1 = fx.linear[0].effective_W().detach().numpy() Astars_np_2 = fx.linear[1].effective_W().detach().numpy() # Astars_np = np.dot(Astars_np_1, Astars_np_2) Astars_np = np.matmul(Astars_np_1, Astars_np_2)
def lin_regions(nx, layers, maps, activations, outdir="./plots_region"): for nlayers in layers: for (linmap, sigmin, sigmax, real) in maps: torch.manual_seed(408) np.random.seed(408) fx = blocks.MLP( nx, nx, nonlin=nn.Identity, linear_map=slim.linear.maps[linmap], hsizes=[nx] * nlayers, bias=False, linargs={ "sigma_min": sigmin, "sigma_max": sigmax, "real": real, }, ) for (act_name, act) in activations: fx.nonlin = nn.ModuleList([act() for _ in fx.nonlin]) for bias in [False]: combo_string = f"{linmap}_x{nx}_({sigmin},{sigmax})_{act_name}_{nlayers}l_{'real' if real else 'complex'}{'_bias' if bias else ''}" print(combo_string) if nx == 2: plot_astar_phase_portrait( fx, x_lims=(-6, 6), y_lims=(-6, 6), step=0.5, use_bias=bias, initial_states=initial_states, fname=os.path.join(outdir, f"phase_{combo_string}.png"), ) grid_x, grid_y = torch.meshgrid( torch.arange(-6, 6.1, 0.1), torch.arange(-6, 6.1, 0.1), ) X = torch.stack((grid_x.flatten(), grid_y.flatten())).T else: X = torch.arange(-6, 6.1, 0.1).unsqueeze(-1).expand(-1, nx) Astars, Astar_b, _, _, _ = lpv_batched(fx, X) Astars = Astars.detach().numpy() # plot Anorms Anorms = compute_norms(Astars) Anorm_mat = np.reshape(Anorms, grid_x.shape) fig1, ax1 = plt.subplots() im1 = ax1.imshow(Anorm_mat, vmin=abs(Anorm_mat).min(), vmax=abs(Anorm_mat).max(), cmap=PALETTE, origin='lower', extent=[X.min(), X.max(), X.min(), X.max()])#, interpolation="bilinear") fig1.colorbar(im1, ax=ax1) im1.set_clim(0., 2.) # ax1.set_title('Metric: 'r'$\Vert A^* \Vert$') fname1 = os.path.join(outdir, f"norm_region_{combo_string}.pdf") plt.savefig(fname1) # plot dominant eigenvalues eigvals = compute_eigenvalues(Astars) dom_eigs = [np.absolute(eigs).max() for eigs in eigvals] dom_eigs_mat = np.reshape(dom_eigs, grid_x.shape) fig2, ax2 = plt.subplots() vmin = abs(dom_eigs_mat).min() vmax = abs(dom_eigs_mat).max() im2 = ax2.imshow(dom_eigs_mat, vmin=0, vmax=2, cmap=PALETTE, origin='lower', extent=[X.min(), X.max(), X.min(), X.max()])#, interpolation="bilinear") fig2.colorbar(im2, ax=ax2) im2.set_clim(0., 2.) #ax2.set_title('Metric: 'r'$\| \lambda_1 \|$') fname2 = os.path.join(outdir, f"dom_eig_region_{combo_string}.pdf") plt.savefig(fname2) # plot sum of absolute eigenvalues sum_eigs = [np.absolute(eigs).sum() for eigs in eigvals] sum_eigs_mat = np.reshape(sum_eigs, grid_x.shape) fig3, ax3 = plt.subplots() im3 = ax3.imshow(sum_eigs_mat, vmin=abs(sum_eigs_mat).min(), vmax=abs(sum_eigs_mat).max(), cmap=PALETTE, origin='lower', extent=[X.min(), X.max(), X.min(), X.max()])#, interpolation="bilinear") # im3 = ax3.imshow(sum_eigs_mat, vmin=0, vmax=0.3, # cmap=plt.cm.CMRmap, origin='lower', # extent=[X.min(), X.max(), X.min(), X.max()], interpolation="bilinear") fig3.colorbar(im3, ax=ax3) im3.set_clim(0., 2.) # ax3.set_title('Metric: 'r'$\sum_{i=1}^n{\| \lambda_i \|}$') fname3 = os.path.join(outdir, f"sum_eig_region_{combo_string}.pdf") plt.savefig(fname3) plt.close('all')
return Astar, Astar_b, bstar, Aprime_mats, Aprime_b_mats, bprimes if __name__ == "__main__": import time torch.manual_seed(2) np.random.seed(2) nx = 3 test_bias = True # random feature point x_z = torch.randn(1, nx) # define single layer square neural net fx_layer = blocks.MLP(nx, nx, nonlin=nn.ReLU, hsizes=[], bias=test_bias) # verify linear operations on MLP layers fx_layer.linear[0](x_z) torch.matmul(x_z, fx_layer.linear[0].effective_W()) + fx_layer.linear[0].bias # verify single layer linear parameter varying form lpv(fx_layer, torch.randn(1, nx)) # define square neural net fx = blocks.MLP(nx, nx, nonlin=nn.ReLU, hsizes=[nx, nx, nx], bias=test_bias) if test_bias: for i in range(nx):
is_pos_def, extent=extent, # cmap=PALETTE, ) ax[2].imshow( V_star_grid * is_pos_def, extent=extent, # cmap=PALETTE, ) ax[3].imshow(Astar_std, extent=extent) fig.colorbar(im1, ax=ax) return [ax] if __name__ == "__main__": import slim from neuromancer import blocks linmap = slim.linear.maps["gershgorin"] fx = blocks.MLP( 2, 2, bias=False, linear_map=linmap, nonlin=torch.nn.ReLU, hsizes=[2] * 2, linargs=dict(sigma_min=1.1, sigma_max=1.2, real=False), ) plot(fx, t=10, step=1.) plt.show()
if __name__ == '__main__': nx, ny, nu, nd = 15, 7, 5, 3 N = 10 samples = 100 # Data format: (N,samples,dim) x = torch.rand(samples, nx) U = torch.rand(N, samples, nu) D = torch.rand(N, samples, nd) Y = torch.rand(N, samples, ny) data = {'x0': x, 'Uf': U, 'Df': D, 'Yf': Y} datadims = {'x0': (nx, ), 'Uf': (N, nu), 'Df': (N, nd), 'Yf': (N, ny)} # block SSM fx, fu, fd = [ blocks.MLP(insize, nx, hsizes=[64, 64, 64]) for insize in [nx, nu, nd] ] fy = blocks.MLP(nx, ny, hsizes=[64, 64, 64]) model = BlockSSM(fx, fy, fu, fd) model = BlockSSM(fx, fy, fu, fd) output = model(data) # black box SSM fxud = blocks.MLP(nx + nu + nd, nx, hsizes=[64, 64, 64]) fy = slim.Linear(nx, ny) model = BlackSSM(fxud, fy) output = model(data) fxud = blocks.RNN(nx + nu + nd, nx, hsizes=[64, 64, 64]) model = BlackSSM(fxud, fy) output = model(data) data = {'x0_new': x, 'Uf': U, 'Df': D, 'Yf_fresh': Y}
def phase_and_spectra_plot_loop(nx, layers, maps, activations, outdir="plots_20201117"): for nlayers in layers: for (linmap, sigmin, sigmax, real) in maps: torch.manual_seed(408) np.random.seed(408) fx = blocks.MLP( nx, nx, nonlin=nn.Identity, linear_map=slim.linear.maps[linmap], hsizes=[nx] * nlayers, bias=True, linargs={ "sigma_min": sigmin, "sigma_max": sigmax, "real": real, }, ) for (act_name, act) in activations: fx.nonlin = nn.ModuleList([act() for _ in fx.nonlin]) for bias in [True, False]: combo_string = f"{linmap}_x{nx}_({sigmin},{sigmax})_{act_name}_{nlayers}l_{'real' if real else 'complex'}{'_bias' if bias else ''}" print(combo_string) if not os.path.exists( os.path.join(outdir, f"phase_{combo_string}.png")): if nx == 2: plot_astar_phase_portrait( fx, x_lims=(-6, 6), y_lims=(-6, 6), step=0.5, use_bias=bias, initial_states=initial_states, fname=os.path.join( outdir, f"phase_{combo_string}.png"), ) if not os.path.exists( os.path.join(outdir, f"spectrum_{combo_string}.png")): if nx == 2: grid_x, grid_y = torch.meshgrid( torch.arange(-6, 6, 0.5), torch.arange(-6, 6, 0.5), ) X = torch.stack( (grid_x.flatten(), grid_y.flatten())).T else: X = torch.arange(-6, 6, 0.5).unsqueeze(-1).expand(-1, nx) Astars = [] for x in X: Astar, Astar_b, *_ = lpv(fx, x) Astars += [ Astar_b.detach().cpu().numpy() if bias else Astar.detach().cpu().numpy() ] eigvals = compute_eigenvalues(Astars) plot_eigenvalues(eigvals, fname=os.path.join( outdir, f"spectrum_{combo_string}.png")) # plot_matrix_eigval_anim(Astars, eigvals, fname=os.path.join(outdir, f"spectrum_{combo_string}.mp4")) plt.close('all')