Ejemplo n.º 1
0
def eta_histogram(results_path, save_path, prefix, train, labels):
    """
    Histogram of samples by individual sample eta.
    """
    plt.clf()

    n_class = len(labels)
    colors = sns.cubehelix_palette(n_class, start=2, rot=0, dark=0, light=.5)

    plt.figure(figsize=(6, 6))
    etas = np.array(load_results(
        results_path, f"{prefix}_pca20.json")["etas"])
    targets = train["targets"].numpy()
    etas = [etas[targets == c] for c in range(n_class)]

    for c in range(n_class):
        plt.hist(
            etas[c], bins=80, color=colors[c], alpha=0.5, label=f"{labels[c]}")
        plt.axvline(
            etas[c].mean(), color=colors[c], linestyle='dashed', linewidth=2)
    plt.xlabel("Per sample $\eta$", fontsize=30)
    plt.xticks(fontsize=26)
    plt.ylabel("Number of samples", fontsize=30)
    plt.yticks(fontsize=26)
    plt.legend()
    plotting.savefig(os.path.join(save_path, f"{prefix}_eta_hist"))
Ejemplo n.º 2
0
def VisuError(Error, testcases, Savetofile=False):
    name = testcases.name
    Vals = testcases.Vals
    symbols = ('^','o', 's','D','p','H',)
    lines   = ('-', '--',':', '-.','.',)
    newfig(width=1.3)
    for i in range(len(Vals)):
        namestr = ', ' + '$N_s$=' + str(Vals[i])
        if name != 'SampleNum' and i ==0:
            plt.semilogy(M_Vec,Error[i,:,4], 'k-.',label='Projection'  )
            plt.semilogy(M_Vec,Error[i,:,0], 'r-.',label='POD-G      '  )
        elif name == 'SampleNum':
            plt.semilogy(M_Vec,Error[i,:,4], 'k-.'+symbols[i] ,label='Projection'+namestr  )
            plt.semilogy(M_Vec,Error[i,:,0], 'r-.'+symbols[i] ,label='POD-G'   +namestr  )
        if not( name == 'NResi' ):
            plt.semilogy(M_Vec,Error[i,:,1], 'y:'+symbols[i]  ,label='PDNN'  +namestr  )
        plt.semilogy(M_Vec,Error[i,:,2], 'g--'+symbols[i]     ,label='PINN'+namestr  )
        plt.semilogy(M_Vec,Error[i,:,3], 'g-'+symbols[i]      ,label='PRNN'+namestr  )

    plt.xlabel('$m$')
    plt.ylabel('Error')
    #plt.title(name)
    plt.legend(loc="lower left", ncol=1, handlelength=3)
    plt.show()

    if Savetofile:
        savefig("fig/ErrorComparsion_"+name)
Ejemplo n.º 3
0
def view_images(train, results_path, save_path, prefix):
    """
    View the most and least leaked images.
    """
    # sort etas by index
    etas = load_results(
        results_path, f"{prefix}_pca20.json")["etas"]
    sorted_etas = sorted(
        zip(etas, range(len(etas))), key=lambda x: x[0], reverse=True)

    ims = train["features"].squeeze()
    n_ims = 8
    f, axarr = plt.subplots(2, n_ims, figsize=(7, 2.2))
    f.subplots_adjust(wspace=0.05)
    for priv in [False, True]:
        for i in range(n_ims):
            ax = axarr[int(priv), i]
            idx = -(i + 1) if priv else i
            im = sorted_etas[idx][1]
            image = ims[im, ...]
            if image.ndim == 3:
                image = image.permute(1, 2, 0)
            ax.imshow(image, cmap='gray')
            ax.axis("off")
            title = "{:.1e}".format(sorted_etas[idx][0])
            ax.set_title(title, fontsize=14)
            ax.get_xaxis().set_visible(False)
            ax.get_yaxis().set_visible(False)
    plotting.savefig(os.path.join(save_path, f"{prefix}_images"))
    plt.close(f)
Ejemplo n.º 4
0
def save_bigprofiles(prots, protids, unnorm_eluts, fname, hires_mult=1, **kwargs):
    import plotting as pl
    nplots = plot_bigprofiles(prots, protids, unnorm_eluts, **kwargs)
    fig = pl.gcf()
    nprots = len(prots) if prots else len(protids)
    fig.set_size_inches(20, 4+(nplots/4)*nprots)
    pl.savefig(fname, bbox_inches='tight', dpi=200*hires_mult)
    pl.clf()
Ejemplo n.º 5
0
def VisuError(Error, testcases, Savetofile=False):
    name = testcases.name
    Vals = testcases.Vals
    symbols = ('^','o', 's','D','p','H',)
    lines   = ('-', '--',':', '-.','.',)
    #plt.close('all')i
    if name == 'SampleNum':
        tmp = '$N_s$'
        width = 1.3
    elif name =='NetSize':
        tmp = '$n_H$'
        width = 1
    elif name == 'NResi':
        tmp = '$N_{Resi}$'
        width = 1
    newfig(width=width)
    for i in range(len(Vals)):
        namestr = ', ' + tmp+'=' + str(Vals[i])
        if name != 'SampleNum' and i ==0:
            plt.semilogy(M_Vec,Error[i,:,4], 'k-.',label='Projection'  )
            plt.semilogy(M_Vec,Error[i,:,0], 'r-.',label='POD-G      '  )
        elif name == 'SampleNum':
            plt.semilogy(M_Vec,Error[i,:,4], 'k-.'+symbols[i] ,label='Projection'+namestr  )
            plt.semilogy(M_Vec,Error[i,:,0], 'r-.'+symbols[i] ,label='POD-G'   +namestr  )
        if not( name == 'NResi' ):
            plt.semilogy(M_Vec,Error[i,:,1], 'y:'+symbols[i]  ,label='PDNN'  +namestr  )
        plt.semilogy(M_Vec,Error[i,:,2], 'g--'+symbols[i]     ,label='PINN'+namestr  )
        plt.semilogy(M_Vec,Error[i,:,3], 'g-'+symbols[i]      ,label='PRNN'+namestr  )


    if name == 'NResi':
        plt.ylim(bottom=9E-5)
    elif name == 'NetSize':
        plt.ylim(bottom=7E-5)
    plt.xlabel('$m$')
    plt.ylabel(r'Error $\varepsilon$')
    #plt.title(name)
    plt.legend(loc="lower left", ncol=1, handlelength=3)
    
    if name == 'NResi':
        plt.legend(loc="best", ncol=1, handlelength=3)
        
    plt.show()
    

    if Savetofile:
        savefig("fig/ErrorComparsion_"+name)
Ejemplo n.º 6
0
    def __init__(self, parent, controller):
        tk.Frame.__init__(self, parent)
        self.controller = controller
        self.x_name = tk.StringVar()
        self.y_name = tk.StringVar()
        self.x_data = tk.StringVar()
        self.y_data = tk.StringVar()

        tk.Button(self,
                  text="Back",
                  command=lambda: controller.show_frame("PageOne")).grid(
                      column=0, row=0, sticky="W")
        self.plotbutton = tk.Button(
            self,
            text="Plot",
            command=lambda: fitAndPlot(PageOne.data[self.x_data.get()], PageOne
                                       .data[self.y_data.get()]))
        PageOne.data = {
            "a": ((1, 2, 3, 4), (1, 1, 1, 1)),
            "b": ((2, 4, 5, 6), (1, 1, 1, 1))
        }
        tk.Button(self, text="more plot",
                  command=lambda: self.makePlot()).grid(column=0,
                                                        row=1,
                                                        sticky="W")
        self.plotbutton.grid(column=0, row=2, sticky="W")
        tk.Entry(self, textvariable=self.x_name).grid(column=1, row=3)
        tk.Entry(self, textvariable=self.y_name).grid(column=1, row=4)
        tk.Entry(self, textvariable=self.x_data).grid(column=1, row=5)
        tk.Entry(self, textvariable=self.y_data).grid(column=1, row=6)
        tk.Label(self, text="x Name").grid(column=0, row=3, sticky="W")
        tk.Label(self, text="y Name").grid(column=0, row=4, sticky="W")
        tk.Label(self, text="x Data").grid(column=0, row=5, sticky="W")
        tk.Label(self, text="x Data").grid(column=0, row=6, sticky="W")
        tk.Button(self, text="save",
                  command=lambda: savefig(self.fig)).grid(column=0, row=7)
        self.option_menu_y = tk.OptionMenu(self, self.x_data,
                                           *PageOne.data.keys())
        self.option_menu_y.grid(column=0, row=8)

        self.variable = tk.StringVar()
        PlotPage.menu_y = self.option_menu_y["menu"]
        PlotPage.menu_y.add(
            "command",
            label="test",
            command=lambda value="test": self.x_data.set("test"))
        """
        while True:
            self.option_menu_y.grid_forget()
            self.option_menu_y = tk.OptionMenu(self, self.x_data, PageOne.data.keys())
            self.option_menu_y.grid(column=0, row=8)
            t.sleep(5)
        """
        """
Ejemplo n.º 7
0
def correlations(results_path, save_path, prefix):
    results = load_results(results_path, f"{prefix}_pca20.json")
    etas = np.array(results["etas"])
    n_samples = 2000
    np.random.seed(n_samples)
    samples = np.random.permutation(len(etas))[:n_samples]
    losses = np.array(results["train_losses"])
    grad_norms = np.array(results["train_grad_norms"])
    alternatives = [
        ("loss", "(a) Loss $\ell({\\bf w^*}^\\top {\\bf x}, y)$", losses),
        ("gradnorm", "(b) Gradient norm $\|\\nabla_{\\bf w^*} \ell\|_2$", grad_norms)]
    f, axarr = plt.subplots(1, 2, figsize=(10, 4), sharey=True)
    f.subplots_adjust(wspace=0.1)
    for e, (method, xlabel, values) in enumerate(alternatives):
        ax = axarr[e]
        ax.scatter(values[samples], etas[samples], s=2.5, color=COLOR)
        ax.set_xlabel(xlabel)
    axarr[0].set_ylabel("FIL $\eta$")

    plotting.savefig(os.path.join(save_path, f"{prefix}_scatter_alternatives_eta"))
    plt.clf()
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Exact $D(t,x)$', fontsize=10)

    ########     Learned d(t,x,y)     ###########
    ax = plt.subplot(gs[1:2, 1])
    h = plot_solution(t, x, d_pred, ax)
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Learned $D(t,x)$', fontsize=10)

    savefig('./figures/turbulence_1D_diffusion', crop=False)

    scipy.io.savemat(
        'turbulence_1D_diffusion_results_%s.mat' % (time.strftime('%d_%m_%Y')),
        {
            't': t,
            'x': x,
            'u': u,
            'd': d,
            'u_pred': u_pred,
            'd_pred': d_pred
        })
Ejemplo n.º 9
0
    ax.set_title('$t = 0.50$', fontsize = 10)
    ax.legend(loc='upper center', bbox_to_anchor=(0.5, -0.35), ncol=5, frameon=False)
    
    ax = plt.subplot(gs1[0, 2])
    ax.plot(x,Exact[75,:], 'b-', linewidth = 2, label = 'Exact')       
    ax.plot(x,U_pred[75,:], 'r--', linewidth = 2, label = 'Prediction')
    ax.set_xlabel('$x$')
    ax.set_ylabel('$u(t,x)$')
    ax.axis('square')
    ax.set_xlim([-1.1,1.1])
    ax.set_ylim([-1.1,1.1])    
    ax.set_title('$t = 0.75$', fontsize = 10)
    
    ####### Row 3: Identified PDE ##################    
    gs2 = gridspec.GridSpec(1, 3)
    gs2.update(top=1.0-2.0/3.0, bottom=0, left=0.0, right=1.0, wspace=0.0)
    
    ax = plt.subplot(gs2[:, :])
    ax.axis('off')
    s1 = r'$\begin{tabular}{ |c|c| }  \hline Correct PDE & $u_t + u u_x - 0.0031831 u_{xx} = 0$ \\  \hline Identified PDE (clean data) & '
    s2 = r'$u_t + %.5f u u_x - %.7f u_{xx} = 0$ \\  \hline ' % (lambda_1_value, lambda_2_value)
    s3 = r'Identified PDE (1\% noise) & '
    s4 = r'$u_t + %.5f u u_x - %.7f u_{xx} = 0$  \\  \hline ' % (lambda_1_value_noisy, lambda_2_value_noisy)
    s5 = r'\end{tabular}$'
    s = s1+s2+s3+s4+s5
    ax.text(0.1,0.1,s)
       
    '''
    savefig('./figures/NGSIM_infer')
    print("number of loops:", Loop_Num)
    ######## Exact solution #######################
    ########      Predicted p(t,x,y)     ########### 
    gs = gridspec.GridSpec(1, 2)
    gs.update(top=0.8, bottom=0.2, left=0.1, right=0.9, wspace=0.5)
    ax = plt.subplot(gs[:, 0])
    h = ax.imshow(Exact_idn, interpolation='nearest', cmap='jet', 
                  extent=[lb_idn[0], ub_idn[0]*keep, lb_idn[1], ub_idn[1]],
                  origin='lower', aspect='auto')
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Exact Dynamics', fontsize = 10)
    
    ######## Approximation Error ########### 
    ax = plt.subplot(gs[:, 1])
    h = ax.imshow(abs(Exact_idn-U_pred), interpolation='nearest', cmap='jet', 
                  extent=[lb_idn[0], ub_idn[0]*keep, lb_idn[1], ub_idn[1]], 
                  origin='lower', aspect='auto')
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Identifier Error', fontsize = 10)
    
    savefig('Results/KdV_idn_relu')
                  interpolation='nearest',
                  cmap='jet',
                  extent=[lb_idn[0], ub_idn[0] * keep, lb_idn[1], ub_idn[1]],
                  origin='lower',
                  aspect='auto')
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Exact Dynamics', fontsize=10)

    ######## Approximation Error ###########
    ax = plt.subplot(gs[:, 1])
    h = ax.imshow(abs(Exact_idn - U_pred),
                  interpolation='nearest',
                  cmap='jet',
                  extent=[lb_idn[0], ub_idn[0] * keep, lb_idn[1], ub_idn[1]],
                  origin='lower',
                  aspect='auto')
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Identifier Error', fontsize=10)

    savefig('Results/KdV_idn_rat')
Ejemplo n.º 12
0
             Y_test[0:1, -1, 0],
             'ko',
             label='$Y_T = u(T,X_T)$')

    plt.plot(t_test[1:samples, :, 0].T, Y_pred[1:samples, :, 0].T, 'b')
    plt.plot(t_test[1:samples, :, 0].T, Y_test[1:samples, :, 0].T, 'r--')
    plt.plot(t_test[1:samples, -1, 0], Y_test[1:samples, -1, 0], 'ko')

    plt.plot([0], Y_test[0, 0, 0], 'ks', label='$Y_0 = u(0,X_0)$')

    plt.xlabel('$t$')
    plt.ylabel('$Y_t = u(t,X_t)$')
    plt.title('100-dimensional Black-Scholes-Barenblatt')
    plt.legend()

    savefig('BSB.png', crop=False)

    errors = np.sqrt((Y_test - Y_pred)**2 / Y_test**2)
    mean_errors = np.mean(errors, 0)
    std_errors = np.std(errors, 0)

    plt.figure()
    plt.plot(t_test[0, :, 0], mean_errors, 'b', label='mean')
    plt.plot(t_test[0, :, 0],
             mean_errors + 2 * std_errors,
             'r--',
             label='mean + two standard deviations')
    plt.xlabel('$t$')
    plt.ylabel('relative error')
    plt.title('100-dimensional Black-Scholes-Barenblatt')
    plt.legend()
Ejemplo n.º 13
0
                  interpolation='nearest',
                  cmap='jet',
                  extent=[lb_idn[0], ub_idn[0] * keep, lb_idn[1], ub_idn[1]],
                  origin='lower',
                  aspect='auto')
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Exact Dynamics', fontsize=10)

    ######## Approximation Error ###########
    ax = plt.subplot(gs[:, 1])
    h = ax.imshow(abs(Exact_idn - U_pred),
                  interpolation='nearest',
                  cmap='jet',
                  extent=[lb_idn[0], ub_idn[0] * keep, lb_idn[1], ub_idn[1]],
                  origin='lower',
                  aspect='auto')
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Identifier Error', fontsize=10)

    savefig('Results/Rational_%d_%d/KdV_idn_rat' % (rP, rQ))
Ejemplo n.º 14
0
t = f['t']

# read yml file
yml_path = files.find(args[0], 'yml')
ps = yaml.load(open(yml_path).read())
plmat = ps['plot_matrix'] if 'plot_matrix' in ps else None

if save:
    files.delete_images()

# show/save a graph for every connection
# or a single graph if plot_matrix is specified
if plmat:
    conns = {}
    for n in nodes:
        conns[n.name] = {}
    for c in connections:
        conns[c.origin_node.name][c.dest_node.name] = c
    pl.plot_matrix(plmat, conns, t)
    if save:
        pl.savefig(files.image_path())
else:
    for c in connections:
        pl.plot_connection(c, t)
        if save:
            pl.savefig(files.image_path(c))

if not save:
    pl.show()

gs = gridspec.GridSpec(1, 1)
gs.update(top=0.9, bottom=0.1, left=0.1, right=0.9, wspace=0.7, hspace=0.5)

# ######## Exact solution #######################
ax = plt.subplot(gs[0, 0])
h = ax.imshow(Exact_idn,
              interpolation='nearest',
              cmap='jet',
              extent=[lb_idn[0], ub_idn[0], lb_idn[1], ub_idn[1]],
              origin='lower',
              aspect='auto')
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)

fig.colorbar(h, cax=cax)
savefig('Results/solution')

fig, ax = newfig(2.0, 0.5)
ax.axis('off')
gs = gridspec.GridSpec(1, 3)
gs.update(top=0.9, bottom=0.1, left=0.1, right=0.9, wspace=0.5)

######## ReLU error #######################
ax = plt.subplot(gs[0, 0])
h = ax.imshow(abs(Exact_idn - U_relu),
              interpolation='nearest',
              cmap='jet',
              extent=[lb_idn[0], ub_idn[0], lb_idn[1], ub_idn[1]],
              origin='lower',
              aspect='auto',
              vmin=0.0,
Ejemplo n.º 16
0
        f.write("% Automatically generated\n")
        f.write("\\newcommand{{\\accfrac}}{{{0:.2f}}}\n".format(acc_frac))
        f.write("\\newcommand{{\\taua}}{{{0:.0f}}}\n".format(tau[0]))
        f.write("\\newcommand{{\\taub}}{{{0:.0f}}}\n".format(tau[1]))

    # Plot the traces and corner plot.
    fig, axes = plt.subplots(2, 1, figsize=SQUARE_FIGSIZE, sharex=True)
    axes[0].plot(chain[:5000, 0], "k")
    axes[1].plot(chain[:5000, 1], "k")
    axes[0].set_ylabel(r"$\theta_1$")
    axes[1].set_ylabel(r"$\theta_2$")
    axes[1].set_xlabel("step")
    axes[0].yaxis.set_major_locator(plt.MaxNLocator(4))
    axes[1].yaxis.set_major_locator(plt.MaxNLocator(4))
    axes[1].xaxis.set_major_locator(plt.MaxNLocator(3))
    savefig(fig, "traces.pdf")

    plt.close(fig)
    fig = corner.corner(chain, labels=[r"$\theta_1$", r"$\theta_2$"])
    savefig(fig, "corner.pdf")

    plt.close(fig)
    fig, ax = plt.subplots(1, 1, figsize=SQUARE_FIGSIZE, sharex=True)
    p = autocorr.function(chain)
    ax.plot(p[:, 0], label=r"$f(\theta) = \theta_1$")
    ax.plot(p[:, 1], label=r"$f(\theta) = \theta_2$")
    p = autocorr.function(np.prod(chain, axis=1))
    ax.plot(p, label=r"$f(\theta) = \theta_1 \, \theta_2$")
    ax.set_title("Metropolis")
    ax.set_ylabel("autocorrelation")
    ax.set_xlabel("lag [steps]")
Ejemplo n.º 17
0
    h = plot_solution(t, x, d, ax)
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Exact $D(t,x)$', fontsize=10)

    ########     Learned d(t,x,y)     ###########
    ax = plt.subplot(gs[1:2, 1])
    h = plot_solution(t, x, d_pred, ax)
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Learned $D(t,x)$', fontsize=10)

    savefig('./figures/Results_1D', crop=False)

    scipy.io.savemat('turbulence_results_%s.mat' % (time.strftime('%d_%m_%Y')),
                     {
                         't': t,
                         'x': x,
                         'u': u,
                         'd': d,
                         'u_pred': u_pred,
                         'd_pred': d_pred
                     })
Ejemplo n.º 18
0
    ax.legend(loc='upper center', bbox_to_anchor=(0.5, -0.35), ncol=5, frameon=False)
    
    ax = plt.subplot(gs1[0, 2])
    ax.plot(x,Exact[75,:], 'b-', linewidth = 2, label = 'Exact')       
    ax.plot(x,U_pred[75,:], 'r--', linewidth = 2, label = 'Prediction')
    lower = U_pred[75,:] - 2.0*np.sqrt(Sigma_pred[75,:])
    upper = U_pred[75,:] + 2.0*np.sqrt(Sigma_pred[75,:])
    plt.fill_between(x.flatten(), lower.flatten(), upper.flatten(), 
                     facecolor='orange', alpha=0.5, label="Two std band")
    ax.set_xlabel('$x$')
    ax.set_ylabel('$u(t,x)$')
    ax.axis('square')
    ax.set_xlim([-1.1,1.1])
    ax.set_ylim([-1.1,1.1])    
    ax.set_title('$t = 0.75$', fontsize = 10)
    savefig('./Prediction')
    

    fig, ax = newfig(1.0)
    ax.axis('off')
    
    #############       Uncertainty       ##################
    gs2 = gridspec.GridSpec(1, 2)
    gs2.update(top=1-0.06, bottom=1-1/3, left=0.15, right=0.85, wspace=0)
    ax = plt.subplot(gs2[:, :])
    
    h = ax.imshow(Sigma_pred.T, interpolation='nearest', cmap='rainbow', 
                  extent=[t.min(), t.max(), x.min(), x.max()], 
                  origin='lower', aspect='auto')
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)
Ejemplo n.º 19
0
# Plot the different solutions
fig, ax = newfig(0.6, 1.2)
ax.axis('off')
gs = gridspec.GridSpec(1, 1)
gs.update(top=0.9, bottom=0.1, left=0.1, right=0.9, wspace=0.7, hspace=0.5)

# ######## Exact solution #######################
ax = plt.subplot(gs[0, 0])
h = ax.imshow(Exact_idn, interpolation='nearest', cmap='jet', 
              extent=[lb_idn[0], ub_idn[0], lb_idn[1], ub_idn[1]],
              origin='lower', aspect='auto')
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)

fig.colorbar(h, cax=cax)
savefig('Results/solution')

fig, ax = newfig(2.0, 0.5)
ax.axis('off')
gs = gridspec.GridSpec(1, 3)
gs.update(top=0.9, bottom=0.1, left=0.1, right=0.9, wspace=0.5)

######## ReLU error #######################
ax = plt.subplot(gs[0, 0])
h = ax.imshow(abs(Exact_idn-U_relu), interpolation='nearest', cmap='jet', 
              extent=[lb_idn[0], ub_idn[0], lb_idn[1], ub_idn[1]],
              origin='lower', aspect='auto', vmin=0.0, vmax=0.05)
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)

fig.colorbar(h, cax=cax)
Ejemplo n.º 20
0
    # of weights and biases.
    model = PINN(x_train, t_train, rho_train, u_train, p_train, E_train, layers)
    model.train(20000)

    #model.train2(num_epochs = 200, batch_size = 10000, learning_rate=1e-3)
    #model.train2(num_epochs = 300, batch_size = 10000, learning_rate=1e-4)
    #model.train2(num_epochs = 300, batch_size = 10000, learning_rate=1e-5)
    #model.train2(num_epochs = 200, batch_size = 10000, learning_rate=1e-6)

    # Plotting Loss
    plt.plot(loss_vector, label='Loss value')
    plt.legend()
    plt.title('Loss value over iterations')
    plt.xlabel('Iterations')
    plt.ylabel('Loss')
    savefig('./figures/Loss', crop = False)
    plt.show()

    # Test Data
    # Test the neural network performance using Test dataset "data1" generated
    # by eliminating the initally randomly selected rows from the data. The test
    # dataset "data1" is then sliced to get test parameter values.
    data1 = data
    data1 = np.delete(data1, idx, 0)
    print(data.shape)

    # x
    x_test  = data1[:,0:1].flatten()[:,None]
    t_test  = data1[:,1:2].flatten()[:,None]
    XT_test = np.concatenate([x_test, t_test], 1)
    # y
Ejemplo n.º 21
0
    random_state = np.random.RandomState(seed=0)

    states = np.arange(7)
    weights = np.array([1, 1, 1, 1, 1, 1, 10, 1])
    alpha = 0.01
    gamma = 0.99

    weights_list = [weights]
    for i in range(n_steps):
        s = random_state.choice(states)
        weights = weights + 7 * alpha * (gamma * q(states[-1], weights) -
                                         q(s, weights)) * feature(s)
        weights_list.append(weights)

    output = np.c_[weights_list]

    with plt.rc_context(plotting.rc()):
        fig, ax = plt.subplots(1)
        lines = ax.plot(output)
        ax.legend(lines, [f"w{i+1}" for i in range(output.shape[1])])
        ax.grid(alpha=0.1)
        ax.set_xlabel("Steps")
        ax.set_ylabel("Weight")
        ax.set_title("Q-learning on Baird's Counterexample")
        plt.tight_layout()

        plotting.savefig(fig,
                         path=os.path.join(
                             c.Paths.output, "ex_11_3",
                             "bairds_counter_example_q_learning.png"))
Ejemplo n.º 22
0
        return self.loss_Eqs(x, self.u_net(x), source, weight)

    def loss_Eqs(self, x, lamda, source, weight=1):
        #    def loss_PINN(self,x,source):
        #lamda = self.u_net(x);
        fx = torch.matmul(lamda[:, None, None, :], self.A[None, :, :, :])
        fx = torch.matmul(fx, lamda[:, None, :, None])
        fx = fx.view(lamda.shape)
        fx = fx + torch.matmul(lamda, self.B.T) - source
        return self.lossfun(weight * fx, torch.zeros_like(fx))


if __name__ == '__main__':
    NumSolsdir = 'NumSols'
    Nsample = 80
    matfile = NumSolsdir + '/' + 'Burges1D_SampleNum=' + str(Nsample) + '.mat'
    M = 2
    roeqs = CustomedEqs(matfile, M)

    #    Net = CustomedNet(roeqs=roeqs,layers=[2,20,20,20,M])
    #    print(Net.labeledLoss)

    from plotting import newfig, savefig
    import matplotlib.pyplot as plt
    newfig(width=0.8)
    plt.semilogy(np.arange(roeqs.sigma.shape[0]) + 1, roeqs.sigma, '-ko')
    plt.xlabel('$m$')
    plt.ylabel('Singular value')
    plt.show()
    savefig('fig/SingularValues_%d' % (Nsample))
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Exact $\\varepsilon(t,x)$', fontsize=10)

    ########     Learned e(t,x,y)     ###########
    ax = plt.subplot(gs[1:2, 1])
    h = plot_solution(t, x, e_pred, ax)
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Learned $\\varepsilon(t,x)$', fontsize=10)

    savefig('./figures/turbulence_1D_dissipation', crop=False)

    scipy.io.savemat(
        'turbulence_1D_dissipation_swish_noise3_results_%s.mat' %
        (time.strftime('%d_%m_%Y')), {
            't': t,
            'x': x,
            'u': u,
            'e': e,
            'u_pred': u_pred,
            'e_pred': e_pred
        })
Ejemplo n.º 24
0
    ax.set_xlim([-5.1, 5.1])
    ax.set_ylim([-0.1, 5.1])

    ax = plt.subplot(gs1[0, 1])
    ax.plot(x, Exact_h[:, 100], 'b-', linewidth=2, label='Exact')
    ax.plot(x, H_pred[100, :], 'r--', linewidth=2, label='Prediction')
    ax.set_xlabel('$x$')
    ax.set_ylabel('$|h(t,x)|$')
    ax.axis('square')
    ax.set_xlim([-5.1, 5.1])
    ax.set_ylim([-0.1, 5.1])
    ax.set_title('$t = %.2f$' % (t[100]), fontsize=10)
    ax.legend(loc='upper center',
              bbox_to_anchor=(0.5, -0.8),
              ncol=5,
              frameon=False)

    ax = plt.subplot(gs1[0, 2])
    ax.plot(x, Exact_h[:, 125], 'b-', linewidth=2, label='Exact')
    ax.plot(x, H_pred[125, :], 'r--', linewidth=2, label='Prediction')
    ax.set_xlabel('$x$')
    ax.set_ylabel('$|h(t,x)|$')
    ax.axis('square')
    ax.set_xlim([-5.1, 5.1])
    ax.set_ylim([-0.1, 5.1])
    ax.set_title('$t = %.2f$' % (t[125]), fontsize=10)

    savefig('./figures/NLS')
    loss_log = np.array(model.loss_log)
    np.save('loss/loss_QRes.npy', loss_log)
Ejemplo n.º 25
0
newfig(width=1)
for k in range(2):
    for i in range(alpha.shape[0]):
        alphai = alpha[i:i + 1, :]
        lamdai = Net(torch.tensor(alphai).float().to(DEVICE))
        phi_proj = np.matmul(lamdai, roeqs.Modes.T)
        phi_Exact = roeqs.phix(roeqs.xgrid.T, alphai[:, 0:1], alphai[:, 1:2])

        #plot
        name = '$\\boldsymbol{\\mu}=(%0.1f,%0.1f)$' % (alphai[0, 0], alphai[0,
                                                                            1])
        if k == 0:
            plt.plot(roeqs.xgrid,
                     phi_Exact.T,
                     colors[i] + lines[0],
                     label='PS',
                     markersize=6)
        else:
            plt.plot(roeqs.xgrid,
                     phi_proj.T,
                     colors[i] + symbols[i],
                     label='PRNN, ' + name,
                     markersize=6)

plt.xlabel('$x$')
plt.ylabel('$\phi$')
#plt.title(resultsdir)
plt.legend(loc="upper left", ncol=2, handlelength=2, columnspacing=1)
plt.show()
savefig('fig/ResultComparsion')
Ejemplo n.º 26
0
        lambda_1_value, lambda_2_value)
    s = s + r' \end{array}$ \\ '
    s = s + r' \hline'
    s = s + r' Identified PDE (1\% noise) & $\begin{array}{c}'
    s = s + r' u_t + %.3f (u u_x + v u_y) = -p_x + %.5f (u_{xx} + u_{yy})' % (
        lambda_1_value_noisy, lambda_2_value_noisy)
    s = s + r' \\'
    s = s + r' v_t + %.3f (u v_x + v v_y) = -p_y + %.5f (v_{xx} + v_{yy})' % (
        lambda_1_value_noisy, lambda_2_value_noisy)
    s = s + r' \end{array}$ \\ '
    s = s + r' \hline'
    s = s + r' \end{tabular}$'

    ax.text(0.015, 0.0, s)

    savefig('./figures/NavierStokes_prediction')

    print('Error u: %e' % (error_u))
    print('Error v: %e' % (error_v))
    print('Error p: %e' % (error_p))
    print('Error l1: %.5f%%' % (error_lambda_1))
    print('Error l2: %.5f%%' % (error_lambda_2))

    print('Error l1: %.5f%%' % (error_lambda_1_noisy))
    print('Error l2: %.5f%%' % (error_lambda_2_noisy))

    with open('results.txt', 'w') as f:
        lines = []

        lines.append('Error v: %e\n' % (error_v))
        lines.append('Error p: %e\n' % (error_p))
Ejemplo n.º 27
0
                  extent=[lb_sol[0], ub_sol[0], lb_sol[1], ub_sol[1]],
                  origin='lower', aspect='auto')
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Exact Dynamics', fontsize = 10)
    
    line = np.linspace(lb_sol[1], ub_sol[1], 2)[:,None]
    ax.plot(t_idn[index]*np.ones((2,1)), line, 'w-', linewidth = 1)
    
    ########     Exact p(t,x,y)     ########### 
    ax = plt.subplot(gs[:, 1])
    h = ax.imshow(U_pred, interpolation='nearest', cmap='jet', 
                  extent=[lb_sol[0], ub_sol[0], lb_sol[1], ub_sol[1]], 
                  origin='lower', aspect='auto')
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('Learned Dynamics', fontsize = 10)
    
    line = np.linspace(lb_sol[1], ub_sol[1], 2)[:,None]
    ax.plot(t_idn[index]*np.ones((2,1)), line, 'w-', linewidth = 1)
    
    savefig('../figures/Burgers')
                Y,
                zdir='y',
                offset=t_star.mean(),
                cmap='rainbow',
                alpha=0.8)

    ax.text(x_star.mean(), data['t'].min() - 1, y_star.min() - 1, '$x$')
    ax.text(x_star.max() + 1, data['t'].mean(), y_star.min() - 1, '$t$')
    ax.text(x_star.min() - 1, data['t'].min() - 0.5, y_star.mean(), '$y$')
    ax.text(x_star.min() - 3, data['t'].mean(), y_star.max() + 1, '$v(t,x,y)$')
    ax.set_xlim3d(r1)
    ax.set_ylim3d(r2)
    ax.set_zlim3d(r3)
    axisEqual3D(ax)

    savefig('./figures/NavierStokes_data' + str(learning_rate))

    fig, ax = newfig(1.015, 0.8)
    ax.axis('off')

    ######## Row 2: Pressure #######################
    ########      Predicted p(t,x,y)     ###########
    gs2 = gridspec.GridSpec(1, 2)
    gs2.update(top=1, bottom=1 - 1 / 2, left=0.1, right=0.9, wspace=0.5)
    ax = plt.subplot(gs2[:, 0])
    h = ax.imshow(
        PP_star,
        interpolation='nearest',
        cmap='rainbow',
        extent=[x_star.min(),
                x_star.max(),
Ejemplo n.º 29
0
    nwalkers = 32
    sampler = emcee.EnsembleSampler(nwalkers,
                                    ndim,
                                    log_posterior,
                                    args=(x, y, y_err))
    p0 = np.append(w, -5.0)
    pos, lp, _ = sampler.run_mcmc(p0 + 1e-4 * np.random.randn(nwalkers, ndim),
                                  5000)
    ind = np.argmax(lp)
    sampler.reset()
    pos, lp, _ = sampler.run_mcmc(
        pos[ind] + 1e-4 * np.random.randn(nwalkers, ndim), 5000)
    sampler.run_mcmc(pos, 20000)
    tau = sampler.get_autocorr_time()
    acc_frac = np.mean(sampler.acceptance_fraction)

    fig, ax = plt.subplots(1, 1, figsize=SQUARE_FIGSIZE, sharex=True)
    ax.errorbar(x, y, yerr=y_err, xerr=x_err, fmt=".k", capsize=0, ms=4)
    x0 = np.linspace(0, 10, 3)
    samples = sampler.flatchain
    for i in np.random.randint(len(samples), size=100):
        theta = samples[i]
        ax.plot(x0, x0 * theta[0] + theta[1], color="k", alpha=0.05)
    ax.plot(x0, 0.5 * x0 - 0.1, label="truth")
    ax.plot(x0, np.dot(np.vander(x0, 2), w), label="naive fit")
    ax.set_xlabel("$x$")
    ax.set_ylabel("$y$")
    ax.yaxis.set_major_locator(plt.MaxNLocator(4))
    plt.legend(fontsize=12, loc=4)
    savefig(fig, "line1.pdf")
def plot_pressure(p_pred, p_star, X_star, epoch, learning_rate):

    # Predict for plotting
    lb = X_star.min(0)
    ub = X_star.max(0)
    nn = 200
    x = np.linspace(lb[0], ub[0], nn)
    y = np.linspace(lb[1], ub[1], nn)
    X, Y = np.meshgrid(x, y)

    PP_star = griddata(X_star,
                       np.ndarray.flatten(p_pred.numpy()), (X, Y),
                       method='cubic')
    P_exact = griddata(X_star,
                       np.ndarray.flatten(p_star), (X, Y),
                       method='cubic')

    x_star = X_star[:, 0:1]
    y_star = X_star[:, 1:2]

    fig, ax = newfig(1.015, 0.8)
    ax.axis('off')

    ########      Predicted p(t,x,y)     ###########
    gs2 = gridspec.GridSpec(1, 2)
    gs2.update(top=1, bottom=1 - 1 / 2, left=0.1, right=0.9, wspace=0.5)
    ax = plt.subplot(gs2[:, 0])
    h = ax.imshow(
        PP_star,
        interpolation='nearest',
        cmap='rainbow',
        extent=[x_star.min(),
                x_star.max(),
                y_star.min(),
                y_star.max()],
        origin='lower',
        aspect='auto')
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$x$')
    ax.set_ylabel('$y$')
    ax.set_aspect('equal', 'box')
    ax.set_title('Predicted pressure', fontsize=10)

    ########     Exact p(t,x,y)     ###########
    ax = plt.subplot(gs2[:, 1])
    h = ax.imshow(
        P_exact,
        interpolation='nearest',
        cmap='rainbow',
        extent=[x_star.min(),
                x_star.max(),
                y_star.min(),
                y_star.max()],
        origin='lower',
        aspect='auto')
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)

    fig.colorbar(h, cax=cax)
    ax.set_xlabel('$x$')
    ax.set_ylabel('$y$')
    ax.set_aspect('equal', 'box')
    ax.set_title('Exact pressure', fontsize=10)

    savefig('./figures/NavierStokes_prediction ' + str(learning_rate) + ' ' +
            str(epoch))

    plt.close()
    pass
Ejemplo n.º 31
0
#                        , ticks=[-1, -0.5, 0, 0.5, 1])
#    cbar.ax.set_yticklabels(['-1', '-0.5', '0', '0.5', '1'])
    cbar.ax.tick_params(labelsize=20)
    ax.set_xlim(-0.01, 1)
    ax.set_ylim(-1.01, 1.02)
    #ax_pred.locator_params(nbins=5)
    #ax_pred.set_xticklabels(np.linspace(0,1,5), rotation=0, fontsize=18)
    ax.set_xlabel('$t$')
    ax.set_ylabel('$x$')
    ax.set_title('$ u^{Exact} $')
#    for xc in x_interface:
#        ax.axhline(y=xc, linewidth=1, color = 'w')
    
    #fig.tight_layout()
    fig.set_size_inches(w=15,h=8) 
    savefig('./figures/BurExact_4sd')#KdV3SD_PredPlot')
    
#    plt.show()

    fig, ax = newfig(1.0, 1.1)
#    fig = plt.figure()
    gridspec.GridSpec(1,1)
    
    ax = plt.subplot2grid((1,1), (0,0))
    maxLevel = max(max(u1_star),max(max(u2_star),max(max(u3_star),max(u4_star))))[0]
    minLevel = min(min(u1_star),min(min(u2_star), min(min(u3_star), min(u4_star)) ))[0] 
    levels = np.linspace(minLevel-0.01, maxLevel+0.01, 200)
    CS_pred1 = ax.contourf(T1, X1, U1_pred, levels=levels, cmap='jet', origin='lower')
    CS_pred2 = ax.contourf(T2, X2, U2_pred, levels=levels, cmap='jet', origin='lower')
    CS_pred3 = ax.contourf(T3, X3, U3_pred, levels=levels, cmap='jet', origin='lower')
    CS_pred4 = ax.contourf(T4, X4, U4_pred, levels=levels, cmap='jet', origin='lower')
Ejemplo n.º 32
0
    import matplotlib.pyplot as plt

    # Run the sampler.
    ndim = 2
    nwalkers = 32
    sampler = emcee.EnsembleSampler(nwalkers, ndim, log_p_func)
    sampler.run_mcmc(np.random.randn(nwalkers, ndim), 20000)
    tau = sampler.get_autocorr_time()
    acc_frac = np.mean(sampler.acceptance_fraction)

    with open("numbers-emcee.tex", "w") as f:
        f.write("% Automatically generated - emcee\n")
        f.write("\\newcommand{{\\eaccfrac}}{{{0:.2f}}}\n".format(acc_frac))
        f.write("\\newcommand{{\\etaua}}{{{0:.0f}}}\n".format(tau[0]))
        f.write("\\newcommand{{\\etaub}}{{{0:.0f}}}\n".format(tau[1]))

    fig, ax = plt.subplots(1, 1, figsize=SQUARE_FIGSIZE, sharex=True)
    p = autocorr.function(np.mean(sampler.chain, axis=0))
    ax.plot(p[:, 0], label=r"$f(\theta) = \theta_1$")
    ax.plot(p[:, 1], label=r"$f(\theta) = \theta_2$")
    p = autocorr.function(np.mean(np.prod(sampler.chain, axis=-1), axis=0))
    ax.plot(p, label=r"$f(\theta) = \theta_1 \, \theta_2$")
    ax.set_title("emcee")
    ax.set_ylabel("autocorrelation")
    ax.set_xlabel("lag [steps]")
    ax.set_xlim(0, 500)
    ax.yaxis.set_major_locator(plt.MaxNLocator(4))
    ax.xaxis.set_major_locator(plt.MaxNLocator(4))
    plt.legend(fontsize=12)
    savefig(fig, "ensemble.pdf")