Esempio n. 1
0
def create_image_training_epoch(X_data_tr, Y_data_tr, X_data_val, Y_data_val,
                                tr_loss, val_loss, x_grid, y_grid, cf_a,
                                video_fotograms_folder, epoch_i):
    """
    Creates the image of the training and validation accuracy
    """
    gl.init_figure();
    ax1 = gl.subplot2grid((2,1), (0,0), rowspan=1, colspan=1)
    ax2 = gl.subplot2grid((2,1), (1,0), rowspan=1, colspan=1)
    
    plt.title("Training")
    ## First plot with the data and predictions !!!
    ax1 = gl.scatter(X_data_tr, Y_data_tr, ax = ax1, lw = 3,legend = ["tr points"], labels = ["Analysis of training", "X","Y"])
    gl.scatter(X_data_val, Y_data_val, lw = 3,legend = ["val points"])
    
    gl.plot (x_grid, y_grid, legend = ["Prediction function"])

    gl.set_zoom(xlimPad = [0.2, 0.2], ylimPad = [0.2,0.2], X = X_data_tr, Y = Y_data_tr)
    ## Second plot with the evolution of parameters !!!
    ax2 = gl.plot([], tr_loss, ax = ax2, lw = 3, labels = ["RMSE. lr: %.3f"%cf_a.lr, "epoch","RMSE"], legend = ["train"])
    gl.plot([], val_loss, lw = 3, legend = ["validation"], loc = 3)
    
    
    gl.set_fontSizes(ax = [ax1,ax2], title = 20, xlabel = 20, ylabel = 20, 
                      legend = 20, xticks = 12, yticks = 12)
    
    # Set final properties and save figure
    gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.30)
    
    gl.savefig(video_fotograms_folder +'%i.png'%epoch_i, 
               dpi = 100, sizeInches = [14, 10], close = True, bbox_inches = None)
Esempio n. 2
0
def plot_weights_network(model, folder_images):

#
    weights = model.linear1.weight.detach().numpy()
    biases = model.linear1.bias.detach().numpy().reshape(-1,1)
    neurons = np.concatenate((weights, biases), axis = 1)
    
    weights2 = model.W2.detach().numpy()
    biases2 = model.b2.detach().numpy().reshape(-1,1)
    neurons2 = np.concatenate((weights2, biases2), axis =0).T
    
    gl.init_figure();
    ax1 = gl.subplot2grid((1,4), (0,0), rowspan=1, colspan=2)
    ax2 = gl.subplot2grid((1,4), (0,3), rowspan=1, colspan=4)

    cmap = cm.get_cmap('coolwarm', 30)
    cax = ax1.imshow(neurons, interpolation="nearest", cmap=cmap)
    cax2 = ax2.imshow(neurons2, interpolation="nearest", cmap=cmap)
    
#    plt.xticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='vertical')
#    plt.yticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='horizontal')
    plt.colorbar(cax)
#    plt.colorbar(cax2)
#        ax1.set_xticks(data_df_train.columns) # , rotation='vertical'
#    ax1.grid(True)
    plt.title('Weights ')
#    labels=[str(x) for x in range(Nshow )]
#    ax1.set_xticklabels(labels,fontsize=20)
#    ax1.set_yticklabels(labels,fontsize=20)
    # Add colorbar, make sure to specify tick locations to match desired ticklabels
    plt.show()
    gl.savefig(folder_images +'Weights.png', 
           dpi = 100, sizeInches = [2*8, 2*2])
def plot_weights_network(model, folder_images):

    #
    weights = model.linear1.weight.detach().numpy()
    biases = model.linear1.bias.detach().numpy().reshape(-1, 1)
    neurons = np.concatenate((weights, biases), axis=1)

    weights2 = model.W2.detach().numpy()
    biases2 = model.b2.detach().numpy().reshape(-1, 1)
    neurons2 = np.concatenate((weights2, biases2), axis=0).T

    gl.init_figure()
    ax1 = gl.subplot2grid((1, 4), (0, 0), rowspan=1, colspan=2)
    ax2 = gl.subplot2grid((1, 4), (0, 3), rowspan=1, colspan=4)

    cmap = cm.get_cmap('coolwarm', 30)
    cax = ax1.imshow(neurons, interpolation="nearest", cmap=cmap)
    cax2 = ax2.imshow(neurons2, interpolation="nearest", cmap=cmap)

    #    plt.xticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='vertical')
    #    plt.yticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='horizontal')
    plt.colorbar(cax)
    #    plt.colorbar(cax2)
    #        ax1.set_xticks(data_df_train.columns) # , rotation='vertical'
    #    ax1.grid(True)
    plt.title('Weights ')
    #    labels=[str(x) for x in range(Nshow )]
    #    ax1.set_xticklabels(labels,fontsize=20)
    #    ax1.set_yticklabels(labels,fontsize=20)
    # Add colorbar, make sure to specify tick locations to match desired ticklabels
    plt.show()
    gl.savefig(folder_images + 'Weights.png',
               dpi=100,
               sizeInches=[2 * 8, 2 * 2])
def plot_learnt_function(X_data_tr, Y_data_tr, X_data_val, Y_data_val, x_grid,
                         y_grid, cf_a, folder_images):
    gl.init_figure()
    ax1 = gl.scatter(X_data_tr,
                     Y_data_tr,
                     lw=3,
                     legend=["tr points"],
                     labels=["Data", "X", "Y"],
                     alpha=0.2)
    ax2 = gl.scatter(X_data_val,
                     Y_data_val,
                     lw=3,
                     legend=["val points"],
                     alpha=0.2)

    gl.set_fontSizes(ax=[ax1, ax2],
                     title=20,
                     xlabel=20,
                     ylabel=20,
                     legend=20,
                     xticks=12,
                     yticks=12)

    gl.plot(x_grid, y_grid, legend=["training line"])
    gl.savefig(folder_images + 'Training_Example_Data.png',
               dpi=100,
               sizeInches=[14, 4])
def create_image_training_epoch(X_data_tr, Y_data_tr, X_data_val, Y_data_val,
                                tr_loss, val_loss, x_grid, y_grid, cf_a,
                                video_fotograms_folder, epoch_i):
    """
    Creates the image of the training and validation accuracy
    """
    gl.init_figure()
    ax1 = gl.subplot2grid((2, 1), (0, 0), rowspan=1, colspan=1)
    ax2 = gl.subplot2grid((2, 1), (1, 0), rowspan=1, colspan=1)

    plt.title("Training")
    ## First plot with the data and predictions !!!
    ax1 = gl.scatter(X_data_tr,
                     Y_data_tr,
                     ax=ax1,
                     lw=3,
                     legend=["tr points"],
                     labels=["Analysis of training", "X", "Y"])
    gl.scatter(X_data_val, Y_data_val, lw=3, legend=["val points"])

    gl.plot(x_grid, y_grid, legend=["Prediction function"])

    gl.set_zoom(xlimPad=[0.2, 0.2],
                ylimPad=[0.2, 0.2],
                X=X_data_tr,
                Y=Y_data_tr)
    ## Second plot with the evolution of parameters !!!
    ax2 = gl.plot([],
                  tr_loss,
                  ax=ax2,
                  lw=3,
                  labels=["RMSE. lr: %.3f" % cf_a.lr, "epoch", "RMSE"],
                  legend=["train"])
    gl.plot([], val_loss, lw=3, legend=["validation"], loc=3)

    gl.set_fontSizes(ax=[ax1, ax2],
                     title=20,
                     xlabel=20,
                     ylabel=20,
                     legend=20,
                     xticks=12,
                     yticks=12)

    # Set final properties and save figure
    gl.subplots_adjust(left=.09,
                       bottom=.10,
                       right=.90,
                       top=.95,
                       wspace=.30,
                       hspace=0.30)

    gl.savefig(video_fotograms_folder + '%i.png' % epoch_i,
               dpi=100,
               sizeInches=[14, 10],
               close=True,
               bbox_inches=None)
Esempio n. 6
0
def plot_evolution_RMSE(tr_loss, val_loss, cf_a, folder_images):
    gl.init_figure()
    ax1 = gl.plot([], tr_loss, lw = 3, labels = ["RMSE loss and parameters. Learning rate: %.3f"%cf_a.lr, "","RMSE"], legend = ["train"])
    gl.plot([], val_loss, lw = 3, legend = ["validation"])
    
    
    gl.set_fontSizes(ax = [ax1], title = 20, xlabel = 20, ylabel = 20, 
                      legend = 20, xticks = 12, yticks = 12)
    gl.savefig(folder_images +'Training_Example_Parameters.png', 
               dpi = 100, sizeInches = [14, 7])
Esempio n. 7
0
def plot_learnt_function(X_data_tr, Y_data_tr, X_data_val, Y_data_val,
                          x_grid, y_grid, cf_a,
                          folder_images):
    gl.init_figure()
    ax1 = gl.scatter(X_data_tr, Y_data_tr, lw = 3,legend = ["tr points"], labels = ["Data", "X","Y"], alpha = 0.2)
    ax2 = gl.scatter(X_data_val, Y_data_val, lw = 3,legend = ["val points"], alpha = 0.2)
    
    gl.set_fontSizes(ax = [ax1,ax2], title = 20, xlabel = 20, ylabel = 20, 
                      legend = 20, xticks = 12, yticks = 12)
    
    gl.plot (x_grid, y_grid, legend = ["training line"])
    gl.savefig(folder_images +'Training_Example_Data.png', 
               dpi = 100, sizeInches = [14, 4])
def plot_evolution_RMSE(tr_loss, val_loss, cf_a, folder_images):
    gl.init_figure()
    ax1 = gl.plot([],
                  tr_loss,
                  lw=3,
                  labels=[
                      "RMSE loss and parameters. Learning rate: %.3f" %
                      cf_a.lr, "", "RMSE"
                  ],
                  legend=["train"])
    gl.plot([], val_loss, lw=3, legend=["validation"])

    gl.set_fontSizes(ax=[ax1],
                     title=20,
                     xlabel=20,
                     ylabel=20,
                     legend=20,
                     xticks=12,
                     yticks=12)
    gl.savefig(folder_images + 'Training_Example_Parameters.png',
               dpi=100,
               sizeInches=[14, 7])
def create_image_weights_epoch(model, video_fotograms_folder2, epoch_i):
    """
    Creates the image of the training and validation accuracy
    """
    N_Bayesian_layers = len(model.VBmodels)
    N_Normal_layers = len(model.LinearModels)

    # Compute the number of squares we will need:
    # 1 x linear layers, 2 x LSTMS

    gl.init_figure()
    cmap = cm.get_cmap('coolwarm', 30)

    all_axes = []
    for i in range(N_Bayesian_layers):
        layer = model.VBmodels[i]

        #        if (layer.type_layer == "linear"):
        if ("linear" in type(layer).__name__.lower()):
            ax = gl.subplot2grid((1, N_Bayesian_layers + N_Normal_layers),
                                 (0, i),
                                 rowspan=1,
                                 colspan=1)
            weights = layer.weight.detach().cpu().numpy()
            biases = layer.bias.detach().cpu().numpy().reshape(-1, 1)
            neurons = np.concatenate((weights, biases), axis=1)
            cax = ax.imshow(neurons,
                            interpolation="nearest",
                            cmap=cmap,
                            vmin=-2,
                            vmax=2)

            all_axes.append(ax)
        else:
            ax = gl.subplot2grid((1, N_Bayesian_layers + N_Normal_layers),
                                 (0, i),
                                 rowspan=1,
                                 colspan=1)
            weights_ih = layer.weight_ih.detach().cpu().numpy()
            biases_ih = layer.bias_ih.detach().cpu().numpy().reshape(-1, 1)
            weights_hh = layer.weight_hh.detach().cpu().numpy()
            biases_hh = layer.bias_hh.detach().cpu().numpy().reshape(-1, 1)

            weights = np.concatenate((weights_ih, weights_hh), axis=1)
            biases = np.concatenate((biases_ih, biases_hh), axis=1)
            neurons = np.concatenate((weights, biases), axis=1)
            cax = ax.imshow(neurons,
                            interpolation="nearest",
                            cmap=cmap,
                            vmin=-2,
                            vmax=2)
            all_axes.append(ax)

    for i in range(N_Normal_layers):
        layer = model.LinearModels[i]
        if ("linear" in type(layer).__name__.lower()):
            ax = gl.subplot2grid((1, N_Bayesian_layers + N_Normal_layers),
                                 (0, N_Bayesian_layers + i),
                                 rowspan=1,
                                 colspan=1)
            weights = layer.weight.detach().cpu().numpy()
            biases = layer.bias.detach().cpu().numpy().reshape(-1, 1)
            neurons = np.concatenate((weights, biases), axis=1)
            cax = ax.imshow(neurons,
                            interpolation="nearest",
                            cmap=cmap,
                            vmin=-2,
                            vmax=2)
            all_axes.append(ax)
        else:
            ax = gl.subplot2grid((1, N_Bayesian_layers + N_Normal_layers),
                                 (0, N_Bayesian_layers + i),
                                 rowspan=1,
                                 colspan=1)
            weights_ih = layer.weight_ih.detach().cpu().numpy()
            biases_ih = layer.bias_ih.detach().cpu().numpy().reshape(-1, 1)
            weights_hh = layer.weight_hh.detach().cpu().numpy()
            biases_hh = layer.bias_hh.detach().cpu().numpy().reshape(-1, 1)

            weights = np.concatenate((weights_ih, weights_hh), axis=1)
            biases = np.concatenate((biases_ih, biases_hh), axis=1)
            neurons = np.concatenate((weights, biases), axis=1)
            cax = ax.imshow(neurons,
                            interpolation="nearest",
                            cmap=cmap,
                            vmin=-2,
                            vmax=2)
            all_axes.append(ax)


#    plt.xticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='vertical')
#    plt.yticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='horizontal')
    plt.colorbar(cax)
    #    plt.colorbar(cax2)
    #        ax1.set_xticks(data_df_train.columns) # , rotation='vertical'
    #    ax1.grid(True)
    plt.title('Weights ')

    #    labels=[str(x) for x in range(Nshow )]
    #    ax1.set_xticklabels(labels,fontsize=20)
    #    ax1.set_yticklabels(labels,fontsize=20)
    # Add colorbar, make sure to specify tick locations to match desired ticklabels
    plt.show()

    gl.set_fontSizes(ax=[all_axes],
                     title=20,
                     xlabel=20,
                     ylabel=20,
                     legend=20,
                     xticks=12,
                     yticks=12)

    # Set final properties and save figure
    gl.subplots_adjust(left=.09,
                       bottom=.10,
                       right=.90,
                       top=.95,
                       wspace=.30,
                       hspace=0.30)

    gl.savefig(video_fotograms_folder2 + '%i.png' % epoch_i,
               dpi=100,
               sizeInches=[14, 10],
               close=True,
               bbox_inches=None)
Esempio n. 10
0
def create_Bayesian_analysis_charts(model,
                                    X_data_tr,
                                    X_data_val,
                                    tr_data_loss,
                                    val_data_loss,
                                    KL_loss_tr,
                                    KL_loss_val,
                                    final_loss_tr,
                                    final_loss_val,
                                    folder_images,
                                    epoch_i=None):

    # Configurations of the plots

    alpha_points = 0.2
    color_points_train = "dark navy blue"
    color_points_val = "amber"
    color_truth = "k"
    color_mean = "b"
    color_most_likey = "y"

    ################################ Divide in plots ##############################
    gl.init_figure()
    ax1 = gl.subplot2grid((6, 3), (0, 0), rowspan=3, colspan=1)
    ax2 = gl.subplot2grid((6, 3), (3, 0), rowspan=3, colspan=1, sharex=ax1)

    ax3 = gl.subplot2grid((6, 3), (0, 1), rowspan=2, colspan=1)
    ax4 = gl.subplot2grid((6, 3), (2, 1), rowspan=2, colspan=1, sharex=ax3)
    ax5 = gl.subplot2grid((6, 3), (4, 1), rowspan=2, colspan=1, sharex=ax3)

    ax6 = gl.subplot2grid((6, 3), (0, 2), rowspan=3, colspan=1)
    ax7 = gl.subplot2grid((6, 3), (3, 2), rowspan=3, colspan=1, sharex=ax6)
    """
    ############################# Data computation #######################
    """

    Xtrain_sample_cpu, Xtrain_reconstruction,Xtrain_reconstruction_samples = \
        compute_reconstruction_data( model,X_data_tr, Nsamples = 100, sample_index = 2)

    plot_reconstruction_data(Xtrain_sample_cpu, Xtrain_reconstruction,
                             Xtrain_reconstruction_samples, ax1, ax2)
    """
    ############## ax3 ax4 ax5: Loss Evolution !! ######################
    """
    plot_losses_evolution_epoch(tr_data_loss, val_data_loss, KL_loss_tr,
                                KL_loss_val, final_loss_tr, final_loss_val,
                                ax3, ax4, ax5)
    """
    ############## ax6 ax7: Projecitons Weights !! ######################
    """
    plot_projections_VAE(model, X_data_tr, ax6)
    ## Plot in chart 7 the acceptable mu = 2sigma  -> sigma = |mu|/2sigma

    #    gl.set_zoom (ax = ax6, ylim = [-0.1,10])
    #    gl.set_zoom (ax = ax7, xlim = [-2.5, 2.5], ylim = [-0.05, np.exp(model.cf_a.input_layer_prior["log_sigma2"])*(1 + 0.15)])

    #    gl.set_zoom (ax = ax7, xlim = [-2.5, 2.5], ylim = [-0.1,2])

    # Set final properties and save figure
    gl.set_fontSizes(ax=[ax1, ax2, ax3, ax4, ax5, ax6, ax7],
                     title=20,
                     xlabel=20,
                     ylabel=20,
                     legend=10,
                     xticks=12,
                     yticks=12)

    gl.subplots_adjust(left=.09,
                       bottom=.10,
                       right=.90,
                       top=.95,
                       wspace=.30,
                       hspace=0.10)

    if (type(epoch_i) == type(None)):
        gl.savefig(folder_images + "../" + 'Final_values_regression_1D_' +
                   str(model.cf_a.eta_KL) + '.png',
                   dpi=100,
                   sizeInches=[20, 10])
    else:
        gl.savefig(folder_images + '%i.png' % epoch_i,
                   dpi=100,
                   sizeInches=[20, 10],
                   close=True,
                   bbox_inches="tight")
Esempio n. 11
0
    
    ## Plot the covariance matrix ! 
    # Show the Nshow first samples
    
    Nshow = 20


    ## Plot realizations of the Gaussian process
    Nrealizations = 10
    
    flag = 1;
    legend = ["Realizations"]
    labels = ["Gaussian Process  noise e(t)","t", "e(t)"]
    
    # Plot the realizations
    gl.init_figure();
    ax0 = gl.subplot2grid((1,4), (0,0), rowspan=1, colspan=3)
    for i in range(Nrealizations):
        f_prime = np.random.randn(N,1)
        error = L.dot(f_prime) 
        gl.plot(tgrid,error, lw = 3, color = "b", ls = "-", alpha = 0.5, 
                 legend = legend, labels = labels)
#        gl.scatter(tgrid,f_prime, lw = 1, alpha = 0.3, color = "b")
        
        if (flag == 1):
            flag = 0
            legend = []
    
    
    #Variance of each prediction
    v = np.diagonal(K)
Esempio n. 12
0
def plot_multiple_iterations(Xs,mus,covs, Ks ,myDManager, logl,theta_list,model_theta_list, folder_images):
    ######## Plot the original data #####
    gl.init_figure();
    gl.set_subplots(2,3);
    Ngraph = 6
    
    colors = ["r","b","g"]
    K_G,K_W,K_vMF = Ks
    
    for i in range(Ngraph):
        indx = int(i*((len(theta_list)-1)/float(Ngraph-1)))
        nf = 1
        for xi in range(len( Xs)):
            ## First cluster
            labels = ['EM Evolution. Kg:'+str(K_G)+ ', Kw:' + str(K_W) + ', K_vMF:' + str(K_vMF), "X1","X2"]
            ax1 = gl.scatter(Xs[xi][0,:],Xs[xi][1,:],labels = ["","",""] , 
                              color = colors[xi] ,alpha = 0.2, nf = nf)
            nf =0
            mean,w,h,theta = bMA.get_gaussian_ellipse_params( mu = mus[xi], Sigma = covs[xi], Chi2val = 2.4477)
            r_ellipse = bMA.get_ellipse_points(mean,w,h,theta)
            gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "--", lw = 2
                     ,AxesStyle = "Normal2", color = colors[xi], alpha = 0.7)
            

        # Only doable if the clusters dont die
        for k_c in myDManager.clusterk_to_Dname.keys():
            k = myDManager.clusterk_to_thetak[k_c]
            distribution_name = myDManager.clusterk_to_Dname[k_c] # G W
            
            if (distribution_name == "Gaussian"):
                ## Plot the ecolution of the mu
                #### Plot the Covariance of the clusters !
                mean,w,h,theta = bMA.get_gaussian_ellipse_params( mu = theta_list[indx][k][0], Sigma = theta_list[indx][k][1], Chi2val = 2.4477)
                r_ellipse = bMA.get_ellipse_points(mean,w,h,theta)
                gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "-.", lw = 3,
                        AxesStyle = "Normal2",
                       legend = ["Kg(%i). pi:%0.2f"%(k,  float(model_theta_list[indx][0][0,k]))]) 
            
            elif(distribution_name == "Watson"):
                #### Plot the pdf of the distributino !
                ## Distribution parameters for Watson
                kappa = float(theta_list[indx][k][1])
                mu = theta_list[indx][k][0]
    
                Nsa = 1000
                # Draw 2D samples as transformation of the angle
                Xalpha = np.linspace(0, 2*np.pi, Nsa)
                Xgrid= np.array([np.cos(Xalpha), np.sin(Xalpha)])
                
                probs = []  # Vector with probabilities
                for i in range(Nsa):
                    probs.append(np.exp(Wad.Watson_pdf_log(Xgrid[:,i],[mu,kappa]) ))
                
                probs = np.array(probs)
                # Plot it in polar coordinates
                X1_w = (1 + probs) * np.cos(Xalpha)
                X2_w = (1 + probs) * np.sin(Xalpha)
                
                gl.plot(X1_w,X2_w, 
                     alpha = 1, lw = 3, ls = "-.",legend = ["Kw(%i). pi:%0.2f"%(k,  float(model_theta_list[indx][0][0,k]))]) 
                
            elif(distribution_name == "vonMisesFisher"):
                #### Plot the pdf of the distributino !
                ## Distribution parameters for Watson
                kappa = float(theta_list[indx][k][1]); mu = theta_list[indx][k][0]
                Nsa = 1000
                # Draw 2D samples as transformation of the angle
                Xalpha = np.linspace(0, 2*np.pi, Nsa)
                Xgrid= np.array([np.cos(Xalpha), np.sin(Xalpha)])
                
                probs = []  # Vector with probabilities
                for i in range(Nsa):
                    probs.append(np.exp(vMFd.vonMisesFisher_pdf_log(Xgrid[:,i],[mu,kappa]) ))
                    
                probs = np.array(probs)
                probs = probs.reshape((probs.size,1)).T
                # Plot it in polar coordinates
                X1_w = (1 + probs) * np.cos(Xalpha)
                X2_w = (1 + probs) * np.sin(Xalpha)
                
    #            print X1_w.shape, X2_w.shape
                gl.plot(X1_w,X2_w, 
                     alpha = 1, lw = 3, ls = "-.", legend = ["Kvmf(%i). pi:%0.2f"%(k,  float(model_theta_list[indx][0][0,k]))]) 
            

        ax1.axis('equal')
    gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.2, hspace=0.01)
    gl.savefig(folder_images +'Final_State2. K_G:'+str(K_G)+ ', K_W:' + str(K_W) + '.png', 
           dpi = 100, sizeInches = [18, 8])
Esempio n. 13
0
def generate_images_iterations_ll(Xs,mus,covs, Ks ,myDManager, logl,theta_list,model_theta_list,folder_images_gif):
#    os.remove(folder_images_gif) # Remove previous images if existing
    """
    WARNING: MEANT FOR ONLY 3 Distributions due to the color RGB
    """
    import shutil
    ul.create_folder_if_needed(folder_images_gif)
    shutil.rmtree(folder_images_gif)
    ul.create_folder_if_needed(folder_images_gif)
    ######## Plot the original data #####

    Xdata = np.concatenate(Xs,axis = 1).T
    colors = ["r","b","g"]
    K_G,K_W,K_vMF = Ks
    
    ### FOR EACH ITERATION 
    for i in range(len(theta_list)):  # theta_list
        indx = i
        gl.init_figure()
        ax1 = gl.subplot2grid((1,2), (0,0), rowspan=1, colspan=1)
        
        ## Get the relative ll of the Gaussian denoising cluster.
        ll = myDManager.pdf_log_K(Xdata,theta_list[indx])
        N,K = ll.shape
#        print ll.shape
        for j in range(N):  # For every sample
        #TODO: Can this not be done without a for ?
            # Normalize the probability of the sample being generated by the clusters
            Marginal_xi_probability = gf.sum_logs(ll[j,:])
            ll[j,:] = ll[j,:]- Marginal_xi_probability
        
            ax1 = gl.scatter(Xdata[j,0],Xdata[j,1], labels = ['EM Evolution. Kg:'+str(K_G)+ ', Kw:' + str(K_W) + ', K_vMF:' + str(K_vMF), "X1","X2"], 
                              color = (np.exp(ll[j,1]), np.exp(ll[j,0]), np.exp(ll[j,2])) ,  ###  np.exp(ll[j,2])
                              alpha = 1, nf = 0)
            
        # Only doable if the clusters dont die
        for k_c in myDManager.clusterk_to_Dname.keys():
            k = myDManager.clusterk_to_thetak[k_c]
            distribution_name = myDManager.clusterk_to_Dname[k_c] # G W
            
            if (distribution_name == "Gaussian"):
                ## Plot the ecolution of the mu
                #### Plot the Covariance of the clusters !
                mean,w,h,theta = bMA.get_gaussian_ellipse_params( mu = theta_list[indx][k][0], Sigma = theta_list[indx][k][1], Chi2val = 2.4477)
                r_ellipse = bMA.get_ellipse_points(mean,w,h,theta)
                gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "-.", lw = 3,
                        AxesStyle = "Normal2",
                       legend = ["Kg(%i). pi:%0.2f"%(k,  float(model_theta_list[indx][0][0,k]))]) 
            
            elif(distribution_name == "Watson"):
                #### Plot the pdf of the distributino !
                ## Distribution parameters for Watson
                kappa = float(theta_list[indx][k][1]);  mu = theta_list[-1][k][0]
                Nsa = 1000
                # Draw 2D samples as transformation of the angle
                Xalpha = np.linspace(0, 2*np.pi, Nsa)
                Xgrid= np.array([np.cos(Xalpha), np.sin(Xalpha)])
                
                probs = []  # Vector with probabilities
                for i in range(Nsa):
                    probs.append(np.exp(Wad.Watson_pdf_log(Xgrid[:,i],[mu,kappa]) ))
                
                probs = np.array(probs)
                # Plot it in polar coordinates
                X1_w = (1 + probs) * np.cos(Xalpha)
                X2_w = (1 + probs) * np.sin(Xalpha)
                
                gl.plot(X1_w,X2_w, 
                     alpha = 1, lw = 3, ls = "-.", legend = ["Kw(%i). pi:%0.2f"%(k,  float(model_theta_list[indx][0][0,k]))]) 
                
            elif(distribution_name == "vonMisesFisher"):
                #### Plot the pdf of the distributino !
                ## Distribution parameters for Watson
                kappa = float(theta_list[indx][k][1]); mu = theta_list[indx][k][0]
                Nsa = 1000
                # Draw 2D samples as transformation of the angle
                Xalpha = np.linspace(0, 2*np.pi, Nsa)
                Xgrid= np.array([np.cos(Xalpha), np.sin(Xalpha)])
                
                probs = []  # Vector with probabilities
                for i in range(Nsa):
                    probs.append(np.exp(vMFd.vonMisesFisher_pdf_log(Xgrid[:,i],[mu,kappa]) ))
                    
                probs = np.array(probs)
                probs = probs.reshape((probs.size,1)).T
                # Plot it in polar coordinates
                X1_w = (1 + probs) * np.cos(Xalpha)
                X2_w = (1 + probs) * np.sin(Xalpha)
                
    #            print X1_w.shape, X2_w.shape
                gl.plot(X1_w,X2_w, 
                     alpha = 1, lw = 3, ls = "-.", legend = ["Kvmf(%i). pi:%0.2f"%(k,  float(model_theta_list[indx][0][0,k]))]) 
                
            
        gl.set_zoom(xlim = [-6,6], ylim = [-6,6], ax = ax1)     
        ax2 = gl.subplot2grid((1,2), (0,1), rowspan=1, colspan=1)
        if (indx == 0):
            gl.add_text(positionXY = [0.1,.5], text = r' Initilization Incomplete LogLike: %.2f'%(logl[0]),fontsize = 15)
            pass
        elif (indx >= 1):
           
            gl.plot(range(1,np.array(logl).flatten()[1:].size +1),np.array(logl).flatten()[1:(indx+1)], ax = ax2, 
                    legend = ["Iteration %i, Incom LL: %.2f"%(indx, logl[indx])], labels = ["Convergence of LL with generated data","Iterations","LL"], lw = 2)
            gl.scatter(1, logl[1], lw = 2)
            pt = 0.05
            gl.set_zoom(xlim = [0,len(logl)], ylim = [logl[1] - (logl[-1]-logl[1])*pt,logl[-1] + (logl[-1]-logl[1])*pt], ax = ax2)
            
        gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.2, hspace=0.01)
        
        gl.savefig(folder_images_gif +'gif_'+ str(indx) + '.png', 
               dpi = 100, sizeInches = [16, 8], close = "yes",bbox_inches = None)
        
        gl.close("all")
Esempio n. 14
0
def generate_gaussian_data(folder_images, plot_original_data, N1 = 200, N2 = 300, N3 = 50):

    
    mu1 = np.array([[0],[0]])
    cov1 = np.array([[0.8,-1.1],
                     [-1.1,1.6]])
    
    mu2 = np.array([[0],[0]])
    cov2 = np.array([[0.3,0.45],
                     [0.45,0.8]])
    mu3 = np.array([[0],[0]])
    cov3 = np.array([[0.1,0.0],
                     [0.0,0.1]])
    
    X1 = np.random.multivariate_normal(mu1.flatten(), cov1, N1).T
    X2 = np.random.multivariate_normal(mu2.flatten(), cov2, N2).T
    X3 = np.random.multivariate_normal(mu3.flatten(), cov3, N3).T

#    samples_X1 = np.array(range(X1.shape[1]))[np.where([X1[0,:] > 0])[0]]
#    samples_X1 = np.where(X1[0,:] > 0)[0] # np.array(range(X1.shape[1]))
#    print samples_X1
#    X1 = X1[:,samples_X1]
#    X2 = np.concatenate((X2,X3),axis = 1)
    
    ######## Plotting #####
    if (plot_original_data):
        gl.init_figure();
        ## First cluster
        ax1 = gl.scatter(X1[0,:],X1[1,:], labels = ["Gaussian Generated Data", "x1","x2"], 
                         legend = ["K = 1"], color = "r",alpha = 0.5)
        mean,w,h,theta = bMA.get_gaussian_ellipse_params( mu = mu1, Sigma = cov1, Chi2val = 2.4477)
        r_ellipse = bMA.get_ellipse_points(mean,w,h,theta)
        gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "--", lw = 2
                 ,AxesStyle = "Normal2", color = "r")
        
        ## Second cluster
        ax1 = gl.scatter(X2[0,:],X2[1,:], legend = ["K = 2"], color = "b", alpha = 0.5)
        mean,w,h,theta = bMA.get_gaussian_ellipse_params( mu = mu2, Sigma = cov2, Chi2val = 2.4477)
        r_ellipse = bMA.get_ellipse_points(mean,w,h,theta)
        gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "--", lw = 2,AxesStyle = "Normal2", color = "b")
        
        ## Third cluster
        ax1 = gl.scatter(X3[0,:],X3[1,:], legend = ["K = 3"], color = "g", alpha = 0.5)
        mean,w,h,theta = bMA.get_gaussian_ellipse_params( mu = mu3, Sigma = cov3, Chi2val = 2.4477)
        r_ellipse = bMA.get_ellipse_points(mean,w,h,theta)
        gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "--", lw = 2,AxesStyle = "Normal2", color = "g")
        
        ax1.axis('equal')

        gl.savefig(folder_images +'Original data.png', 
               dpi = 100, sizeInches = [12, 6])
    
    ############ ESTIMATE THEM ################
    theta1 = Gae.get_Gaussian_muSigma_ML(X1.T, parameters = dict([["Sigma","full"]]))
    print ("mu1:")
    print (theta1[0])
    print ("Sigma1")
    print(theta1[1])
    
    ############## Estimate Likelihood ###################
    ll = Gad.Gaussian_pdf_log (X1, [mu1,cov1])
    ll2 = []
    for i in range (ll.size):
        ll2.append( multivariate_normal.logpdf(X1[:,i], mean=mu1.flatten(), cov=cov1))
    ll2 = np.array(ll2).reshape(ll.shape)
    
    print ("ll ours")
    print (ll.T)
    print ("ll scipy")
    print (ll2.T)
    print ("Difference in ll")
    print ((ll - ll2).T)
    
    ###### Multiple clusters case
    ll_K = Gad.Gaussian_K_pdf_log(X1, [[mu1,cov1],[mu2,cov2]])
    
    if(0):
        X1 = gf.remove_module(X1.T).T
        X2 = gf.remove_module(X2.T).T
        X3 = gf.remove_module(X3.T).T
    Xdata = np.concatenate((X1,X2,X3), axis =1).T

    return X1,X2,X3,Xdata, mu1,mu2,mu3, cov1,cov2, cov3
Esempio n. 15
0
def plot_final_distribution(Xs,mus,covs, Ks ,myDManager, logl,theta_list,model_theta_list, folder_images):

    colors = ["r","b","g"]
    K_G,K_W,K_vMF = Ks
    ################## Print the Watson and Gaussian Distribution parameters ###################
    for k_c in myDManager.clusterk_to_Dname.keys():
        k = myDManager.clusterk_to_thetak[k_c]
        distribution_name = myDManager.clusterk_to_Dname[k_c] # G W
        if (distribution_name == "Gaussian"):
            print ("------------ Gaussian Cluster. K = %i--------------------"%k)
            print ("mu")
            print (theta_list[-1][k][0])
            print ("Sigma")
            print (theta_list[-1][k][1])
        elif(distribution_name == "Watson"):
            print ("------------ Watson Cluster. K = %i--------------------"%k)
            print ("mu")
            print (theta_list[-1][k][0])
            print ("Kappa")
            print (theta_list[-1][k][1])
        elif(distribution_name == "vonMisesFisher"):
            print ("------------ vonMisesFisher Cluster. K = %i--------------------"%k)
            print ("mu")
            print (theta_list[-1][k][0])
            print ("Kappa")
            print (theta_list[-1][k][1])
    print ("pimix")
    print (model_theta_list[-1])
    
    mus_Watson_Gaussian = []
    # k_c is the number of the cluster inside the Manager. k is the index in theta
    for k_c in myDManager.clusterk_to_Dname.keys():
        k = myDManager.clusterk_to_thetak[k_c]
        distribution_name = myDManager.clusterk_to_Dname[k_c] # G W
        mus_k = []
        for iter_i in range(len(theta_list)): # For each iteration of the algorihtm
            if (distribution_name == "Gaussian"):
                theta_i = theta_list[iter_i][k]
                mus_k.append(theta_i[0])
            elif(distribution_name == "Watson"):
                theta_i = theta_list[iter_i][k]
                mus_k.append(theta_i[0])
            elif(distribution_name == "vonMisesFisher"):
                theta_i = theta_list[iter_i][k]
                mus_k.append(theta_i[0])
                
        mus_k = np.concatenate(mus_k, axis = 1).T
        mus_Watson_Gaussian.append(mus_k)
    

    
    ######## Plot the original data #####
    gl.init_figure();
    ## First cluster
    for xi in range(len( Xs)):
        ## First cluster
        ax1 = gl.scatter(Xs[xi][0,:],Xs[xi][1,:], labels = ['EM Evolution. Kg:'+str(K_G)+ ', Kw:' + str(K_W) + ', K_vMF:' + str(K_vMF), "X1","X2"], 
                          color = colors[xi] ,alpha = 0.2, nf = 0)
        mean,w,h,theta = bMA.get_gaussian_ellipse_params( mu = mus[xi], Sigma = covs[xi], Chi2val = 2.4477)
        r_ellipse = bMA.get_ellipse_points(mean,w,h,theta)
        gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "--", lw = 2
                 ,AxesStyle = "Normal2", color = colors[xi], alpha = 0.7)

    indx = -1
    # Only doable if the clusters dont die
    Nit,Ndim = mus_Watson_Gaussian[0].shape
    for k_c in myDManager.clusterk_to_Dname.keys():
        k = myDManager.clusterk_to_thetak[k_c]
        distribution_name = myDManager.clusterk_to_Dname[k_c] # G W
        
        if (distribution_name == "Gaussian"):
            ## Plot the ecolution of the mu
            #### Plot the Covariance of the clusters !
            mean,w,h,theta = bMA.get_gaussian_ellipse_params( mu = theta_list[indx][k][0], Sigma = theta_list[indx][k][1], Chi2val = 2.4477)
            r_ellipse = bMA.get_ellipse_points(mean,w,h,theta)
            gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "-.", lw = 3,
                    AxesStyle = "Normal2", 
                    legend = ["Kg(%i). pi:%0.2f"%(k,  float(model_theta_list[indx][0][0,k]))]) 


            gl.scatter(mus_Watson_Gaussian[k][:,0], mus_Watson_Gaussian[k][:,1], nf = 0, na = 0, alpha = 0.3, lw = 1,
                          color = "y")
            gl.plot(mus_Watson_Gaussian[k][:,0], mus_Watson_Gaussian[k][:,1], nf = 0, na = 0, alpha = 0.8, lw = 2,
                          color = "y")
        
        elif(distribution_name == "Watson"):
            #### Plot the pdf of the distributino !
            ## Distribution parameters for Watson
            kappa = float(theta_list[indx][k][1])
            mu = theta_list[indx][k][0]

            Nsa = 1000
            # Draw 2D samples as transformation of the angle
            Xalpha = np.linspace(0, 2*np.pi, Nsa)
            Xgrid= np.array([np.cos(Xalpha), np.sin(Xalpha)])
            probs = []  # Vector with probabilities
            for i in range(Nsa):
                probs.append(np.exp(Wad.Watson_pdf_log(Xgrid[:,i],[mu,kappa]) ))
            
            probs = np.array(probs)
            # Plot it in polar coordinates
            X1_w = (1 + probs) * np.cos(Xalpha)
            X2_w = (1 + probs) * np.sin(Xalpha)
            
            gl.plot(X1_w,X2_w, legend = ["Kw(%i). pi:%0.2f"%(k,  float(model_theta_list[indx][0][0,k]))] ,
               alpha = 1, lw = 3, ls = "-.")
            
        elif(distribution_name == "vonMisesFisher"):
            #### Plot the pdf of the distributino !
            ## Distribution parameters for Watson
            kappa = float(theta_list[indx][k][1])
            mu = theta_list[indx][k][0]

            Nsa = 1000
            # Draw 2D samples as transformation of the angle
            Xalpha = np.linspace(0, 2*np.pi, Nsa)
            Xgrid= np.array([np.cos(Xalpha), np.sin(Xalpha)])
            
            probs = []  # Vector with probabilities
            for i in range(Nsa):
                probs.append(np.exp(vMFd.vonMisesFisher_pdf_log(Xgrid[:,i],[mu,kappa]) ))
                
            probs = np.array(probs)
            probs = probs.reshape((probs.size,1)).T
            # Plot it in polar coordinates
            X1_w = (1 + probs) * np.cos(Xalpha)
            X2_w = (1 + probs) * np.sin(Xalpha)
            
#            print X1_w.shape, X2_w.shape
            gl.plot(X1_w,X2_w, 
                 alpha = 1, lw = 3, ls = "-.", legend = ["Kvmf(%i). pi:%0.2f"%(k,  float(model_theta_list[indx][0][0,k]))]) 
                
    ax1.axis('equal')
    gl.savefig(folder_images +'Final_State. K_G:'+str(K_G)+ ', K_W:' + str(K_W)  + ', K_vMF:' + str(K_vMF) + '.png', 
           dpi = 100, sizeInches = [12, 6])
Esempio n. 16
0
def create_Bayesian_analysis_charts(model,
                                    X_data_tr, Y_data_tr, X_data_val, Y_data_val,
                                    tr_loss, val_loss, KL_loss,final_loss_tr,final_loss_val,
                                    xgrid_real_func, ygrid_real_func,
                                    folder_images,
                                    epoch_i = None):

    # Configurations of the plots
   
    alpha_points = 0.2 
    color_points_train = "dark navy blue"
    color_points_val = "amber"
    color_train_loss = "cobalt blue"
    color_val_loss = "blood"
    color_truth = "k"
    color_mean = "b"
    color_most_likey = "y"

    ############################# Data computation #######################
    if(type(X_data_tr) == type([])):
        pass
    else:
        if (X_data_tr.shape[1] == 1): # Regression Example 
            x_grid, all_y_grid,most_likely_ygrid = compute_regression_1D_data( model,X_data_tr,X_data_val, Nsamples = 100)
        elif(X_data_tr.shape[1] == 2):  # Classification Example 
            xx,yy , all_y_grid,most_likely_ygrid = compute_classification_2D_data( model,X_data_tr,X_data_val, Nsamples = 100)
        else:        # RNN
            x_grid, all_y_grid,most_likely_ygrid = compute_RNN_1D_data( model,X_data_tr,X_data_val, Nsamples = 100)
        
    ################################ Divide in plots ##############################
    gl.init_figure();
    ax1 = gl.subplot2grid((6,3), (0,0), rowspan=3, colspan=1)
    ax2 = gl.subplot2grid((6,3), (3,0), rowspan=3, colspan=1, sharex = ax1, sharey = ax1)
    
    ax3 = gl.subplot2grid((6,3), (0,1), rowspan=2, colspan=1)
    ax4 = gl.subplot2grid((6,3), (2,1), rowspan=2, colspan=1, sharex = ax3)
    ax5 = gl.subplot2grid((6,3), (4,1), rowspan=2, colspan=1, sharex = ax3)
    
    ax6 = gl.subplot2grid((6,3), (0,2), rowspan=3, colspan=1)
    ax7 = gl.subplot2grid((6,3), (3,2), rowspan=3, colspan=1, sharex = ax6)
    
    if(type(X_data_tr) == type([])):
        Xtrain = [torch.tensor(X_data_tr[i],device=model.cf_a.device, dtype=model.cf_a.dtype) for i in range(len(X_data_tr))]
        Ytrain = torch.tensor(Y_data_tr,device=model.cf_a.device, dtype=torch.int64)
        
        Xval = [torch.tensor(X_data_val[i],device=model.cf_a.device, dtype=model.cf_a.dtype) for i in range(len(X_data_val))]
        Yval = torch.tensor(Y_data_val,device=model.cf_a.device, dtype=torch.int64)

        confusion = model.get_confusion_matrix(Xtrain, Ytrain)
        plot_confusion_matrix(confusion,model.languages, ax1 )
        confusion = model.get_confusion_matrix(Xval, Yval)
        plot_confusion_matrix(confusion,model.languages, ax2 )

    else:
        if (X_data_tr.shape[1] == 1): # Regression Example 
            plot_data_regression_1d_2axes(X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func, X_data_val, Y_data_val,
                                              x_grid,all_y_grid, most_likely_ygrid,
                                              alpha_points, color_points_train, color_points_val, color_most_likey,color_mean,color_truth,
                                              ax1,ax2)
        elif(X_data_tr.shape[1] == 2): # Classification Example 
            plot_data_classification_2d_2axes(X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func, X_data_val, Y_data_val,
                                               xx,yy,all_y_grid, most_likely_ygrid,
                                              alpha_points, color_points_train, color_points_val, color_most_likey,color_mean, color_truth,
                                              ax1,ax2)
        else:       # RNN example
            plot_data_RNN_1d_2axes(X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func, X_data_val, Y_data_val,
                                              x_grid,all_y_grid, most_likely_ygrid,
                                              alpha_points, color_points_train, color_points_val, color_most_likey,color_mean,color_truth,
                                              ax1,ax2)
 
#    gl.fill_between (x_grid, [mean_samples_grid + 2*std_samples_grid, mean_samples_grid - 2*std_samples_grid]
#                              , ax  = ax2, alpha = 0.10, color = "b", legend = ["Mean realizaions"])
    ## ax2: The uncertainty of the prediction !!
#    gl.plot (x_grid, std_samples_grid, ax = ax2, labels = ["Std (%i)"%(Nsamples),"X","f(X)"], legend = [" std predictions"], fill = 1, alpha = 0.3)
    
   ############## ax3 ax4 ax5: Loss Evolution !! ######################
    ## ax3: Evolutoin of the data loss
    gl.plot([], tr_loss, ax = ax3, lw = 3, labels = ["Losses", "","Data loss"], legend = ["train"],
            color = color_train_loss)
    gl.plot([], val_loss,ax = ax3, lw = 3, legend = ["validation"],
            color = color_val_loss,  AxesStyle = "Normal - No xaxis")
    
    ## ax4: The evolution of the KL loss
    gl.plot([], KL_loss, ax = ax4, lw = 3, labels = ["", "","KL loss"], legend = ["Bayesian Weights"],
            AxesStyle = "Normal - No xaxis", color = "k")

    ## ax5: Evolutoin of the total loss
    gl.plot([], final_loss_tr, ax = ax5, lw = 3, labels = ["", "epoch","Total Loss (Bayes)"], legend = ["train"],
            color = color_train_loss)
    gl.plot([], final_loss_val,ax = ax5, lw = 3, legend = ["validation"], color = color_val_loss)
           
    ############## ax6 ax7: Variational Weights !! ######################
    create_plot_variational_weights(model,ax6,ax7)
    ## Plot in chart 7 the acceptable mu = 2sigma  -> sigma = |mu|/2sigma 
    mu_grid = np.linspace(-3,3,100)
    y_grid = np.abs(mu_grid)/2
    
    gl.fill_between(mu_grid, 10*np.ones(mu_grid.size), y_grid,
                    alpha = 0.2, color = "r", ax = ax7, legend = ["95% non-significant"])
    
    gl.set_zoom (ax = ax6, ylim = [-0.1,10])
    gl.set_zoom (ax = ax7, xlim = [-2.5, 2.5], ylim = [-0.05, np.exp(model.cf_a.input_layer_prior["log_sigma2"])*(1 + 0.15)])
    
#    gl.set_zoom (ax = ax7, xlim = [-2.5, 2.5], ylim = [-0.1,2])
    
    # Set final properties and save figure
    gl.set_fontSizes(ax = [ax1,ax2,ax3,ax4,ax5,ax6,ax7], title = 20, xlabel = 20, ylabel = 20, 
                      legend = 10, xticks = 12, yticks = 12)


    gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.10)
    
    if (type(epoch_i) == type(None)):
        gl.savefig(folder_images +"../"+'Final_values_regression_1D_' +str(model.cf_a.eta_KL) +'.png', 
                   dpi = 100, sizeInches = [20, 10])
    else:
        gl.savefig(folder_images +'%i.png'%epoch_i, 
                   dpi = 100, sizeInches = [20, 10], close = True, bbox_inches = "tight")
Esempio n. 17
0
def create_Bayesian_analysis_charts_simplified(model, train_dataset, validation_dataset,
                                    tr_loss, val_loss, KL_loss,
                                    folder_images,
                                    epoch_i = None):

    # Configurations of the plots
    alpha_points = 0.2 
    color_points_train = "dark navy blue"
    color_points_val = "amber"
    color_train_loss = "cobalt blue"
    color_val_loss = "blood"
    color_truth = "k"
    color_mean = "b"
    color_most_likey = "y"

    ################################ Divide in plots ##############################
    gl.init_figure();
    ax1 = gl.subplot2grid((6,3), (0,0), rowspan=3, colspan=1)
    ax2 = gl.subplot2grid((6,3), (3,0), rowspan=3, colspan=1, sharex = ax1, sharey = ax1)
    
    ax3 = gl.subplot2grid((6,3), (0,1), rowspan=2, colspan=1)
    ax4 = gl.subplot2grid((6,3), (2,1), rowspan=2, colspan=1, sharex = ax3)
    ax5 = gl.subplot2grid((6,3), (4,1), rowspan=2, colspan=1, sharex = ax3)
    
    ax6 = gl.subplot2grid((6,3), (0,2), rowspan=3, colspan=1)
    ax7 = gl.subplot2grid((6,3), (3,2), rowspan=3, colspan=1, sharex = ax6)
    
    
   ####### ax1, ax2: Get confusion matrices ##########

    labels_classes, confusion = model.get_confusion_matrix(train_dataset)
    plot_confusion_matrix(confusion,labels_classes, ax1 )
    labels_classes, confusion = model.get_confusion_matrix(validation_dataset)
    plot_confusion_matrix(confusion,labels_classes, ax2 )
        
   ############## ax3 ax4 ax5: Loss Evolution !! ######################
    ## ax3: Evolutoin of the data loss
    gl.plot([], tr_loss, ax = ax3, lw = 3, labels = ["Losses", "","Data loss (MSE)"], legend = ["train"],
            color = color_train_loss)
    gl.plot([], val_loss,ax = ax3, lw = 3, legend = ["validation"],
            color = color_val_loss,  AxesStyle = "Normal - No xaxis")
    
    ## ax4: The evolution of the KL loss
    gl.plot([], KL_loss, ax = ax4, lw = 3, labels = ["", "","KL loss"], legend = ["Bayesian Weights"],
            AxesStyle = "Normal - No xaxis", color = "k")

    ## ax5: Evolutoin of the total loss
    gl.plot([], tr_loss, ax = ax5, lw = 3, labels = ["", "epoch","Total Loss (Bayes)"], legend = ["train"],
            color = color_train_loss)
    gl.plot([], val_loss,ax = ax5, lw = 3, legend = ["validation"], color = color_val_loss)
           
    ############## ax6 ax7: Variational Weights !! ######################
    create_plot_variational_weights(model,ax6,ax7)

    gl.set_zoom (ax = ax6, ylim = [-0.1,10])
    gl.set_zoom (ax = ax7, xlim = [-2.5, 2.5], ylim = [-0.1,0.5])
    
    # Set final properties and save figure
    gl.set_fontSizes(ax = [ax1,ax2,ax3,ax4,ax5,ax6,ax7], title = 20, xlabel = 20, ylabel = 20, 
                      legend = 10, xticks = 12, yticks = 12)


    gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.10)
    
    if (type(epoch_i) == type(None)):
        gl.savefig(folder_images +'Training_Example_Data_Bayesian.png', 
                   dpi = 100, sizeInches = [20, 10])
    else:
        gl.savefig(folder_images +'%i.png'%epoch_i, 
                   dpi = 100, sizeInches = [20, 10], close = True, bbox_inches = "tight")
Esempio n. 18
0
def create_Bayesian_analysis_charts(model,
                                    X_data_tr,
                                    Y_data_tr,
                                    X_data_val,
                                    Y_data_val,
                                    tr_loss,
                                    val_loss,
                                    KL_loss,
                                    final_loss_tr,
                                    final_loss_val,
                                    xgrid_real_func,
                                    ygrid_real_func,
                                    folder_images,
                                    epoch_i=None):

    # Configurations of the plots

    alpha_points = 0.2
    color_points_train = "dark navy blue"
    color_points_val = "amber"
    color_train_loss = "cobalt blue"
    color_val_loss = "blood"
    color_truth = "k"
    color_mean = "b"
    color_most_likey = "y"

    ############################# Data computation #######################
    if (type(X_data_tr) == type([])):
        pass
    else:
        if (X_data_tr.shape[1] == 1):  # Regression Example
            x_grid, all_y_grid, most_likely_ygrid = compute_regression_1D_data(
                model, X_data_tr, X_data_val, Nsamples=100)
        elif (X_data_tr.shape[1] == 2):  # Classification Example
            xx, yy, all_y_grid, most_likely_ygrid = compute_classification_2D_data(
                model, X_data_tr, X_data_val, Nsamples=100)
        else:  # RNN
            x_grid, all_y_grid, most_likely_ygrid = compute_RNN_1D_data(
                model, X_data_tr, X_data_val, Nsamples=100)

    ################################ Divide in plots ##############################
    gl.init_figure()
    ax1 = gl.subplot2grid((6, 3), (0, 0), rowspan=3, colspan=1)
    ax2 = gl.subplot2grid((6, 3), (3, 0),
                          rowspan=3,
                          colspan=1,
                          sharex=ax1,
                          sharey=ax1)

    ax3 = gl.subplot2grid((6, 3), (0, 1), rowspan=2, colspan=1)
    ax4 = gl.subplot2grid((6, 3), (2, 1), rowspan=2, colspan=1, sharex=ax3)
    ax5 = gl.subplot2grid((6, 3), (4, 1), rowspan=2, colspan=1, sharex=ax3)

    ax6 = gl.subplot2grid((6, 3), (0, 2), rowspan=3, colspan=1)
    ax7 = gl.subplot2grid((6, 3), (3, 2), rowspan=3, colspan=1, sharex=ax6)

    if (type(X_data_tr) == type([])):
        Xtrain = [
            torch.tensor(X_data_tr[i],
                         device=model.cf_a.device,
                         dtype=model.cf_a.dtype) for i in range(len(X_data_tr))
        ]
        Ytrain = torch.tensor(Y_data_tr,
                              device=model.cf_a.device,
                              dtype=torch.int64)

        Xval = [
            torch.tensor(X_data_val[i],
                         device=model.cf_a.device,
                         dtype=model.cf_a.dtype)
            for i in range(len(X_data_val))
        ]
        Yval = torch.tensor(Y_data_val,
                            device=model.cf_a.device,
                            dtype=torch.int64)

        confusion = model.get_confusion_matrix(Xtrain, Ytrain)
        plot_confusion_matrix(confusion, model.languages, ax1)
        confusion = model.get_confusion_matrix(Xval, Yval)
        plot_confusion_matrix(confusion, model.languages, ax2)

    else:
        if (X_data_tr.shape[1] == 1):  # Regression Example
            plot_data_regression_1d_2axes(
                X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func,
                X_data_val, Y_data_val, x_grid, all_y_grid, most_likely_ygrid,
                alpha_points, color_points_train, color_points_val,
                color_most_likey, color_mean, color_truth, ax1, ax2)
        elif (X_data_tr.shape[1] == 2):  # Classification Example
            plot_data_classification_2d_2axes(
                X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func,
                X_data_val, Y_data_val, xx, yy, all_y_grid, most_likely_ygrid,
                alpha_points, color_points_train, color_points_val,
                color_most_likey, color_mean, color_truth, ax1, ax2)
        else:  # RNN example
            plot_data_RNN_1d_2axes(X_data_tr, Y_data_tr, xgrid_real_func,
                                   ygrid_real_func, X_data_val, Y_data_val,
                                   x_grid, all_y_grid, most_likely_ygrid,
                                   alpha_points, color_points_train,
                                   color_points_val, color_most_likey,
                                   color_mean, color_truth, ax1, ax2)


#    gl.fill_between (x_grid, [mean_samples_grid + 2*std_samples_grid, mean_samples_grid - 2*std_samples_grid]
#                              , ax  = ax2, alpha = 0.10, color = "b", legend = ["Mean realizaions"])
## ax2: The uncertainty of the prediction !!
#    gl.plot (x_grid, std_samples_grid, ax = ax2, labels = ["Std (%i)"%(Nsamples),"X","f(X)"], legend = [" std predictions"], fill = 1, alpha = 0.3)

############## ax3 ax4 ax5: Loss Evolution !! ######################
## ax3: Evolutoin of the data loss
    gl.plot([],
            tr_loss,
            ax=ax3,
            lw=3,
            labels=["Losses", "", "Data loss"],
            legend=["train"],
            color=color_train_loss)
    gl.plot([],
            val_loss,
            ax=ax3,
            lw=3,
            legend=["validation"],
            color=color_val_loss,
            AxesStyle="Normal - No xaxis")

    ## ax4: The evolution of the KL loss
    gl.plot([],
            KL_loss,
            ax=ax4,
            lw=3,
            labels=["", "", "KL loss"],
            legend=["Bayesian Weights"],
            AxesStyle="Normal - No xaxis",
            color="k")

    ## ax5: Evolutoin of the total loss
    gl.plot([],
            final_loss_tr,
            ax=ax5,
            lw=3,
            labels=["", "epoch", "Total Loss (Bayes)"],
            legend=["train"],
            color=color_train_loss)
    gl.plot([],
            final_loss_val,
            ax=ax5,
            lw=3,
            legend=["validation"],
            color=color_val_loss)

    ############## ax6 ax7: Variational Weights !! ######################
    create_plot_variational_weights(model, ax6, ax7)
    ## Plot in chart 7 the acceptable mu = 2sigma  -> sigma = |mu|/2sigma
    mu_grid = np.linspace(-3, 3, 100)
    y_grid = np.abs(mu_grid) / 2

    gl.fill_between(mu_grid,
                    10 * np.ones(mu_grid.size),
                    y_grid,
                    alpha=0.2,
                    color="r",
                    ax=ax7,
                    legend=["95% non-significant"])

    gl.set_zoom(ax=ax6, ylim=[-0.1, 10])
    gl.set_zoom(ax=ax7,
                xlim=[-2.5, 2.5],
                ylim=[
                    -0.05,
                    np.exp(model.cf_a.input_layer_prior["log_sigma2"]) *
                    (1 + 0.15)
                ])

    #    gl.set_zoom (ax = ax7, xlim = [-2.5, 2.5], ylim = [-0.1,2])

    # Set final properties and save figure
    gl.set_fontSizes(ax=[ax1, ax2, ax3, ax4, ax5, ax6, ax7],
                     title=20,
                     xlabel=20,
                     ylabel=20,
                     legend=10,
                     xticks=12,
                     yticks=12)

    gl.subplots_adjust(left=.09,
                       bottom=.10,
                       right=.90,
                       top=.95,
                       wspace=.30,
                       hspace=0.10)

    if (type(epoch_i) == type(None)):
        gl.savefig(folder_images + "../" + 'Final_values_regression_1D_' +
                   str(model.cf_a.eta_KL) + '.png',
                   dpi=100,
                   sizeInches=[20, 10])
    else:
        gl.savefig(folder_images + '%i.png' % epoch_i,
                   dpi=100,
                   sizeInches=[20, 10],
                   close=True,
                   bbox_inches="tight")
Esempio n. 19
0
def visualize_attention_matrix(question_tokens, passage_tokens, attention_matrix,
                               image_path):
        """
            Text to visualze attention map for.a given exmaple.
            
            question_tokens: List of tokens of the question
            passage_tokens: List of tokens of the passage
            attention_matrix: len(passage) x len(question) matrix with the probabilities 
        """
        
        f = gl.init_figure()
        ax = f.add_axes([0.1, 0.3, 0.8, 0.5])
        ax_attention_words = f.add_axes([0.1, 0.70, 0.8, 0.15])
        ax_attention_words.axis('off')
        
        
        # add image
        cmap = "binary" #cm.get_cmap('coolwarm', 30)
        i = ax.imshow(attention_matrix, interpolation='nearest', cmap=cmap,vmin=0, vmax=1)

        # add colorbar
        cbaxes = f.add_axes([0.95, 0.3, 0.02, 0.5])
        cbar = f.colorbar(i, cax=cbaxes, orientation='vertical')
        cbar.ax.set_xlabel('Probability', labelpad=6)

        # add labels
        ax.set_yticks(range(len(question_tokens)))
        ax.set_yticklabels(question_tokens)
        
        ax.set_xticks(range(len(passage_tokens)))
        ax.set_xticklabels(passage_tokens, rotation=80)
        
        ax.set_xlabel('Passage')
        ax.set_ylabel('Question')
        
        ###########  GET THE MOST ATTENTION WORDS ########
        Nmax_attention_words = 3
        z = (-attention_matrix).argsort(axis = 1)[:,:]
        
        attentioned_passage_words = []
        for i in range (len(question_tokens)):
            attentioned_passage_words.append([])
            for j in range(Nmax_attention_words):
                attentioned_passage_words[-1].append(passage_tokens[z[i,j]] + "(%.1f%%)"%(attention_matrix[i,z[i,j]]*100))
            attentioned_passage_words[-1] = ", ".join(attentioned_passage_words[-1])

        
        text_correspondance = ""
        for i in range (len(question_tokens)):
            text_correspondance += question_tokens[i] + " ---> " + attentioned_passage_words[i] + "\n"
        
        ax_attention_words.text(0,0,text_correspondance)
#        ax2.yaxis.tick_right()
#        ax2.yaxis.set_label_position("right")
        
        f.show()
#        gl.set_fontSizes(ax = [ax1,ax2], title = 20, xlabel = 15, ylabel = 18, 
#                          legend = 12, xticks = 14, yticks = 14)
        gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.20, hspace=0.10)
        
        gl.savefig(image_path,  dpi = 100, sizeInches = [10, 6], close = False, bbox_inches = "tight") 
Esempio n. 20
0
def generate_images_iterations_ll(Xs, mus, covs, Ks, myDManager, logl,
                                  theta_list, model_theta_list,
                                  folder_images_gif):
    #    os.remove(folder_images_gif) # Remove previous images if existing
    """
    WARNING: MEANT FOR ONLY 3 Distributions due to the color RGB
    """
    import shutil
    ul.create_folder_if_needed(folder_images_gif)
    shutil.rmtree(folder_images_gif)
    ul.create_folder_if_needed(folder_images_gif)
    ######## Plot the original data #####

    Xdata = np.concatenate(Xs, axis=1).T
    colors = ["r", "b", "g"]
    K_G, K_W, K_vMF = Ks

    ### FOR EACH ITERATION
    for i in range(len(theta_list)):  # theta_list
        indx = i
        gl.init_figure()
        ax1 = gl.subplot2grid((1, 2), (0, 0), rowspan=1, colspan=1)

        ## Get the relative ll of the Gaussian denoising cluster.
        ll = myDManager.pdf_log_K(Xdata, theta_list[indx])
        N, K = ll.shape
        #        print ll.shape
        for j in range(N):  # For every sample
            #TODO: Can this not be done without a for ?
            # Normalize the probability of the sample being generated by the clusters
            Marginal_xi_probability = gf.sum_logs(ll[j, :])
            ll[j, :] = ll[j, :] - Marginal_xi_probability

            ax1 = gl.scatter(
                Xdata[j, 0],
                Xdata[j, 1],
                labels=[
                    'EM Evolution. Kg:' + str(K_G) + ', Kw:' + str(K_W) +
                    ', K_vMF:' + str(K_vMF), "X1", "X2"
                ],
                color=(np.exp(ll[j, 1]), np.exp(ll[j, 0]),
                       np.exp(ll[j, 2])),  ###  np.exp(ll[j,2])
                alpha=1,
                nf=0)

        # Only doable if the clusters dont die
        for k_c in myDManager.clusterk_to_Dname.keys():
            k = myDManager.clusterk_to_thetak[k_c]
            distribution_name = myDManager.clusterk_to_Dname[k_c]  # G W

            if (distribution_name == "Gaussian"):
                ## Plot the ecolution of the mu
                #### Plot the Covariance of the clusters !
                mean, w, h, theta = bMA.get_gaussian_ellipse_params(
                    mu=theta_list[indx][k][0],
                    Sigma=theta_list[indx][k][1],
                    Chi2val=2.4477)
                r_ellipse = bMA.get_ellipse_points(mean, w, h, theta)
                gl.plot(r_ellipse[:, 0],
                        r_ellipse[:, 1],
                        ax=ax1,
                        ls="-.",
                        lw=3,
                        AxesStyle="Normal2",
                        legend=[
                            "Kg(%i). pi:%0.2f" %
                            (k, float(model_theta_list[indx][0][0, k]))
                        ])

            elif (distribution_name == "Watson"):
                #### Plot the pdf of the distributino !
                ## Distribution parameters for Watson
                kappa = float(theta_list[indx][k][1])
                mu = theta_list[-1][k][0]
                Nsa = 1000
                # Draw 2D samples as transformation of the angle
                Xalpha = np.linspace(0, 2 * np.pi, Nsa)
                Xgrid = np.array([np.cos(Xalpha), np.sin(Xalpha)])

                probs = []  # Vector with probabilities
                for i in range(Nsa):
                    probs.append(
                        np.exp(Wad.Watson_pdf_log(Xgrid[:, i], [mu, kappa])))

                probs = np.array(probs)
                # Plot it in polar coordinates
                X1_w = (1 + probs) * np.cos(Xalpha)
                X2_w = (1 + probs) * np.sin(Xalpha)

                gl.plot(X1_w,
                        X2_w,
                        alpha=1,
                        lw=3,
                        ls="-.",
                        legend=[
                            "Kw(%i). pi:%0.2f" %
                            (k, float(model_theta_list[indx][0][0, k]))
                        ])

            elif (distribution_name == "vonMisesFisher"):
                #### Plot the pdf of the distributino !
                ## Distribution parameters for Watson
                kappa = float(theta_list[indx][k][1])
                mu = theta_list[indx][k][0]
                Nsa = 1000
                # Draw 2D samples as transformation of the angle
                Xalpha = np.linspace(0, 2 * np.pi, Nsa)
                Xgrid = np.array([np.cos(Xalpha), np.sin(Xalpha)])

                probs = []  # Vector with probabilities
                for i in range(Nsa):
                    probs.append(
                        np.exp(
                            vMFd.vonMisesFisher_pdf_log(
                                Xgrid[:, i], [mu, kappa])))

                probs = np.array(probs)
                probs = probs.reshape((probs.size, 1)).T
                # Plot it in polar coordinates
                X1_w = (1 + probs) * np.cos(Xalpha)
                X2_w = (1 + probs) * np.sin(Xalpha)

                #            print X1_w.shape, X2_w.shape
                gl.plot(X1_w,
                        X2_w,
                        alpha=1,
                        lw=3,
                        ls="-.",
                        legend=[
                            "Kvmf(%i). pi:%0.2f" %
                            (k, float(model_theta_list[indx][0][0, k]))
                        ])

        gl.set_zoom(xlim=[-6, 6], ylim=[-6, 6], ax=ax1)
        ax2 = gl.subplot2grid((1, 2), (0, 1), rowspan=1, colspan=1)
        if (indx == 0):
            gl.add_text(positionXY=[0.1, .5],
                        text=r' Initilization Incomplete LogLike: %.2f' %
                        (logl[0]),
                        fontsize=15)
            pass
        elif (indx >= 1):

            gl.plot(
                range(1,
                      np.array(logl).flatten()[1:].size + 1),
                np.array(logl).flatten()[1:(indx + 1)],
                ax=ax2,
                legend=["Iteration %i, Incom LL: %.2f" % (indx, logl[indx])],
                labels=[
                    "Convergence of LL with generated data", "Iterations", "LL"
                ],
                lw=2)
            gl.scatter(1, logl[1], lw=2)
            pt = 0.05
            gl.set_zoom(xlim=[0, len(logl)],
                        ylim=[
                            logl[1] - (logl[-1] - logl[1]) * pt,
                            logl[-1] + (logl[-1] - logl[1]) * pt
                        ],
                        ax=ax2)

        gl.subplots_adjust(left=.09,
                           bottom=.10,
                           right=.90,
                           top=.95,
                           wspace=.2,
                           hspace=0.01)

        gl.savefig(folder_images_gif + 'gif_' + str(indx) + '.png',
                   dpi=100,
                   sizeInches=[16, 8],
                   close="yes",
                   bbox_inches=None)

        gl.close("all")
Esempio n. 21
0
def plot_multiple_iterations(Xs, mus, covs, Ks, myDManager, logl, theta_list,
                             model_theta_list, folder_images):
    ######## Plot the original data #####
    gl.init_figure()
    gl.set_subplots(2, 3)
    Ngraph = 6

    colors = ["r", "b", "g"]
    K_G, K_W, K_vMF = Ks

    for i in range(Ngraph):
        indx = int(i * ((len(theta_list) - 1) / float(Ngraph - 1)))
        nf = 1
        for xi in range(len(Xs)):
            ## First cluster
            labels = [
                'EM Evolution. Kg:' + str(K_G) + ', Kw:' + str(K_W) +
                ', K_vMF:' + str(K_vMF), "X1", "X2"
            ]
            ax1 = gl.scatter(Xs[xi][0, :],
                             Xs[xi][1, :],
                             labels=["", "", ""],
                             color=colors[xi],
                             alpha=0.2,
                             nf=nf)
            nf = 0
            mean, w, h, theta = bMA.get_gaussian_ellipse_params(mu=mus[xi],
                                                                Sigma=covs[xi],
                                                                Chi2val=2.4477)
            r_ellipse = bMA.get_ellipse_points(mean, w, h, theta)
            gl.plot(r_ellipse[:, 0],
                    r_ellipse[:, 1],
                    ax=ax1,
                    ls="--",
                    lw=2,
                    AxesStyle="Normal2",
                    color=colors[xi],
                    alpha=0.7)

        # Only doable if the clusters dont die
        for k_c in myDManager.clusterk_to_Dname.keys():
            k = myDManager.clusterk_to_thetak[k_c]
            distribution_name = myDManager.clusterk_to_Dname[k_c]  # G W

            if (distribution_name == "Gaussian"):
                ## Plot the ecolution of the mu
                #### Plot the Covariance of the clusters !
                mean, w, h, theta = bMA.get_gaussian_ellipse_params(
                    mu=theta_list[indx][k][0],
                    Sigma=theta_list[indx][k][1],
                    Chi2val=2.4477)
                r_ellipse = bMA.get_ellipse_points(mean, w, h, theta)
                gl.plot(r_ellipse[:, 0],
                        r_ellipse[:, 1],
                        ax=ax1,
                        ls="-.",
                        lw=3,
                        AxesStyle="Normal2",
                        legend=[
                            "Kg(%i). pi:%0.2f" %
                            (k, float(model_theta_list[indx][0][0, k]))
                        ])

            elif (distribution_name == "Watson"):
                #### Plot the pdf of the distributino !
                ## Distribution parameters for Watson
                kappa = float(theta_list[indx][k][1])
                mu = theta_list[indx][k][0]

                Nsa = 1000
                # Draw 2D samples as transformation of the angle
                Xalpha = np.linspace(0, 2 * np.pi, Nsa)
                Xgrid = np.array([np.cos(Xalpha), np.sin(Xalpha)])

                probs = []  # Vector with probabilities
                for i in range(Nsa):
                    probs.append(
                        np.exp(Wad.Watson_pdf_log(Xgrid[:, i], [mu, kappa])))

                probs = np.array(probs)
                # Plot it in polar coordinates
                X1_w = (1 + probs) * np.cos(Xalpha)
                X2_w = (1 + probs) * np.sin(Xalpha)

                gl.plot(X1_w,
                        X2_w,
                        alpha=1,
                        lw=3,
                        ls="-.",
                        legend=[
                            "Kw(%i). pi:%0.2f" %
                            (k, float(model_theta_list[indx][0][0, k]))
                        ])

            elif (distribution_name == "vonMisesFisher"):
                #### Plot the pdf of the distributino !
                ## Distribution parameters for Watson
                kappa = float(theta_list[indx][k][1])
                mu = theta_list[indx][k][0]
                Nsa = 1000
                # Draw 2D samples as transformation of the angle
                Xalpha = np.linspace(0, 2 * np.pi, Nsa)
                Xgrid = np.array([np.cos(Xalpha), np.sin(Xalpha)])

                probs = []  # Vector with probabilities
                for i in range(Nsa):
                    probs.append(
                        np.exp(
                            vMFd.vonMisesFisher_pdf_log(
                                Xgrid[:, i], [mu, kappa])))

                probs = np.array(probs)
                probs = probs.reshape((probs.size, 1)).T
                # Plot it in polar coordinates
                X1_w = (1 + probs) * np.cos(Xalpha)
                X2_w = (1 + probs) * np.sin(Xalpha)

                #            print X1_w.shape, X2_w.shape
                gl.plot(X1_w,
                        X2_w,
                        alpha=1,
                        lw=3,
                        ls="-.",
                        legend=[
                            "Kvmf(%i). pi:%0.2f" %
                            (k, float(model_theta_list[indx][0][0, k]))
                        ])

        ax1.axis('equal')
    gl.subplots_adjust(left=.09,
                       bottom=.10,
                       right=.90,
                       top=.95,
                       wspace=.2,
                       hspace=0.01)
    gl.savefig(folder_images + 'Final_State2. K_G:' + str(K_G) + ', K_W:' +
               str(K_W) + '.png',
               dpi=100,
               sizeInches=[18, 8])
Esempio n. 22
0
def plot_final_distribution(Xs, mus, covs, Ks, myDManager, logl, theta_list,
                            model_theta_list, folder_images):

    colors = ["r", "b", "g"]
    K_G, K_W, K_vMF = Ks
    ################## Print the Watson and Gaussian Distribution parameters ###################
    for k_c in myDManager.clusterk_to_Dname.keys():
        k = myDManager.clusterk_to_thetak[k_c]
        distribution_name = myDManager.clusterk_to_Dname[k_c]  # G W
        if (distribution_name == "Gaussian"):
            print("------------ Gaussian Cluster. K = %i--------------------" %
                  k)
            print("mu")
            print(theta_list[-1][k][0])
            print("Sigma")
            print(theta_list[-1][k][1])
        elif (distribution_name == "Watson"):
            print("------------ Watson Cluster. K = %i--------------------" %
                  k)
            print("mu")
            print(theta_list[-1][k][0])
            print("Kappa")
            print(theta_list[-1][k][1])
        elif (distribution_name == "vonMisesFisher"):
            print(
                "------------ vonMisesFisher Cluster. K = %i--------------------"
                % k)
            print("mu")
            print(theta_list[-1][k][0])
            print("Kappa")
            print(theta_list[-1][k][1])
    print("pimix")
    print(model_theta_list[-1])

    mus_Watson_Gaussian = []
    # k_c is the number of the cluster inside the Manager. k is the index in theta
    for k_c in myDManager.clusterk_to_Dname.keys():
        k = myDManager.clusterk_to_thetak[k_c]
        distribution_name = myDManager.clusterk_to_Dname[k_c]  # G W
        mus_k = []
        for iter_i in range(
                len(theta_list)):  # For each iteration of the algorihtm
            if (distribution_name == "Gaussian"):
                theta_i = theta_list[iter_i][k]
                mus_k.append(theta_i[0])
            elif (distribution_name == "Watson"):
                theta_i = theta_list[iter_i][k]
                mus_k.append(theta_i[0])
            elif (distribution_name == "vonMisesFisher"):
                theta_i = theta_list[iter_i][k]
                mus_k.append(theta_i[0])

        mus_k = np.concatenate(mus_k, axis=1).T
        mus_Watson_Gaussian.append(mus_k)

    ######## Plot the original data #####
    gl.init_figure()
    ## First cluster
    for xi in range(len(Xs)):
        ## First cluster
        ax1 = gl.scatter(Xs[xi][0, :],
                         Xs[xi][1, :],
                         labels=[
                             'EM Evolution. Kg:' + str(K_G) + ', Kw:' +
                             str(K_W) + ', K_vMF:' + str(K_vMF), "X1", "X2"
                         ],
                         color=colors[xi],
                         alpha=0.2,
                         nf=0)
        mean, w, h, theta = bMA.get_gaussian_ellipse_params(mu=mus[xi],
                                                            Sigma=covs[xi],
                                                            Chi2val=2.4477)
        r_ellipse = bMA.get_ellipse_points(mean, w, h, theta)
        gl.plot(r_ellipse[:, 0],
                r_ellipse[:, 1],
                ax=ax1,
                ls="--",
                lw=2,
                AxesStyle="Normal2",
                color=colors[xi],
                alpha=0.7)

    indx = -1
    # Only doable if the clusters dont die
    Nit, Ndim = mus_Watson_Gaussian[0].shape
    for k_c in myDManager.clusterk_to_Dname.keys():
        k = myDManager.clusterk_to_thetak[k_c]
        distribution_name = myDManager.clusterk_to_Dname[k_c]  # G W

        if (distribution_name == "Gaussian"):
            ## Plot the ecolution of the mu
            #### Plot the Covariance of the clusters !
            mean, w, h, theta = bMA.get_gaussian_ellipse_params(
                mu=theta_list[indx][k][0],
                Sigma=theta_list[indx][k][1],
                Chi2val=2.4477)
            r_ellipse = bMA.get_ellipse_points(mean, w, h, theta)
            gl.plot(r_ellipse[:, 0],
                    r_ellipse[:, 1],
                    ax=ax1,
                    ls="-.",
                    lw=3,
                    AxesStyle="Normal2",
                    legend=[
                        "Kg(%i). pi:%0.2f" %
                        (k, float(model_theta_list[indx][0][0, k]))
                    ])

            gl.scatter(mus_Watson_Gaussian[k][:, 0],
                       mus_Watson_Gaussian[k][:, 1],
                       nf=0,
                       na=0,
                       alpha=0.3,
                       lw=1,
                       color="y")
            gl.plot(mus_Watson_Gaussian[k][:, 0],
                    mus_Watson_Gaussian[k][:, 1],
                    nf=0,
                    na=0,
                    alpha=0.8,
                    lw=2,
                    color="y")

        elif (distribution_name == "Watson"):
            #### Plot the pdf of the distributino !
            ## Distribution parameters for Watson
            kappa = float(theta_list[indx][k][1])
            mu = theta_list[indx][k][0]

            Nsa = 1000
            # Draw 2D samples as transformation of the angle
            Xalpha = np.linspace(0, 2 * np.pi, Nsa)
            Xgrid = np.array([np.cos(Xalpha), np.sin(Xalpha)])
            probs = []  # Vector with probabilities
            for i in range(Nsa):
                probs.append(
                    np.exp(Wad.Watson_pdf_log(Xgrid[:, i], [mu, kappa])))

            probs = np.array(probs)
            # Plot it in polar coordinates
            X1_w = (1 + probs) * np.cos(Xalpha)
            X2_w = (1 + probs) * np.sin(Xalpha)

            gl.plot(X1_w,
                    X2_w,
                    legend=[
                        "Kw(%i). pi:%0.2f" %
                        (k, float(model_theta_list[indx][0][0, k]))
                    ],
                    alpha=1,
                    lw=3,
                    ls="-.")

        elif (distribution_name == "vonMisesFisher"):
            #### Plot the pdf of the distributino !
            ## Distribution parameters for Watson
            kappa = float(theta_list[indx][k][1])
            mu = theta_list[indx][k][0]

            Nsa = 1000
            # Draw 2D samples as transformation of the angle
            Xalpha = np.linspace(0, 2 * np.pi, Nsa)
            Xgrid = np.array([np.cos(Xalpha), np.sin(Xalpha)])

            probs = []  # Vector with probabilities
            for i in range(Nsa):
                probs.append(
                    np.exp(
                        vMFd.vonMisesFisher_pdf_log(Xgrid[:, i], [mu, kappa])))

            probs = np.array(probs)
            probs = probs.reshape((probs.size, 1)).T
            # Plot it in polar coordinates
            X1_w = (1 + probs) * np.cos(Xalpha)
            X2_w = (1 + probs) * np.sin(Xalpha)

            #            print X1_w.shape, X2_w.shape
            gl.plot(X1_w,
                    X2_w,
                    alpha=1,
                    lw=3,
                    ls="-.",
                    legend=[
                        "Kvmf(%i). pi:%0.2f" %
                        (k, float(model_theta_list[indx][0][0, k]))
                    ])

    ax1.axis('equal')
    gl.savefig(folder_images + 'Final_State. K_G:' + str(K_G) + ', K_W:' +
               str(K_W) + ', K_vMF:' + str(K_vMF) + '.png',
               dpi=100,
               sizeInches=[12, 6])
Esempio n. 23
0
def generate_gaussian_data(folder_images,
                           plot_original_data,
                           N1=200,
                           N2=300,
                           N3=50):

    mu1 = np.array([[0], [0]])
    cov1 = np.array([[0.8, -1.1], [-1.1, 1.6]])

    mu2 = np.array([[0], [0]])
    cov2 = np.array([[0.3, 0.45], [0.45, 0.8]])
    mu3 = np.array([[0], [0]])
    cov3 = np.array([[0.1, 0.0], [0.0, 0.1]])

    X1 = np.random.multivariate_normal(mu1.flatten(), cov1, N1).T
    X2 = np.random.multivariate_normal(mu2.flatten(), cov2, N2).T
    X3 = np.random.multivariate_normal(mu3.flatten(), cov3, N3).T

    #    samples_X1 = np.array(range(X1.shape[1]))[np.where([X1[0,:] > 0])[0]]
    #    samples_X1 = np.where(X1[0,:] > 0)[0] # np.array(range(X1.shape[1]))
    #    print samples_X1
    #    X1 = X1[:,samples_X1]
    #    X2 = np.concatenate((X2,X3),axis = 1)

    ######## Plotting #####
    if (plot_original_data):
        gl.init_figure()
        ## First cluster
        ax1 = gl.scatter(X1[0, :],
                         X1[1, :],
                         labels=["Gaussian Generated Data", "x1", "x2"],
                         legend=["K = 1"],
                         color="r",
                         alpha=0.5)
        mean, w, h, theta = bMA.get_gaussian_ellipse_params(mu=mu1,
                                                            Sigma=cov1,
                                                            Chi2val=2.4477)
        r_ellipse = bMA.get_ellipse_points(mean, w, h, theta)
        gl.plot(r_ellipse[:, 0],
                r_ellipse[:, 1],
                ax=ax1,
                ls="--",
                lw=2,
                AxesStyle="Normal2",
                color="r")

        ## Second cluster
        ax1 = gl.scatter(X2[0, :],
                         X2[1, :],
                         legend=["K = 2"],
                         color="b",
                         alpha=0.5)
        mean, w, h, theta = bMA.get_gaussian_ellipse_params(mu=mu2,
                                                            Sigma=cov2,
                                                            Chi2val=2.4477)
        r_ellipse = bMA.get_ellipse_points(mean, w, h, theta)
        gl.plot(r_ellipse[:, 0],
                r_ellipse[:, 1],
                ax=ax1,
                ls="--",
                lw=2,
                AxesStyle="Normal2",
                color="b")

        ## Third cluster
        ax1 = gl.scatter(X3[0, :],
                         X3[1, :],
                         legend=["K = 3"],
                         color="g",
                         alpha=0.5)
        mean, w, h, theta = bMA.get_gaussian_ellipse_params(mu=mu3,
                                                            Sigma=cov3,
                                                            Chi2val=2.4477)
        r_ellipse = bMA.get_ellipse_points(mean, w, h, theta)
        gl.plot(r_ellipse[:, 0],
                r_ellipse[:, 1],
                ax=ax1,
                ls="--",
                lw=2,
                AxesStyle="Normal2",
                color="g")

        ax1.axis('equal')

        gl.savefig(folder_images + 'Original data.png',
                   dpi=100,
                   sizeInches=[12, 6])

    ############ ESTIMATE THEM ################
    theta1 = Gae.get_Gaussian_muSigma_ML(X1.T,
                                         parameters=dict([["Sigma", "full"]]))
    print("mu1:")
    print(theta1[0])
    print("Sigma1")
    print(theta1[1])

    ############## Estimate Likelihood ###################
    ll = Gad.Gaussian_pdf_log(X1, [mu1, cov1])
    ll2 = []
    for i in range(ll.size):
        ll2.append(
            multivariate_normal.logpdf(X1[:, i], mean=mu1.flatten(), cov=cov1))
    ll2 = np.array(ll2).reshape(ll.shape)

    print("ll ours")
    print(ll.T)
    print("ll scipy")
    print(ll2.T)
    print("Difference in ll")
    print((ll - ll2).T)

    ###### Multiple clusters case
    ll_K = Gad.Gaussian_K_pdf_log(X1, [[mu1, cov1], [mu2, cov2]])

    if (0):
        X1 = gf.remove_module(X1.T).T
        X2 = gf.remove_module(X2.T).T
        X3 = gf.remove_module(X3.T).T
    Xdata = np.concatenate((X1, X2, X3), axis=1).T

    return X1, X2, X3, Xdata, mu1, mu2, mu3, cov1, cov2, cov3
Esempio n. 24
0
    def init_figure(self):
        """
        This function initializes the chart, with its widgets and everything
        
        """
        
        button_height = 0.030;
        textbox_length0 = 0.02
        textbox_length1 = 0.04
        textbox_length2 = 0.05
         
        fig = gl.init_figure();
        ## Set the image to full screen
        fig_manager = plt.get_current_fig_manager()
        if hasattr(fig_manager, 'window'):
            fig_manager.window.showMaximized()
    
        data_axes = gl.subplot2grid((1,4), (0,0), rowspan=1, colspan=3)
        
        self.fig = fig; self.data_axes = data_axes;
        
        #### Logo Images !!
        logo_path =  self.output_folder + "images_IoTubes/IoTubes_logo.png"
        image = mpimg.imread(logo_path)
        ax_img = plt.axes([0.725, 0.75, 0.2, 0.2])
        ax_img.imshow(image)
        ax_img.axis("off")
        
        ################## Widgets Axes #####################
        
       
        widgets_x = 0.76
        widgets_x2 = 0.85
        widgets_x3 = 0.90
        
        w1_x, w2_x, w3_x = 0.73, 0.8,0.87
        
        base_y = 0.69
        
        administration_y = base_y
        monitoring_y = administration_y - 0.12
        chart_s_y = monitoring_y - 0.12
        chart_s_y2 = chart_s_y -0.05
        chart_start_stop_y = chart_s_y2 - 0.05
        
        output_y = chart_start_stop_y - 0.12

        
        diff_headline_content =  0.052
        ## Administration ! 
        headlines_x = 0.705
        text = self.fig.text(headlines_x, administration_y + diff_headline_content, 'Administration:', size=20) # ha='center', va='center', size=20)
        
        axbox_machineID = plt.axes([widgets_x, administration_y, textbox_length1, button_height])
        axbox_pipingID = plt.axes([widgets_x2, administration_y, textbox_length1, button_height])

        ### Monitoring
        text = self.fig.text(headlines_x, monitoring_y + diff_headline_content, 'PH Monitoring:', size=20) # ha='center', va='center', size=20)
        axbox_desired_value = plt.axes([widgets_x, monitoring_y, textbox_length0, button_height])
        axbox_range_warning = plt.axes([widgets_x2, monitoring_y, textbox_length0, button_height])
        
        ## Sampling and plotting
        text = self.fig.text(headlines_x, output_y + diff_headline_content, 'Output Generation:', size=20) # ha='center', va='center', size=20)
        axbox_sample_period = plt.axes([widgets_x, chart_s_y, textbox_length1, button_height])
        axbox_plot_period = plt.axes([widgets_x2, chart_s_y, textbox_length1, button_height])
        axbox_Nsamples_show = plt.axes([widgets_x, chart_s_y2, textbox_length1, button_height])
        
        ax_start = plt.axes([widgets_x,chart_start_stop_y, 0.04, button_height])
        ax_stop = plt.axes([widgets_x2, chart_start_stop_y, 0.04, button_height])
        
        ## Output
        text = self.fig.text(headlines_x, chart_s_y + diff_headline_content, 'Sampling and plotting:', size=20) # ha='center', va='center', size=20)
        axsave_disk = plt.axes([w1_x, output_y, 0.055, button_height])
        axsave_DDBB = plt.axes([w2_x, output_y, 0.055, button_height])
        axreport = plt.axes([w3_x, output_y, 0.055, button_height])

        
        ################## Add functionalities ###########################
        
        ################ Chart AXES ################:
        bstop = Button(ax_stop, 'Stop')
        bstop.on_clicked(self.stop_reading_data)
        
        bstart = Button(ax_start, 'Start')
        bstart.on_clicked(self.start_reading_data)
#        bprev.on_clicked(self.auto_update_test)
        
        #### Text input Period  ####
        initial_text = str(int(self.period_sampling * 1000));
        text_box_sample_period = TextBox(axbox_sample_period, 'Sample(ms) ', initial=initial_text)
        text_box_sample_period.on_submit(self.submit_sample_period)
        
        initial_text = str(int(self.period_plotting * 1000));
        text_box_plotting_period = TextBox(axbox_plot_period, 'Plot(ms) ', initial=initial_text)
        text_box_plotting_period.on_submit(self.submit_plotting_period)
        
        #### Text input N samples ####
        initial_text = str(int(self.show_window));
        text_Nsamples_show = TextBox(axbox_Nsamples_show, 'Samples Chart ', initial=initial_text)
        text_Nsamples_show.on_submit(self.submit_show_window)
        
        ################ Data generation widgets ################
        bpsave_disk = Button(axsave_disk, 'Save Disk')
        bpsave_disk.on_clicked(self.save_to_disk)
        
        bpsave_DDBB = Button(axsave_DDBB, 'Save DDBB')
        bpsave_DDBB.on_clicked(self.send_buffer_to_DDBB)
        
        bpsave_report = Button(axreport, 'Report')
        bpsave_report.on_clicked(self.generate_report)
        
        ################ Cleaning input widgets ################
        ## Text input MAchine ID
        initial_text = self.machine_ID
        text_box_machine = TextBox(axbox_machineID, 'Machine ID ', initial=initial_text)
        text_box_machine.on_submit(self.submit_machineID)
    
        initial_text = self.piping_ID
        text_box_piping = TextBox(axbox_pipingID, 'Piping ID ', initial=initial_text)
        text_box_piping.on_submit(self.submit_pipingID)
        
    
        
        ################ MONITORING variables ################
        initial_text = str(self.Monitor.desired_value);
        text_desired_value = TextBox(axbox_desired_value, 'Desired PH ', initial=initial_text)
        text_desired_value.on_submit(self.submit_desired_value)
        
        initial_text = str(self.Monitor.range_warning);
        text_range_warning = TextBox(axbox_range_warning, 'Warning Range ', initial=initial_text)
        text_range_warning.on_submit(self.submit_range_warning)
        
        
        # I think we needed to keep them in memory of they would die
        self.buttons = [bstart, bstop, bpsave_disk,bpsave_DDBB,text_box_machine,
                        text_box_sample_period,text_box_plotting_period,
                        text_Nsamples_show,
                        text_desired_value, text_range_warning,bpsave_report, text_box_piping]


        
        self.initial_text_data = gl.add_text(positionXY = [0.35,0.5], text = r'Waiting for data',fontsize = 30, ax = data_axes)
        
        gl.subplots_adjust(left=.09, bottom=.20, right=.90, top=.90, wspace=.20, hspace=0)
        
        self.monitoring_y = monitoring_y
Esempio n. 25
0
    df.to_csv('./out.csv', sep=',')
        
###### SET THE READING ########
ser = serial.Serial('/dev/ttyUSB0', 9600)
ser.readline()
for i in range(10):
    print (float(ser.readline().decode("utf-8").split("\n")[0]))
#ser.close()


###### GENERATE FAKE DATA ############
data = np.random.randn(100,1) + 35
time = range(data.size)

###### GENERATE THE FIGURE ############
fig = gl.init_figure();
data_axes = gl.subplot2grid((1,4), (0,0), rowspan=1, colspan=3)


data = []
time = []

update_data.index = 0

print ("starting...")
## Define the class with all the info
class information():
    ## Serial port info
    serial = ser   # Serial port we get the info from
    rt = None
    ## Data information
Esempio n. 26
0
                   verbose = verbose, time_profiling = time_profiling)

    ### Set the initial parameters
    theta_init = None
    model_theta_init = None
    ############# PERFORM THE EM #############
    
    logl,theta_list,model_theta_list = myEM.fit(Xdata, model_theta_init = model_theta_init, theta_init = theta_init) 
    
    spf.print_final_clusters(myDManager,clusters_relation, theta_list[-1], model_theta_list[-1])
    
    #######################################################################################################################
    #### Plot the evolution of the centroids likelihood ! #####################################################
    #######################################################################################################################

    gl.init_figure()
    gl.plot(range(1,np.array(logl).flatten()[1:].size +1),np.array(logl).flatten()[1:], 
            legend = ["EM LogLikelihood"], 
    labels = ["Convergence of LL with generated data","Iterations","LL"], 
    lw = 2)
    gl.savefig(folder_images +'Likelihood_Evolution. K_G:'+str(K_G)+ ', K_W:' + str(K_W) + ', K_vMF:' + str(K_vMF)+ '.png', 
           dpi = 100, sizeInches = [12, 6])

    if(perform_HMM_after_EM):
        Ninit = 1
        ############# Create the EM object and fit the data to it. #############
        clusters_relation = "MarkovChain1"   # MarkovChain1  independent
        myEM = CEM.CEM( distribution = myDManager, clusters_relation = clusters_relation, 
                       T = T, Ninit = Ninit,  delta_ll = delta_ll, 
                       verbose = verbose, time_profiling = time_profiling)
    
Esempio n. 27
0
def create_Bayesian_analysis_charts_simplified(model,
                                               train_dataset,
                                               validation_dataset,
                                               tr_loss,
                                               val_loss,
                                               KL_loss,
                                               folder_images,
                                               epoch_i=None):

    # Configurations of the plots
    alpha_points = 0.2
    color_points_train = "dark navy blue"
    color_points_val = "amber"
    color_train_loss = "cobalt blue"
    color_val_loss = "blood"
    color_truth = "k"
    color_mean = "b"
    color_most_likey = "y"

    ################################ Divide in plots ##############################
    gl.init_figure()
    ax1 = gl.subplot2grid((6, 3), (0, 0), rowspan=3, colspan=1)
    ax2 = gl.subplot2grid((6, 3), (3, 0),
                          rowspan=3,
                          colspan=1,
                          sharex=ax1,
                          sharey=ax1)

    ax3 = gl.subplot2grid((6, 3), (0, 1), rowspan=2, colspan=1)
    ax4 = gl.subplot2grid((6, 3), (2, 1), rowspan=2, colspan=1, sharex=ax3)
    ax5 = gl.subplot2grid((6, 3), (4, 1), rowspan=2, colspan=1, sharex=ax3)

    ax6 = gl.subplot2grid((6, 3), (0, 2), rowspan=3, colspan=1)
    ax7 = gl.subplot2grid((6, 3), (3, 2), rowspan=3, colspan=1, sharex=ax6)

    ####### ax1, ax2: Get confusion matrices ##########

    labels_classes, confusion = model.get_confusion_matrix(train_dataset)
    plot_confusion_matrix(confusion, labels_classes, ax1)
    labels_classes, confusion = model.get_confusion_matrix(validation_dataset)
    plot_confusion_matrix(confusion, labels_classes, ax2)

    ############## ax3 ax4 ax5: Loss Evolution !! ######################
    ## ax3: Evolutoin of the data loss
    gl.plot([],
            tr_loss,
            ax=ax3,
            lw=3,
            labels=["Losses", "", "Data loss (MSE)"],
            legend=["train"],
            color=color_train_loss)
    gl.plot([],
            val_loss,
            ax=ax3,
            lw=3,
            legend=["validation"],
            color=color_val_loss,
            AxesStyle="Normal - No xaxis")

    ## ax4: The evolution of the KL loss
    gl.plot([],
            KL_loss,
            ax=ax4,
            lw=3,
            labels=["", "", "KL loss"],
            legend=["Bayesian Weights"],
            AxesStyle="Normal - No xaxis",
            color="k")

    ## ax5: Evolutoin of the total loss
    gl.plot([],
            tr_loss,
            ax=ax5,
            lw=3,
            labels=["", "epoch", "Total Loss (Bayes)"],
            legend=["train"],
            color=color_train_loss)
    gl.plot([],
            val_loss,
            ax=ax5,
            lw=3,
            legend=["validation"],
            color=color_val_loss)

    ############## ax6 ax7: Variational Weights !! ######################
    create_plot_variational_weights(model, ax6, ax7)

    gl.set_zoom(ax=ax6, ylim=[-0.1, 10])
    gl.set_zoom(ax=ax7, xlim=[-2.5, 2.5], ylim=[-0.1, 0.5])

    # Set final properties and save figure
    gl.set_fontSizes(ax=[ax1, ax2, ax3, ax4, ax5, ax6, ax7],
                     title=20,
                     xlabel=20,
                     ylabel=20,
                     legend=10,
                     xticks=12,
                     yticks=12)

    gl.subplots_adjust(left=.09,
                       bottom=.10,
                       right=.90,
                       top=.95,
                       wspace=.30,
                       hspace=0.10)

    if (type(epoch_i) == type(None)):
        gl.savefig(folder_images + 'Training_Example_Data_Bayesian.png',
                   dpi=100,
                   sizeInches=[20, 10])
    else:
        gl.savefig(folder_images + '%i.png' % epoch_i,
                   dpi=100,
                   sizeInches=[20, 10],
                   close=True,
                   bbox_inches="tight")
def visualize_attention_matrix(question_tokens, passage_tokens,
                               attention_matrix, image_path):
    """
            Text to visualze attention map for.a given exmaple.
            
            question_tokens: List of tokens of the question
            passage_tokens: List of tokens of the passage
            attention_matrix: len(passage) x len(question) matrix with the probabilities 
        """

    f = gl.init_figure()
    ax = f.add_axes([0.1, 0.3, 0.8, 0.5])
    ax_attention_words = f.add_axes([0.1, 0.70, 0.8, 0.15])
    ax_attention_words.axis('off')

    # add image
    cmap = "binary"  #cm.get_cmap('coolwarm', 30)
    i = ax.imshow(attention_matrix,
                  interpolation='nearest',
                  cmap=cmap,
                  vmin=0,
                  vmax=1)

    # add colorbar
    cbaxes = f.add_axes([0.95, 0.3, 0.02, 0.5])
    cbar = f.colorbar(i, cax=cbaxes, orientation='vertical')
    cbar.ax.set_xlabel('Probability', labelpad=6)

    # add labels
    ax.set_yticks(range(len(question_tokens)))
    ax.set_yticklabels(question_tokens)

    ax.set_xticks(range(len(passage_tokens)))
    ax.set_xticklabels(passage_tokens, rotation=80)

    ax.set_xlabel('Passage')
    ax.set_ylabel('Question')

    ###########  GET THE MOST ATTENTION WORDS ########
    Nmax_attention_words = 3
    z = (-attention_matrix).argsort(axis=1)[:, :]

    attentioned_passage_words = []
    for i in range(len(question_tokens)):
        attentioned_passage_words.append([])
        for j in range(Nmax_attention_words):
            attentioned_passage_words[-1].append(
                passage_tokens[z[i, j]] + "(%.1f%%)" %
                (attention_matrix[i, z[i, j]] * 100))
        attentioned_passage_words[-1] = ", ".join(
            attentioned_passage_words[-1])

    text_correspondance = ""
    for i in range(len(question_tokens)):
        text_correspondance += question_tokens[
            i] + " ---> " + attentioned_passage_words[i] + "\n"

    ax_attention_words.text(0, 0, text_correspondance)
    #        ax2.yaxis.tick_right()
    #        ax2.yaxis.set_label_position("right")

    f.show()
    #        gl.set_fontSizes(ax = [ax1,ax2], title = 20, xlabel = 15, ylabel = 18,
    #                          legend = 12, xticks = 14, yticks = 14)
    gl.subplots_adjust(left=.09,
                       bottom=.10,
                       right=.90,
                       top=.95,
                       wspace=.20,
                       hspace=0.10)

    gl.savefig(image_path,
               dpi=100,
               sizeInches=[10, 6],
               close=False,
               bbox_inches="tight")
Esempio n. 29
0

## Comparison
threshold_corte_ultimo_escano_list = np.linspace(Total_votos,0,10000)

blue_party = total_parties_blue[2]
red_party = total_parties_red[1]
party_escanhos_blue_2, party_escanhos_red_2 = get_esanhos_between_two_formations(blue_party,red_party, threshold_corte_ultimo_escano_list,N_escanos)
 

"""
PLOT THE INDIVIDUAL ESCANHOS AND THE TOTAL IN THE END
"""


gl.init_figure();
ax1 = gl.subplot2grid((3,1), (0,0), rowspan=1, colspan=1)
ax2 = gl.subplot2grid((3,1), (1,0), rowspan=1, colspan=1, sharex = ax1, sharey = ax1)
ax3 = gl.subplot2grid((3,1), (2,0), rowspan=1, colspan=1, sharex = ax1, sharey = ax1)
    
gl.plot(threshold_corte_ultimo_escano_list_blue,party_escanhos_blue, ax = ax1,
        labels = ["Numero de escanhos obtenidos en funcion del numero de votos del ultimo escanho", "Numero de votos del ultimo escanho", "Numero de escanhos obtenidos"],
        legend = [["%.2f"%(total_parties_blue[i][j]) for j in range(len(total_parties_blue[i]))] for i in range(len(total_parties_blue))])

gl.plot(threshold_corte_ultimo_escano_list_red,party_escanhos_red, ax = ax2,
        labels = ["Numero de escanhos obtenidos en funcion del numero de votos del ultimo escanho", "Numero de votos del ultimo escanho", "Numero de escanhos obtenidos"],
        legend = [["%.2f"%(total_parties_red[i][j]) for j in range(len(total_parties_red[i]))] for i in range(len(total_parties_red))])


gl.plot(threshold_corte_ultimo_escano_list,party_escanhos_blue_2, ax = ax3,
        labels = ["", "", ""],
Esempio n. 30
0

###### SET THE READING ########
if (0):
    ser = serial.Serial('/dev/ttyUSB0', 9600)
    ser.readline()
    for i in range(10):
        print(float(ser.readline().decode("utf-8").split("\n")[0]))
#ser.close()

###### GENERATE FAKE DATA ############
data = np.random.randn(100, 1) + 35
time = range(data.size)

###### GENERATE THE FIGURE ############
fig = gl.init_figure()
data_axes = gl.subplot2grid((1, 4), (0, 0), rowspan=1, colspan=3)

data = []
time = []

update_data.index = 0

print("starting...")


## Define the class with all the info
class information():
    ## Serial port info
    serial = ser  # Serial port we get the info from
    rt = None
    classifiers_keys = cl_d.keys()
    Nclassifiers = len(classifiers_keys)
    for key in classifiers_keys:
        classifier = cl_d[key]
        train_acc.append(get_class_rate(Ytrain, classifier.predict(Xtrain)))
        test_acc.append(get_class_rate(Ytest, classifier.predict(Xtest)))

        train_CE.append(get_CE(Ytrain, classifier.predict_proba(Xtrain)[:, 1]))
        test_CE.append(get_CE(Ytest, classifier.predict_proba(Xtest)[:, 1]))
    train_acc = np.array(train_acc)
    test_acc = np.array(test_acc)
    train_CE = np.array(train_CE)
    test_CE = np.array(test_CE)

    gl.init_figure()
    ax1 = plt.subplot(2, 1, 1)
    plt.bar(np.arange(Nclassifiers) + 0.2,
            1 - train_acc,
            width=0.2,
            color='c',
            align='center',
            label="train")
    plt.bar(np.arange(Nclassifiers) + 0.4,
            1 - test_acc,
            width=0.2,
            color='r',
            align='center',
            label="test")
    plt.xticks(np.arange(Nclassifiers) + 0.3, classifiers_keys)
    plt.title('Classifiers Performance')
Esempio n. 32
0
def create_image_weights_epoch(model, video_fotograms_folder2, epoch_i):
    """
    Creates the image of the training and validation accuracy
    """
    N_Bayesian_layers = len(model.VBmodels)    
    N_Normal_layers = len(model.LinearModels)
    
    # Compute the number of squares we will need:
    # 1 x linear layers, 2 x LSTMS
    
    gl.init_figure();
    cmap = cm.get_cmap('coolwarm', 30)
    
    all_axes = []
    for i in range(N_Bayesian_layers):
        layer = model.VBmodels[i]
        
#        if (layer.type_layer == "linear"):
        if ("linear" in type(layer).__name__.lower()):
            ax = gl.subplot2grid((1,N_Bayesian_layers + N_Normal_layers), (0,i), rowspan=1, colspan=1)
            weights = layer.weight.detach().cpu().numpy()
            biases = layer.bias.detach().cpu().numpy().reshape(-1,1)
            neurons = np.concatenate((weights, biases), axis = 1)
            cax = ax.imshow(neurons, interpolation="nearest", cmap=cmap, vmin=-2, vmax=2)
        
            all_axes.append(ax)
        else:
            ax = gl.subplot2grid((1,N_Bayesian_layers + N_Normal_layers), (0,i), rowspan=1, colspan=1)
            weights_ih = layer.weight_ih.detach().cpu().numpy()
            biases_ih = layer.bias_ih.detach().cpu().numpy().reshape(-1,1)
            weights_hh = layer.weight_hh.detach().cpu().numpy()
            biases_hh = layer.bias_hh.detach().cpu().numpy().reshape(-1,1)
            
            weights = np.concatenate((weights_ih,weights_hh),axis = 1)
            biases = np.concatenate((biases_ih,biases_hh),axis = 1)
            neurons = np.concatenate((weights, biases), axis = 1)
            cax = ax.imshow(neurons, interpolation="nearest", cmap=cmap, vmin=-2, vmax=2)
            all_axes.append(ax)
            
            
    for i in range(N_Normal_layers):
        layer = model.LinearModels[i]
        if ("linear" in type(layer).__name__.lower()):
            ax = gl.subplot2grid((1,N_Bayesian_layers + N_Normal_layers), (0,N_Bayesian_layers +i), rowspan=1, colspan=1)
            weights = layer.weight.detach().cpu().numpy()
            biases = layer.bias.detach().cpu().numpy().reshape(-1,1)
            neurons = np.concatenate((weights, biases), axis = 1)
            cax = ax.imshow(neurons, interpolation="nearest", cmap=cmap, vmin=-2, vmax=2)
            all_axes.append(ax)
        else:
            ax = gl.subplot2grid((1,N_Bayesian_layers + N_Normal_layers), (0,N_Bayesian_layers +i), rowspan=1, colspan=1)
            weights_ih = layer.weight_ih.detach().cpu().numpy()
            biases_ih = layer.bias_ih.detach().cpu().numpy().reshape(-1,1)
            weights_hh = layer.weight_hh.detach().cpu().numpy()
            biases_hh = layer.bias_hh.detach().cpu().numpy().reshape(-1,1)
            
            weights = np.concatenate((weights_ih,weights_hh),axis = 1)
            biases = np.concatenate((biases_ih,biases_hh),axis = 1)
            neurons = np.concatenate((weights, biases), axis = 1)
            cax = ax.imshow(neurons, interpolation="nearest", cmap=cmap, vmin=-2, vmax=2)
            all_axes.append(ax)
            
#    plt.xticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='vertical')
#    plt.yticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='horizontal')
    plt.colorbar(cax)
#    plt.colorbar(cax2)
#        ax1.set_xticks(data_df_train.columns) # , rotation='vertical'
#    ax1.grid(True)
    plt.title('Weights ')

    
#    labels=[str(x) for x in range(Nshow )]
#    ax1.set_xticklabels(labels,fontsize=20)
#    ax1.set_yticklabels(labels,fontsize=20)
    # Add colorbar, make sure to specify tick locations to match desired ticklabels
    plt.show()

    
    gl.set_fontSizes(ax = [all_axes], title = 20, xlabel = 20, ylabel = 20, 
                      legend = 20, xticks = 12, yticks = 12)
    
    # Set final properties and save figure
    gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.30)
    
    gl.savefig(video_fotograms_folder2 +'%i.png'%epoch_i, 
               dpi = 100, sizeInches = [14, 10], close = True, bbox_inches = None)