Exemple #1
0
def plot_data_regression_1d_2axes(X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func, X_data_val, Y_data_val,
                                  x_grid,all_y_grid, most_likely_ygrid,
                                  alpha_points, color_points_train, color_points_val, color_most_likey,color_mean, color_truth,
                                  ax1,ax2):
    """
    This function plots the outputs of the Regression model for the 1D example
    """
    
    ## Compute mean and std of regression
    std_samples_grid = np.std(all_y_grid, axis = 1)
    mean_samples_grid = np.mean(all_y_grid, axis = 1)
    
    ############## ax1: Data + Mostlikely + Real + Mean !! ########################
    if(type(ax1) != type(None)):
        gl.scatter(X_data_tr, Y_data_tr, ax = ax1, lw = 3,  #legend = ["tr points"], 
                   labels = ["Data and predictions", "","Y"], alpha = alpha_points, color = color_points_train)
        gl.scatter(X_data_val, Y_data_val, ax = ax1, lw = 3, #legend = ["val points"], 
                   alpha = alpha_points, color = color_points_val)
        
        gl.plot (xgrid_real_func, ygrid_real_func, ax  = ax1, alpha = 0.90, color = color_truth, legend = ["Truth"])
        gl.plot (x_grid, most_likely_ygrid, ax  = ax1, alpha = 0.90, color = color_most_likey, legend = ["Most likely"])
        gl.plot (x_grid, mean_samples_grid, ax  = ax1, alpha = 0.90, color = color_mean, legend = ["Posterior mean"],
                 AxesStyle = "Normal - No xaxis")
    
    ############## ax2: Data + Realizations of the function !! ######################
    if(type(ax2) != type(None)):
        gl.scatter(X_data_tr, Y_data_tr, ax = ax2, lw = 3,  # legend = ["tr points"], 
                   labels = ["", "X","Y"], alpha = alpha_points, color = color_points_train)
        gl.scatter(X_data_val, Y_data_val, ax = ax2, lw = 3, # legend = ["val points"], 
                   alpha = alpha_points, color = color_points_val)
            
        gl.plot (x_grid, all_y_grid, ax  = ax2, alpha = 0.15, color = "k")
        gl.plot (x_grid, mean_samples_grid, ax  = ax2, alpha = 0.90, color = "b", legend = ["Mean realization"])
        
    gl.set_zoom(xlimPad = [0.2,0.2], ylimPad = [0.2,0.2], ax = ax2, X = X_data_tr, Y = Y_data_tr)
Exemple #2
0
def update_data(information):

    time, data = information.time, information.data

    ## Read data to update !!
    information.serial.flush()
    data.append(
        float(information.serial.readline().decode("utf-8").split("\n")[0]))
    time.append(update_data.index)

    update_data.index += 1

    window = 100

    start = max([update_data.index - window, 0])
    print(start, data[-1])

    # option 2, remove all lines and collections
    for artist in plt.gca().lines + plt.gca().collections:
        artist.remove()

    gl.plot(np.array(time)[start:update_data.index],
            np.array(data)[start:update_data.index],
            labels=["Sensors values", "time (s)", "Temperature"],
            color="k",
            ax=data_axes)
    gl.set_zoom(xlimPad=[0.2, 0.2], ylimPad=[0.1, 0.1])
Exemple #3
0
def create_image_training_epoch(X_data_tr, Y_data_tr, X_data_val, Y_data_val,
                                tr_loss, val_loss, x_grid, y_grid, cf_a,
                                video_fotograms_folder, epoch_i):
    """
    Creates the image of the training and validation accuracy
    """
    gl.init_figure();
    ax1 = gl.subplot2grid((2,1), (0,0), rowspan=1, colspan=1)
    ax2 = gl.subplot2grid((2,1), (1,0), rowspan=1, colspan=1)
    
    plt.title("Training")
    ## First plot with the data and predictions !!!
    ax1 = gl.scatter(X_data_tr, Y_data_tr, ax = ax1, lw = 3,legend = ["tr points"], labels = ["Analysis of training", "X","Y"])
    gl.scatter(X_data_val, Y_data_val, lw = 3,legend = ["val points"])
    
    gl.plot (x_grid, y_grid, legend = ["Prediction function"])

    gl.set_zoom(xlimPad = [0.2, 0.2], ylimPad = [0.2,0.2], X = X_data_tr, Y = Y_data_tr)
    ## Second plot with the evolution of parameters !!!
    ax2 = gl.plot([], tr_loss, ax = ax2, lw = 3, labels = ["RMSE. lr: %.3f"%cf_a.lr, "epoch","RMSE"], legend = ["train"])
    gl.plot([], val_loss, lw = 3, legend = ["validation"], loc = 3)
    
    
    gl.set_fontSizes(ax = [ax1,ax2], title = 20, xlabel = 20, ylabel = 20, 
                      legend = 20, xticks = 12, yticks = 12)
    
    # Set final properties and save figure
    gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.30)
    
    gl.savefig(video_fotograms_folder +'%i.png'%epoch_i, 
               dpi = 100, sizeInches = [14, 10], close = True, bbox_inches = None)
def create_image_training_epoch(X_data_tr, Y_data_tr, X_data_val, Y_data_val,
                                tr_loss, val_loss, x_grid, y_grid, cf_a,
                                video_fotograms_folder, epoch_i):
    """
    Creates the image of the training and validation accuracy
    """
    gl.init_figure()
    ax1 = gl.subplot2grid((2, 1), (0, 0), rowspan=1, colspan=1)
    ax2 = gl.subplot2grid((2, 1), (1, 0), rowspan=1, colspan=1)

    plt.title("Training")
    ## First plot with the data and predictions !!!
    ax1 = gl.scatter(X_data_tr,
                     Y_data_tr,
                     ax=ax1,
                     lw=3,
                     legend=["tr points"],
                     labels=["Analysis of training", "X", "Y"])
    gl.scatter(X_data_val, Y_data_val, lw=3, legend=["val points"])

    gl.plot(x_grid, y_grid, legend=["Prediction function"])

    gl.set_zoom(xlimPad=[0.2, 0.2],
                ylimPad=[0.2, 0.2],
                X=X_data_tr,
                Y=Y_data_tr)
    ## Second plot with the evolution of parameters !!!
    ax2 = gl.plot([],
                  tr_loss,
                  ax=ax2,
                  lw=3,
                  labels=["RMSE. lr: %.3f" % cf_a.lr, "epoch", "RMSE"],
                  legend=["train"])
    gl.plot([], val_loss, lw=3, legend=["validation"], loc=3)

    gl.set_fontSizes(ax=[ax1, ax2],
                     title=20,
                     xlabel=20,
                     ylabel=20,
                     legend=20,
                     xticks=12,
                     yticks=12)

    # Set final properties and save figure
    gl.subplots_adjust(left=.09,
                       bottom=.10,
                       right=.90,
                       top=.95,
                       wspace=.30,
                       hspace=0.30)

    gl.savefig(video_fotograms_folder + '%i.png' % epoch_i,
               dpi=100,
               sizeInches=[14, 10],
               close=True,
               bbox_inches=None)
Exemple #5
0
def plot_data_classification_2d_2axes(X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func, X_data_val, Y_data_val,
                                  xx,yy,all_y_grid, most_likely_ygrid,
                                  alpha_points, color_points_train, color_points_val, color_most_likey,color_mean, color_truth,
                                  ax1,ax2):
    """
    This function plots the outputs of the Classification model for the 2D example
    """
    
    alpha_points = 1
    ## Compute mean and std of regression
    std_samples_grid = np.std(all_y_grid, axis = 1)
    mean_samples_grid = np.mean(all_y_grid, axis = 1)
    
    ############## ax1: Data + Mostlikely + Real + Mean !! ########################
    
    classes = np.unique(Y_data_tr).flatten();
    colors = ["r","g","b"]
    
    for i in range(classes.size):
        X_data_tr_class = X_data_tr[np.where(Y_data_tr == classes[i])[0],:]
        X_data_val_class = X_data_val[np.where(Y_data_val == classes[i])[0],:]
#        print (X_data_tr_class.shape)
#        print (classes)
#        print (X_data_tr)
        if ((X_data_tr_class.size > 0) and (X_data_val_class.size > 0)):
            gl.scatter(X_data_tr_class[:,0].flatten().tolist(), X_data_tr_class[:,1].flatten().tolist(), ax = ax1, lw = 3,  #legend = ["tr points"], 
                       labels = ["Data and predictions", "","Y"], alpha = alpha_points, color = colors[i])
            gl.scatter(X_data_val_class[:,0].flatten(),  X_data_val_class[:,1].flatten(), ax = ax1, lw = 3,color = colors[i], #legend = ["val points"], 
                       alpha = alpha_points, marker = ">")

    out = ax1.contourf(xx, yy, most_likely_ygrid.reshape(xx.shape), cmap=plt.cm.coolwarm, alpha=0.5)
    
#    ax.scatter(X0, X1, c=y, cmap=plt.cm.coolwarm, s=20, edgecolors='k')
    
#    gl.plot (xgrid_real_func, ygrid_real_func, ax  = ax1, alpha = 0.90, color = color_truth, legend = ["Truth"])
    for i in range(classes.size):
        X_data_tr_class = X_data_tr[np.where(Y_data_tr == classes[i])[0],:]
        X_data_val_class = X_data_val[np.where(Y_data_val == classes[i])[0],:]
#        print (X_data_tr_class.shape)
#        print (classes)
#        print (X_data_tr)
        if ((X_data_tr_class.size > 0) and (X_data_val_class.size > 0)):
            gl.scatter(X_data_tr_class[:,0].flatten().tolist(), X_data_tr_class[:,1].flatten().tolist(), ax = ax2, lw = 3,  #legend = ["tr points"], 
                       labels = ["", "X","Y"], alpha = alpha_points, color = colors[i])
            gl.scatter(X_data_val_class[:,0].flatten(),  X_data_val_class[:,1].flatten(), ax = ax2, lw = 3,color = colors[i], #legend = ["val points"], 
                       alpha = alpha_points, marker = ">")

    for ygrid in all_y_grid:
       out = ax2.contourf(xx, yy, ygrid.reshape(xx.shape), cmap=plt.cm.coolwarm, alpha=0.5)
    

    ############## ax2: Data + Realizations of the function !! ######################
    
    gl.set_zoom(xlimPad = [0.3,0.3], ylimPad = [0.3,0.3], ax = ax2, X = X_data_tr[:,0], Y = X_data_tr[:,1])
def plot_VB_weights_mu_std_2D(VBmodel, ax1, type = "LinearVB", title = ""):
    """
    This function plots the variational weights in the 2 axes given
    """
    l = 0
    if (type == "LinearVB"):
        [mu_W, sigma_W, mu_b, sigma_b] = get_LinearVB_weights(VBmodel)
        shape_weights = VBmodel.mu_weight.detach().cpu().numpy().shape
        title +=" " + str(shape_weights)
#        title = ["linear layer: %i"%(l)]
        
        plots_weights_layer(mu_W, sigma_W, mu_b, sigma_b, ax1, title)
        
        prior = VBmodel.prior
        max_mu,min_mu,max_std,min_std,max_abs = get_boundaries_plot(mu_W, sigma_W, mu_b,sigma_b)
        
        gl.scatter(0, prior.sigma1, lw = 3, ax = ax1, legend = ["Prior 1 (%.3f)"%(prior.sigma1)], color = "k", marker = "x",)
        gl.scatter(0, prior.sigma2, lw = 3,ax = ax1, legend = ["Prior 2 (%.3f)"%(prior.sigma2)], color = "b",marker = "x" ) 

        plot_signifant_region(ax1, max_mu,min_mu,max_std,min_std,max_abs)
        gl.set_zoom (ax = ax1, xlimPad = [0.1, 0.1], ylimPad = [0.1,0.1], 
                     X = np.array([min_mu,max_mu]), 
                     Y = np.array([min_std,max_std]) )

    if (type == "HighwayVB"):
        [mu_W, sigma_W, mu_b, sigma_b] = get_LinearVB_weights(VBmodel)
        shape_weights = VBmodel.mu_weight.detach().cpu().numpy().shape
        title +=" " + str(shape_weights)
#        title = ["linear layer: %i"%(l)]
        print(mu_W.shape, mu_b.shape)
#        plots_weights_layer(mu_W[:200,:], sigma_W[:200,:], mu_b[:200], sigma_b[:200], ax1, title + " $G(x)$")
#        plots_weights_layer(mu_W[200:,:], sigma_W[200:,:], mu_b[200:], sigma_b[200:], ax1, title+ " $H(x)$")
      
        plots_weights_layer(mu_W.reshape(shape_weights)[:200,:].flatten(), sigma_W.reshape(shape_weights)[:200,:].flatten(), mu_b[:200], sigma_b[:200], ax1, title, legend = ["Weights and biases G(x)"])
        plots_weights_layer(mu_W.reshape(shape_weights)[200:,:].flatten(), sigma_W.reshape(shape_weights)[200:,:].flatten(), mu_b[200:], sigma_b[200:], ax1, title, legend = ["Weights and biases H(x)"])
        
        prior = VBmodel.prior
        max_mu,min_mu,max_std,min_std,max_abs = get_boundaries_plot(mu_W, sigma_W, mu_b,sigma_b)
        
        gl.scatter(0, prior.sigma1, lw = 3, ax = ax1, legend = ["Prior 1 (%.3f)"%(prior.sigma1)], color = "k", marker = "x",)
        gl.scatter(0, prior.sigma2, lw = 3,ax = ax1, legend = ["Prior 2 (%.3f)"%(prior.sigma2)], color = "b",marker = "x" ) 

        plot_signifant_region(ax1, max_mu,min_mu,max_std,min_std,max_abs)
        gl.set_zoom (ax = ax1, xlimPad = [0.1, 0.1], ylimPad = [0.1,0.1], 
                     X = np.array([min_mu,max_mu]), 
                     Y = np.array([min_std,max_std]) )
Exemple #7
0
def update_data(information):
    
    time,data = information.time, information.data
    
    ## Read data to update !!
    information.serial.flush()
    data.append(float(information.serial.readline().decode("utf-8").split("\n")[0]))
    time.append(update_data.index)
    
    update_data.index += 1;
    
    window = 100
    
    start = max([update_data.index - window, 0])
    print (start, data[-1])
    
    # option 2, remove all lines and collections
    for artist in plt.gca().lines + plt.gca().collections:
        artist.remove()
    
    gl.plot(np.array(time)[start:update_data.index], np.array(data)[start:update_data.index], 
            labels = ["Sensors values", "time (s)", "Temperature"], color = "k", ax = data_axes);
    gl.set_zoom(xlimPad = [0.2,0.2] ,ylimPad = [0.1, 0.1])
Exemple #8
0
        all_axes.append(ax1)
        all_axes.append(ax2)

        mu_grid = np.linspace(-3, 3, 100)
        y_grid = np.abs(mu_grid) / 2

        gl.fill_between(mu_grid,
                        10 * np.ones(mu_grid.size),
                        y_grid,
                        alpha=0.2,
                        color="r",
                        ax=ax2,
                        legend=["95% non-significant"])

        gl.set_zoom(ax=ax2,
                    xlim=[-2.5, 2.5],
                    ylim=[-0.05, model.linear1.prior.sigma1 * (1 + 0.30)])

        eta_KL = eta_values[i]
        ax1.set_title("Model estimations for $\zeta = " + str(eta_KL) + "$")
        ax2.set_title("Variational Weights for $\zeta = " + str(eta_KL) + "$")
#    gl.set_zoom (ax = ax7, xlim = [-2.5, 2.5], ylim = [-0.1,2])

# Set final properties and save figure
    gl.set_fontSizes(ax=all_axes,
                     title=14,
                     xlabel=16,
                     ylabel=16,
                     legend=10,
                     xticks=10,
                     yticks=10)
            marker=marker,
            AxesStyle="Normal2")
    gl.stem([],
            EMAw2,
            nf=1,
            sharex=ax1,
            sharey=ax1,
            labels=["", "Lag", ""],
            legend=["EMA(%i)" % nMA2],
            color="k",
            xlimPad=[0.1, 0.3],
            ylimPad=[0.1, 0.4],
            marker=marker,
            AxesStyle="Normal2 - No yaxis")

    gl.set_zoom(xlim=[-2, nMA2 * (1.10)], ylim=[-0.01, 0.25])
    axes_list = gl.get_axes()
    for ax in axes_list:
        gl.format_yaxis(ax=ax, Nticks=10)

    gl.subplots_adjust(left=.09,
                       bottom=.10,
                       right=.90,
                       top=.95,
                       wspace=.05,
                       hspace=0.05)

    gl.savefig(folder_images + 'windows.png',
               dpi=100,
               sizeInches=[2 * 8, 2 * 3])
def generate_images_iterations_ll(Xs,mus,covs, Ks ,myDManager, logl,theta_list,model_theta_list,folder_images_gif):
#    os.remove(folder_images_gif) # Remove previous images if existing
    """
    WARNING: MEANT FOR ONLY 3 Distributions due to the color RGB
    """
    import shutil
    ul.create_folder_if_needed(folder_images_gif)
    shutil.rmtree(folder_images_gif)
    ul.create_folder_if_needed(folder_images_gif)
    ######## Plot the original data #####

    Xdata = np.concatenate(Xs,axis = 1).T
    colors = ["r","b","g"]
    K_G,K_W,K_vMF = Ks
    
    ### FOR EACH ITERATION 
    for i in range(len(theta_list)):  # theta_list
        indx = i
        gl.init_figure()
        ax1 = gl.subplot2grid((1,2), (0,0), rowspan=1, colspan=1)
        
        ## Get the relative ll of the Gaussian denoising cluster.
        ll = myDManager.pdf_log_K(Xdata,theta_list[indx])
        N,K = ll.shape
#        print ll.shape
        for j in range(N):  # For every sample
        #TODO: Can this not be done without a for ?
            # Normalize the probability of the sample being generated by the clusters
            Marginal_xi_probability = gf.sum_logs(ll[j,:])
            ll[j,:] = ll[j,:]- Marginal_xi_probability
        
            ax1 = gl.scatter(Xdata[j,0],Xdata[j,1], labels = ['EM Evolution. Kg:'+str(K_G)+ ', Kw:' + str(K_W) + ', K_vMF:' + str(K_vMF), "X1","X2"], 
                              color = (np.exp(ll[j,1]), np.exp(ll[j,0]), np.exp(ll[j,2])) ,  ###  np.exp(ll[j,2])
                              alpha = 1, nf = 0)
            
        # Only doable if the clusters dont die
        for k_c in myDManager.clusterk_to_Dname.keys():
            k = myDManager.clusterk_to_thetak[k_c]
            distribution_name = myDManager.clusterk_to_Dname[k_c] # G W
            
            if (distribution_name == "Gaussian"):
                ## Plot the ecolution of the mu
                #### Plot the Covariance of the clusters !
                mean,w,h,theta = bMA.get_gaussian_ellipse_params( mu = theta_list[indx][k][0], Sigma = theta_list[indx][k][1], Chi2val = 2.4477)
                r_ellipse = bMA.get_ellipse_points(mean,w,h,theta)
                gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "-.", lw = 3,
                        AxesStyle = "Normal2",
                       legend = ["Kg(%i). pi:%0.2f"%(k,  float(model_theta_list[indx][0][0,k]))]) 
            
            elif(distribution_name == "Watson"):
                #### Plot the pdf of the distributino !
                ## Distribution parameters for Watson
                kappa = float(theta_list[indx][k][1]);  mu = theta_list[-1][k][0]
                Nsa = 1000
                # Draw 2D samples as transformation of the angle
                Xalpha = np.linspace(0, 2*np.pi, Nsa)
                Xgrid= np.array([np.cos(Xalpha), np.sin(Xalpha)])
                
                probs = []  # Vector with probabilities
                for i in range(Nsa):
                    probs.append(np.exp(Wad.Watson_pdf_log(Xgrid[:,i],[mu,kappa]) ))
                
                probs = np.array(probs)
                # Plot it in polar coordinates
                X1_w = (1 + probs) * np.cos(Xalpha)
                X2_w = (1 + probs) * np.sin(Xalpha)
                
                gl.plot(X1_w,X2_w, 
                     alpha = 1, lw = 3, ls = "-.", legend = ["Kw(%i). pi:%0.2f"%(k,  float(model_theta_list[indx][0][0,k]))]) 
                
            elif(distribution_name == "vonMisesFisher"):
                #### Plot the pdf of the distributino !
                ## Distribution parameters for Watson
                kappa = float(theta_list[indx][k][1]); mu = theta_list[indx][k][0]
                Nsa = 1000
                # Draw 2D samples as transformation of the angle
                Xalpha = np.linspace(0, 2*np.pi, Nsa)
                Xgrid= np.array([np.cos(Xalpha), np.sin(Xalpha)])
                
                probs = []  # Vector with probabilities
                for i in range(Nsa):
                    probs.append(np.exp(vMFd.vonMisesFisher_pdf_log(Xgrid[:,i],[mu,kappa]) ))
                    
                probs = np.array(probs)
                probs = probs.reshape((probs.size,1)).T
                # Plot it in polar coordinates
                X1_w = (1 + probs) * np.cos(Xalpha)
                X2_w = (1 + probs) * np.sin(Xalpha)
                
    #            print X1_w.shape, X2_w.shape
                gl.plot(X1_w,X2_w, 
                     alpha = 1, lw = 3, ls = "-.", legend = ["Kvmf(%i). pi:%0.2f"%(k,  float(model_theta_list[indx][0][0,k]))]) 
                
            
        gl.set_zoom(xlim = [-6,6], ylim = [-6,6], ax = ax1)     
        ax2 = gl.subplot2grid((1,2), (0,1), rowspan=1, colspan=1)
        if (indx == 0):
            gl.add_text(positionXY = [0.1,.5], text = r' Initilization Incomplete LogLike: %.2f'%(logl[0]),fontsize = 15)
            pass
        elif (indx >= 1):
           
            gl.plot(range(1,np.array(logl).flatten()[1:].size +1),np.array(logl).flatten()[1:(indx+1)], ax = ax2, 
                    legend = ["Iteration %i, Incom LL: %.2f"%(indx, logl[indx])], labels = ["Convergence of LL with generated data","Iterations","LL"], lw = 2)
            gl.scatter(1, logl[1], lw = 2)
            pt = 0.05
            gl.set_zoom(xlim = [0,len(logl)], ylim = [logl[1] - (logl[-1]-logl[1])*pt,logl[-1] + (logl[-1]-logl[1])*pt], ax = ax2)
            
        gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.2, hspace=0.01)
        
        gl.savefig(folder_images_gif +'gif_'+ str(indx) + '.png', 
               dpi = 100, sizeInches = [16, 8], close = "yes",bbox_inches = None)
        
        gl.close("all")
        ################# Draw the error ellipse  #################
        mean,w,h,theta = bMA.get_gaussian_ellipse_params( mu = mu_Y, Sigma = SigmaY, Chi2val = 2.4477)
#        mean,vecs = bMA.get_gaussian_mean_and_vects(Y.T)
        vecs,vals = bMA.get_eigenVectorsAndValues(Sigma = SigmaY)
        r_ellipse = bMA.get_ellipse_points(mean,w,h,theta)
        gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "--",color = "k", lw = 2,
                 legend = ["Corr: .2f"],AxesStyle = "Normal2")
        
        gl.plot([mean[0] - vecs[0,0]*w, mean[0] + vecs[0,0]*w], 
                [mean[1] - vecs[0,1]*w, mean[1] + vecs[0,1]*w], ax = ax1, ls = "--",color = "k")
        gl.plot([mean[0] - vecs[1,0]*h, mean[0] + vecs[1,0]*h], 
                [mean[1] - vecs[1,1]*h, mean[1] + vecs[1,1]*h], ax = ax1, ls = "--",color = "k")
        

        ax1.axis('equal')
        gl.set_zoom(ax = ax1, X =r_ellipse[:,0], Y = r_ellipse[:,1],
                    ylimPad = [0.2,0.2],xlimPad = [0.2,0.2])

        
    gl.savefig(folder_images +'RotatedProjection.png', 
               dpi = 100, sizeInches = [14, 7])
    
    

############################################################
################# PLOT DATA ###############################
###########################################################

## Now we are gonna plot the projections and the final thing

    gl.set_subplots(1,3)
Exemple #12
0
            plots_weights_layer(mu_W,
                                sigma_W,
                                mu_b,
                                sigma_b,
                                ax1,
                                "All weights",
                                legend=[legend],
                                alpha=0.1)
        list_all_axes.append(ax1)

    max_mu, min_mu, max_std, min_std, max_abs = compute_all_boundaries(
        list_all_weights)
    plot_signifant_region(ax1, max_mu, min_mu, max_std, min_std, max_abs)
    gl.set_zoom(ax=ax1,
                xlimPad=[0.1, 0.1],
                ylimPad=[0.1, 0.1],
                X=np.array([min_mu, max_mu]),
                Y=np.array([min_std, max_std]))

    gl.set_fontSizes(ax=list_all_axes,
                     title=15,
                     xlabel=15,
                     ylabel=15,
                     legend=10,
                     xticks=12,
                     yticks=12)

    gl.subplots_adjust(left=.09,
                       bottom=.10,
                       right=.90,
                       top=.95,
Exemple #13
0
def plot_VB_weights_mu_std_2D(VBmodel, ax1, type="LinearVB", title=""):
    """
    This function plots the variational weights in the 2 axes given
    """
    l = 0
    if (type == "LinearVB"):
        [mu_W, sigma_W, mu_b, sigma_b] = get_LinearVB_weights(VBmodel)
        shape_weights = VBmodel.mu_weight.detach().cpu().numpy().shape
        title += " " + str(shape_weights)
        #        title = ["linear layer: %i"%(l)]

        plots_weights_layer(mu_W, sigma_W, mu_b, sigma_b, ax1, title)

        prior = VBmodel.prior
        max_mu, min_mu, max_std, min_std, max_abs = get_boundaries_plot(
            mu_W, sigma_W, mu_b, sigma_b)

        gl.scatter(
            0,
            prior.sigma1,
            lw=3,
            ax=ax1,
            legend=["Prior 1 (%.3f)" % (prior.sigma1)],
            color="k",
            marker="x",
        )
        gl.scatter(0,
                   prior.sigma2,
                   lw=3,
                   ax=ax1,
                   legend=["Prior 2 (%.3f)" % (prior.sigma2)],
                   color="b",
                   marker="x")

        plot_signifant_region(ax1, max_mu, min_mu, max_std, min_std, max_abs)
        gl.set_zoom(ax=ax1,
                    xlimPad=[0.1, 0.1],
                    ylimPad=[0.1, 0.1],
                    X=np.array([min_mu, max_mu]),
                    Y=np.array([min_std, max_std]))

    if (type == "HighwayVB"):
        [mu_W, sigma_W, mu_b, sigma_b] = get_LinearVB_weights(VBmodel)
        shape_weights = VBmodel.mu_weight.detach().cpu().numpy().shape
        title += " " + str(shape_weights)
        #        title = ["linear layer: %i"%(l)]
        print(mu_W.shape, mu_b.shape)
        #        plots_weights_layer(mu_W[:200,:], sigma_W[:200,:], mu_b[:200], sigma_b[:200], ax1, title + " $G(x)$")
        #        plots_weights_layer(mu_W[200:,:], sigma_W[200:,:], mu_b[200:], sigma_b[200:], ax1, title+ " $H(x)$")

        plots_weights_layer(mu_W.reshape(shape_weights)[:200, :].flatten(),
                            sigma_W.reshape(shape_weights)[:200, :].flatten(),
                            mu_b[:200],
                            sigma_b[:200],
                            ax1,
                            title,
                            legend=["Weights and biases G(x)"])
        plots_weights_layer(mu_W.reshape(shape_weights)[200:, :].flatten(),
                            sigma_W.reshape(shape_weights)[200:, :].flatten(),
                            mu_b[200:],
                            sigma_b[200:],
                            ax1,
                            title,
                            legend=["Weights and biases H(x)"])

        prior = VBmodel.prior
        max_mu, min_mu, max_std, min_std, max_abs = get_boundaries_plot(
            mu_W, sigma_W, mu_b, sigma_b)

        gl.scatter(
            0,
            prior.sigma1,
            lw=3,
            ax=ax1,
            legend=["Prior 1 (%.3f)" % (prior.sigma1)],
            color="k",
            marker="x",
        )
        gl.scatter(0,
                   prior.sigma2,
                   lw=3,
                   ax=ax1,
                   legend=["Prior 2 (%.3f)" % (prior.sigma2)],
                   color="b",
                   marker="x")

        plot_signifant_region(ax1, max_mu, min_mu, max_std, min_std, max_abs)
        gl.set_zoom(ax=ax1,
                    xlimPad=[0.1, 0.1],
                    ylimPad=[0.1, 0.1],
                    X=np.array([min_mu, max_mu]),
                    Y=np.array([min_std, max_std]))
Exemple #14
0
            labels = ["","",""],
            legend = ["WMA(%i)"%nMA2], color = "k",
            xlimPad = [0.1,0.3], ylimPad = [0.1,0.4],
            marker = marker, AxesStyle = "Normal2 - No yaxis - No xaxis")
    gl.stem([], EMAw, nf = 1, sharex = ax1, sharey = ax1,
            labels = ["","Lag","EMA"],
            legend = ["EMA(%i)"%nMA1],
            xlimPad = [0.1,0.3], ylimPad = [0.1,0.4],
            marker = marker, AxesStyle = "Normal2")
    gl.stem([], EMAw2, nf = 1, sharex = ax1, sharey = ax1,
            labels = ["","Lag",""],
            legend = ["EMA(%i)"%nMA2], color = "k",
            xlimPad = [0.1,0.3], ylimPad = [0.1,0.4],
            marker = marker, AxesStyle = "Normal2 - No yaxis")
            
    gl.set_zoom(xlim = [-2,nMA2 * (1.10)], ylim = [-0.01, 0.25])
    axes_list = gl.get_axes()
    for ax in axes_list:
        gl.format_yaxis(ax = ax, Nticks = 10)
        
    gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.05, hspace=0.05)

    gl.savefig(folder_images +'windows.png', 
               dpi = 100, sizeInches = [2*8, 2*3])

if (MAMAs):
    # Some basic indicators.
    price = timeData.get_timeSeries(["Close"]);
    dates = timeData.get_dates()

    # For comparing SMA, EMA, WMA
Exemple #15
0
def create_Bayesian_analysis_charts(model,
                                    X_data_tr, Y_data_tr, X_data_val, Y_data_val,
                                    tr_loss, val_loss, KL_loss,final_loss_tr,final_loss_val,
                                    xgrid_real_func, ygrid_real_func,
                                    folder_images,
                                    epoch_i = None):

    # Configurations of the plots
   
    alpha_points = 0.2 
    color_points_train = "dark navy blue"
    color_points_val = "amber"
    color_train_loss = "cobalt blue"
    color_val_loss = "blood"
    color_truth = "k"
    color_mean = "b"
    color_most_likey = "y"

    ############################# Data computation #######################
    if(type(X_data_tr) == type([])):
        pass
    else:
        if (X_data_tr.shape[1] == 1): # Regression Example 
            x_grid, all_y_grid,most_likely_ygrid = compute_regression_1D_data( model,X_data_tr,X_data_val, Nsamples = 100)
        elif(X_data_tr.shape[1] == 2):  # Classification Example 
            xx,yy , all_y_grid,most_likely_ygrid = compute_classification_2D_data( model,X_data_tr,X_data_val, Nsamples = 100)
        else:        # RNN
            x_grid, all_y_grid,most_likely_ygrid = compute_RNN_1D_data( model,X_data_tr,X_data_val, Nsamples = 100)
        
    ################################ Divide in plots ##############################
    gl.init_figure();
    ax1 = gl.subplot2grid((6,3), (0,0), rowspan=3, colspan=1)
    ax2 = gl.subplot2grid((6,3), (3,0), rowspan=3, colspan=1, sharex = ax1, sharey = ax1)
    
    ax3 = gl.subplot2grid((6,3), (0,1), rowspan=2, colspan=1)
    ax4 = gl.subplot2grid((6,3), (2,1), rowspan=2, colspan=1, sharex = ax3)
    ax5 = gl.subplot2grid((6,3), (4,1), rowspan=2, colspan=1, sharex = ax3)
    
    ax6 = gl.subplot2grid((6,3), (0,2), rowspan=3, colspan=1)
    ax7 = gl.subplot2grid((6,3), (3,2), rowspan=3, colspan=1, sharex = ax6)
    
    if(type(X_data_tr) == type([])):
        Xtrain = [torch.tensor(X_data_tr[i],device=model.cf_a.device, dtype=model.cf_a.dtype) for i in range(len(X_data_tr))]
        Ytrain = torch.tensor(Y_data_tr,device=model.cf_a.device, dtype=torch.int64)
        
        Xval = [torch.tensor(X_data_val[i],device=model.cf_a.device, dtype=model.cf_a.dtype) for i in range(len(X_data_val))]
        Yval = torch.tensor(Y_data_val,device=model.cf_a.device, dtype=torch.int64)

        confusion = model.get_confusion_matrix(Xtrain, Ytrain)
        plot_confusion_matrix(confusion,model.languages, ax1 )
        confusion = model.get_confusion_matrix(Xval, Yval)
        plot_confusion_matrix(confusion,model.languages, ax2 )

    else:
        if (X_data_tr.shape[1] == 1): # Regression Example 
            plot_data_regression_1d_2axes(X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func, X_data_val, Y_data_val,
                                              x_grid,all_y_grid, most_likely_ygrid,
                                              alpha_points, color_points_train, color_points_val, color_most_likey,color_mean,color_truth,
                                              ax1,ax2)
        elif(X_data_tr.shape[1] == 2): # Classification Example 
            plot_data_classification_2d_2axes(X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func, X_data_val, Y_data_val,
                                               xx,yy,all_y_grid, most_likely_ygrid,
                                              alpha_points, color_points_train, color_points_val, color_most_likey,color_mean, color_truth,
                                              ax1,ax2)
        else:       # RNN example
            plot_data_RNN_1d_2axes(X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func, X_data_val, Y_data_val,
                                              x_grid,all_y_grid, most_likely_ygrid,
                                              alpha_points, color_points_train, color_points_val, color_most_likey,color_mean,color_truth,
                                              ax1,ax2)
 
#    gl.fill_between (x_grid, [mean_samples_grid + 2*std_samples_grid, mean_samples_grid - 2*std_samples_grid]
#                              , ax  = ax2, alpha = 0.10, color = "b", legend = ["Mean realizaions"])
    ## ax2: The uncertainty of the prediction !!
#    gl.plot (x_grid, std_samples_grid, ax = ax2, labels = ["Std (%i)"%(Nsamples),"X","f(X)"], legend = [" std predictions"], fill = 1, alpha = 0.3)
    
   ############## ax3 ax4 ax5: Loss Evolution !! ######################
    ## ax3: Evolutoin of the data loss
    gl.plot([], tr_loss, ax = ax3, lw = 3, labels = ["Losses", "","Data loss"], legend = ["train"],
            color = color_train_loss)
    gl.plot([], val_loss,ax = ax3, lw = 3, legend = ["validation"],
            color = color_val_loss,  AxesStyle = "Normal - No xaxis")
    
    ## ax4: The evolution of the KL loss
    gl.plot([], KL_loss, ax = ax4, lw = 3, labels = ["", "","KL loss"], legend = ["Bayesian Weights"],
            AxesStyle = "Normal - No xaxis", color = "k")

    ## ax5: Evolutoin of the total loss
    gl.plot([], final_loss_tr, ax = ax5, lw = 3, labels = ["", "epoch","Total Loss (Bayes)"], legend = ["train"],
            color = color_train_loss)
    gl.plot([], final_loss_val,ax = ax5, lw = 3, legend = ["validation"], color = color_val_loss)
           
    ############## ax6 ax7: Variational Weights !! ######################
    create_plot_variational_weights(model,ax6,ax7)
    ## Plot in chart 7 the acceptable mu = 2sigma  -> sigma = |mu|/2sigma 
    mu_grid = np.linspace(-3,3,100)
    y_grid = np.abs(mu_grid)/2
    
    gl.fill_between(mu_grid, 10*np.ones(mu_grid.size), y_grid,
                    alpha = 0.2, color = "r", ax = ax7, legend = ["95% non-significant"])
    
    gl.set_zoom (ax = ax6, ylim = [-0.1,10])
    gl.set_zoom (ax = ax7, xlim = [-2.5, 2.5], ylim = [-0.05, np.exp(model.cf_a.input_layer_prior["log_sigma2"])*(1 + 0.15)])
    
#    gl.set_zoom (ax = ax7, xlim = [-2.5, 2.5], ylim = [-0.1,2])
    
    # Set final properties and save figure
    gl.set_fontSizes(ax = [ax1,ax2,ax3,ax4,ax5,ax6,ax7], title = 20, xlabel = 20, ylabel = 20, 
                      legend = 10, xticks = 12, yticks = 12)


    gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.10)
    
    if (type(epoch_i) == type(None)):
        gl.savefig(folder_images +"../"+'Final_values_regression_1D_' +str(model.cf_a.eta_KL) +'.png', 
                   dpi = 100, sizeInches = [20, 10])
    else:
        gl.savefig(folder_images +'%i.png'%epoch_i, 
                   dpi = 100, sizeInches = [20, 10], close = True, bbox_inches = "tight")
def create_Bayesian_analysis_charts_simplified(model,
                                               train_dataset,
                                               validation_dataset,
                                               tr_loss,
                                               val_loss,
                                               KL_loss,
                                               folder_images,
                                               epoch_i=None):

    # Configurations of the plots
    alpha_points = 0.2
    color_points_train = "dark navy blue"
    color_points_val = "amber"
    color_train_loss = "cobalt blue"
    color_val_loss = "blood"
    color_truth = "k"
    color_mean = "b"
    color_most_likey = "y"

    ################################ Divide in plots ##############################
    gl.init_figure()
    ax1 = gl.subplot2grid((6, 3), (0, 0), rowspan=3, colspan=1)
    ax2 = gl.subplot2grid((6, 3), (3, 0),
                          rowspan=3,
                          colspan=1,
                          sharex=ax1,
                          sharey=ax1)

    ax3 = gl.subplot2grid((6, 3), (0, 1), rowspan=2, colspan=1)
    ax4 = gl.subplot2grid((6, 3), (2, 1), rowspan=2, colspan=1, sharex=ax3)
    ax5 = gl.subplot2grid((6, 3), (4, 1), rowspan=2, colspan=1, sharex=ax3)

    ax6 = gl.subplot2grid((6, 3), (0, 2), rowspan=3, colspan=1)
    ax7 = gl.subplot2grid((6, 3), (3, 2), rowspan=3, colspan=1, sharex=ax6)

    ####### ax1, ax2: Get confusion matrices ##########

    labels_classes, confusion = model.get_confusion_matrix(train_dataset)
    plot_confusion_matrix(confusion, labels_classes, ax1)
    labels_classes, confusion = model.get_confusion_matrix(validation_dataset)
    plot_confusion_matrix(confusion, labels_classes, ax2)

    ############## ax3 ax4 ax5: Loss Evolution !! ######################
    ## ax3: Evolutoin of the data loss
    gl.plot([],
            tr_loss,
            ax=ax3,
            lw=3,
            labels=["Losses", "", "Data loss (MSE)"],
            legend=["train"],
            color=color_train_loss)
    gl.plot([],
            val_loss,
            ax=ax3,
            lw=3,
            legend=["validation"],
            color=color_val_loss,
            AxesStyle="Normal - No xaxis")

    ## ax4: The evolution of the KL loss
    gl.plot([],
            KL_loss,
            ax=ax4,
            lw=3,
            labels=["", "", "KL loss"],
            legend=["Bayesian Weights"],
            AxesStyle="Normal - No xaxis",
            color="k")

    ## ax5: Evolutoin of the total loss
    gl.plot([],
            tr_loss,
            ax=ax5,
            lw=3,
            labels=["", "epoch", "Total Loss (Bayes)"],
            legend=["train"],
            color=color_train_loss)
    gl.plot([],
            val_loss,
            ax=ax5,
            lw=3,
            legend=["validation"],
            color=color_val_loss)

    ############## ax6 ax7: Variational Weights !! ######################
    create_plot_variational_weights(model, ax6, ax7)

    gl.set_zoom(ax=ax6, ylim=[-0.1, 10])
    gl.set_zoom(ax=ax7, xlim=[-2.5, 2.5], ylim=[-0.1, 0.5])

    # Set final properties and save figure
    gl.set_fontSizes(ax=[ax1, ax2, ax3, ax4, ax5, ax6, ax7],
                     title=20,
                     xlabel=20,
                     ylabel=20,
                     legend=10,
                     xticks=12,
                     yticks=12)

    gl.subplots_adjust(left=.09,
                       bottom=.10,
                       right=.90,
                       top=.95,
                       wspace=.30,
                       hspace=0.10)

    if (type(epoch_i) == type(None)):
        gl.savefig(folder_images + 'Training_Example_Data_Bayesian.png',
                   dpi=100,
                   sizeInches=[20, 10])
    else:
        gl.savefig(folder_images + '%i.png' % epoch_i,
                   dpi=100,
                   sizeInches=[20, 10],
                   close=True,
                   bbox_inches="tight")
def plot_data_classification_2d_2axes(X_data_tr, Y_data_tr, xgrid_real_func,
                                      ygrid_real_func, X_data_val, Y_data_val,
                                      xx, yy, all_y_grid, most_likely_ygrid,
                                      alpha_points, color_points_train,
                                      color_points_val, color_most_likey,
                                      color_mean, color_truth, ax1, ax2):
    """
    This function plots the outputs of the Classification model for the 2D example
    """

    alpha_points = 1
    ## Compute mean and std of regression
    std_samples_grid = np.std(all_y_grid, axis=1)
    mean_samples_grid = np.mean(all_y_grid, axis=1)

    ############## ax1: Data + Mostlikely + Real + Mean !! ########################

    classes = np.unique(Y_data_tr).flatten()
    colors = ["r", "g", "b"]

    for i in range(classes.size):
        X_data_tr_class = X_data_tr[np.where(Y_data_tr == classes[i])[0], :]
        X_data_val_class = X_data_val[np.where(Y_data_val == classes[i])[0], :]
        #        print (X_data_tr_class.shape)
        #        print (classes)
        #        print (X_data_tr)
        if ((X_data_tr_class.size > 0) and (X_data_val_class.size > 0)):
            gl.scatter(
                X_data_tr_class[:, 0].flatten().tolist(),
                X_data_tr_class[:, 1].flatten().tolist(),
                ax=ax1,
                lw=3,  #legend = ["tr points"], 
                labels=["Data and predictions", "", "Y"],
                alpha=alpha_points,
                color=colors[i])
            gl.scatter(
                X_data_val_class[:, 0].flatten(),
                X_data_val_class[:, 1].flatten(),
                ax=ax1,
                lw=3,
                color=colors[i],  #legend = ["val points"], 
                alpha=alpha_points,
                marker=">")

    out = ax1.contourf(xx,
                       yy,
                       most_likely_ygrid.reshape(xx.shape),
                       cmap=plt.cm.coolwarm,
                       alpha=0.5)

    #    ax.scatter(X0, X1, c=y, cmap=plt.cm.coolwarm, s=20, edgecolors='k')

    #    gl.plot (xgrid_real_func, ygrid_real_func, ax  = ax1, alpha = 0.90, color = color_truth, legend = ["Truth"])
    for i in range(classes.size):
        X_data_tr_class = X_data_tr[np.where(Y_data_tr == classes[i])[0], :]
        X_data_val_class = X_data_val[np.where(Y_data_val == classes[i])[0], :]
        #        print (X_data_tr_class.shape)
        #        print (classes)
        #        print (X_data_tr)
        if ((X_data_tr_class.size > 0) and (X_data_val_class.size > 0)):
            gl.scatter(
                X_data_tr_class[:, 0].flatten().tolist(),
                X_data_tr_class[:, 1].flatten().tolist(),
                ax=ax2,
                lw=3,  #legend = ["tr points"], 
                labels=["", "X", "Y"],
                alpha=alpha_points,
                color=colors[i])
            gl.scatter(
                X_data_val_class[:, 0].flatten(),
                X_data_val_class[:, 1].flatten(),
                ax=ax2,
                lw=3,
                color=colors[i],  #legend = ["val points"], 
                alpha=alpha_points,
                marker=">")

    for ygrid in all_y_grid:
        out = ax2.contourf(xx,
                           yy,
                           ygrid.reshape(xx.shape),
                           cmap=plt.cm.coolwarm,
                           alpha=0.5)

    ############## ax2: Data + Realizations of the function !! ######################

    gl.set_zoom(xlimPad=[0.3, 0.3],
                ylimPad=[0.3, 0.3],
                ax=ax2,
                X=X_data_tr[:, 0],
                Y=X_data_tr[:, 1])
Exemple #18
0
    def update_plotting_chart(self, extraInfo = None):
        """
        This function aims to udate the values in the chart using the new information
        contained in the "information" input structure. 
        This function is called by the Task Scheduler,
        """
        
        self.plot_lock.acquire()
        desired_value = self.Monitor.desired_value   # The desired value of the sensor
        range_warning = self.Monitor.range_warning   # The range which if crosses we send email
        range_stop = self.Monitor.range_stop     # The range which if crosses we stop
        
#        print ("Gonna plot")
        self.data_lock.acquire()
        data, time = np.array(self.data_buffer), np.array(self.time_buffer)
        self.data_lock.release()
        
        if (type(data) == type(None)):
            self.plot_lock.release()
            return True  ; ## For the task manager ?
        if (type(time) == type(None)):
            self.plot_lock.release()
            return True ; ## For the task manager ?
        ## Select the start and end index to plot
        s_indx = max([data.size - self.show_window, 0])
        e_indx = data.size -1
                    
        if(self.first_plot_flag):
            ## Remove the text box
            self.initial_text_data.set_visible(False)
                
            if(len(data) < 2):  # Plot 2 data minimum
                self.plot_lock.release()
                return True ; ## For the task manager ?
            
            self.first_plot_flag = False
            
            ##------------------------------------------
            #### Warning bands 
            ax_aux, plots_data_upper_warning_band = gl.plot([time[s_indx],time[e_indx]], [desired_value + range_warning, desired_value + range_warning],  ax = self.data_axes,
                    color = "y", lw = 3, ls="--", return_drawing_elements = True, legend = ["Warning email"], loc = "upper right"); #, legend = ["Warning area"]
            
            ax_aux, plots_data_lower_warning_band = gl.plot([time[s_indx],time[e_indx]], [desired_value - range_warning, desired_value - range_warning],  ax = self.data_axes,
                    color = "y", lw = 3, ls="--", return_drawing_elements = True);
                                                            
            #### Error bands 
            ax_aux, plots_data_upper_error_band = gl.plot([time[s_indx],time[e_indx]], [desired_value + range_stop, desired_value + range_stop],  ax = self.data_axes,
                    color = "r", lw = 3, ls="--", return_drawing_elements = True, legend = ["Stop"], loc = "upper right"); #, legend = ["Warning area"]
            
            ax_aux, plots_data_lower_error_band = gl.plot([time[s_indx],time[e_indx]], [desired_value - range_stop, desired_value - range_stop],  ax = self.data_axes,
                    color = "r", lw = 3, ls="--", return_drawing_elements = True);
                                                            
                    
            ax_aux, plot_time_series = gl.plot(time[s_indx:e_indx+1], data[s_indx:e_indx+1],  ax = self.data_axes,
                    labels = ["Cleaning Procedure: " + self.cleaning_ID, self.time_now.strftime("%B %d, %Y"), "PH"], color = "k", xaxis_mode = "intraday", return_drawing_elements = True,
                    loc = "upper right");
        
            gl.set_fontSizes(ax = self.data_axes, title = 25, xlabel = 20, ylabel = 20, 
                      legend = 15, xticks = 15, yticks = 15)
            
            ## Save the elements so that we can modify them later
            
            self.plots_data = [plot_time_series[0], plots_data_upper_warning_band[0], plots_data_lower_warning_band[0], plots_data_upper_error_band[0], plots_data_lower_error_band[0]]
            

            
        else:
#            print self.plots_data
            self.plots_data[0].set_xdata(time[s_indx:e_indx+1])
            self.plots_data[0].set_ydata(data[s_indx:e_indx+1])
            
            ## Warning bands
            self.plots_data[1].set_xdata([time[s_indx],time[e_indx]])
            self.plots_data[1].set_ydata([desired_value + range_warning, desired_value + range_warning])
            
            self.plots_data[2].set_xdata([time[s_indx],time[e_indx]])
            self.plots_data[2].set_ydata([desired_value - range_warning, desired_value - range_warning])
            
            ## Error bands
            self.plots_data[3].set_xdata([time[s_indx],time[e_indx]])
            self.plots_data[3].set_ydata([desired_value + range_stop, desired_value + range_stop])
            self.plots_data[4].set_xdata([time[s_indx],time[e_indx]])
            self.plots_data[4].set_ydata([desired_value - range_stop, desired_value - range_stop])
#            gl.set_xlim(ax = self.data_axes, X = time[s_indx:e_indx+1], xmin = np.min(time[s_indx:e_indx+1]), xmax = np.max(time[s_indx:e_indx+1]))
#            gl.set_ylim(ax = self.data_axes, Y = data[s_indx:e_indx+1], ymin =np.min(data[s_indx:e_indx+1]),ymax = np.max(data[s_indx:e_indx+1]))
 

#                gl.set_zoom(X = time[s_indx:e_indx+1],Y = data[s_indx:e_indx+1],xlimPad = [0.2,0.2] ,ylimPad = [0.1, 0.1])
#                gl.set_zoom(X = time[s_indx:e_indx+1],Y = data[s_indx:e_indx+1],xlimPad = [0.2,0.2] ,ylimPad = [0.1, 0.1])
#                gl.set_zoom(X = time[s_indx:e_indx+1],Y = data[s_indx:e_indx+1],xlimPad = [0.2,0.2] ,ylim = [0, 14])
            gl.set_zoom(X = time[s_indx:e_indx+1],Y = data[s_indx:e_indx+1],xlimPad = [0.2,0.2] ,ylim = [0, 10])
            pass
#                self.data_axes.update()
#                self.data_axes.draw(self.plots_data[0])
        plt.draw()
#        l.set_ydata(ydata)
#        ax.set_ylim(np.min(ydata), np.max(ydata))
#        plt.draw()
        
#        self.fig.canvas.draw()
#        #### RETOQUES ########
#        if (len(self.data_buffer) > 1):
#            gl.set_zoom(X = time[s_indx:e_indx+1],Y = data[s_indx:e_indx+1],xlimPad = [0.2,0.2] ,ylimPad = [0.1, 0.1])
        
#
    #    if (update_data.index == 1000):
    #        rt.stop()
    #        information.serial.close()
        self.check_monitoring(data[s_indx:e_indx+1])
        
        self.plot_lock.release()
        return True ; ## For the task manager ?
Exemple #19
0
    r_ellipse = bMA.get_ellipse_points(mean,w,h,theta)
    gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax0, ls = "--",color = "k", lw = 2,
             legend = ["Corr: %.2f"%(corr[0,1])])
    
    gl.plot([mean[0], mean[0] + vecs[0,0]*w], 
            [mean[1], mean[1] + vecs[0,1]*w], ax = ax0, ls = "--",color = "k")
    gl.plot([mean[0], mean[0] + vecs[1,0]*h], 
            [mean[1], mean[1] + vecs[1,1]*h], ax = ax0, ls = "--",color = "k")
    
    gl.plot([mean[0], mean[0] - vecs[0,0]*w], 
            [mean[1], mean[1] - vecs[0,1]*w], ax = ax0, ls = "--",color = "k")
    gl.plot([mean[0], mean[0] - vecs[1,0]*h], 
            [mean[1], mean[1] - vecs[1,1]*h], ax = ax0, ls = "--",color = "k")
            
    ax0.axis('equal')
    gl.set_zoom(ax = ax0, X =r_ellipse[:,0], Y = r_ellipse[:,1],
                ylimPad = [0.2,0.2],xlimPad = [0.2,0.2])

    vecs_original = vecs
    
    #### PLOT THE TRANSFORMED ONES !!!
    ##############################################################################
    X_1,X_2 = Xproj[:,[0]], Xproj[:,[1]]
    mu_1, mu_2 = np.mean(Xproj, axis =0)
    std_1,std_2 = np.std(Xproj,axis =0)
    
    cov = np.cov(np.concatenate((X_1,X_2),axis = 1).T).T
    std_K = 3
    ## Do stuff now
    ax1 = gl.subplot2grid((1,2), (0,1), rowspan=1, colspan=1,  sharex = ax0, sharey = ax0)
    gl.scatter(X_1,X_2, alpha = 0.5, ax = ax1, lw = 4, AxesStyle = "Normal",
               labels = ["PCA Prejected 2D data","Y1", "Y2"], color = "dark navy blue")
Exemple #20
0
        axes_l.append(ax_ii)
    
    gl.subplots_adjust(left=.09, bottom=.20, right=.90, top=.95, wspace=.2, hspace=0.001)
    ax_i = gl.subplot2grid((days_plot,Ndiv), (0,Ndiv-1), rowspan=int(days_plot/2), colspan=1) 
    for i in range(1,K+1):
        gl.scatter(0, i, legend = [" K = %i"%(i)], lw = 28, AxesStyle = "Normal - No xaxis - No yaxis" , loc = "center left")
        
    gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.2, hspace=0.01)
    image_name = "EM_%iSymbol_timeAnalysis_%iclusters"%(2,K)+ clusters_relation+ "_"+str(periods[0])+ '.png'
#    
    gl.set_fontSizes(ax = axes_l, title = 20, xlabel = 20, ylabel = 20, 
                      legend = 35, xticks = 25, yticks = 10)
    
    gl.set_fontSizes(ax = ax_i, title = 20, xlabel = 20, ylabel = 20, 
                      legend = 30, xticks = 20, yticks = 10)
    gl.set_zoom(xlim = [10,10.50])
    
    gl.savefig(folder_images + image_name, 
               dpi = 100, sizeInches = [30, 12])

 
## Save to disk the clusters
#    mus_kk = []
#    for i in range(K):
#        mus_kk.append(theta_list[-1][i][0])
#    
#    mus_kk = np.concatenate(mus_kk,axis = 1)
#    
#    
##    df = pd.DataFrame(mus_kk)
##    df.to_csv(folder_images + "file_path.csv")
Exemple #21
0
################# PLOT DATA ###############################
############################################################

if(distribution_graph):
    # Get the histogram and gaussian estimations !
    ## Scatter plot of the points 

    gl.init_figure()
    ax1 = gl.scatter(ret1, np.zeros(ret1.shape), alpha = 0.5, lw = 4, AxesStyle = "Normal",
               labels = ["",symbolIDs[0], ""],
               legend = ["%i points"%ret1.size])
    
    for i in range(int(ret1.size/26)):
        gl.scatter(ret1[i*26:(i+1)*26], np.ones(ret1[i*26:(i+1)*26].shape)*(i+1), alpha = 0.5, lw = 4, AxesStyle = "Normal",
                   legend = ["Day %i"%(i+1)])
    gl.set_zoom(ax = ax1, X = ret1,xlimPad = [0.1,0.8])

    gl.savefig(folder_images +'InitPointsInferenceDays.png', 
               dpi = 100, sizeInches = [10, 4])
    
if (estimation_days_graph):
    gl.init_figure()
    
    x_grid, y_values = bMA.gaussian1D_points(X = ret1, num = 100, std_K = 2, x_grid = None)
    
    ax1 = gl.plot(x_grid, y_values, alpha = 0.1, lw = 4, AxesStyle = "Normal",
               labels = ["",symbolIDs[0], "Distribution"],
               legend = ["%i points"%ret1.size] , color = "k")
              
    for i in range(int(ret1.size/26)):
        D = ret1[i*26:(i+1)*26]
Exemple #22
0
def generate_images_iterations_ll(Xs, mus, covs, Ks, myDManager, logl,
                                  theta_list, model_theta_list,
                                  folder_images_gif):
    #    os.remove(folder_images_gif) # Remove previous images if existing
    """
    WARNING: MEANT FOR ONLY 3 Distributions due to the color RGB
    """
    import shutil
    ul.create_folder_if_needed(folder_images_gif)
    shutil.rmtree(folder_images_gif)
    ul.create_folder_if_needed(folder_images_gif)
    ######## Plot the original data #####

    Xdata = np.concatenate(Xs, axis=1).T
    colors = ["r", "b", "g"]
    K_G, K_W, K_vMF = Ks

    ### FOR EACH ITERATION
    for i in range(len(theta_list)):  # theta_list
        indx = i
        gl.init_figure()
        ax1 = gl.subplot2grid((1, 2), (0, 0), rowspan=1, colspan=1)

        ## Get the relative ll of the Gaussian denoising cluster.
        ll = myDManager.pdf_log_K(Xdata, theta_list[indx])
        N, K = ll.shape
        #        print ll.shape
        for j in range(N):  # For every sample
            #TODO: Can this not be done without a for ?
            # Normalize the probability of the sample being generated by the clusters
            Marginal_xi_probability = gf.sum_logs(ll[j, :])
            ll[j, :] = ll[j, :] - Marginal_xi_probability

            ax1 = gl.scatter(
                Xdata[j, 0],
                Xdata[j, 1],
                labels=[
                    'EM Evolution. Kg:' + str(K_G) + ', Kw:' + str(K_W) +
                    ', K_vMF:' + str(K_vMF), "X1", "X2"
                ],
                color=(np.exp(ll[j, 1]), np.exp(ll[j, 0]),
                       np.exp(ll[j, 2])),  ###  np.exp(ll[j,2])
                alpha=1,
                nf=0)

        # Only doable if the clusters dont die
        for k_c in myDManager.clusterk_to_Dname.keys():
            k = myDManager.clusterk_to_thetak[k_c]
            distribution_name = myDManager.clusterk_to_Dname[k_c]  # G W

            if (distribution_name == "Gaussian"):
                ## Plot the ecolution of the mu
                #### Plot the Covariance of the clusters !
                mean, w, h, theta = bMA.get_gaussian_ellipse_params(
                    mu=theta_list[indx][k][0],
                    Sigma=theta_list[indx][k][1],
                    Chi2val=2.4477)
                r_ellipse = bMA.get_ellipse_points(mean, w, h, theta)
                gl.plot(r_ellipse[:, 0],
                        r_ellipse[:, 1],
                        ax=ax1,
                        ls="-.",
                        lw=3,
                        AxesStyle="Normal2",
                        legend=[
                            "Kg(%i). pi:%0.2f" %
                            (k, float(model_theta_list[indx][0][0, k]))
                        ])

            elif (distribution_name == "Watson"):
                #### Plot the pdf of the distributino !
                ## Distribution parameters for Watson
                kappa = float(theta_list[indx][k][1])
                mu = theta_list[-1][k][0]
                Nsa = 1000
                # Draw 2D samples as transformation of the angle
                Xalpha = np.linspace(0, 2 * np.pi, Nsa)
                Xgrid = np.array([np.cos(Xalpha), np.sin(Xalpha)])

                probs = []  # Vector with probabilities
                for i in range(Nsa):
                    probs.append(
                        np.exp(Wad.Watson_pdf_log(Xgrid[:, i], [mu, kappa])))

                probs = np.array(probs)
                # Plot it in polar coordinates
                X1_w = (1 + probs) * np.cos(Xalpha)
                X2_w = (1 + probs) * np.sin(Xalpha)

                gl.plot(X1_w,
                        X2_w,
                        alpha=1,
                        lw=3,
                        ls="-.",
                        legend=[
                            "Kw(%i). pi:%0.2f" %
                            (k, float(model_theta_list[indx][0][0, k]))
                        ])

            elif (distribution_name == "vonMisesFisher"):
                #### Plot the pdf of the distributino !
                ## Distribution parameters for Watson
                kappa = float(theta_list[indx][k][1])
                mu = theta_list[indx][k][0]
                Nsa = 1000
                # Draw 2D samples as transformation of the angle
                Xalpha = np.linspace(0, 2 * np.pi, Nsa)
                Xgrid = np.array([np.cos(Xalpha), np.sin(Xalpha)])

                probs = []  # Vector with probabilities
                for i in range(Nsa):
                    probs.append(
                        np.exp(
                            vMFd.vonMisesFisher_pdf_log(
                                Xgrid[:, i], [mu, kappa])))

                probs = np.array(probs)
                probs = probs.reshape((probs.size, 1)).T
                # Plot it in polar coordinates
                X1_w = (1 + probs) * np.cos(Xalpha)
                X2_w = (1 + probs) * np.sin(Xalpha)

                #            print X1_w.shape, X2_w.shape
                gl.plot(X1_w,
                        X2_w,
                        alpha=1,
                        lw=3,
                        ls="-.",
                        legend=[
                            "Kvmf(%i). pi:%0.2f" %
                            (k, float(model_theta_list[indx][0][0, k]))
                        ])

        gl.set_zoom(xlim=[-6, 6], ylim=[-6, 6], ax=ax1)
        ax2 = gl.subplot2grid((1, 2), (0, 1), rowspan=1, colspan=1)
        if (indx == 0):
            gl.add_text(positionXY=[0.1, .5],
                        text=r' Initilization Incomplete LogLike: %.2f' %
                        (logl[0]),
                        fontsize=15)
            pass
        elif (indx >= 1):

            gl.plot(
                range(1,
                      np.array(logl).flatten()[1:].size + 1),
                np.array(logl).flatten()[1:(indx + 1)],
                ax=ax2,
                legend=["Iteration %i, Incom LL: %.2f" % (indx, logl[indx])],
                labels=[
                    "Convergence of LL with generated data", "Iterations", "LL"
                ],
                lw=2)
            gl.scatter(1, logl[1], lw=2)
            pt = 0.05
            gl.set_zoom(xlim=[0, len(logl)],
                        ylim=[
                            logl[1] - (logl[-1] - logl[1]) * pt,
                            logl[-1] + (logl[-1] - logl[1]) * pt
                        ],
                        ax=ax2)

        gl.subplots_adjust(left=.09,
                           bottom=.10,
                           right=.90,
                           top=.95,
                           wspace=.2,
                           hspace=0.01)

        gl.savefig(folder_images_gif + 'gif_' + str(indx) + '.png',
                   dpi=100,
                   sizeInches=[16, 8],
                   close="yes",
                   bbox_inches=None)

        gl.close("all")
def create_Bayesian_analysis_charts(model,
                                    X_data_tr,
                                    Y_data_tr,
                                    X_data_val,
                                    Y_data_val,
                                    tr_loss,
                                    val_loss,
                                    KL_loss,
                                    final_loss_tr,
                                    final_loss_val,
                                    xgrid_real_func,
                                    ygrid_real_func,
                                    folder_images,
                                    epoch_i=None):

    # Configurations of the plots

    alpha_points = 0.2
    color_points_train = "dark navy blue"
    color_points_val = "amber"
    color_train_loss = "cobalt blue"
    color_val_loss = "blood"
    color_truth = "k"
    color_mean = "b"
    color_most_likey = "y"

    ############################# Data computation #######################
    if (type(X_data_tr) == type([])):
        pass
    else:
        if (X_data_tr.shape[1] == 1):  # Regression Example
            x_grid, all_y_grid, most_likely_ygrid = compute_regression_1D_data(
                model, X_data_tr, X_data_val, Nsamples=100)
        elif (X_data_tr.shape[1] == 2):  # Classification Example
            xx, yy, all_y_grid, most_likely_ygrid = compute_classification_2D_data(
                model, X_data_tr, X_data_val, Nsamples=100)
        else:  # RNN
            x_grid, all_y_grid, most_likely_ygrid = compute_RNN_1D_data(
                model, X_data_tr, X_data_val, Nsamples=100)

    ################################ Divide in plots ##############################
    gl.init_figure()
    ax1 = gl.subplot2grid((6, 3), (0, 0), rowspan=3, colspan=1)
    ax2 = gl.subplot2grid((6, 3), (3, 0),
                          rowspan=3,
                          colspan=1,
                          sharex=ax1,
                          sharey=ax1)

    ax3 = gl.subplot2grid((6, 3), (0, 1), rowspan=2, colspan=1)
    ax4 = gl.subplot2grid((6, 3), (2, 1), rowspan=2, colspan=1, sharex=ax3)
    ax5 = gl.subplot2grid((6, 3), (4, 1), rowspan=2, colspan=1, sharex=ax3)

    ax6 = gl.subplot2grid((6, 3), (0, 2), rowspan=3, colspan=1)
    ax7 = gl.subplot2grid((6, 3), (3, 2), rowspan=3, colspan=1, sharex=ax6)

    if (type(X_data_tr) == type([])):
        Xtrain = [
            torch.tensor(X_data_tr[i],
                         device=model.cf_a.device,
                         dtype=model.cf_a.dtype) for i in range(len(X_data_tr))
        ]
        Ytrain = torch.tensor(Y_data_tr,
                              device=model.cf_a.device,
                              dtype=torch.int64)

        Xval = [
            torch.tensor(X_data_val[i],
                         device=model.cf_a.device,
                         dtype=model.cf_a.dtype)
            for i in range(len(X_data_val))
        ]
        Yval = torch.tensor(Y_data_val,
                            device=model.cf_a.device,
                            dtype=torch.int64)

        confusion = model.get_confusion_matrix(Xtrain, Ytrain)
        plot_confusion_matrix(confusion, model.languages, ax1)
        confusion = model.get_confusion_matrix(Xval, Yval)
        plot_confusion_matrix(confusion, model.languages, ax2)

    else:
        if (X_data_tr.shape[1] == 1):  # Regression Example
            plot_data_regression_1d_2axes(
                X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func,
                X_data_val, Y_data_val, x_grid, all_y_grid, most_likely_ygrid,
                alpha_points, color_points_train, color_points_val,
                color_most_likey, color_mean, color_truth, ax1, ax2)
        elif (X_data_tr.shape[1] == 2):  # Classification Example
            plot_data_classification_2d_2axes(
                X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func,
                X_data_val, Y_data_val, xx, yy, all_y_grid, most_likely_ygrid,
                alpha_points, color_points_train, color_points_val,
                color_most_likey, color_mean, color_truth, ax1, ax2)
        else:  # RNN example
            plot_data_RNN_1d_2axes(X_data_tr, Y_data_tr, xgrid_real_func,
                                   ygrid_real_func, X_data_val, Y_data_val,
                                   x_grid, all_y_grid, most_likely_ygrid,
                                   alpha_points, color_points_train,
                                   color_points_val, color_most_likey,
                                   color_mean, color_truth, ax1, ax2)


#    gl.fill_between (x_grid, [mean_samples_grid + 2*std_samples_grid, mean_samples_grid - 2*std_samples_grid]
#                              , ax  = ax2, alpha = 0.10, color = "b", legend = ["Mean realizaions"])
## ax2: The uncertainty of the prediction !!
#    gl.plot (x_grid, std_samples_grid, ax = ax2, labels = ["Std (%i)"%(Nsamples),"X","f(X)"], legend = [" std predictions"], fill = 1, alpha = 0.3)

############## ax3 ax4 ax5: Loss Evolution !! ######################
## ax3: Evolutoin of the data loss
    gl.plot([],
            tr_loss,
            ax=ax3,
            lw=3,
            labels=["Losses", "", "Data loss"],
            legend=["train"],
            color=color_train_loss)
    gl.plot([],
            val_loss,
            ax=ax3,
            lw=3,
            legend=["validation"],
            color=color_val_loss,
            AxesStyle="Normal - No xaxis")

    ## ax4: The evolution of the KL loss
    gl.plot([],
            KL_loss,
            ax=ax4,
            lw=3,
            labels=["", "", "KL loss"],
            legend=["Bayesian Weights"],
            AxesStyle="Normal - No xaxis",
            color="k")

    ## ax5: Evolutoin of the total loss
    gl.plot([],
            final_loss_tr,
            ax=ax5,
            lw=3,
            labels=["", "epoch", "Total Loss (Bayes)"],
            legend=["train"],
            color=color_train_loss)
    gl.plot([],
            final_loss_val,
            ax=ax5,
            lw=3,
            legend=["validation"],
            color=color_val_loss)

    ############## ax6 ax7: Variational Weights !! ######################
    create_plot_variational_weights(model, ax6, ax7)
    ## Plot in chart 7 the acceptable mu = 2sigma  -> sigma = |mu|/2sigma
    mu_grid = np.linspace(-3, 3, 100)
    y_grid = np.abs(mu_grid) / 2

    gl.fill_between(mu_grid,
                    10 * np.ones(mu_grid.size),
                    y_grid,
                    alpha=0.2,
                    color="r",
                    ax=ax7,
                    legend=["95% non-significant"])

    gl.set_zoom(ax=ax6, ylim=[-0.1, 10])
    gl.set_zoom(ax=ax7,
                xlim=[-2.5, 2.5],
                ylim=[
                    -0.05,
                    np.exp(model.cf_a.input_layer_prior["log_sigma2"]) *
                    (1 + 0.15)
                ])

    #    gl.set_zoom (ax = ax7, xlim = [-2.5, 2.5], ylim = [-0.1,2])

    # Set final properties and save figure
    gl.set_fontSizes(ax=[ax1, ax2, ax3, ax4, ax5, ax6, ax7],
                     title=20,
                     xlabel=20,
                     ylabel=20,
                     legend=10,
                     xticks=12,
                     yticks=12)

    gl.subplots_adjust(left=.09,
                       bottom=.10,
                       right=.90,
                       top=.95,
                       wspace=.30,
                       hspace=0.10)

    if (type(epoch_i) == type(None)):
        gl.savefig(folder_images + "../" + 'Final_values_regression_1D_' +
                   str(model.cf_a.eta_KL) + '.png',
                   dpi=100,
                   sizeInches=[20, 10])
    else:
        gl.savefig(folder_images + '%i.png' % epoch_i,
                   dpi=100,
                   sizeInches=[20, 10],
                   close=True,
                   bbox_inches="tight")
Exemple #24
0
    gl.set_fontSizes(ax=axes_l,
                     title=20,
                     xlabel=20,
                     ylabel=20,
                     legend=35,
                     xticks=25,
                     yticks=10)

    gl.set_fontSizes(ax=ax_i,
                     title=20,
                     xlabel=20,
                     ylabel=20,
                     legend=30,
                     xticks=20,
                     yticks=10)
    gl.set_zoom(xlim=[10, 10.50])

    gl.savefig(folder_images + image_name, dpi=100, sizeInches=[30, 12])

## Save to disk the clusters
#    mus_kk = []
#    for i in range(K):
#        mus_kk.append(theta_list[-1][i][0])
#
#    mus_kk = np.concatenate(mus_kk,axis = 1)
#
#
##    df = pd.DataFrame(mus_kk)
##    df.to_csv(folder_images + "file_path.csv")
#
#    np.savetxt(folder_images + "clusters.csv", mus_kk, delimiter=",")
def plot_data_regression_1d_2axes(X_data_tr, Y_data_tr, xgrid_real_func,
                                  ygrid_real_func, X_data_val, Y_data_val,
                                  x_grid, all_y_grid, most_likely_ygrid,
                                  alpha_points, color_points_train,
                                  color_points_val, color_most_likey,
                                  color_mean, color_truth, ax1, ax2):
    """
    This function plots the outputs of the Regression model for the 1D example
    """

    ## Compute mean and std of regression
    std_samples_grid = np.std(all_y_grid, axis=1)
    mean_samples_grid = np.mean(all_y_grid, axis=1)

    ############## ax1: Data + Mostlikely + Real + Mean !! ########################
    if (type(ax1) != type(None)):
        gl.scatter(
            X_data_tr,
            Y_data_tr,
            ax=ax1,
            lw=3,  #legend = ["tr points"], 
            labels=["Data and predictions", "", "Y"],
            alpha=alpha_points,
            color=color_points_train)
        gl.scatter(
            X_data_val,
            Y_data_val,
            ax=ax1,
            lw=3,  #legend = ["val points"], 
            alpha=alpha_points,
            color=color_points_val)

        gl.plot(xgrid_real_func,
                ygrid_real_func,
                ax=ax1,
                alpha=0.90,
                color=color_truth,
                legend=["Truth"])
        gl.plot(x_grid,
                most_likely_ygrid,
                ax=ax1,
                alpha=0.90,
                color=color_most_likey,
                legend=["Most likely"])
        gl.plot(x_grid,
                mean_samples_grid,
                ax=ax1,
                alpha=0.90,
                color=color_mean,
                legend=["Posterior mean"],
                AxesStyle="Normal - No xaxis")

    ############## ax2: Data + Realizations of the function !! ######################
    if (type(ax2) != type(None)):
        gl.scatter(
            X_data_tr,
            Y_data_tr,
            ax=ax2,
            lw=3,  # legend = ["tr points"], 
            labels=["", "X", "Y"],
            alpha=alpha_points,
            color=color_points_train)
        gl.scatter(
            X_data_val,
            Y_data_val,
            ax=ax2,
            lw=3,  # legend = ["val points"], 
            alpha=alpha_points,
            color=color_points_val)

        gl.plot(x_grid, all_y_grid, ax=ax2, alpha=0.15, color="k")
        gl.plot(x_grid,
                mean_samples_grid,
                ax=ax2,
                alpha=0.90,
                color="b",
                legend=["Mean realization"])

    gl.set_zoom(xlimPad=[0.2, 0.2],
                ylimPad=[0.2, 0.2],
                ax=ax2,
                X=X_data_tr,
                Y=Y_data_tr)
 for i in range (len(list_all_weights)):
 
     [mu_W, sigma_W, mu_b, sigma_b] = list_all_weights [i]
     legend = list_all_labels[i]
     if(i == 1):
         shape_weights = (400,200)
         plots_weights_layer(mu_W.reshape(shape_weights)[:200,:].flatten(), sigma_W.reshape(shape_weights)[:200,:].flatten(), mu_b[:200], sigma_b[:200], ax1, "All weights",  legend = [legend + " G(x)"], alpha = 0.1)
         plots_weights_layer(mu_W.reshape(shape_weights)[200:,:].flatten(), sigma_W.reshape(shape_weights)[200:,:].flatten(), mu_b[200:], sigma_b[200:], ax1, "All weights",  legend = [legend + " H(x)"], alpha = 0.1)
     else:
         plots_weights_layer(mu_W, sigma_W,mu_b, sigma_b, ax1, "All weights",  legend = [legend], alpha = 0.1)
     list_all_axes.append(ax1)
 
 max_mu,min_mu,max_std,min_std,max_abs = compute_all_boundaries(list_all_weights)
 plot_signifant_region(ax1, max_mu,min_mu,max_std,min_std,max_abs)
 gl.set_zoom (ax = ax1, xlimPad = [0.1, 0.1], ylimPad = [0.1,0.1], 
              X = np.array([min_mu,max_mu]), 
              Y = np.array([min_std,max_std]) )
 
 gl.set_fontSizes(ax = list_all_axes, title = 15, xlabel = 15, ylabel = 15, 
               legend = 10, xticks = 12, yticks = 12)
 
 gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.20, hspace=0.22)
 for ax in list_all_axes:
     ax.ticklabel_format(style='sci',scilimits=(0,0),axis='both')
 gl.savefig(folder_images +'Bayesian_weights_biday.png', 
            dpi = 100, sizeInches = [18, 12])
 
 
 
 if(1):
     
            ax = ax2
            ax.set_xlim(xmin, xmax)
            ax.set_ylim(ymin, ymax)
            # Contourf plot
            cfset = ax.contourf(xx, yy, f, cmap='Blues')
            ## Or kernel density estimate plot instead of the contourf plot
            #ax.imshow(np.rot90(f), cmap='Blues', extent=[xmin, xmax, ymin, ymax])
            # Contour plot
            cset = ax.contour(xx, yy, f, colors='k')
            # Label plot
            ax.clabel(cset, inline=1, fontsize=10)
            ax.set_xlabel('Y1')
            ax.set_ylabel('Y0')

        gl.set_zoom(ax=ax1, ylim=[-2, 25])

        gl.set_fontSizes(ax=[ax1, ax2],
                         title=18,
                         xlabel=15,
                         ylabel=18,
                         legend=18,
                         xticks=10,
                         yticks=15)

        ax2.set_xticklabels(x_labels, rotation=45, ha="right")

        plt.setp(ax1.get_xticklabels(), visible=False)
        gl.subplots_adjust(left=.09,
                           bottom=.10,
                           right=.90,
Exemple #28
0
def create_Bayesian_analysis_charts_simplified(model, train_dataset, validation_dataset,
                                    tr_loss, val_loss, KL_loss,
                                    folder_images,
                                    epoch_i = None):

    # Configurations of the plots
    alpha_points = 0.2 
    color_points_train = "dark navy blue"
    color_points_val = "amber"
    color_train_loss = "cobalt blue"
    color_val_loss = "blood"
    color_truth = "k"
    color_mean = "b"
    color_most_likey = "y"

    ################################ Divide in plots ##############################
    gl.init_figure();
    ax1 = gl.subplot2grid((6,3), (0,0), rowspan=3, colspan=1)
    ax2 = gl.subplot2grid((6,3), (3,0), rowspan=3, colspan=1, sharex = ax1, sharey = ax1)
    
    ax3 = gl.subplot2grid((6,3), (0,1), rowspan=2, colspan=1)
    ax4 = gl.subplot2grid((6,3), (2,1), rowspan=2, colspan=1, sharex = ax3)
    ax5 = gl.subplot2grid((6,3), (4,1), rowspan=2, colspan=1, sharex = ax3)
    
    ax6 = gl.subplot2grid((6,3), (0,2), rowspan=3, colspan=1)
    ax7 = gl.subplot2grid((6,3), (3,2), rowspan=3, colspan=1, sharex = ax6)
    
    
   ####### ax1, ax2: Get confusion matrices ##########

    labels_classes, confusion = model.get_confusion_matrix(train_dataset)
    plot_confusion_matrix(confusion,labels_classes, ax1 )
    labels_classes, confusion = model.get_confusion_matrix(validation_dataset)
    plot_confusion_matrix(confusion,labels_classes, ax2 )
        
   ############## ax3 ax4 ax5: Loss Evolution !! ######################
    ## ax3: Evolutoin of the data loss
    gl.plot([], tr_loss, ax = ax3, lw = 3, labels = ["Losses", "","Data loss (MSE)"], legend = ["train"],
            color = color_train_loss)
    gl.plot([], val_loss,ax = ax3, lw = 3, legend = ["validation"],
            color = color_val_loss,  AxesStyle = "Normal - No xaxis")
    
    ## ax4: The evolution of the KL loss
    gl.plot([], KL_loss, ax = ax4, lw = 3, labels = ["", "","KL loss"], legend = ["Bayesian Weights"],
            AxesStyle = "Normal - No xaxis", color = "k")

    ## ax5: Evolutoin of the total loss
    gl.plot([], tr_loss, ax = ax5, lw = 3, labels = ["", "epoch","Total Loss (Bayes)"], legend = ["train"],
            color = color_train_loss)
    gl.plot([], val_loss,ax = ax5, lw = 3, legend = ["validation"], color = color_val_loss)
           
    ############## ax6 ax7: Variational Weights !! ######################
    create_plot_variational_weights(model,ax6,ax7)

    gl.set_zoom (ax = ax6, ylim = [-0.1,10])
    gl.set_zoom (ax = ax7, xlim = [-2.5, 2.5], ylim = [-0.1,0.5])
    
    # Set final properties and save figure
    gl.set_fontSizes(ax = [ax1,ax2,ax3,ax4,ax5,ax6,ax7], title = 20, xlabel = 20, ylabel = 20, 
                      legend = 10, xticks = 12, yticks = 12)


    gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.10)
    
    if (type(epoch_i) == type(None)):
        gl.savefig(folder_images +'Training_Example_Data_Bayesian.png', 
                   dpi = 100, sizeInches = [20, 10])
    else:
        gl.savefig(folder_images +'%i.png'%epoch_i, 
                   dpi = 100, sizeInches = [20, 10], close = True, bbox_inches = "tight")