示例#1
0
def output_stuff(self, dir_folder, params = ["File"]):
    # This function stores the parameters of the NN into a file.
    # The name of the file is the value of the parameters given in params[]
    # separated by _
    Param_obj = CGSLFN_param(self)
    Exec_obj = interface.Execution_param(self)
    
    # Store results of the experiments !! 
    import pickle_lib as pkl
    
    # Create name
    name = str('');
    for param in params:
        name += str(param)+"_"
        
    pkl.store_pickle(dir_folder + name,[Exec_obj, Param_obj],1)
    cl_d["RF"] = rf
    cl_d["ERF"] = ert
    cl_d["LSVM"] = gsvmr
    cl_d["RFSVM"] = sbmrf
#    cl_d["PolySVM"] = gsvmp;

if (save_model_to_disk):
    # We save the last model to disk using pickle !
    # Lazy way but since non Deep Learning models do not require much
    # memory in general, it is justified.
    folder_model = "../models/"
    key_classifier = "LSVM"  # QDA  # GNB RF

    ul.create_folder_if_needed(folder_model)
    classifier = cl_d[key_classifier]
    pkl.store_pickle(folder_model + key_classifier + ".pkl", [classifier])

    pkl.store_pickle(folder_model + "scaler_X" + ".pkl", [scaler_X])
    pkl.store_pickle(folder_model + "scaler_Y" + ".pkl", [scaler_Y])
"""
PLOT THE RESULTS
"""

if (plot_performance_all):

    def get_class_rate(Y, Ypred):
        return np.mean(np.equal(Y, Ypred))

    def get_CE(Y, Ypred):
        tol = 1e-5
        return -np.mean(Y * np.log(Ypred + tol) +
示例#3
0
 #        gl.plot(tgrid, X+ 2*np.sqrt(v), lw= 1, alpha =  0.5, color = "yellow", legend = ["95% confidence interval"]);
 #        gl.plot(tgrid, X- 2*np.sqrt(v), lw= 1, alpha =  0.5, color = "yellow");
     
 if (plot_realizations_signal_generated_as_output):
     gl.set_fontSizes( title = 20, xlabel = 15, ylabel = 20, 
               legend = 12, xticks = 12, yticks = 12)
     gl.savefig(folder_images +'GP_database_realizations.png', 
             dpi = 100, sizeInches = [2*6.5, 2*2])
 
     
 ########## Using Pickle ###############
 
 Ndivisions = 10;
 
 # Cannot use it due to incompatibilities Python 2 and 3
 pkl.store_pickle(folder_data +"X_values.pkl",X_list,Ndivisions)
 pkl.store_pickle(folder_data +"Y_values.pkl",Y_list,Ndivisions)
 pkl.store_pickle(folder_data +"t_values.pkl",t_list,Ndivisions)
 
 ## Test to load the files back 
 X_list2 = pkl.load_pickle(folder_data +"X_values.pkl",Ndivisions)
 Y_list2 = pkl.load_pickle(folder_data +"Y_values.pkl",Ndivisions)
 
 ###### Using Json ###############
 if (0):
     # Cannot put vectors as elements...
     import json
     data_dict = {"X":X_list, "Y": Y_list}
     
     
     with open(folder_data + 'data.json', 'w') as f:
Nsamples = Nsamples.astype(int)  # Number of samples per chain

# For training
Chains_list = gf.draw_HMM_indexes(pi, A, Nchains, Nsamples)
HMM_list = gf.draw_HMM_samples(Chains_list, Xdata)

## For validation !!!

Nsa = Xdata[0].shape[1]
Xdata2 = []
for Xdata_k in Xdata:
    Xdata2.append(Xdata_k[int(Nsa/2):,:])
    
Chains_list2 = gf.draw_HMM_indexes(pi, A, Nchains, Nsamples)
HMM_list2 = gf.draw_HMM_samples(Chains_list2, Xdata2)

gl.scatter_3D(0, 0, 0, nf = 1, na = 0)

for XdataChain in HMM_list:
    gl.scatter_3D(XdataChain[:,0], XdataChain[:,1],XdataChain[:,2], nf = 0, na = 0)


# We pickle the information
# This way we have the same samples for EM and HMM

folder = "./HMM_data/"
pkl.store_pickle(folder +"HMM_labels.pkl",Chains_list,1)
pkl.store_pickle(folder +"HMM_datapoints.pkl",HMM_list,1)
pkl.store_pickle(folder +"HMM_param.pkl",[pi,A],1)

pkl.store_pickle(folder +"HMM2_datapoints.pkl",HMM_list2,1)
for k in range(K):
    mu = np.random.uniform(-1, 1, (1, 3)).flatten()
    mu = mu / np.sqrt(np.sum(mu * mu))
    kappa = 50 + np.random.rand(1) * 50
    kappa = -kappa
    print "Real: ", mu, kappa

    filedir = folder_EM + "Wdata_" + str(k) + ".csv"

    # Generate and plot the data
    Xdata_k = Was.randWatson(N, mu, kappa)
    gl.scatter_3D(Xdata_k[:, 0], Xdata_k[:, 1], Xdata_k[:, 2], nf=0, na=0)

    # pickle the parameters
    pkl.store_pickle(folder_EM + "Wdata_" + str(k) + ".pkl", [mu, kappa], 1)

    # Print an estimation of the parameters

    mu = Wae.get_MLmean(Xdata_k)
    kappa = Wae.get_MLkappa(mu, Xdata_k)

    print "Estimated: ", mu, kappa
    np.savetxt(filedir, Xdata_k, delimiter=",")

    Xdata.append(Xdata_k)

################################################################
######## Generate HMM data and store it ###########################
################################################################
示例#6
0
        gl.set_fontSizes(title=20,
                         xlabel=15,
                         ylabel=20,
                         legend=12,
                         xticks=12,
                         yticks=12)
        gl.savefig(folder_images + 'GP_database_realizations.png',
                   dpi=100,
                   sizeInches=[2 * 6.5, 2 * 2])

    ########## Using Pickle ###############

    Ndivisions = 10

    # Cannot use it due to incompatibilities Python 2 and 3
    pkl.store_pickle(folder_data + "X_values.pkl", X_list, Ndivisions)
    pkl.store_pickle(folder_data + "Y_values.pkl", Y_list, Ndivisions)
    pkl.store_pickle(folder_data + "t_values.pkl", t_list, Ndivisions)

    ## Test to load the files back
    X_list2 = pkl.load_pickle(folder_data + "X_values.pkl", Ndivisions)
    Y_list2 = pkl.load_pickle(folder_data + "Y_values.pkl", Ndivisions)

    ###### Using Json ###############
    if (0):
        # Cannot put vectors as elements...
        import json
        data_dict = {"X": X_list, "Y": Y_list}

        with open(folder_data + 'data.json', 'w') as f:
            json.dump(data_dict, f)
示例#7
0
def save_results(filename, li, partitions=1):
    # Just saves the results as a pickel.
    pkl.store_pickle(filename, li, partitions=1)
    print "Object: " + filename + " saved."
        print("Training Epoch duration: ", epoch_duration)
        training_logger["time"]["training"].append(epoch_duration)

        model.fill_epoch_training_information(training_logger, device,
                                              validation_iterable,
                                              num_batches_validation)
        time_now = dt.datetime.now()
        validation_duration = time_now - time_prev
        time_prev = time_now
        print("Validation duration: ", validation_duration)
        training_logger["time"]["validation"].append(validation_duration)

        ### Save the results to disk !!!
        cf_a.mode_file_path = mode_file_path + str(i + 1) + ".prm"
        cf_a.pickle_results_path = pickle_results_path + str(i + 1) + ".pkl"
        pkl.store_pickle(pickle_results_path + str(i + 1) + ".pkl",
                         [cf_a, training_logger])

        if (cf_a.save_weights_at_each_epoch):
            torch.save(model.state_dict(),
                       mode_file_path + str(i + 1) + ".prm")

        # Remove the previous pickle file
        if (i > 0):
            os.remove(pickle_results_path + str(i) + ".pkl")
        if (cf_a.save_weights_at_each_epoch):
            if ((i > 0) and
                (cf_a.save_only_last_weights)):  # Erase the previous one
                os.remove(mode_file_path + str(i) + ".prm")

        time_now = dt.datetime.now()
        saving_duration = time_now - time_prev
    cl_d = dict()
    cl_d["LR"] = lr; cl_d["LDA"] = lda;cl_d["QDA"] = qda; cl_d["GNB"] = gnb;
    cl_d["KNN"] = gknn; cl_d["Tree"] = gtree; cl_d["RF"] = rf; cl_d["ERF"] = ert;
    cl_d["LSVM"] = gsvmr; cl_d["RFSVM"] = sbmrf; 
#    cl_d["PolySVM"] = gsvmp;
    
if (save_model_to_disk):
    # We save the last model to disk using pickle ! 
    # Lazy way but since non Deep Learning models do not require much 
    # memory in general, it is justified.
    folder_model = "../models/"
    key_classifier = "LSVM"  # QDA  # GNB RF
    
    ul.create_folder_if_needed(folder_model)
    classifier = cl_d[key_classifier]
    pkl.store_pickle(folder_model + key_classifier +".pkl", [classifier])
    
    pkl.store_pickle(folder_model + "scaler_X" +".pkl", [scaler_X])
    pkl.store_pickle(folder_model + "scaler_Y" +".pkl", [scaler_Y])
     
"""
PLOT THE RESULTS
"""

if (plot_performance_all):
    
    def get_class_rate(Y, Ypred):
        return np.mean(np.equal(Y,Ypred))
    
    def get_CE (Y, Ypred):
         tol = 1e-5
Chains_list = gf.draw_HMM_indexes(pi, A, Nchains, Nsamples)
HMM_list = gf.draw_HMM_samples(Chains_list, Xdata)

## For validation !!!

Nsa = Xdata[0].shape[1]
Xdata2 = []
for Xdata_k in Xdata:
    Xdata2.append(Xdata_k[int(Nsa / 2):, :])

Chains_list2 = gf.draw_HMM_indexes(pi, A, Nchains, Nsamples)
HMM_list2 = gf.draw_HMM_samples(Chains_list2, Xdata2)

gl.scatter_3D(0, 0, 0, nf=1, na=0)

for XdataChain in HMM_list:
    gl.scatter_3D(XdataChain[:, 0],
                  XdataChain[:, 1],
                  XdataChain[:, 2],
                  nf=0,
                  na=0)

# We pickle the information
# This way we have the same samples for EM and HMM

folder = "./HMM_data/"
pkl.store_pickle(folder + "HMM_labels.pkl", Chains_list, 1)
pkl.store_pickle(folder + "HMM_datapoints.pkl", HMM_list, 1)
pkl.store_pickle(folder + "HMM_param.pkl", [pi, A], 1)

pkl.store_pickle(folder + "HMM2_datapoints.pkl", HMM_list2, 1)