if len(sys.argv) < 2: # Choose the best one by the lowest training reconstruction error in last 10 epochs # filename = 'MNIST_LeNet5DoubleFlatten_HalfCauchy-LogNormal-tau_conv_global(1.00E-04)tau_conv_local(3.00E-03)tau_fc_global(1.00E-04)tau_fc_local(3.00E-03)_E0200_01:20:10:600426_e0197.pkl' # filename = 'MNIST_LeNet5DoubleFlatten_HalfCauchy-LogNormal-tau_conv_global(1.00E-04)tau_conv_local(1.00E-03)tau_fc_global(1.00E-04)tau_fc_local(1.00E-03)_E0200_01:20:13:088725_e0196.pkl' # filename = 'MNIST_LeNet5DoubleFlatten_HalfCauchy-LogNormal-tau_conv_global(1.00E-04)tau_conv_local(5.00E-04)tau_fc_global(1.00E-04)tau_fc_local(5.00E-04)_E0200_01:20:18:234706_e0191.pkl' # filename = 'MNIST_LeNet5DoubleFlatten_HalfCauchy-LogNormal-tau_conv_global(1.00E-04)tau_conv_local(1.00E-04)tau_fc_global(1.00E-04)tau_fc_local(1.00E-04)_E0200_01:20:21:680255_e0194.pkl' # filename = 'MNIST_LeNet5DoubleFlatten_HalfCauchy-LogNormal-tau_conv_global(1.00E-04)tau_conv_local(5.00E-03)tau_fc_global(1.00E-04)tau_fc_local(5.00E-03)_E0200_01:20:27:171604_e0199.pkl' # filename = 'MNIST_LeNet5DoubleFlatten_HalfCauchy-LogNormal-tau_conv_global(1.00E-04)tau_conv_local(1.00E-02)tau_fc_global(1.00E-04)tau_fc_local(1.00E-02)_E0200_12:08:27:167291_e0197.pkl' # filename = 'MNIST_LeNet5DoubleFlatten_HalfCauchy-LogNormal-tau_conv_global(1.00E-04)tau_conv_local(5.00E-02)tau_fc_global(1.00E-04)tau_fc_local(5.00E-02)_E0200_12:08:12:007496_e0191.pkl' # filename = 'MNIST_LeNet5DoubleFlatten_HalfCauchy-LogNormal-tau_conv_global(1.00E-04)tau_conv_local(1.00E-01)tau_fc_global(1.00E-04)tau_fc_local(1.00E-01)_E0200_16:30:05:309278_e0193.pkl' filename = 'MNIST_LeNet5DoubleFlatten_E0200_e0002.pkl' else: filename = sys.argv[1] row_threshold = [2.0, -8.0, -10.0, -9.0] col_threshold = [1.0, -8.0, -11.0, -9.6] model_file = os.path.join(exp_dir(), os.path.split(filename)[1]) dirname, filename = os.path.split(model_file) model = load_model(model_type=model_type, prior_info=prior_info_from_json('HalfCauchy.json'), use_gpu=False) model.load_state_dict(torch.load(model_file)) train_loader, valid_loader, test_loader, train_loader_eval = load_data( data_type=data_type, batch_size=100, num_workers=0, use_gpu=False) evaluate_with_prunning(model, train_loader_eval, valid_loader, test_loader, row_threshold=row_threshold, col_threshold=col_threshold, tag=filename)
import compress import numpy as np # Real training train = 'Data/Train/' small = 'Data/small/' X = compress.load_data(small) compress.compress_images(X, 100)
import numpy as np import pca as p import compress as c TRAINING_DATA = "Data/Train/" TEST_DATA = "Data/Test/" X = c.load_data(TRAINING_DATA) # c.compress_images(X, 10) c.compress_images(X, 100) # c.compress_images(X, 500) # c.compress_images(X, 1000) # c.compress_images(X, 2000) # c.compress_images(X, 50) # X = c.load_data(TEST_DATA) # c.compress_images(X, 10) # c.compress_images(X, 100) # c.compress_images(X, 500) # c.compress_images(X, 1000) # c.compress_images(X, 2000) # X = np.array([[-1, -1], [-1, 1], [1, -1], [1, 1]]) # X = np.array([[1, 1], [2, 7], [3, 3], [4, 4], [5, 5]]) # Z = p.compute_Z(X) # COV = p.compute_covariance_matrix(Z) # L, PCS = p.find_pcs(COV) # Z_star = p.project_data(Z, PCS, L, 2, 0) # print(Z_star) exit()
import pca import numpy as np import compress # X = np.array([[1, 1], [1, 0], [2, 2], [2, 1], [2, 4], [3, 4], [ # 3, 3], [3, 2], [4, 4], [4, 5], [5, 5], [5, 7], [5, 4]]) # Z = pca.compute_Z(X, True, True) # COV = pca.compute_covariance_matrix(Z) # # print(COV) # L, PCS = pca.find_pcs(COV) # Zstar = pca.project_data(Z, PCS, L, 1, 0) # print(PCS) # print(Zstar) X = compress.load_data('Data/Train/') compress.compress_images(X, 10) compress.compress_images(X, 100) compress.compress_images(X, 500) compress.compress_images(X, 1000) compress.compress_images(X, 2000)
################################################################################################################################################### # filename: PCATest.py # author: Sara Davis # date: 12/3/2018 # version: 1.0 # description: Run PCA.py ########################################################################################################################### import pca import numpy as np import compress X = np.array([[1, 1], [1,-1], [-1, 1], [-1, -1]]) centering = True scaling = False Z = pca.compute_Z(X, centering, scaling) COV = pca.compute_covariance_matrix(Z) L, PCS = pca.find_pcs(COV) Z_star = pca.project_data(Z, PCS, L, 1, 0) X = compress.load_data('/home/sara/Desktop/Data/Train/') compress.compress_images(X, 100)
# -*- coding: utf-8 -*- """ Created on Thu Nov 21 15:03:00 2019 @author: pom_p """ import compress as comp import numpy as np X = comp.load_data('Train') comp.compress_images(X, 1000)