from data_loader import load_sensor_files
from sklearn.decomposition import PCA
from numpy import array
from random import sample
from numpy.linalg import norm

num_samples = 100000
data_path = "/home/willist/Documents/dnn/data/"
training_set = load_sensor_files(data_path, num_samples=num_samples, shared=False)

pca = PCA(n_components=5)
pca.fit(training_set)

num_test_samples = len(training_set) / 10
test_samples = sample(training_set, num_test_samples)
mean_error = 0.0
for sample in test_samples:
    s_compr = pca.transform(sample)
    s_decompr = pca.inverse_transform(s_compr)
    # print sample
    # print s_compr
    # print s_decompr
    mean_error += norm(sample - s_decompr)

print mean_error / num_test_samples
示例#2
0
from data_loader import load_sensor_files
from sklearn.linear_model import LinearRegression
import numpy as np

data_path = "/home/willist/Documents/dnn/data/labeled/"

training_set, training_labels, test_set, test_labels = load_sensor_files(data_path, shared=False)

regr = LinearRegression()

regr.fit(training_set, training_labels)

predicted_labels = regr.predict(test_set)
print ("Residual sum of squares: %.5f" % np.mean(np.sum((predicted_labels - test_labels) ** 2, axis=1)))
示例#3
0
              "Fine Tune Epochs: " + str(fine_tune_epochs) + "\n" + \
              "Fine Tune Supervised: " + str(fine_tune_supervised) + "\n" + \
              "Hidden Layer Sizes: " + str(hidden_layer_sizes) + "\n" + \
              "Corruption Levels: " + str(corruption_levels) + "\n" + \
              "Pretraining Learning Rates: " + str(pretraining_learning_rates) + "\n" + \
              "Tied Weights: " + str(tied_weights) + "\n" + \
              "Sigmoid Compressions: " + str(sigmoid_compressions) + "\n" + \
              "Sigmoid Reconstructions: " + str(sigmoid_reconstructions) + "\n" + \
              "Supervised Sigmoid Activation: " + str(supervised_sigmoid_activation) + "\n"

print_flush(description)

training_set, training_labels, test_set, test_labels = load_sensor_files(training_path, testing_path,
                                                                         history_length=history_length,
                                                                         num_training_samples=num_training_samples,
                                                                         num_training_samples_per_file=num_training_samples_per_file,
                                                                         num_test_samples=num_test_samples,
                                                                         num_test_samples_per_file=num_test_samples_per_file,
                                                                         feature_indexes=feature_indexes,
                                                                         label_indexes=label_indexes)

# compute number of minibatches for training, validation and testing
n_train_batches = training_set.get_value(borrow=True).shape[0]
n_train_batches /= batch_size

# numpy random generator
#  start-snippet-3

numpy_rng = random.RandomState(89677)
print_flush("... building the model")
# construct the stacked denoising autoencoder class