def train_dbn(path, epochs, learning_rate, dims): #train dbn on mnist data = mnist.load_mnist('../../../mnist_test.csv') data = rbm.insert_biases(data) dims = [784, 100, 50, 10] dbn_weights = generate_dbn_weights(dims) dbn_weights = train_dbm(dbm_weights, data, learning_rate = learning_rate, max_epochs = 300)
return vis_states def reconstruct(weights, hid_states): for i in range(len(weights) + 1)[1:]: hid_states = rbm.reconstruct(weights[-i], hid_states) return hid_states def pickle_weights(weights, path): data = {"weights": weights} output = open(path, "wb") pickle.dump(data, output) output.close() def unpickle(path): pkl_file = open(path, "rb") data = pickle.load(pkl_file) pkl_file.close() return data["weights"] if __name__ == "__main__": data = mnist.load_mnist("../../../mnist_test.csv") data = rbm.insert_biases(data) dims = [784, 200, 50, 10] dbn_weights = generate_dbn_weights(dims) dbn_weights = unpickle("../weights/300_dbn.pkl") # dbn_weights = train_dbn(dbn_weights, data, max_epochs = 2000)
return dbn_weights def construct(weights, vis_states): for weight in weights: vis_states = rbm.construct(weight, vis_states) return vis_states def reconstruct(weights, hid_states): for i in range(len(weights)+1)[1:]: hid_states = rbm.reconstruct(weights[-i], hid_states) return hid_states def pickle_weights(weights, path): data = {'weights': weights} output = open(path, 'wb') pickle.dump(data, output) output.close() def unpickle(path): pkl_file = open(path, 'rb') data = pickle.load(pkl_file) pkl_file.close() return data['weights'] if __name__ == "__main__": data = mnist.load_mnist('../../../mnist_test.csv') data = rbm.insert_biases(data) dims = [784, 200, 50, 10] dbn_weights = generate_dbn_weights(dims) dbn_weights = unpickle('../weights/300_dbn.pkl') #dbn_weights = train_dbn(dbn_weights, data, max_epochs = 2000)