コード例 #1
0
ファイル: index.py プロジェクト: ashwin4ever/Human-memory
def train_dbn(path, epochs, learning_rate, dims):
    #train dbn on mnist
    data = mnist.load_mnist('../../../mnist_test.csv')
    data = rbm.insert_biases(data)
    dims = [784, 100, 50, 10]
    dbn_weights = generate_dbn_weights(dims)
    dbn_weights = train_dbm(dbm_weights, data, learning_rate = learning_rate, max_epochs = 300)
コード例 #2
0
ファイル: dbn.py プロジェクト: FlyingSpringrol/Human-memory
    return vis_states


def reconstruct(weights, hid_states):
    for i in range(len(weights) + 1)[1:]:
        hid_states = rbm.reconstruct(weights[-i], hid_states)
    return hid_states


def pickle_weights(weights, path):
    data = {"weights": weights}
    output = open(path, "wb")
    pickle.dump(data, output)
    output.close()


def unpickle(path):
    pkl_file = open(path, "rb")
    data = pickle.load(pkl_file)
    pkl_file.close()
    return data["weights"]


if __name__ == "__main__":
    data = mnist.load_mnist("../../../mnist_test.csv")
    data = rbm.insert_biases(data)
    dims = [784, 200, 50, 10]
    dbn_weights = generate_dbn_weights(dims)
    dbn_weights = unpickle("../weights/300_dbn.pkl")
    # dbn_weights = train_dbn(dbn_weights, data, max_epochs = 2000)
コード例 #3
0
ファイル: dbn.py プロジェクト: ashwin4ever/Human-memory
    return dbn_weights

def construct(weights, vis_states):
    for weight in weights:
        vis_states = rbm.construct(weight, vis_states)
    return vis_states

def reconstruct(weights, hid_states):
    for i in range(len(weights)+1)[1:]:
        hid_states = rbm.reconstruct(weights[-i], hid_states)
    return hid_states

def pickle_weights(weights, path):
    data = {'weights': weights}
    output = open(path, 'wb')
    pickle.dump(data, output)
    output.close()

def unpickle(path):
    pkl_file = open(path, 'rb')
    data = pickle.load(pkl_file)
    pkl_file.close()
    return data['weights']
if __name__ == "__main__":
    data = mnist.load_mnist('../../../mnist_test.csv')
    data = rbm.insert_biases(data)
    dims = [784, 200, 50, 10]
    dbn_weights = generate_dbn_weights(dims)
    dbn_weights = unpickle('../weights/300_dbn.pkl')
    #dbn_weights = train_dbn(dbn_weights, data, max_epochs = 2000)