コード例 #1
0
                stacked=True,
                learning_rate=0.02,
                max_epoch=5000,
                tied=True,
                activation="tanh")

# encode data (without fine-tuning)
encoded_datas = autoencoder.encode(datas)
print "encoder (without fine-tuning) ================"
print encoded_datas

# train autoencoder with fine-tuning
print "\ntrain autoencoder with fine-tuning ==========\n"
autoencoder.fine_tune(fine_tuning_iterator,
                      supervised=True,
                      learning_rate=0.02,
                      max_epoch=10000,
                      tied=True)
#autoencoder.fine_tune(fine_tuning_iterator, supervised = False, learning_rate = 0.02, max_epoch = 6000)

# encode data (with fine-tuning)
tuned_encoded_datas = autoencoder.encode(datas)
print "encoder (with fine-tuning)================"
print tuned_encoded_datas

# predict data( based on fine tuning )
predicted_datas = autoencoder.predict(datas)
print "predicted ================"
print predicted_datas
predicted_labels = predicted_datas.argmax(1)
eval_array = (predicted_labels == labels)
コード例 #2
0
# train data
datas = [[1, 1, 1, 0, 0, 0], [0, 0, 0, 1, 1, 1]]

# data wrapper
iterator = DataIterator(datas)

# train autoencoder
# assume the input dimension is input_d
# the network is like input_d -> 4 -> 2 -> 4 -> input_d
autoencoder = AutoEncoder()
autoencoder.fit([4, 2],
                iterator,
                stacked=True,
                learning_rate=0.1,
                max_epoch=5000)
autoencoder.fine_tune(iterator, learning_rate=0.1, supervised=False)

# after training

# encode data
encoded_datas = autoencoder.encode(datas)
print "encoder ================"
print encoded_datas

# decode data
decoded_datas = autoencoder.decode(encoded_datas)
print "decoder ================"
print decoded_datas

# reconstruct data (encode and decode data)
reconstructed_datas = autoencoder.reconstruct(datas)