Beispiel #1
0
plotScatter(X_pca,
            y_train,
            title="6_PCA reduction (2d) of auto-encoded data (%dd)" %
            prefilter_train.shape[1])

print("Performing TSNE")
model = TSNE(n_components=2, random_state=0, init="pca")
toPlot = model.fit_transform(prefilter_train[:1000])
plotTSNE(toPlot, y_train[:1000], nb_classes,
         "7_t-SNE embedding of auto-encoded data ")

print("Classifying and comparing")
# Classify results from Autoencoder
print("Building classical fully connected layer for classification")
model = Sequential()
model.add(Dense(prefilter_train.shape[1], nb_classes, activation=activation))

model.add(Activation('softmax'))

model.compile(loss='categorical_crossentropy', optimizer='adam')
model.fit(prefilter_train,
          Y_train,
          batch_size=batch_size,
          nb_epoch=nb_epoch,
          show_accuracy=False,
          verbose=0,
          validation_data=(prefilter_test, Y_test))

score = model.evaluate(prefilter_test, Y_test, verbose=0, show_accuracy=True)
print('\nscore:', score)
# 'ㅋㅋ

from keras.backend import tensorflow_backend as K

# gpus = tf.config.experimental.list_physical_devices('GPU')
# if gpus:
#   try:
#     tf.config.experimental.set_memory_growth(gpus[0], True)
#   except RuntimeError as e:
#     # gpu메모리 적게 준다.
#     print(e)

from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(4, input_dim=4, activation='selu'))
model.add(Dense(4, activation='elu'))
model.add(Dense(3, activation='softmax'))

import numpy as np
from keras.callbacks import ModelCheckpoint
modelpath = './model/sample/iris/check-{epoch:02d}-{val_loss:.4f}.hdf5'
checkpoint = ModelCheckpoint(filepath=modelpath,
                             monitor='val_loss',
                             save_best_only=True,
                             save_weights_only=False,
                             verbose=1)

from keras.callbacks import EarlyStopping
es = EarlyStopping(monitor='loss', patience=90)
model.compile(loss='categorical_crossentropy',
print("prefilter_train: ", prefilter_train.shape)
print("prefilter_test: ", prefilter_test.shape)

print("Performing PCA")
X_pca = pca(prefilter_train)
plotScatter(X_pca, y_train, title="6_PCA reduction (2d) of auto-encoded data (%dd)" % prefilter_train.shape[1])

print("Performing TSNE")
model = TSNE(n_components=2, random_state=0, init="pca")
toPlot = model.fit_transform(prefilter_train[:1000])
plotTSNE(toPlot, y_train[:1000], nb_classes, "7_t-SNE embedding of auto-encoded data ")


print("Classifying and comparing")
# Classify results from Autoencoder
print("Building classical fully connected layer for classification")
model = Sequential()
model.add(Dense(prefilter_train.shape[1], nb_classes, activation=activation))

model.add(Activation('softmax'))

model.compile(loss='categorical_crossentropy', optimizer='adam')
model.fit(prefilter_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=(prefilter_test, Y_test))

score = model.evaluate(prefilter_test, Y_test, verbose=0, show_accuracy=True)
print('\nscore:', score)

print('Loss change:', 100*(score[0] - classical_score[0])/classical_score[0], '%')
print('Accuracy change:', 100*(score[1] - classical_score[1])/classical_score[1], '%')