x, y, test_size=0.4) # optimizer definition opt = register_opt(optimizer_name='rmsprop', momentum=0.1, learning_rate=0.01) # model definition model = Sequential() model.add(GRU(128, activation='tanh', input_shape=(30, len_chars))) model.compile(loss='categorical_crossentropy', optimizer=opt) model.summary('nietzsche gru') model_epochs = 20 fit_stats = model.fit(train_data, train_label, batch_size=128, epochs=model_epochs, validation_data=(test_data, test_label), verbose=False) model_name = model.model_name plot_metric('loss', model_epochs, fit_stats['train_loss'], fit_stats['valid_loss'], model_name=model_name) plot_metric('accuracy', model_epochs, fit_stats['train_acc'], fit_stats['valid_acc'], model_name=model_name)
model.add(BatchNormalization()) model.add( Conv2D(filters=64, kernel_size=(3, 3), activation='relu', padding='same')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(BatchNormalization()) model.add(Flatten()) model.add(Dense(256, activation='relu')) model.add(Dropout(0.5)) model.add(BatchNormalization()) model.add(Dense(10, activation='softmax')) # 10 digits classes model.compile(loss='categorical_crossentropy', optimizer=opt) model_epochs = 12 fit_stats = model.fit(train_data.reshape(-1, 1, 8, 8), one_hot(train_label), batch_size=128, epochs=model_epochs, validation_data=(test_data.reshape(-1, 1, 8, 8), one_hot(test_label)), shuffle_data=True) predictions = unhot(model.predict(test_data.reshape(-1, 1, 8, 8), True)) print_results(predictions, test_label) plot_digits_img_results(test_data, test_label, predictions) plot_metric('Loss', model_epochs, fit_stats['train_loss'], fit_stats['valid_loss']) plot_metric('Accuracy', model_epochs, fit_stats['train_acc'], fit_stats['valid_acc'])
transformed_image_dims).astype(np.float32)) transformed_train_label = z_score( train_label.reshape(train_label.shape[0], transformed_image_dims).astype(np.float32)) transformed_test_data = z_score( test_data.reshape(test_data.shape[0], transformed_image_dims).astype(np.float32)) transformed_test_label = z_score( test_label.reshape(test_label.shape[0], transformed_image_dims).astype(np.float32)) model_epochs = 500 fit_stats = autoencoder.fit(transformed_train_data, transformed_train_label, batch_size=128, epochs=model_epochs, validation_data=(transformed_test_data, transformed_test_label), shuffle_data=True) # generate non rescaled test labels for use in generated digits plot (use the same random_seed as above) _, _, _, test_label = train_test_split(data.data, data.target, test_size=0.2, random_seed=5) predictions = autoencoder.predict(transformed_test_data).reshape( (-1, channels, img_rows, img_cols)) model_name = autoencoder.model_name plot_generated_img_samples(unhot(one_hot(test_label)), predictions,
# optimizer definition opt = register_opt(optimizer_name='rmsprop', momentum=0.01, learning_rate=0.01) # opt = register_opt(optimizer_name = 'adadelta', momentum = 0.01, learning_rate = 1) # model definition model = Sequential() model.add(GRU(10, activation='tanh', input_shape=(10, seq_len))) model.compile(loss='categorical_crossentropy', optimizer=opt) model.summary('seq gru') model_epochs = 100 fit_stats = model.fit(train_data, train_label, batch_size=100, epochs=model_epochs, validation_data=(test_data, test_label)) print_seq_results(model.predict(test_data, (0, 2, 1)), test_label, test_data, unhot_axis=2) model_name = model.model_name plot_metric('loss', model_epochs, fit_stats['train_loss'], fit_stats['valid_loss'], model_name=model_name) plot_metric('accuracy',
# optimizer definition opt = register_opt(optimizer_name='adam', momentum=0.1, learning_rate=0.01) # model definition model = Sequential() model.add(Dense(10, activation='sigmoid', input_shape=(train_data.shape[1], ))) model.add(Dense(3, activation='sigmoid')) # 3 iris_classes model.compile(loss='categorical_crossentropy', optimizer=opt) model.summary('iris mlp') model_epochs = 25 fit_stats = model.fit(train_data, one_hot(train_label), batch_size=10, epochs=model_epochs, validation_data=(test_data, one_hot(test_label)), shuffle_data=True) # eval_stats = model.evaluate(test_data, one_hot(train_label)) predictions = unhot(model.predict(test_data)) print_results(predictions, test_label) model_name = model.model_name plot_metric('loss', model_epochs, fit_stats['train_loss'], fit_stats['valid_loss'], model_name=model_name) plot_metric('accuracy', model_epochs,
from ztlearn.dl.models import Sequential from ztlearn.utils import train_test_split from ztlearn.dl.optimizers import register_opt opt = register_opt(optimizer_name = 'sgd_momentum', momentum = 0.01, learning_rate = 0.001) model = Sequential(init_method = 'he_normal') model.add(Embedding(10, 2, activation = 'selu', input_shape = (1, 10))) model.compile(loss = 'categorical_crossentropy', optimizer = opt) train_data = np.random.randint(10, size=(5, 1, 10)) train_label = np.random.randint(14, size=(5, 1, 10)) train_data, test_data, train_label, test_label = train_test_split(train_data, train_label, test_size = 0.1) fit_stats = model.fit(train_data, train_label, batch_size = 4, epochs = 50) """ works data = np.arange(0,100,1).reshape(10,1,10) labels = np.arange(1,101,1).reshape(10,1,10) model.add(Embedding(100, 5, activation = 'selu', input_shape = (1, 10))) model.add(RNN(10, activation="tanh", bptt_truncate = 3, input_shape = (10, 10))) """