def tune_with_kerastuner2(): # model = define_one_block_model() tuner = RandomSearch(define_one_block_model, objective='val_accuracy', max_trials=5, executions_per_trial=3, directory="tuner_dir", project_name="cats_vs_dogs_tuner") train_datagen = ImageDataGenerator(rescale=1.0 / 255.0) # width_shift_range=0.1, height_shift_range=0.1, horizontal_flip=True) test_datagen = ImageDataGenerator(rescale=1.0 / 255.0) # prepare iterators train_it = train_datagen.flow_from_directory('dataset_dogs_vs_cats/train/', class_mode='binary', batch_size=16, target_size=(200, 200)) test_it = test_datagen.flow_from_directory('dataset_dogs_vs_cats/test/', class_mode='binary', batch_size=16, target_size=(200, 200)) tuner.search(train_it, steps_per_epoch=len(train_it), validation_data=test_it, validation_steps=len(test_it), epochs=5, use_multiprocessing=True) models = tuner.get_best_models(num_models=3) tuner.results_summary() return tuner
def tune_with_kerastuner1(): photos = load('dogs_vs_cats_photos.npy') labels = load('dogs_vs_cats_labels.npy') (trainX, testX, trainY, testY) = train_test_split(photos, labels, test_size=0.25, random_state=42) trainY = keras.utils.to_categorical(trainY, 2) testY = keras.utils.to_categorical(testY, 2) model = define_three_block_model() # history = model.fit(photos, labels, batch_size=16, epochs=10, validation_split=0.33, verbose=1, use_multiprocessing=True) tuner = RandomSearch(model, objective='val_accuracy', max_trials=5, executions_per_trial=3, directory="tuner_dir", project_name="cats_vs_dogs_tuner") tuner.search_space_summary() # tuner.search(trainX, trainY, # epochs=5, # validation_data=(testX, testY)) models = tuner.get_best_models(num_models=2) tuner.results_summary() return tuner
def search(dt=600, window_size=360, future_steps=144, epochs=50, with_time=True, batch_size=128, max_trials=200): bathroom1 = Dataset.parse('dataset/', 'bathroom1') kitchen1 = Dataset.parse('dataset/', 'kitchen1') combined1 = bathroom1.combine(kitchen1) X, y = prepare_data_future_steps(combined1, window_size=window_size, dt=dt, with_time=with_time, future_steps=future_steps) X_train = X[:-4 * (3600 // dt) * 24, :, :] X_val = X[-4 * (3600 // dt) * 24:-2 * (3600 // dt) * 24, :, :] X_test = X[-2 * (3600 // dt) * 24:, :, :] # For now only sensor 24 y_train = y[:-4 * (3600 // dt) * 24, :, 0] y_val = y[-4 * (3600 // dt) * 24:-2 * (3600 // dt) * 24, :, 0] y_test = y[-2 * (3600 // dt) * 24:, :, 0] tuner = RandomSearch(FuturePredictionModelHyperparameters( window_size=window_size, num_features=X.shape[2], future_steps=future_steps), objective='val_loss', max_trials=max_trials, directory='test_dir') tuner.search_space_summary() tuner.search(x=X_train, y=y_train, epochs=epochs, batch_size=batch_size, validation_data=(X_val, y_val), callbacks=[IsNanEarlyStopper(monitor='loss')]) tuner.results_summary()
def search_model(): data = get_data() data_train, data_val_model, data_val_interpretation, data_test = get_train_val_test_splits(data) train_features = np.load("train_features.npy") valid_features = np.load("valid_features.npy") train_y = data_train["outcome"].values valid_y = data_val_model["outcome"].values tuner = RandomSearch( get_hp_model, objective='val_accuracy', max_trials=20, executions_per_trial=1, directory='test', project_name='test') tuner.search(train_features, y=train_y, batch_size=32, epochs=300, validation_data=(valid_features, valid_y), verbose=2, class_weight=dict(enumerate(utils.get_class_weights(train_y))), # callbacks=[EarlyStopping(patience=30)] ) tuner.results_summary()
# Build model model = keras.Model(inputs, outputs) model.compile(optimizer=Adam(lr), loss='categorical_crossentropy', metrics=['accuracy']) return model # Initialize the tuner by passing the `build_model` function # and specifying key search constraints: maximize val_acc (objective), # and the number of trials to do. More efficient tuners like UltraBand() can # be used. tuner = RandomSearch(build_model, objective='val_accuracy', max_trials=TRIALS, project_name='hello_world_tutorial_results') # Display search space overview tuner.search_space_summary() # Perform the model search. The search function has the same signature # as `model.fit()`. tuner.search(x_train, y_train, batch_size=128, epochs=EPOCHS, validation_data=(x_val, y_val)) # Display the best models, their hyperparameters, and the resulting metrics. tuner.results_summary() # Retrieve the best model and display its architecture best_model = tuner.get_best_models(num_models=1)[0] best_model.summary()
LOG_DIR = f"{int(time.time())}" tuner = RandomSearch(build_model, objective="val_accuracy", max_trials=1, executions_per_trial=1, directory=LOG_DIR) tuner.search(x=X_train, y=y_train, epochs=150, batch_size=64, validation_split=0.1) print(tuner.get_best_hyperparameters()[0].values) print(tuner.results_summary()) best_model = tuner.get_best_models(num_models=1)[0] NN_pred = best_model.predict(X_test) pred = list() for i in range(len(NN_pred)): pred.append(np.argmax(NN_pred[i])) test = list() for i in range(len(y_test)): test.append(np.argmax(y_test[i])) train_pred = best_model.predict(X_train) train_prediction = list() for i in range(len(train_pred)): train_prediction.append(np.argmax(train_pred[i])) train = list() for i in range(len(y_train)):
objective='val_binary_accuracy', max_trials=7, executions_per_trial=4, directory='project', project_name='Air Quality Index') print(tuner.search_space_summary()) x_train = np.asarray(x_train).astype(np.float32) y_train = np.asarray(y_train).astype(np.float32) x_val = np.asarray(x_val).astype(np.float32) y_val = np.asarray(y_val).astype(np.float32) tuner.search(x_train, y_train, epochs=5, validation_data=(x_val, y_val)) print(tuner.results_summary(num_trials=3)) # 3 best models test_acc, test_loss = fit_and_evaluate(model, x_train, y_train, x_val, y_val, batch_size=8192, epochs=100) h1n1_preds, seasonal_preds = make_predictions(model, x_train) h1n1_true, seasonal_true = y_train[:, 0].tolist(), y_train[:, 1].tolist() train_score = get_scores(h1n1_true, h1n1_preds, seasonal_true, seasonal_preds) print(f'Training Accuracy: {train_score}')