model.add(Dense(512, input_shape=(784,))) model.add(Activation('relu')) model.add(Dropout({{uniform(0, 1)}})) model.add(Dense({{choice([256, 512, 1024])}})) model.add(Activation('relu')) model.add(Dropout({{uniform(0, 1)}})) model.add(Dense(10)) model.add(Activation('softmax')) rms = RMSprop() model.compile(loss='categorical_crossentropy', optimizer=rms, metrics=["accuracy"]) model.fit(X_train, Y_train, batch_size={{choice([64, 128])}}, nb_epoch=1, verbose=2, validation_data=(X_test, Y_test)) score, acc = model.evaluate(X_test, Y_test, verbose=0) print('Test accuracy:', acc) return {'loss': -acc, 'status': STATUS_OK, 'model': model.to_yaml(), 'weights': pickle.dumps(model.get_weights())} # Create Spark context conf = SparkConf().setAppName('Elephas_Hyperparameter_Optimization').setMaster('local[8]') sc = SparkContext(conf=conf) # Define hyper-parameter model and run optimization. hyperparam_model = HyperParamModel(sc,num_workers=4) best_model = hyperparam_model.minimize(model=model, data=data, max_evals=5) print("Best model", best_model.to_yaml())
def test_hyper_param_model(spark_context): hyperparam_model = HyperParamModel(spark_context) hyperparam_model.minimize(model=model, data=data, max_evals=1)
model.add(Activation('softmax')) rms = RMSprop() model.compile(loss='categorical_crossentropy', optimizer=rms) model.fit(X_train, Y_train, batch_size={{choice([64, 128])}}, nb_epoch=1, show_accuracy=True, verbose=2, validation_data=(X_test, Y_test)) acc = model.evaluate(X_test, Y_test, show_accuracy=True, verbose=0) print('Test accuracy:', acc) return { 'loss': -acc, 'status': STATUS_OK, 'model': model.to_yaml(), 'weights': pickle.dumps(model.get_weights()) } # Create Spark context conf = SparkConf().setAppName('Elephas_Hyperparameter_Optimization').setMaster( 'local[8]') sc = SparkContext(conf=conf) # Define hyper-parameter model and run optimization. hyperparam_model = HyperParamModel(sc) hyperparam_model.minimize(model=model, data=data, max_evals=5)
evl.output_info() if i > 5 and evl.dev_metric < 0.4: break if i > 10 and evl.dev_metric < 0.5: break if i > 15 and evl.dev_metric < 0.6: break best_dev_kappa = evl.best_dev best_test_kappa = evl.best_test print('Test kappa:', best_dev_kappa) return { 'loss': 1 - best_dev_kappa, 'status': STATUS_OK, 'model': model.to_yaml(), 'weights': pk.dumps(model.get_weights()) } # Create Spark context conf = SparkConf().setAppName('Elephas_Hyperparameter_Optimization').setMaster( 'local[8]') sc = SparkContext(conf=conf) # Define hyper-parameter model and run optimization. hyperparam_model = HyperParamModel(sc, num_workers=2) best_model = hyperparam_model.minimize(model=model, data=data, max_evals=100) best_model_yaml = best_model.to_yaml() print(best_model_yaml)