def run(state, include=None, exclude=None, no_reverse=False, raw=False, place=None): """ Run transformations on data loaded in MongoDB. State is required. Place is optional. Optionally provide to limit transforms that are performed. """ try: run_transforms = _select_transforms(state, include, exclude, raw, place) except IncludeExcludeError as e: sys.exit(e) for transform in run_transforms: if not no_reverse and transform.auto_reverse: # Reverse the transform if it's been run previously transform.reverse() print 'Executing %s' % transform transform() validators = transform.validators.values() if validators: print "Executing validation" run_validation(state, validators)
def run(state, include=None, exclude=None, no_reverse=False): """ Run transformations on data loaded in MongoDB. State is required. Optionally provide to limit transforms that are performed. """ try: run_transforms = _select_transforms(state, include, exclude) except IncludeExcludeError as e: sys.exit(e) for transform in run_transforms: if not no_reverse and transform.auto_reverse: # Reverse the transform if it's been run previously transform.reverse() print 'Executing %s' % transform transform() validators = transform.validators.values() if validators: print "Executing validation" run_validation(state, validators)
model.add( keras.layers.Dense(32, activation='relu', kernel_regularizer=keras.regularizers.l2(0.0001))) model.add(keras.layers.Dropout(0.4)) model.add( keras.layers.Dense( 1, activation='sigmoid', bias_initializer=keras.initializers.Constant(output_bias))) model.compile(loss=keras.losses.BinaryCrossentropy(), optimizer="adam", metrics=metrics) print(model.summary()) return model model = build_model( input_shape=(40, 40, 3), filters=filters, batch_norm=batch_norm, #output_bias=output_bias, metrics=metrics) model.load_weights(weightsDir + 'lastEpoch.h5') validate.run_validation(model, weightsDir + 'lastEpoch.h5', outputDir, dataDir, plotDir)
keras.callbacks.EarlyStopping(patience=patience_count), keras.callbacks.ModelCheckpoint(filepath=weightsDir+'model.{epoch}.h5', save_best_only=True, monitor=monitor, mode='auto') # tf.keras.callbacks.TensorBoard(log_dir=logDir, # histogram_freq=0, # write_graph=False, # write_images=False) ] history = model.fit(train_generator, epochs = epochs, verbose= v, validation_data=val_generator, callbacks=callbacks) model.save_weights(weightsDir+'lastEpoch.h5') print(utils.bcolors.GREEN+"Saved weights to "+weightsDir+utils.bcolors.ENDC) # save and plot history file with open(outputDir+'history.pkl', 'wb') as f: pickle.dump(history.history, f) print(utils.bcolors.GREEN+"Saved history, train and validation files to "+outputDir+utils.bcolors.ENDC) utils.plot_history(history, plotDir, ['loss','accuracy']) print(utils.bcolors.YELLOW+"Plotted history to "+plotDir+utils.bcolors.ENDC) if(run_validate): validate.run_validation(model, weightsDir+'lastEpoch.h5', outputDir, dataDir, plotDir, batch_size)
def validate(): run_validation()