def validate(data_type, model, seq_length=40, saved_model=None, class_limit=None, image_shape=None): batch_size = 32 # Get the data and process it. if image_shape is None: data = DataSet(seq_length=seq_length, class_limit=class_limit) else: data = DataSet(seq_length=seq_length, class_limit=class_limit, image_shape=image_shape) val_generator = data.frame_generator(batch_size, 'test', data_type) # Get the model. rm = ResearchModels(len(data.classes), model, seq_length, saved_model) # Evaluate! results = rm.model.evaluate_generator(generator=val_generator, val_samples=3200) print(results) print(rm.model.metrics_names)
def train(data_type, seq_length, model, saved_model=None, class_limit=None, image_shape=None, load_to_memory=False, batch_size=32, nb_epoch=100): # Helper: Save the model. checkpointer = ModelCheckpoint( filepath=os.path.join('data', 'checkpoints', model + '-' + data_type + \ '.{epoch:03d}-{val_accuracy:.3f}.hdf5'), monitor='val_accuracy', verbose=1, save_best_only=True) # Helper: TensorBoard tb = TensorBoard(log_dir=os.path.join('data', 'logs', model)) # Helper: Stop when we stop learning. early_stopper = EarlyStopping(patience=50, monitor='val_accuracy') # Helper: Save results. timestamp = time.time() csv_logger = CSVLogger(os.path.join('data', 'logs', model + '-' + 'training-' + \ str(timestamp) + '.log')) # Get the data and process it. if image_shape is None: data = DataSet( seq_length=seq_length, class_limit=class_limit ) else: data = DataSet( seq_length=seq_length, class_limit=class_limit, image_shape=image_shape ) # Get samples per epoch. # Multiply by 0.7 to attempt to guess how much of data.data is the train set. steps_per_epoch = (len(data.data) * 0.7) // batch_size if load_to_memory: # Get data. X, y = data.get_all_sequences_in_memory('train', data_type) X_test, y_test = data.get_all_sequences_in_memory('test', data_type) else: # Get generators. generator = data.frame_generator(batch_size, 'train', data_type) val_generator = data.frame_generator(batch_size, 'test', data_type) # Get the model. rm = ResearchModels(len(data.classes), model, seq_length, saved_model) # Fit! if load_to_memory: # Use standard fit. rm.model.fit( X, y, batch_size=batch_size, validation_data=(X_test, y_test), verbose=1, # callbacks=[tb, early_stopper, csv_logger], callbacks=[early_stopper, csv_logger, checkpointer], epochs=nb_epoch) else: # Use fit generator. rm.model.fit_generator( generator=generator, steps_per_epoch=steps_per_epoch, epochs=nb_epoch, verbose=1, # callbacks=[tb, early_stopper, csv_logger, checkpointer], callbacks=[ early_stopper, checkpointer], validation_data=val_generator, validation_steps=40, workers=4)