model = regression_model( features.shape[1], emb_mat, seqs.shape[1], conv_layers=config['conv_layers'], filters=config['conv_filters'], dropout=config['dropout'], fc_layers=config['fc_layers'], fc_units=config['fc_units'], metrics=[r2], ) save_architecture(model, model_path + '.json') # load model callbacks cbs = get_callbacks(model_name=model_name, log_dir=logging_path, stop_patience=10, lr_patience=4, verbose=1, emb_freq=5, emb_layers=['word_embedding'], emb_meta={'word_embedding': 'word_labels.tsv'}) # train model model.fit( {'text_input': seqs[:train_size], 'aux_input': features[:train_size]}, {'output': labels[:train_size]}, validation_data=({'text_input': seqs[-val_size:], 'aux_input': features[-val_size:]}, {'output': labels[-val_size:]}), batch_size=batch_size, epochs=200, verbose=0, shuffle=True, callbacks=cbs, ) history = cbs[2] print_regression_metrics(history)
print("Validation set: {} examples".format(val_size)) # Create logging directory get_ipython().system('mkdir -p $logging_path') # Remove prior logs get_ipython().system('rm $logging_path/*') # load and save model model = regression_model(feats.shape[1], config['num_layers'], config['num_units'], metrics=[r2]) save_architecture(model, model_path + '.json') # load model callbacks cbs = get_callbacks(model_name=model_name, log_dir=logging_path, verbose=1) # train model model.fit(feats[:train_size], labels[:train_size], validation_data=(feats[-val_size:], labels[-val_size:]), batch_size=batch_size, epochs=100, verbose=0, shuffle=True, callbacks=cbs) # print best result history = cbs[2] print_regression_metrics(history)