def optimize(model): logger.info("Checkpoint2") X = model.predictor_src #+ self.predictor_tgt y = model.predictor_tgt # y = model.config.sentence_level print(X) print(y) #Hyperparameter Tuning with Random Search net = NeuralNetRegressor( model, max_epochs=10, lr=0.1, # Shuffle training data on each epoch iterator_train__shuffle=True, ) net.fit(X, y) y_proba = net.predict_proba(X) # deactivate skorch-internal train-valid split and verbose logging net.set_params(train_split=False, verbose=0) params = { 'epochs': [7], 'hidden_LSTM': [32, 64, 128], 'learning_rate_batch': [(32, '1e-3'), (64, '2e-3')], 'dropout': [0.5], } gs = RandomizedSearchCV(net, params, refit=False, cv=3, scoring='accuracy', verbose=2) gs.fit(X, y) print("best score: {:.3f}, best params: {}".format( gs.best_score_, gs.best_params_)) return
print("Fitting") net.fit(train0df, y=None) print("Fit completed") history = net.history train_loss0 = history[:, 'train_loss'] valid_loss0 = history[:, 'valid_loss'] ax1.plot(train_loss0) ax1.plot(valid_loss0) ax1.legend(['train_loss', 'valid_loss']) net.save_params(f_params='dcs0_0005.pkl', f_optimizer='dcs0_0005_optimizer.pkl', f_history='dcs0_0005_history.json') pred = net.predict_proba(valid0) label = valid0.get_label() accuracy = concordance_index(pred, label) print(accuracy) net1 = NeuralNetRegressor(model, criterion=NegativeLogLikelihood, lr=0.00001, batch_size=512, max_epochs=100, optimizer=SGD, optimizer__momentum=0.9, optimizer__weight_decay=0.001, iterator_train__shuffle=True, iterator_train__num_workers=10, iterator_valid__shuffle=True,