def get_callbacks(name): return [ modeling.EpochDots(), tf.keras.callbacks.EarlyStopping(monitor='val_categorical_crossentropy', patience=50, restore_best_weights=True), # tf.keras.callbacks.TensorBoard(log_dir/name, histogram_freq=1), # tf.keras.callbacks.ModelCheckpoint(filepath=checkpoint_dir + "/{}/cp.ckpt".format(name), # verbose=0, # monitor='val_sparse_categorical_crossentropy', # save_weights_only=True, # save_best_only=True), tf.keras.callbacks.ReduceLROnPlateau(monitor='val_categorical_crossentropy', factor=0.1, patience=10, verbose=0, mode='auto', min_delta=0.0001, cooldown=0, min_lr=0), ]
def get_callbacks(self,pat=10): return [modeling.EpochDots(), tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=pat)]
z_test=dtest[:,0:5] z_scaler=procs.StandardScaler() z_scaler.fit(z_test) z_test_norm=z_scaler.transform(z_test) raw_dataset_test=pd.DataFrame(z_test_norm,columns=names) w_test=dtest[:,5] test_low=w_test<5000 w_test=w_test[test_low] model=build_model() EPOCHS=1000 for j in range(0,3): early_stop=keras.callbacks.EarlyStopping(monitor='loss',patience=20) history=model.fit(raw_dataset,label_dataset,epochs=EPOCHS,validation_split=0.2, verbose=0,callbacks=[early_stop,tfmod.EpochDots()]) loss,mae,mse=model.evaluate(raw_dataset,label_dataset,verbose=2) model.summary() test_predict=model.predict(raw_dataset_test).flatten() M5=np.size(test_predict) test_predict=test_predict.reshape(K,1) test_predict=target_scaler.inverse_transform(test_predict) test_predict=test_predict.T[0] print('Datos predecidos (M): {0}'.format(np.shape(test_predict)[0])) # para la Regression, solo bajas energias df=pd.DataFrame() test_out=test_predict[test_low] df['RealData']=w_test