def on_epoch_end(self, batch, logs=None): batches = len(self.validation_data) total = batches * self.batch_size real_total = 0 val_pred = np.zeros((total, self.num_classes)) val_true = np.zeros((total, self.num_classes)) for single_batch in range(batches): val_x, val_y = next(self.validation_data) row_num = val_x.shape[0] real_total += total val_pred[single_batch * self.batch_size: single_batch * self.batch_size + row_num] = \ prp_2_oh_array(np.asarray(self.model.predict(val_x))) val_true[single_batch * self.batch_size:single_batch * self.batch_size + row_num] = val_y val_pred = val_pred[:real_total, :] val_true = val_true[:real_total, :] warnings.filterwarnings('ignore', category=UndefinedMetricWarning) precision, recall, f_score, support = precision_recall_fscore_support( val_true, val_pred) for p, r, f, s in zip(precision, recall, f_score, support): print( " - val_f1: %0.4f - val_pre: %0.4f - val_rec: %0.4f - ins %s" % (f, p, r, s))
def on_epoch_end(self, batch, logs=None): val_x = self.validation_data[0] val_y = self.validation_data[1] prd_y = prp_2_oh_array(np.asarray(self.model.predict(val_x))) warnings.filterwarnings("ignore", category=UndefinedMetricWarning) precision, recall, f_score, _ = precision_recall_fscore_support( val_y, prd_y, average='macro') print " — val_f1: % 0.4f — val_pre: % 0.4f — val_rec % 0.4f" % (f_score, precision, recall)
def on_epoch_end(self, batch, logs=None): val_x = self.validation_data[0] val_y = self.validation_data[1] prd_y = prp_2_oh_array(np.asarray(self.model.predict(val_x))) warnings.filterwarnings("ignore", category=UndefinedMetricWarning) precision, recall, f_score, support = precision_recall_fscore_support(val_y, prd_y) for p, r, f, s in zip(precision, recall, f_score, support): print " — val_f1: % 0.4f — val_pre: % 0.4f — val_rec % 0.4f - ins %s" % (f, p, r, s)