def fold_evaluate_loop(valid_data_split, fold_id): valid_pipe_input = { 'input': { 'meta': valid_data_split }, 'callback_input': { 'meta_valid': None } } valid_ids = valid_data_split[ID_COLUMN].tolist() LOGGER.info('Start pipeline transform on valid') pipeline_network = unet(config=CONFIG, suffix='_fold_{}'.format(fold_id), train_mode=False) pipeline_postprocessing = pipelines.mask_postprocessing( config=CONFIG, suffix='_fold_{}'.format(fold_id)) pipeline_network.clean_cache() pipeline_postprocessing.clean_cache() predicted_masks_valid = pipeline_network.transform(valid_pipe_input) valid_pipe_masks = {'input_masks': predicted_masks_valid} output_valid = pipeline_postprocessing.transform(valid_pipe_masks) utils.clean_object_from_memory(pipeline_network) y_pred_valid = output_valid['binarized_images'] y_true_valid = utils.read_masks(valid_data_split[Y_COLUMN].values) iou, iout = calculate_scores(y_true_valid, y_pred_valid) predicted_masks_valid = predicted_masks_valid['resized_images'] return iou, iout, (valid_ids, predicted_masks_valid)
def evaluate(): meta = pd.read_csv(PARAMS.metadata_filepath) meta_train = meta[meta['is_train'] == 1] cv = utils.KFoldBySortedValue(n_splits=PARAMS.n_cv_splits, shuffle=PARAMS.shuffle, random_state=SEED) for train_idx, valid_idx in cv.split( meta_train[DEPTH_COLUMN].values.reshape(-1)): break meta_valid_split = meta_train.iloc[valid_idx] y_true_valid = utils.read_masks(meta_valid_split[Y_COLUMN].values) if DEV_MODE: meta_valid_split = meta_valid_split.sample(PARAMS.dev_mode_size, random_state=SEED) data = { 'input': { 'meta': meta_valid_split, }, 'callback_input': { 'meta_valid': None } } pipeline_network = unet(config=CONFIG, train_mode=False) pipeline_postprocessing = pipelines.mask_postprocessing(config=CONFIG) pipeline_network.clean_cache() output = pipeline_network.transform(data) valid_masks = {'input_masks': output} output = pipeline_postprocessing.transform(valid_masks) pipeline_network.clean_cache() pipeline_postprocessing.clean_cache() y_pred_valid = output['binarized_images'] LOGGER.info('Calculating IOU and IOUT Scores') iou_score, iout_score = calculate_scores(y_true_valid, y_pred_valid) LOGGER.info('IOU score on validation is {}'.format(iou_score)) CTX.channel_send('IOU', 0, iou_score) LOGGER.info('IOUT score on validation is {}'.format(iout_score)) CTX.channel_send('IOUT', 0, iout_score) results_filepath = os.path.join(EXPERIMENT_DIR, 'validation_results.pkl') LOGGER.info('Saving validation results to {}'.format(results_filepath)) joblib.dump((meta_valid_split, y_true_valid, y_pred_valid), results_filepath)