def proceed_validation(args, is_save=True, is_densecrf=False):
    import cv2
    #name = "ningbo_val"
    name = "val"
    ds = dataset.PSSD(args.base_dir, args.meta_dir, name)
    ds = BatchData(ds, 1)

    pred_config = PredictConfig(model=Model(),
                                session_init=get_model_loader(args.load),
                                input_names=['image'],
                                output_names=['prob'])
    predictor = OfflinePredictor(pred_config)
    from tensorpack.utils.fs import mkdir_p
    result_dir = "result/pssd_apr26"
    #result_dir = "ningbo_validation"
    mkdir_p(result_dir)
    i = 1
    stat = MIoUStatistics(CLASS_NUM)
    logger.info("start validation....")
    for image, label in tqdm(ds.get_data()):
        label = np.squeeze(label)
        image = np.squeeze(image)

        def mypredictor(input_img):
            #input image: 1*H*W*3
            #output : H*W*C
            output = predictor(input_img)
            return output[0][0]

        prediction = predict_scaler(image,
                                    mypredictor,
                                    scales=[0.5, 0.75, 1, 1.25, 1.5],
                                    classes=CLASS_NUM,
                                    tile_size=CROP_SIZE,
                                    is_densecrf=is_densecrf)
        prediction = np.argmax(prediction, axis=2)
        stat.feed(prediction, label)

        if is_save:
            cv2.imwrite(
                os.path.join(result_dir, "{}.png".format(i)),
                np.concatenate((image, visualize_label(label),
                                visualize_label(prediction)),
                               axis=1))
            #imwrite_grid(image,label,prediction, border=512, prefix_dir=result_dir, imageId = i)
        i += 1

    logger.info("mIoU: {}".format(stat.mIoU))
    logger.info("mean_accuracy: {}".format(stat.mean_accuracy))
    logger.info("accuracy: {}".format(stat.accuracy))
def get_data(name, base_dir, meta_dir, batch_size):
    isTrain = True if 'train' in name else False
    ds = dataset.PSSD(base_dir, meta_dir, name, shuffle=True)

    if isTrain:  #special augmentation
        shape_aug = [
            imgaug.RandomResize(xrange=(0.7, 1.5),
                                yrange=(0.7, 1.5),
                                aspect_ratio_thres=0.15),
            RandomCropWithPadding(CROP_SIZE, IGNORE_LABEL),
            imgaug.Flip(horiz=True),
        ]
    else:
        shape_aug = []

    ds = AugmentImageComponents(ds, shape_aug, (0, 1), copy=False)

    if isTrain:
        ds = BatchData(ds, batch_size)
        ds = PrefetchDataZMQ(ds, 1)
    else:
        ds = BatchData(ds, 1)
    return ds