def validate(val_loader, model, epoch, write_to_file=True):
    average_meter = AverageMeter()

    # switch to evaluate mode
    model.eval()

    end = time.time()
    for i, (input, target) in enumerate(val_loader):
        input, target = input.cuda(), target.cuda()
        # torch.cuda.synchronize()
        data_time = time.time() - end

        # compute output
        end = time.time()
        with torch.no_grad():
            pred = model(input)
        # torch.cuda.synchronize()
        gpu_time = time.time() - end

        # measure accuracy and record loss
        result = Result()
        result.evaluate(pred.data, target.data)
        average_meter.update(result, gpu_time, data_time, input.size(0))
        end = time.time()

        # save 8 images for visualization
        skip = 50
        if args.modality == 'd':
            img_merge = None
        else:
            if args.modality == 'rgb':
                rgb = input
            elif args.modality == 'rgbd':
                rgb = input[:, :3, :, :]
                depth = input[:, 3:, :, :]

            if i == 0:
                if args.modality == 'rgbd':
                    img_merge = utils.merge_into_row_with_gt(
                        rgb, depth, target, pred)
                else:
                    img_merge = utils.merge_into_row(rgb, target, pred)
            elif (i < 8 * skip) and (i % skip == 0):
                if args.modality == 'rgbd':
                    row = utils.merge_into_row_with_gt(rgb, depth, target,
                                                       pred)
                else:
                    row = utils.merge_into_row(rgb, target, pred)
                img_merge = utils.add_row(img_merge, row)
            elif i == 8 * skip:
                filename = output_directory + '/comparison_' + str(
                    epoch) + '.png'
                utils.save_image(img_merge, filename)

        if (i + 1) % args.print_freq == 0:
            print('Test: [{0}/{1}]\t'
                  't_GPU={gpu_time:.3f}({average.gpu_time:.3f})\n\t'
                  'RMSE={result.rmse:.2f}({average.rmse:.2f}) '
                  'MAE={result.mae:.2f}({average.mae:.2f})\n\t'
                  'Delta1={result.delta1:.3f}({average.delta1:.3f}) '
                  'REL={result.absrel:.3f}({average.absrel:.3f}) '
                  'Lg10={result.lg10:.3f}({average.lg10:.3f}) '.format(
                      i + 1,
                      len(val_loader),
                      gpu_time=gpu_time,
                      result=result,
                      average=average_meter.average()))

    avg = average_meter.average()

    print('\n*\n'
          'RMSE={average.rmse:.3f}\n'
          'MAE={average.mae:.3f}\n'
          'Delta1={average.delta1:.3f}\n'
          'REL={average.absrel:.3f}\n'
          'Lg10={average.lg10:.3f}\n'
          't_GPU={time:.3f}\n'.format(average=avg, time=avg.gpu_time))

    if write_to_file:
        with open(test_csv, 'a') as csvfile:
            writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
            writer.writerow({
                'mse': avg.mse,
                'rmse': avg.rmse,
                'absrel': avg.absrel,
                'lg10': avg.lg10,
                'mae': avg.mae,
                'delta1': avg.delta1,
                'delta2': avg.delta2,
                'delta3': avg.delta3,
                'data_time': avg.data_time,
                'gpu_time': avg.gpu_time
            })

    return avg, img_merge
Example #2
0
def validate(val_loader, model, epoch, write_to_file=True):
    average_meter = AverageMeter()
    model.eval()  # switch to evaluate mode
    end = time.time()
    eval_file = output_directory + '/evaluation.csv'
    f = open(eval_file, "w+")
    f.write("Max_Error,Depth,RMSE,GPU_TIME,Number_Of_Frame\r\n")
    for i, (input, target) in enumerate(val_loader):
        input, target = input.cuda(), target.cuda()
        # torch.cuda.synchronize()
        data_time = time.time() - end

        # compute output
        end = time.time()
        with torch.no_grad():
            pred = model(input)
        # torch.cuda.synchronize()
        gpu_time = time.time() - end

        abs_err = (target.data - pred.data).abs().cpu()
        max_err_ind = np.unravel_index(np.argmax(abs_err, axis=None),
                                       abs_err.shape)

        max_err_depth = target.data[max_err_ind]
        max_err = abs_err[max_err_ind]

        # measure accuracy and record loss
        result = Result()
        result.evaluate(pred.data, target.data)
        average_meter.update(result, gpu_time, data_time, input.size(0))
        end = time.time()

        f.write(
            f'{max_err},{max_err_depth},{result.rmse:.2f},{gpu_time},{i+1}\r\n'
        )
        # save 8 images for visualization
        skip = 50

        if args.modality == 'rgb':
            rgb = input

        if i == 0:
            img_merge = utils.merge_into_row_with_gt(rgb, target, pred,
                                                     (target - pred).abs())
        elif (i < 8 * skip) and (i % skip == 0):
            row = utils.merge_into_row_with_gt(rgb, target, pred,
                                               (target - pred).abs())
            img_merge = utils.add_row(img_merge, row)
        elif i == 8 * skip:
            filename = output_directory + '/comparison_' + str(epoch) + '.png'
            utils.save_image(img_merge, filename)

        if (i + 1) % args.print_freq == 0:
            print('Test: [{0}/{1}]\t'
                  't_GPU={gpu_time:.3f}({average.gpu_time:.3f})\n\t'
                  'RMSE={result.rmse:.2f}({average.rmse:.2f}) '
                  'MAE={result.mae:.2f}({average.mae:.2f}) '
                  'Delta1={result.delta1:.3f}({average.delta1:.3f}) '
                  'REL={result.absrel:.3f}({average.absrel:.3f}) '
                  'Lg10={result.lg10:.3f}({average.lg10:.3f}) '.format(
                      i + 1,
                      len(val_loader),
                      gpu_time=gpu_time,
                      result=result,
                      average=average_meter.average()))
    f.close()
    avg = average_meter.average()

    print('\n*\n'
          'RMSE={average.rmse:.3f}\n'
          'MAE={average.mae:.3f}\n'
          'Delta1={average.delta1:.3f}\n'
          'REL={average.absrel:.3f}\n'
          'Lg10={average.lg10:.3f}\n'
          't_GPU={time:.3f}\n'.format(average=avg, time=avg.gpu_time))

    if write_to_file:
        with open(test_csv, 'a') as csvfile:
            writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
            writer.writerow({
                'mse': avg.mse,
                'rmse': avg.rmse,
                'absrel': avg.absrel,
                'lg10': avg.lg10,
                'mae': avg.mae,
                'delta1': avg.delta1,
                'delta2': avg.delta2,
                'delta3': avg.delta3,
                'data_time': avg.data_time,
                'gpu_time': avg.gpu_time
            })
    return avg, img_merge
Example #3
0
def validate(val_loader, model, epoch, write_to_file=True, logger=None):
    average_meter = AverageMeter()
    if args.arch in multistage_group:
        average_meter_stage1 = AverageMeter()
    
    # Include daynight info and rain condition
    avg_meter_day = AverageMeter()
    avg_meter_night = AverageMeter()

    # day, night, sun, rain combinations
    avg_meter_day_sun = AverageMeter()
    avg_meter_day_rain = AverageMeter()
    avg_meter_night_sun = AverageMeter()
    avg_meter_night_rain = AverageMeter()

    # sun and rain
    avg_meter_sun = AverageMeter()
    avg_meter_rain = AverageMeter()

    model.eval() # switch to evaluate mode
    end = time.time()

    # Save something to draw??
    if logger is None:
        import h5py
        output_path = os.path.join(output_directory, "results.h5")
        h5_writer = h5py.File(output_path, "w", libver="latest", swmr=True)

    for i, data in enumerate(val_loader):
        # Add compatibility for nuscenes
        if args.data != "nuscenes":
            inputs, target = data[0].cuda(), data[1].cuda()
        else:
            inputs, target = data["inputs"].cuda(), data["labels"].cuda()
            
        torch.cuda.synchronize()
        data_time = time.time() - end

        # Compute output
        end = time.time()
        with torch.no_grad():
            if args.arch in multistage_group:
                pred_ = model(inputs)
                pred1 = pred_["stage1"]
                pred = pred_["stage2"]
            else:
                pred = model(inputs)
                pred_ = None

        torch.cuda.synchronize()
        gpu_time = time.time() - end

        # Record for qualitative results
        if (logger is None) and (i % 5 == 0):
            pred_np = {}
            if pred_ is None:
                pred_np = pred.cpu().numpy()
            else:
                for key in pred_.keys():
                    pred_np[key] = pred_[key][0, ...].cpu().numpy()
            res = {
                "inputs": data["inputs"][0, ...].cpu().numpy(),
                "lidar_depth": data["lidar_depth"][0, ...].cpu().numpy(),
                "radar_depth": data["radar_depth"][0, ...].cpu().numpy(),
                "pred": pred_np
            }
            file_key = "%05d"%(i)
            f_group = h5_writer.create_group(file_key)
            # Store data
            for key, output_data in res.items():
                if isinstance(output_data, dict):
                    for key, data_ in output_data.items():
                        if key in res.keys():
                            key = key + "*"
                        f_group.create_dataset(key, data=data_, compression="gzip")
                elif output_data is None:
                    pass
                else:    
                    f_group.create_dataset(key, data=output_data, compression="gzip")

        # Measure accuracy and record loss
        result = Result()
        result.evaluate(pred.data, target.data)
        average_meter.update(result, gpu_time, data_time, inputs.size(0))
        if args.arch in multistage_group:
            result_stage1 = Result()
            result_stage1.evaluate(pred1.data, target.data)
            average_meter_stage1.update(result_stage1, gpu_time, data_time, inputs.size(0))
        end = time.time()
        
        # Record the day, night, rain info
        assert inputs.size(0) == 1
        daynight_info = data["daynight_info"][0]
        if ("day" in daynight_info) and ("rain" in daynight_info):
            avg_meter_day_rain.update(result, gpu_time, data_time, inputs.size(0))
            avg_meter_day.update(result, gpu_time, data_time, inputs.size(0))
            avg_meter_rain.update(result, gpu_time, data_time, inputs.size(0))
        elif "day" in daynight_info:
            avg_meter_day_sun.update(result, gpu_time, data_time, inputs.size(0))
            avg_meter_day.update(result, gpu_time, data_time, inputs.size(0))
            avg_meter_sun.update(result, gpu_time, data_time, inputs.size(0))

        if ("night" in daynight_info) and ("rain" in daynight_info):
            avg_meter_night_rain.update(result, gpu_time, data_time, inputs.size(0))
            avg_meter_night.update(result, gpu_time, data_time, inputs.size(0))
            avg_meter_rain.update(result, gpu_time, data_time, inputs.size(0))
        elif "night" in daynight_info:
            avg_meter_night_sun.update(result, gpu_time, data_time, inputs.size(0))
            avg_meter_night.update(result, gpu_time, data_time, inputs.size(0))
            avg_meter_sun.update(result, gpu_time, data_time, inputs.size(0))

        
        # save 8 images for visualization
        skip = 50
        if args.modality == 'd':
            img_merge = None
        else:
            if args.modality == 'rgb':
                rgb = inputs
            elif args.modality == 'rgbd':
                rgb = inputs[:,:3,:,:]
                depth = inputs[:,3:,:,:]

            if i == 0:
                if args.modality == 'rgbd':
                    img_merge = utils.merge_into_row_with_gt(rgb, depth, target, pred)
                else:
                    img_merge = utils.merge_into_row(rgb, target, pred)
            elif (i < 8*skip) and (i % skip == 0):
                if args.modality == 'rgbd':
                    row = utils.merge_into_row_with_gt(rgb, depth, target, pred)
                else:
                    row = utils.merge_into_row(rgb, target, pred)
                img_merge = utils.add_row(img_merge, row)
            elif i == 8*skip:
                filename = output_directory + '/comparison_' + str(epoch) + '.png'
                utils.save_image(img_merge, filename)

        if (i+1) % args.print_freq == 0:
            print('Test: [{0}/{1}]\t'
                  't_GPU={gpu_time:.3f}({average.gpu_time:.3f})\n\t'
                  'RMSE={result.rmse:.2f}({average.rmse:.2f}) '
                  'MAE={result.mae:.2f}({average.mae:.2f}) '
                  'Delta1={result.delta1:.3f}({average.delta1:.3f}) '
                  'REL={result.absrel:.3f}({average.absrel:.3f}) '
                  'Lg10={result.lg10:.3f}({average.lg10:.3f}) '.format(
                   i+1, len(val_loader), gpu_time=gpu_time, result=result, average=average_meter.average()))

    # Save the result to pkl file
    if logger is None:
        h5_writer.close()
    avg = average_meter.average()
    if args.arch in multistage_group:
        avg_stage1 = average_meter_stage1.average()
        if logger is not None:
            record_test_scalar_summary(avg_stage1, epoch, logger, "Test_stage1")

    print('\n*\n'
          'RMSE={average.rmse:.3f}\n'
          'Rel={average.absrel:.3f}\n'
          'Log10={average.lg10:.3f}\n'
          'Delta1={average.delta1:.3f}\n'
          'Delta2={average.delta2:.3f}\n'
          'Delta3={average.delta3:.3f}\n'
          't_GPU={time:.3f}\n'.format(
        average=avg, time=avg.gpu_time))

    if logger is not None:
        # Record summaries
        record_test_scalar_summary(avg, epoch, logger, "Test")

    print('\n*\n'
        'RMSE={average.rmse:.3f}\n'
        'MAE={average.mae:.3f}\n'
        'Delta1={average.delta1:.3f}\n'
        'REL={average.absrel:.3f}\n'
        'Lg10={average.lg10:.3f}\n'
        't_GPU={time:.3f}\n'.format(
        average=avg, time=avg.gpu_time))

    if write_to_file:
        with open(test_csv, 'a') as csvfile:
            writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
            writer.writerow({'mse': avg.mse, 'rmse': avg.rmse, 'absrel': avg.absrel, 'lg10': avg.lg10,
                'mae': avg.mae, 'delta1': avg.delta1, 'delta2': avg.delta2, 'delta3': avg.delta3,
                'data_time': avg.data_time, 'gpu_time': avg.gpu_time})

    return avg, img_merge
Example #4
0
def validate(val_loader, model, epoch, write_to_file=True):
    average_meter = AverageMeter()
    model.eval()  # switch to evaluate mode
    end = time.time()
    for i, (input, target) in enumerate(val_loader):
        input, target = input.cuda(), target.cuda()
        torch.cuda.synchronize()
        data_time = time.time() - end

        # compute output
        end = time.time()
        ##############################################################
        ##             Start of PnP-Depth modification              ##
        ##############################################################
        # Original inference
        with torch.no_grad():
            ori_pred = model.pnp_forward_front(model.pnp_forward_rear(
                input))  # equivalent to `ori_pred = model(input)`

        # Inference with PnP
        sparse_target = input[:, -1:]  # NOTE: written for rgbd input
        criterion = criteria.MaskedL1Loss().cuda(
        )  # NOTE: criterion function defined here only for clarity
        pnp_iters = 5  # number of iterations
        pnp_alpha = 0.01  # update/learning rate
        pnp_z = model.pnp_forward_front(input)
        for pnp_i in range(pnp_iters):
            if pnp_i != 0:
                pnp_z = pnp_z - pnp_alpha * torch.sign(pnp_z_grad)  # iFGM
            pnp_z = Variable(pnp_z, requires_grad=True)
            pred = model.pnp_forward_rear(pnp_z)
            if pnp_i < pnp_iters - 1:
                pnp_loss = criterion(pred, sparse_target)
                pnp_z_grad = Grad([pnp_loss], [pnp_z], create_graph=True)[0]
        ##############################################################
        ##              End of PnP-Depth modification               ##
        ##############################################################
        torch.cuda.synchronize()
        gpu_time = time.time() - end

        # measure accuracy and record loss
        result = Result()
        result.evaluate(pred.data, target.data)
        average_meter.update(result, gpu_time, data_time, input.size(0))
        end = time.time()

        # save 8 images for visualization
        skip = 50
        if args.modality == 'd':
            img_merge = None
        else:
            if args.modality == 'rgb':
                rgb = input
            elif args.modality == 'rgbd':
                rgb = input[:, :3, :, :]
                depth = input[:, 3:, :, :]

            if i == 0:
                if args.modality == 'rgbd':
                    img_merge = utils.merge_into_row_with_gt(
                        rgb, depth, target, pred)
                else:
                    img_merge = utils.merge_into_row(rgb, target, pred)
            elif (i < 8 * skip) and (i % skip == 0):
                if args.modality == 'rgbd':
                    row = utils.merge_into_row_with_gt(rgb, depth, target,
                                                       pred)
                else:
                    row = utils.merge_into_row(rgb, target, pred)
                img_merge = utils.add_row(img_merge, row)
            elif i == 8 * skip:
                filename = output_directory + '/comparison_' + str(
                    epoch) + '.png'
                utils.save_image(img_merge, filename)

        if (i + 1) % args.print_freq == 0:
            print('Test: [{0}/{1}]\t'
                  't_GPU={gpu_time:.3f}({average.gpu_time:.3f})\n\t'
                  'RMSE={result.rmse:.2f}({average.rmse:.2f}) '
                  'MAE={result.mae:.2f}({average.mae:.2f}) '
                  'Delta1={result.delta1:.3f}({average.delta1:.3f}) '
                  'REL={result.absrel:.3f}({average.absrel:.3f}) '
                  'Lg10={result.lg10:.3f}({average.lg10:.3f}) '.format(
                      i + 1,
                      len(val_loader),
                      gpu_time=gpu_time,
                      result=result,
                      average=average_meter.average()))

    avg = average_meter.average()

    print('\n*\n'
          'RMSE={average.rmse:.3f}\n'
          'MAE={average.mae:.3f}\n'
          'Delta1={average.delta1:.3f}\n'
          'REL={average.absrel:.3f}\n'
          'Lg10={average.lg10:.3f}\n'
          't_GPU={time:.3f}\n'.format(average=avg, time=avg.gpu_time))

    if write_to_file:
        with open(test_csv, 'a') as csvfile:
            writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
            writer.writerow({
                'mse': avg.mse,
                'rmse': avg.rmse,
                'absrel': avg.absrel,
                'lg10': avg.lg10,
                'mae': avg.mae,
                'delta1': avg.delta1,
                'delta2': avg.delta2,
                'delta3': avg.delta3,
                'data_time': avg.data_time,
                'gpu_time': avg.gpu_time
            })
    return avg, img_merge
Example #5
0
def validate(val_loader, model, epoch, write_to_file=True):
    average_meter = AverageMeter()
    model.eval()  # switch to evaluate mode
    end = time.time()
    for i, (input, target) in enumerate(val_loader):
        # io.imshow(np.squeeze(input[:,3:,:,:].cpu().numpy()), interpolation='nearest')
        # io.show()
        #print(input.size())
        input, target = input.cuda(), target.cuda()
        torch.cuda.synchronize()
        data_time = time.time() - end

        # compute output
        end = time.time()
        with torch.no_grad():
            pred = model(input)
        torch.cuda.synchronize()
        gpu_time = time.time() - end

        # measure accuracy and record loss
        result = Result()
        result.evaluate(pred.data, target.data)
        average_meter.update(result, gpu_time, data_time, input.size(0))
        end = time.time()

        if visualize:
            if args.modality == 'd':
                fig = plt.figure()
                fig.suptitle(
                    'Error Percentage ' + str(round(result.absrel * 100, 2)) +
                    ' GPU TIME ' + str(round(gpu_time, 2)) + '   FPS ' +
                    str(round(60.0 / (gpu_time + data_time), 2)),
                    fontsize=16)

                plt.subplot(131)
                plt.title("SPARSE (Input)")
                plt.axis('off')
                plt.imshow(np.squeeze(input.cpu().numpy()),
                           interpolation='nearest')

                plt.subplot(132)
                plt.title("TARGET (Ground Truth)")
                plt.axis('off')
                plt.imshow(np.squeeze(target.cpu().numpy()),
                           interpolation='nearest')
                plt.colorbar(fraction=0.1, pad=0.04)

                plt.subplot(133)
                plt.title("PREDICTED")
                plt.axis('off')
                plt.imshow(np.squeeze(pred.cpu().numpy()),
                           interpolation='nearest')
                plt.colorbar(fraction=0.1, pad=0.04)

                # plt.waitforbuttonpress(timeout=2)
                # plt.close()

                plt.waitforbuttonpress()
                plt.close()

            if args.modality == 'rgbd':
                # sparse = np.squeeze(input[:, 3:, :, :].cpu().numpy())
                # print(sparse.shape)
                # sleep(3)

                fig = plt.figure()

                fig.suptitle(
                    'Error Percentage ' + str(round(result.absrel * 100, 2)) +
                    ' GPU TIME ' + str(round(gpu_time, 2)) + '   FPS ' +
                    str(round(60.0 / (gpu_time + data_time), 2)),
                    fontsize=16)

                rgb1 = 255 * np.transpose(
                    np.squeeze(input[:, :3, :, :].cpu().numpy()),
                    (1, 2, 0))  # H, W, C
                rgb1 = Image.fromarray(rgb1.astype('uint8'))
                plt.subplot(221)
                plt.title("RGB (Input)")
                plt.axis('off')
                plt.imshow(rgb1)

                plt.subplot(222)
                plt.title("SPARSE (Input)")
                plt.axis('off')
                plt.imshow(np.squeeze(input[:, 3:, :, :].cpu().numpy()),
                           interpolation='nearest')

                plt.subplot(223)
                plt.title("TARGET (Ground Truth)")
                plt.axis('off')
                plt.imshow(np.squeeze(target.cpu().numpy()),
                           interpolation='nearest')
                plt.colorbar(fraction=0.1, pad=0.04)

                plt.subplot(224)
                plt.title("PREDICTED")
                plt.axis('off')
                plt.imshow(np.squeeze(pred.cpu().numpy()),
                           interpolation='nearest')
                plt.colorbar(fraction=0.1, pad=0.04)

                # plt.waitforbuttonpress(timeout=2)
                # plt.close()

                plt.waitforbuttonpress()
                plt.close()


#
# save 8 images for visualization
        skip = 50
        if args.modality == 'd':
            img_merge = None
        else:
            if args.modality == 'rgb':
                rgb = input
            elif args.modality == 'rgbd':
                rgb = input[:, :3, :, :]
                depth = input[:, 3:, :, :]

            if i == 0:
                if args.modality == 'rgbd':
                    img_merge = utils.merge_into_row_with_gt(
                        rgb, depth, target, pred)
                else:
                    img_merge = utils.merge_into_row(rgb, target, pred)
            elif (i < 8 * skip) and (i % skip == 0):
                if args.modality == 'rgbd':
                    row = utils.merge_into_row_with_gt(rgb, depth, target,
                                                       pred)
                else:
                    row = utils.merge_into_row(rgb, target, pred)
                img_merge = utils.add_row(img_merge, row)
            elif i == 8 * skip:
                filename = output_directory + '/comparison_' + str(
                    epoch) + '.png'
                utils.save_image(img_merge, filename)

        if (i + 1) % args.print_freq == 0:
            print('Test: [{0}/{1}]\t'
                  't_GPU={gpu_time:.3f}({average.gpu_time:.3f})\n\t'
                  'RMSE={result.rmse:.2f}({average.rmse:.2f}) '
                  'MAE={result.mae:.2f}({average.mae:.2f}) '
                  'Delta1={result.delta1:.3f}({average.delta1:.3f}) '
                  'REL={result.absrel:.3f}({average.absrel:.3f}) '
                  'Lg10={result.lg10:.3f}({average.lg10:.3f}) '.format(
                      i + 1,
                      len(val_loader),
                      gpu_time=gpu_time,
                      result=result,
                      average=average_meter.average()))

    avg = average_meter.average()

    print('\n*\n'
          'RMSE={average.rmse:.3f}\n'
          'MAE={average.mae:.3f}\n'
          'Delta1={average.delta1:.3f}\n'
          'REL={average.absrel:.3f}\n'
          'Lg10={average.lg10:.3f}\n'
          't_GPU={time:.3f}\n'.format(average=avg, time=avg.gpu_time))

    if write_to_file:
        with open(test_csv, 'a') as csvfile:
            writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
            writer.writerow({
                'mse': avg.mse,
                'rmse': avg.rmse,
                'absrel': avg.absrel,
                'lg10': avg.lg10,
                'mae': avg.mae,
                'delta1': avg.delta1,
                'delta2': avg.delta2,
                'delta3': avg.delta3,
                'data_time': avg.data_time,
                'gpu_time': avg.gpu_time
            })

    return avg, img_merge