예제 #1
0
def validate(test_set, scale_z, obj_file='./prediction', save=True):
    with torch.no_grad():
        unet.eval()
        all_pred = np.array([], dtype=int)
        valLabels = np.array([], dtype=int)
        val_dt = data.kits_dataset(test_set)
        print("val size:", len(val_dt))
        print("batch size:", data.batch_size)
        val_data_loader = torch.utils.data.DataLoader(val_dt, batch_size=data.batch_size,
                                                      collate_fn=data.val_point_transform,
                                                      # collate_fn=data.val_point_transform,
                                                      shuffle=False)
        num_batches = len(val_data_loader)
        for i, batch in enumerate(val_data_loader):
            print(">>>Processing batch: {}/{}".format(i + 1, num_batches))
            if use_cuda:
                batch['x'][1] = batch['x'][1].cuda()
            predictions = unet(batch['x'])
            predictions = predictions.cpu().numpy()
            predictions = np.argmax(predictions, axis=1)
            all_pred = np.concatenate((all_pred, predictions))
            valLabels = np.concatenate((valLabels, batch['y']))

            # save predicted obj
            if save == True:
                xyz = batch['x'][0].cpu().numpy()
                rgb = batch['x'][1].cpu().numpy()
                inds = (xyz[:, 3] == 0)  # get 1st scene in the batch
                xyz = xyz[inds]
                xyz[:, 0] = (xyz[:, 0].astype(np.float32) / scale_z).astype(np.int)
                rgb = rgb[inds]
                pred = predictions[inds]
                out_file = obj_file + str(i) + '.obj'
                save_to_obj(xyz, rgb, pred, out_file)

        class_ious = iou.evaluate(all_pred, valLabels)
        return class_ious
예제 #2
0
        loss.backward()
        optimizer.step()
    print(epoch, 'Train loss', train_loss / (i + 1), 'MegaMulAdd=',
          scn.forward_pass_multiplyAdd_count / len(data.train) / 1e6,
          'MegaHidden', scn.forward_pass_hidden_states / len(data.train) / 1e6,
          'time=',
          time.time() - start, 's')
    scn.checkpoint_save(unet, exp_name, 'unet', epoch, use_cuda)

    if epoch % 100 == 1:
        with torch.no_grad():
            unet.eval()
            store = torch.zeros(data.valOffsets[-1], 20)
            scn.forward_pass_multiplyAdd_count = 0
            scn.forward_pass_hidden_states = 0
            start = time.time()
            for rep in range(1, 1 + data.val_reps):
                for i, batch in enumerate(data.val_data_loader):
                    if use_cuda:
                        batch['x'][1] = batch['x'][1].cuda()
                        batch['y'] = batch['y'].cuda()
                    predictions = unet(batch['x'])
                    store.index_add_(0, batch['point_ids'], predictions.cpu())
                print(epoch, rep, 'Val MegaMulAdd=',
                      scn.forward_pass_multiplyAdd_count / len(data.val) / 1e6,
                      'MegaHidden',
                      scn.forward_pass_hidden_states / len(data.val) / 1e6,
                      'time=',
                      time.time() - start, 's')
                iou.evaluate(store.max(1)[1].numpy(), data.valLabels)
예제 #3
0
def evaluate(save_ply=False, prefix=""):
    with torch.no_grad():
        unet.eval()
        store = torch.zeros(data.valOffsets[-1], 20)
        scn.forward_pass_multiplyAdd_count = 0
        scn.forward_pass_hidden_states = 0
        start = time.time()
        for rep in range(1, 1 + data.val_reps):
            locs = None
            for i, batch in enumerate(data.val_data_loader):
                if use_cuda:
                    batch['x'][1] = batch['x'][1].cuda()
                    batch['y'] = batch['y'].cuda()
                predictions = unet(batch['x'])
                predictions = predictions.cpu()
                store.index_add_(0, batch['point_ids'], predictions)

                # print(len(predictions))
                # print(len(batch['x'][0]))
                # print('batchchhhhh', i)

                # xyz = data.val[idx][0] #from original ply file

                # from distorted xyz used when training
                batch_locs = batch['x'][0].numpy()

                print(len(batch_locs))

                if locs is None:
                    locs = batch_locs
                else:
                    np.concatenate((locs, batch_locs))

            print('infer', rep, 'Val MegaMulAdd=',
                  scn.forward_pass_multiplyAdd_count / len(data.val) / 1e6,
                  'MegaHidden',
                  scn.forward_pass_hidden_states / len(data.val) / 1e6,
                  'time=',
                  time.time() - start, 's')

            predLabels = store.max(1)[1].numpy()
            print(predLabels)
            iou.evaluate(predLabels, data.valLabels)

            if save_ply:
                label_id_to_color = batch['label_id_to_color']
                unknown_color = [1, 1, 1]
                colors = np.array(
                    list(
                        map(
                            lambda label_id: label_id_to_color[label_id]
                            if label_id in label_id_to_color else
                            unknown_color, predLabels)))

                ori_points = []

                for idx, idx_val in enumerate(data.val):
                    # print(len(idx_val[0]))
                    ori_points.extend(idx_val[0])

                idx_data = {}
                for loc, color, ori_point in zip(locs, colors, ori_points):
                    idx = loc[3]
                    point = loc[0:3]

                    if idx not in idx_data:
                        idx_data[idx] = {}
                        idx_data[idx]['points'] = []
                        idx_data[idx]['colors'] = []
                        idx_data[idx]['ori_points'] = []

                    idx_data[idx]['points'].append(point)
                    idx_data[idx]['colors'].append(color)
                    idx_data[idx]['ori_points'].append(ori_point)

                for idx, datum in idx_data.items():
                    points = datum['points']
                    colors = datum['colors']
                    ori_points = datum['ori_points']

                    pcd = PointCloud()
                    # pcd.points = Vector3dVector(points) #the ordering seems to be wrong :/
                    pcd.points = Vector3dVector(ori_points)
                    pcd.colors = Vector3dVector(colors)
                    write_point_cloud(
                        "./ply/{prefix}batch_{rep}_{idx}_.ply".format(
                            prefix=prefix, rep=rep, idx=idx), pcd)
예제 #4
0
                    # save predicted obj
                    if save == True:
                        xyz = batch['x'][0].cpu().numpy()
                        rgb = batch['x'][1].cpu().numpy()
                        inds = (xyz[:, 3] == 0)  # get 1st scene in the batch
                        xyz = xyz[inds]
                        xyz[:, 0] = (xyz[:, 0].astype(np.float32) / data.scale_z).astype(np.int)
                        rgb = rgb[inds]
                        pred = predictions[inds]
                        data.save_to_obj(xyz, rgb, pred,
                                         './pred_epoch' + str(epoch) + '_batch' + str(i) + '.obj')

                s = 'EPOCH: {}, validation: time = {:.3f}, timestamp = {}'. \
                    format(epoch, time.time() - start, datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
                print(s)
                class_ious = iou.evaluate(all_pred, valLabels)

                if data.num_cl == 2:
                    ious = str(class_ious['tissue'][0]) + ', ' + \
                           str(class_ious['kidney'][0])
                elif data.num_cl == 3:
                    ious = str(class_ious['tissue'][0]) + ', ' + \
                           str(class_ious['kidney'][0]) + ', ' + \
                           str(class_ious['tumor'][0])
                logfile.write(s + '\nclass ious: ' + ious + '\n')

                writer.add_scalar("iou/tissue", class_ious['tissue'][0], iteration)
                writer.add_scalar("iou/kidney", class_ious['kidney'][0], iteration)
                if data.num_cl == 3:
                    writer.add_scalar("iou/tumor", class_ious['tumor'][0], iteration)