예제 #1
0
def vis_sequence(cfg,sequence, mv):

        seq_data = parse_npz(sequence)
        n_comps = seq_data['n_comps']
        gender = seq_data['gender']

        T = seq_data.n_frames

        sbj_mesh = os.path.join(grab_path, '..', seq_data.body.vtemp)
        sbj_vtemp = np.array(Mesh(filename=sbj_mesh).vertices)

        sbj_m = smplx.create(model_path=cfg.model_path,
                             model_type='smplx',
                             gender=gender,
                             num_pca_comps=n_comps,
                             v_template=sbj_vtemp,
                             batch_size=T)

        sbj_parms = params2torch(seq_data.body.params)
        verts_sbj = to_cpu(sbj_m(**sbj_parms).vertices)


        obj_mesh = os.path.join(grab_path, '..', seq_data.object.object_mesh)
        obj_mesh = Mesh(filename=obj_mesh)
        obj_vtemp = np.array(obj_mesh.vertices)
        obj_m = ObjectModel(v_template=obj_vtemp,
                            batch_size=T)
        obj_parms = params2torch(seq_data.object.params)
        verts_obj = to_cpu(obj_m(**obj_parms).vertices)

        table_mesh = os.path.join(grab_path, '..', seq_data.table.table_mesh)
        table_mesh = Mesh(filename=table_mesh)
        table_vtemp = np.array(table_mesh.vertices)
        table_m = ObjectModel(v_template=table_vtemp,
                            batch_size=T)
        table_parms = params2torch(seq_data.table.params)
        verts_table = to_cpu(table_m(**table_parms).vertices)

        skip_frame = 4
        for frame in range(0,T, skip_frame):
            o_mesh = Mesh(vertices=verts_obj[frame], faces=obj_mesh.faces, vc=colors['yellow'])
            o_mesh.set_vertex_colors(vc=colors['red'], vertex_ids=seq_data['contact']['object'][frame] > 0)

            s_mesh = Mesh(vertices=verts_sbj[frame], faces=sbj_m.faces, vc=colors['pink'], smooth=True)
            s_mesh.set_vertex_colors(vc=colors['red'], vertex_ids=seq_data['contact']['body'][frame] > 0)

            t_mesh = Mesh(vertices=verts_table[frame], faces=table_mesh.faces, vc=colors['white'])

            mv.set_static_meshes([o_mesh, s_mesh, t_mesh])
 def forward(self, x, targets=None):
     img_dim = x.shape[2]
     loss = 0
     layer_outputs, yolo_outputs = [], []
     for i, (module_def,
             module) in enumerate(zip(self.module_defs, self.module_list)):
         if module_def["type"] in ["convolutional", "upsample", "maxpool"]:
             x = module(x)
         elif module_def["type"] == "route":
             x = torch.cat([
                 layer_outputs[int(layer_i)]
                 for layer_i in module_def["layers"].split(",")
             ], 1)
         elif module_def["type"] == "shortcut":
             layer_i = int(module_def["from"])
             x = layer_outputs[-1] + layer_outputs[layer_i]
         elif module_def["type"] == "yolo":
             x, layer_loss = module[0](x, targets, img_dim)
             loss += layer_loss
             yolo_outputs.append(x)
         layer_outputs.append(x)
     yolo_outputs = to_cpu(torch.cat(yolo_outputs, 1))
     return yolo_outputs if targets is None else (loss, yolo_outputs)
예제 #3
0
def save_grab_vertices(cfg, logger=None, **params):

    grab_path = cfg.grab_path
    out_path = cfg.out_path
    makepath(out_path)

    if logger is None:
        logger = makelogger(log_dir=os.path.join(out_path,
                                                 'grab_preprocessing.log'),
                            mode='a').info
    else:
        logger = logger
    logger('Starting to get vertices for GRAB!')

    all_seqs = glob.glob(grab_path + '/*/*.npz')

    logger('Total sequences: %d' % len(all_seqs))

    # stime = datetime.now().replace(microsecond=0)
    # shutil.copy2(sys.argv[0],
    #              os.path.join(out_path,
    #                           os.path.basename(sys.argv[0]).replace('.py','_%s.py' % datetime.strftime(stime,'%Y%m%d_%H%M'))))

    if out_path is None:
        out_path = grab_path

    for sequence in tqdm(all_seqs):

        outfname = makepath(sequence.replace(grab_path, out_path).replace(
            '.npz', '_verts_body.npz'),
                            isfile=True)

        action_name = os.path.basename(sequence)
        if os.path.exists(outfname):
            logger('Results for %s split already exist.' % (action_name))
            continue
        else:
            logger('Processing data for %s split.' % (action_name))

        seq_data = parse_npz(sequence)
        n_comps = seq_data['n_comps']
        gender = seq_data['gender']

        T = seq_data.n_frames

        if cfg.save_body_verts:

            sbj_mesh = os.path.join(grab_path, '..', seq_data.body.vtemp)
            sbj_vtemp = np.array(Mesh(filename=sbj_mesh).vertices)

            sbj_m = smplx.create(model_path=cfg.model_path,
                                 model_type='smplx',
                                 gender=gender,
                                 num_pca_comps=n_comps,
                                 v_template=sbj_vtemp,
                                 batch_size=T)

            sbj_parms = params2torch(seq_data.body.params)
            verts_sbj = to_cpu(sbj_m(**sbj_parms).vertices)
            np.savez_compressed(outfname, verts_body=verts_sbj)

        if cfg.save_lhand_verts:
            lh_mesh = os.path.join(grab_path, '..', seq_data.lhand.vtemp)
            lh_vtemp = np.array(Mesh(filename=lh_mesh).vertices)

            lh_m = smplx.create(model_path=cfg.model_path,
                                model_type='mano',
                                is_rhand=False,
                                v_template=lh_vtemp,
                                num_pca_comps=n_comps,
                                flat_hand_mean=True,
                                batch_size=T)

            lh_parms = params2torch(seq_data.lhand.params)
            verts_lh = to_cpu(lh_m(**lh_parms).vertices)
            np.savez_compressed(outfname.replace('_verts_body.npz',
                                                 '_verts_lhand.npz'),
                                verts_body=verts_lh)

        if cfg.save_rhand_verts:
            rh_mesh = os.path.join(grab_path, '..', seq_data.rhand.vtemp)
            rh_vtemp = np.array(Mesh(filename=rh_mesh).vertices)

            rh_m = smplx.create(model_path=cfg.model_path,
                                model_type='mano',
                                is_rhand=True,
                                v_template=rh_vtemp,
                                num_pca_comps=n_comps,
                                flat_hand_mean=True,
                                batch_size=T)

            rh_parms = params2torch(seq_data.body.params)
            verts_rh = to_cpu(rh_m(**rh_parms).vertices)
            np.savez_compressed(outfname.replace('_verts_body.npz',
                                                 '_verts_rhand.npz'),
                                verts_body=verts_rh)

        if cfg.save_object_verts:

            obj_mesh = os.path.join(grab_path, '..',
                                    seq_data.object.object_mesh)
            obj_vtemp = np.array(Mesh(filename=obj_mesh).vertices)
            sample_id = np.random.choice(obj_vtemp.shape[0],
                                         cfg.n_verts_sample,
                                         replace=False)
            obj_m = ObjectModel(v_template=obj_vtemp[sample_id], batch_size=T)
            obj_parms = params2torch(seq_data.object.params)
            verts_obj = to_cpu(obj_m(**obj_parms).vertices)
            np.savez_compressed(outfname.replace('_verts_body.npz',
                                                 '_verts_object.npz'),
                                verts_object=verts_obj)

        logger('Processing finished')
    def forward(self, x, targets=None, img_dim=None):

        # Tensors for cuda support
        FloatTensor = torch.cuda.FloatTensor if x.is_cuda else torch.FloatTensor
        LongTensor = torch.cuda.LongTensor if x.is_cuda else torch.LongTensor
        ByteTensor = torch.cuda.ByteTensor if x.is_cuda else torch.ByteTensor

        self.img_dim = img_dim
        num_samples = x.size(0)
        grid_size = x.size(2)

        prediction = (x.view(num_samples, self.num_anchors,
                             self.num_classes + 5, grid_size,
                             grid_size).permute(0, 1, 3, 4, 2).contiguous())

        # Get outputs
        x = torch.sigmoid(prediction[..., 0])  # Center x
        y = torch.sigmoid(prediction[..., 1])  # Center y
        w = prediction[..., 2]  # Width
        h = prediction[..., 3]  # Height
        pred_conf = torch.sigmoid(prediction[..., 4])  # Conf
        pred_cls = torch.sigmoid(prediction[..., 5:])  # Cls pred.

        # If grid size does not match current we compute new offsets
        if grid_size != self.grid_size:
            self.compute_grid_offsets(grid_size, cuda=x.is_cuda)

        # Add offset and scale with anchors
        pred_boxes = FloatTensor(prediction[..., :4].shape)
        pred_boxes[..., 0] = x.data + self.grid_x
        pred_boxes[..., 1] = y.data + self.grid_y
        pred_boxes[..., 2] = torch.exp(w.data) * self.anchor_w
        pred_boxes[..., 3] = torch.exp(h.data) * self.anchor_h

        output = torch.cat(
            (
                pred_boxes.view(num_samples, -1, 4) * self.stride,
                pred_conf.view(num_samples, -1, 1),
                pred_cls.view(num_samples, -1, self.num_classes),
            ),
            -1,
        )

        if targets is None:
            return output, 0
        else:
            iou_scores, class_mask, obj_mask, noobj_mask, tx, ty, tw, th, tcls, tconf = build_targets(
                pred_boxes=pred_boxes,
                pred_cls=pred_cls,
                target=targets,
                anchors=self.scaled_anchors,
                ignore_thres=self.ignore_thres,
            )

            obj_mask = obj_mask.bool()  # convert int8 to bool
            noobj_mask = noobj_mask.bool()  # convert int8 to bool

            # Loss : Mask outputs to ignore non-existing objects (except with conf. loss)
            loss_x = self.mse_loss(x[obj_mask], tx[obj_mask])
            loss_y = self.mse_loss(y[obj_mask], ty[obj_mask])
            loss_w = self.mse_loss(w[obj_mask], tw[obj_mask])
            loss_h = self.mse_loss(h[obj_mask], th[obj_mask])
            loss_conf_obj = self.bce_loss(pred_conf[obj_mask], tconf[obj_mask])
            loss_conf_noobj = self.bce_loss(pred_conf[noobj_mask],
                                            tconf[noobj_mask])
            loss_conf = self.obj_scale * loss_conf_obj + self.noobj_scale * loss_conf_noobj
            loss_cls = self.bce_loss(pred_cls[obj_mask], tcls[obj_mask])
            total_loss = loss_x + loss_y + loss_w + loss_h + loss_conf + loss_cls

            # Metrics
            cls_acc = 100 * class_mask[obj_mask].mean()
            conf_obj = pred_conf[obj_mask].mean()
            conf_noobj = pred_conf[noobj_mask].mean()
            conf50 = (pred_conf > 0.5).float()
            iou50 = (iou_scores > 0.5).float()
            iou75 = (iou_scores > 0.75).float()
            detected_mask = conf50 * class_mask * tconf
            precision = torch.sum(
                iou50 * detected_mask) / (conf50.sum() + 1e-16)
            recall50 = torch.sum(
                iou50 * detected_mask) / (obj_mask.sum() + 1e-16)
            recall75 = torch.sum(
                iou75 * detected_mask) / (obj_mask.sum() + 1e-16)

            self.metrics = {
                "loss": to_cpu(total_loss).item(),
                "x": to_cpu(loss_x).item(),
                "y": to_cpu(loss_y).item(),
                "w": to_cpu(loss_w).item(),
                "h": to_cpu(loss_h).item(),
                "conf": to_cpu(loss_conf).item(),
                "cls": to_cpu(loss_cls).item(),
                "cls_acc": to_cpu(cls_acc).item(),
                "recall50": to_cpu(recall50).item(),
                "recall75": to_cpu(recall75).item(),
                "precision": to_cpu(precision).item(),
                "conf_obj": to_cpu(conf_obj).item(),
                "conf_noobj": to_cpu(conf_noobj).item(),
                "grid_size": grid_size,
            }

            return output, total_loss
예제 #5
0
    def data_preprocessing(self,cfg):

        # stime = datetime.now().replace(microsecond=0)
        # shutil.copy2(sys.argv[0],
        #              os.path.join(self.out_path,
        #                           os.path.basename(sys.argv[0]).replace('.py','_%s.py' % datetime.strftime(stime,'%Y%m%d_%H%M'))))

        self.subject_mesh = {}
        self.obj_info = {}
        self.sbj_info = {}

        for split in self.split_seqs.keys():

            self.logger('Processing data for %s split.' % (split))

            frame_names = []
            body_data = {
                'global_orient': [],'body_pose': [],'transl': [],
                'right_hand_pose': [],'left_hand_pose': [],
                'jaw_pose': [],'leye_pose': [],'reye_pose': [],
                'expression': [],'fullpose': [],
                'contact':[], 'verts' :[]
            }

            object_data ={'verts': [], 'global_orient': [], 'transl': [], 'contact': []}
            lhand_data = {'verts': [], 'global_orient': [], 'hand_pose': [], 'transl': [], 'fullpose': []}
            rhand_data = {'verts': [], 'global_orient': [], 'hand_pose': [], 'transl': [], 'fullpose': []}

            for sequence in tqdm(self.split_seqs[split]):

                seq_data = parse_npz(sequence)

                obj_name = seq_data.obj_name
                sbj_id   = seq_data.sbj_id
                n_comps  = seq_data.n_comps
                gender   = seq_data.gender

                frame_mask = self.filter_contact_frames(seq_data)

                # total selectd frames
                T = frame_mask.sum()
                if T < 1:
                    continue # if no frame is selected continue to the next sequence

                sbj_params = prepare_params(seq_data.body.params, frame_mask)
                rh_params  = prepare_params(seq_data.rhand.params, frame_mask)
                lh_params  = prepare_params(seq_data.lhand.params, frame_mask)
                obj_params = prepare_params(seq_data.object.params, frame_mask)

                append2dict(body_data, sbj_params)
                append2dict(rhand_data, rh_params)
                append2dict(lhand_data, lh_params)
                append2dict(object_data, obj_params)

                sbj_vtemp = self.load_sbj_verts(sbj_id, seq_data)

                if cfg.save_body_verts:

                    sbj_m = smplx.create(model_path=cfg.model_path,
                                         model_type='smplx',
                                         gender=gender,
                                         num_pca_comps=n_comps,
                                         v_template=sbj_vtemp,
                                         batch_size=T)

                    sbj_parms = params2torch(sbj_params)
                    verts_sbj = to_cpu(sbj_m(**sbj_parms).vertices)
                    body_data['verts'].append(verts_sbj)

                if cfg.save_lhand_verts:
                    lh_mesh = os.path.join(grab_path, '..', seq_data.lhand.vtemp)
                    lh_vtemp = np.array(Mesh(filename=lh_mesh).vertices)

                    lh_m = smplx.create(model_path=cfg.model_path,
                                        model_type='mano',
                                        is_rhand=False,
                                        v_template=lh_vtemp,
                                        num_pca_comps=n_comps,
                                        flat_hand_mean=True,
                                        batch_size=T)

                    lh_parms = params2torch(lh_params)
                    verts_lh = to_cpu(lh_m(**lh_parms).vertices)
                    lhand_data['verts'].append(verts_lh)

                if cfg.save_rhand_verts:
                    rh_mesh = os.path.join(grab_path, '..', seq_data.rhand.vtemp)
                    rh_vtemp = np.array(Mesh(filename=rh_mesh).vertices)

                    rh_m = smplx.create(model_path=cfg.model_path,
                                        model_type='mano',
                                        is_rhand=True,
                                        v_template=rh_vtemp,
                                        num_pca_comps=n_comps,
                                        flat_hand_mean=True,
                                        batch_size=T)

                    rh_parms = params2torch(rh_params)
                    verts_rh = to_cpu(rh_m(**rh_parms).vertices)
                    rhand_data['verts'].append(verts_rh)

                ### for objects

                obj_info = self.load_obj_verts(obj_name, seq_data, cfg.n_verts_sample)

                if cfg.save_object_verts:

                    obj_m = ObjectModel(v_template=obj_info['verts_sample'],
                                        batch_size=T)
                    obj_parms = params2torch(obj_params)
                    verts_obj = to_cpu(obj_m(**obj_parms).vertices)
                    object_data['verts'].append(verts_obj)

                if cfg.save_contact:

                    body_data['contact'].append(seq_data.contact.body[frame_mask])
                    object_data['contact'].append(seq_data.contact.object[frame_mask][:,obj_info['verts_sample_id']])

                frame_names.extend(['%s_%s' % (sequence.split('.')[0], fId) for fId in np.arange(T)])


            self.logger('Processing for %s split finished' % split)
            self.logger('Total number of frames for %s split is:%d' % (split, len(frame_names)))


            out_data = [body_data, rhand_data, lhand_data, object_data]
            out_data_name = ['body_data', 'rhand_data', 'lhand_data', 'object_data']

            for idx, data in enumerate(out_data):
                data = np2torch(data)
                data_name = out_data_name[idx]
                outfname = makepath(os.path.join(self.out_path, split, '%s.pt' % data_name), isfile=True)
                torch.save(data, outfname)

            np.savez(os.path.join(self.out_path, split, 'frame_names.npz'), frame_names=frame_names)

        np.save(os.path.join(self.out_path, 'obj_info.npy'), self.obj_info)
        np.save(os.path.join(self.out_path, 'sbj_info.npy'), self.sbj_info)