コード例 #1
0
    def __init__(
        self,
        cfg,
        texture_atlases_dict,
        device="cuda",
        default_class=0,
        **kwargs,
    ):
        self.embedder = build_densepose_embedder(cfg)

        self.texture_image_dict = {}
        self.alpha_dict = {}

        for mesh_name in texture_atlases_dict.keys():
            if texture_atlases_dict[mesh_name].shape[
                    -1] == 4:  # Image with alpha channel
                self.alpha_dict[mesh_name] = texture_atlases_dict[
                    mesh_name][:, :, -1] / 255.0
                self.texture_image_dict[mesh_name] = texture_atlases_dict[
                    mesh_name][:, :, :3]
            else:
                self.alpha_dict[mesh_name] = texture_atlases_dict[
                    mesh_name].sum(axis=-1) > 0
                self.texture_image_dict[mesh_name] = texture_atlases_dict[
                    mesh_name]

        self.device = torch.device(device)
        self.class_to_mesh_name = get_class_to_mesh_name_mapping(cfg)
        self.default_class = default_class

        self.mesh_vertex_embeddings = {
            mesh_name: self.embedder(mesh_name).to(self.device)
            for mesh_name in self.class_to_mesh_name.values()
        }
コード例 #2
0
    def __init__(
        self,
        cfg,
        inplace=True,
        cmap=cv2.COLORMAP_JET,
        alpha=0.7,
        device="cuda",
        default_class=0,
        class_to_mesh_name=DEFAULT_CLASS_TO_MESH_NAME,
        **kwargs,
    ):
        self.mask_visualizer = MatrixVisualizer(inplace=inplace,
                                                cmap=cmap,
                                                val_scale=1.0,
                                                alpha=alpha)
        self.class_to_mesh_name = class_to_mesh_name
        self.embedder = build_densepose_embedder(cfg)
        self.device = torch.device(device)
        self.default_class = default_class

        self.embed_map_rescaled = get_smpl_euclidean_vertex_embedding()[:, 0]
        self.embed_map_rescaled -= self.embed_map_rescaled.min()
        self.embed_map_rescaled /= self.embed_map_rescaled.max()

        self.mesh_vertex_embeddings = {
            mesh_name: self.embedder(mesh_name).to(self.device)
            for mesh_name in self.class_to_mesh_name.values()
        }
コード例 #3
0
    def visualize(
        self,
        image_bgr: Image,
        outputs_boxes_xywh_classes: Tuple[
            Optional[DensePoseEmbeddingPredictorOutput], Optional[Boxes],
            Optional[List[int]]],
    ) -> Image:
        if outputs_boxes_xywh_classes[0] is None:
            return image_bgr

        embedder = build_densepose_embedder(self.cfg)

        embed_map_rescaled = get_smpl_euclidean_vertex_embedding()[:, 0]
        embed_map_rescaled -= embed_map_rescaled.min()
        embed_map_rescaled /= embed_map_rescaled.max()

        S, E, N, bboxes_xywh, pred_classes = self.extract_and_check_outputs_and_boxes(
            outputs_boxes_xywh_classes)

        mesh_vertex_embeddings = {
            p: embedder(self.class_to_mesh_name[p]).to(self.device)
            for p in np.unique(pred_classes)
        }

        for n in range(N):
            x, y, w, h = bboxes_xywh[n].int().tolist()
            closest_vertices, mask = self.get_closest_vertices_mask_from_ES(
                E[[n]], S[[n]], h, w, mesh_vertex_embeddings[pred_classes[n]])
            vis = (embed_map_rescaled[closest_vertices].clip(0, 1) *
                   255.0).cpu().numpy()
            mask_numpy = mask.cpu().numpy().astype(dtype=np.uint8)
            image_bgr = self.mask_visualizer.visualize(image_bgr, mask_numpy,
                                                       vis, [x, y, w, h])

        return image_bgr
コード例 #4
0
def build_inference_based_loaders(
    cfg: CfgNode, model: torch.nn.Module
) -> Tuple[List[InferenceBasedLoader], List[float]]:
    loaders = []
    ratios = []
    embedder = build_densepose_embedder(cfg).to(device=model.device)  # pyre-ignore[16]
    for dataset_spec in cfg.BOOTSTRAP_DATASETS:
        dataset_cfg = get_bootstrap_dataset_config().clone()
        dataset_cfg.merge_from_other_cfg(CfgNode(dataset_spec))
        loader = build_inference_based_loader(cfg, dataset_cfg, model, embedder)
        loaders.append(loader)
        ratios.append(dataset_cfg.RATIO)
    return loaders, ratios
コード例 #5
0
    def __init__(
        self,
        cfg,
        inplace=True,
        cmap=cv2.COLORMAP_JET,
        alpha=0.7,
        device="cuda",
        default_class=0,
        **kwargs,
    ):
        self.mask_visualizer = MatrixVisualizer(
            inplace=inplace, cmap=cmap, val_scale=1.0, alpha=alpha
        )
        self.class_to_mesh_name = get_class_to_mesh_name_mapping(cfg)
        self.embedder = build_densepose_embedder(cfg)
        self.device = torch.device(device)
        self.default_class = default_class

        self.mesh_vertex_embeddings = {
            mesh_name: self.embedder(mesh_name).to(self.device)
            for mesh_name in self.class_to_mesh_name.values()
            if self.embedder.has_embeddings(mesh_name)
        }