Exemplo n.º 1
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import numpy as np
import fvcore.nn.weight_init as weight_init
import torch
from torch import nn
from torch.nn import functional as F

from detectron2.layers import Conv2d, ShapeSpec, get_norm
from detectron2.utils.registry import Registry
from detectron2.utils.torch_utils import SelfGCNLayer, OtherGCNLayer
RELATION_TRIPLET_HEAD_REGISTRY = Registry("RELATION_TRIPLET_HEAD")
RELATION_TRIPLET_HEAD_REGISTRY.__doc__ = """
Registry for box heads, which make box predictions from per-region features.

The registered object will be called with `obj(cfg, input_shape)`.
"""

def build_triplet_head(cfg):
    """
    Build a box head defined by `cfg.MODEL.ROI_BOX_HEAD.NAME`.
    """
    name = cfg.MODEL.RELATION_HEADS.RELATION_TRIPLET_HEAD.NAME
    return RELATION_TRIPLET_HEAD_REGISTRY.get(name)(cfg)


def compute_triplet_result(pred_pair_instances,final_triplet_interest_pred_mix,loss_func,losses,metrics,training):
    pred_pair_instance_nums=[len(pred_pair_instance) for pred_pair_instance in pred_pair_instances]
    final_triplet_interest_preds=final_triplet_interest_pred_mix.split(pred_pair_instance_nums)

    print("triplet")
    # print(final_triplet_interest_preds)
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from detectron2.layers import ShapeSpec
from detectron2.utils.registry import Registry

from .backbone import Backbone

BACKBONE_REGISTRY = Registry("BACKBONE")
BACKBONE_REGISTRY.__doc__ = """
Registry for backbones, which extract feature maps from images

The registered object must be a callable that accepts two arguments:

1. A :class:`detectron2.config.CfgNode`
2. A :class:`detectron2.layers.ShapeSpec`, which contains the input shape specification.

It must returns an instance of :class:`Backbone`.
"""


def build_backbone(cfg, input_shape=None):
    """
    Build a backbone from `cfg.MODEL.BACKBONE.NAME`.
    
    Returns:
        an instance of :class:`Backbone`
    """
    if input_shape is None:
        input_shape = ShapeSpec(channels=len(cfg.MODEL.PIXEL_MEAN))

    backbone_name = cfg.MODEL.BACKBONE.NAME
    backbone = BACKBONE_REGISTRY.get(backbone_name)(cfg, input_shape)
Exemplo n.º 3
0
from torch.nn import functional as F

from detectron2.layers import Conv2d, ShapeSpec
from detectron2.structures import ImageList
from detectron2.utils.registry import Registry

from ..backbone import build_backbone
from ..postprocessing import sem_seg_postprocess
from .build import META_ARCH_REGISTRY

__all__ = [
    "SemanticSegmentor", "SEM_SEG_HEADS_REGISTRY", "SemSegFPNHead",
    "build_sem_seg_head"
]

SEM_SEG_HEADS_REGISTRY = Registry("SEM_SEG_HEADS")
SEM_SEG_HEADS_REGISTRY.__doc__ = """
Registry for semantic segmentation heads, which make semantic segmentation predictions
from feature maps.
"""


@META_ARCH_REGISTRY.register()
class SemanticSegmentor(nn.Module):
    """
    Main class for semantic segmentation architectures.
    """
    def __init__(self, cfg):
        super().__init__()
        self.backbone = build_backbone(cfg)
        self.sem_seg_head = build_sem_seg_head(cfg,
Exemplo n.º 4
0
    build_backbone,
    build_sem_seg_head,
)
from detectron2.modeling.postprocessing import sem_seg_postprocess
from detectron2.projects.deeplab import DeepLabV3PlusHead
from detectron2.projects.deeplab.loss import DeepLabCE
from detectron2.structures import BitMasks, ImageList, Instances
from detectron2.utils.registry import Registry

from .post_processing import get_panoptic_segmentation

__all__ = [
    "PanopticDeepLab", "INS_EMBED_BRANCHES_REGISTRY", "build_ins_embed_branch"
]

INS_EMBED_BRANCHES_REGISTRY = Registry("INS_EMBED_BRANCHES")
INS_EMBED_BRANCHES_REGISTRY.__doc__ = """
Registry for instance embedding branches, which make instance embedding
predictions from feature maps.
"""


@META_ARCH_REGISTRY.register()
class PanopticDeepLab(nn.Module):
    """
    Main class for panoptic segmentation architectures.
    """
    def __init__(self, cfg):
        super().__init__()
        self.backbone = build_backbone(cfg)
        self.sem_seg_head = build_sem_seg_head(cfg,
Exemplo n.º 5
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import fvcore.nn.weight_init as weight_init
import torch
from torch import nn
from torch.nn import functional as F

from detectron2.layers import Conv2d, ConvTranspose2d, interpolate
from detectron2.structures.boxes import matched_boxlist_iou
from detectron2.utils.registry import Registry

from .structures import DensePoseOutput

ROI_DENSEPOSE_HEAD_REGISTRY = Registry("ROI_DENSEPOSE_HEAD")


def initialize_module_params(module):
    for name, param in module.named_parameters():
        if "bias" in name:
            nn.init.constant_(param, 0)
        elif "weight" in name:
            nn.init.kaiming_normal_(param, mode="fan_out", nonlinearity="relu")


@ROI_DENSEPOSE_HEAD_REGISTRY.register()
class DensePoseDeepLabHead(nn.Module):
    def __init__(self, cfg, input_channels):
        super(DensePoseDeepLabHead, self).__init__()
        # fmt: off
        hidden_dim = cfg.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_DIM
        kernel_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_KERNEL
        norm = cfg.MODEL.ROI_DENSEPOSE_HEAD.DEEPLAB.NORM
Exemplo n.º 6
0
import numpy as np
import fvcore.nn.weight_init as weight_init
import torch
from detectron2.layers import Conv2d, ShapeSpec, cat, get_norm
from detectron2.utils.registry import Registry
from fvcore.nn import smooth_l1_loss
from torch import nn
from torch.nn import functional as F

ROI_PLANE_HEAD_REGISTRY = Registry("ROI_PLANE_HEAD")


@ROI_PLANE_HEAD_REGISTRY.register()
class PlaneRCNNConvFCHead(nn.Module):
    """
    A head with several fc layers (each followed by relu).
    """
    def __init__(self, cfg, input_shape: ShapeSpec):
        """
        The following attributes are parsed from config:
            num_fc: the number of fc layers
            fc_dim: the dimension of the fc layers
        """
        super().__init__()

        # fmt: off
        num_conv = cfg.MODEL.ROI_EMBEDDING_HEAD.NUM_CONV
        conv_dim = cfg.MODEL.ROI_EMBEDDING_HEAD.CONV_DIM
        num_fc = cfg.MODEL.ROI_PLANE_HEAD.NUM_FC
        fc_dim = cfg.MODEL.ROI_PLANE_HEAD.FC_DIM
        param_dim = cfg.MODEL.ROI_PLANE_HEAD.PARAM_DIM
Exemplo n.º 7
0
from detectron2.utils.registry import Registry

META_ARCH_REGISTRY = Registry("META_ARCH")  # noqa F401 isort:skip
META_ARCH_REGISTRY.__doc__ = """
Registry for meta-architectures, i.e. the whole model.

The registered object will be called with `obj(cfg)`
and expected to return a `nn.Module` object.
"""


def build_model(cfg):
    """
    Build the whole model architecture, defined by ``cfg.MODEL.META_ARCHITECTURE``.
    Note that it does not load any weights from ``cfg``.
    """
    meta_arch = cfg.MODEL.META_ARCHITECTURE
    return META_ARCH_REGISTRY.get(meta_arch)(cfg)
Exemplo n.º 8
0
# Copyright (c) Facebook, Inc. and its affiliates.
import fvcore.nn.weight_init as weight_init
import torch
from torch import nn
from torch.nn import functional as F

from detectron2.layers import ShapeSpec, cat
from detectron2.structures import BitMasks
from detectron2.utils.events import get_event_storage
from detectron2.utils.registry import Registry

from .point_features import point_sample

POINT_HEAD_REGISTRY = Registry("POINT_HEAD")
POINT_HEAD_REGISTRY.__doc__ = """
Registry for point heads, which makes prediction for a given set of per-point features.

The registered object will be called with `obj(cfg, input_shape)`.
"""


def roi_mask_point_loss(mask_logits, instances, points_coord):
    """
    Compute the point-based loss for instance segmentation mask predictions.

    Args:
        mask_logits (Tensor): A tensor of shape (R, C, P) or (R, 1, P) for class-specific or
            class-agnostic, where R is the total number of predicted masks in all images, C is the
            number of foreground classes, and P is the number of points sampled for each mask.
            The values are logits.
        instances (list[Instances]): A list of N Instances, where N is the number of images
Exemplo n.º 9
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved

from detectron2.utils.registry import Registry

DENSEPOSE_PREDICTOR_REGISTRY = Registry("DENSEPOSE_PREDICTOR")
Exemplo n.º 10
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from detectron2.utils.registry import Registry

START_REGISTRY = Registry("START_REGISTRY")  # noqa F401 isort:skip
START_REGISTRY.__doc__ = """
Registry for meta-architectures, i.e. the whole model.

The registered object will be called with `obj(cfg)`
and expected to return a `nn.Module` object.
"""


def build_start(cfg, **kwargs):
    """
    Build the whole model architecture, defined by ``cfg.MODEL.META_ARCHITECTURE``.
    Note that it does not load any weights from ``cfg``.
    """
    return START_REGISTRY.get(cfg.start.name)(cfg=cfg, **kwargs)
Exemplo n.º 11
0
import torch
import random
from typing import Tuple, List
from fvcore.nn import smooth_l1_loss
from torch import nn
from torch.nn import functional as F

from detectron2.layers import batched_nms, cat
from detectron2.structures import Boxes, Instances
from detectron2.utils.events import get_event_storage
from detectron2.utils.registry import Registry
from detectron2.data import MetadataCatalog, DatasetCatalog

logger = logging.getLogger(__name__)

ROI_HEADS_OUTPUT_REGISTRY = Registry("ROI_HEADS_OUTPUT")
ROI_HEADS_OUTPUT_REGISTRY.__doc__ = """
Registry for the output layers in ROI heads in a generalized R-CNN model."""
"""
Shape shorthand in this module:

    N: number of images in the minibatch
    R: number of ROIs, combined over all images, in the minibatch
    Ri: number of ROIs in image i
    K: number of foreground classes. E.g.,there are 80 foreground classes in COCO.

Naming convention:

    deltas: refers to the 4-d (dx, dy, dw, dh) deltas that parameterize the box2box
    transform (see :class:`box_regression.Box2BoxTransform`).
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from detectron2.utils.registry import Registry

PROPOSAL_GENERATOR_REGISTRY = Registry("PROPOSAL_GENERATOR")
PROPOSAL_GENERATOR_REGISTRY.__doc__ = """
Registry for proposal generator, which produces object proposals from feature maps.

The registered object will be called with `obj(cfg, input_shape)`.
The call should return a `nn.Module` object.
"""

from . import rpn  # noqa F401 isort:skip


def build_proposal_generator(cfg, input_shape):
    """
    Build a proposal generator from `cfg.MODEL.PROPOSAL_GENERATOR.NAME`.
    The name can be "PrecomputedProposals" to use no proposal generator.
    """
    name = cfg.MODEL.PROPOSAL_GENERATOR.NAME
    if name == "PrecomputedProposals":
        return None

    return PROPOSAL_GENERATOR_REGISTRY.get(name)(cfg, input_shape)
Exemplo n.º 13
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import math
import numpy as np
import fvcore.nn.weight_init as weight_init
import torch
from detectron2.layers import ShapeSpec, cat
from detectron2.utils.registry import Registry
from fvcore.nn import smooth_l1_loss
from torch import nn
from torch.nn import functional as F

ROI_Z_HEAD_REGISTRY = Registry("ROI_Z_HEAD")


@ROI_Z_HEAD_REGISTRY.register()
class FastRCNNFCHead(nn.Module):
    """
    A head with several fc layers (each followed by relu).
    """

    def __init__(self, cfg, input_shape: ShapeSpec):
        """
        The following attributes are parsed from config:
            num_fc: the number of fc layers
            fc_dim: the dimension of the fc layers
        """
        super().__init__()

        # fmt: off
        num_fc          = cfg.MODEL.ROI_Z_HEAD.NUM_FC
        fc_dim          = cfg.MODEL.ROI_Z_HEAD.FC_DIM
Exemplo n.º 14
0
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved


import torch
import itertools
from typing import Any, Dict, List, Optional, Set
from detectron2.utils.registry import Registry
from detectron2.solver.build import maybe_add_gradient_clipping as d2_maybe_add_gradient_clipping

D2GO_OPTIM_MAPPER_REGISTRY = Registry("D2GO_OPTIM_MAPPER")

def get_default_optimizer_params(
    model: torch.nn.Module,
    base_lr,
    weight_decay,
    weight_decay_norm,
    bias_lr_factor=1.0,
    weight_decay_bias=None,
    overrides: Optional[Dict[str, Dict[str, float]]] = None,
    lr_multipliers_overwrite: Optional[Dict[str, float]] = None,
):
    """
    Get default param list for optimizer
    Args:
        overrides (dict: str -> (dict: str -> float)):
            if not `None`, provides values for optimizer hyperparameters
            (LR, weight decay) for module parameters with a given name; e.g.
            {"embedding": {"lr": 0.01, "weight_decay": 0.1}} will set the LR and
            weight decay values for all module parameters named `embedding` (default: None)
        lr_multipliers_overwrite (dict: str-> float):
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import torch
import torch.nn as nn
from detectron2.utils.registry import Registry
from pytorch3d.ops import cubify
from pytorch3d.structures import Meshes
from pytorch3d.utils import ico_sphere

from shapenet.modeling.backbone import build_backbone
from shapenet.modeling.heads import MeshRefinementHead, VoxelHead
from shapenet.utils.coords import get_blender_intrinsic_matrix, voxel_to_world

MESH_ARCH_REGISTRY = Registry("MESH_ARCH")


@MESH_ARCH_REGISTRY.register()
class VoxMeshHead(nn.Module):
    def __init__(self, cfg):
        super(VoxMeshHead, self).__init__()

        # fmt: off
        backbone                = cfg.MODEL.BACKBONE
        self.cubify_threshold   = cfg.MODEL.VOXEL_HEAD.CUBIFY_THRESH
        self.voxel_size         = cfg.MODEL.VOXEL_HEAD.VOXEL_SIZE
        # fmt: on

        self.register_buffer("K", get_blender_intrinsic_matrix())
        # backbone
        self.backbone, feat_dims = build_backbone(backbone)
        # voxel head
        cfg.MODEL.VOXEL_HEAD.COMPUTED_INPUT_CHANNELS = feat_dims[-1]
Exemplo n.º 16
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import logging
import numpy as np
import fvcore.nn.weight_init as weight_init
import torch
from torch import nn
from torch.nn import functional as F

from detectron2.layers import Conv2d, ShapeSpec, get_norm
from detectron2.utils.events import get_event_storage
from detectron2.utils.registry import Registry
from termcolor import colored
from detectron2.data.datasets.nuscenes import is_vehicle

EMBEDDING_HEAD_REGISTRY = Registry("EMBEDDING_HEAD")
EMBEDDING_HEAD_REGISTRY.__doc__ = """
Registry for box heads, which make box predictions from per-region features.

The registered object will be called with `obj(cfg, input_shape)`.
"""

logger = logging.getLogger(__name__)

@EMBEDDING_HEAD_REGISTRY.register()
class DoubleMarginContrastiveLoss(nn.Module):
    """
    Contrastive loss
    Takes embeddings of two samples and a target label == 1 if samples are from the same class and label == 0 otherwise
    """

    def __init__(self, margin_p, margin_n):
Exemplo n.º 17
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import logging
import numpy as np
from typing import List
import fvcore.nn.weight_init as weight_init
import torch
from torch import nn
from torch.nn import functional as F

from detectron2.config import configurable
from detectron2.layers import Conv2d, Linear, ShapeSpec, get_norm
from detectron2.utils.registry import Registry

ROI_BOX_HEAD_REGISTRY = Registry("ROI_BOX_HEAD")
ROI_BOX_HEAD_REGISTRY.__doc__ = """
Registry for box heads, which make box predictions from per-region features.

The registered object will be called with `obj(cfg, input_shape)`.
"""


@ROI_BOX_HEAD_REGISTRY.register()
class FastRCNNConvFCHead(nn.Module):
    """
    A head with several 3x3 conv layers (each followed by norm & relu) and then
    several fc layers (each followed by relu).
    """
    @configurable
    def __init__(self,
                 input_shape: ShapeSpec,
                 *,
Exemplo n.º 18
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import copy
import math
from typing import List
import torch
from torch import nn

from detectron2.layers import ShapeSpec
from detectron2.structures import Boxes, RotatedBoxes
from detectron2.utils.registry import Registry

GRID_GENERATOR_REGISTRY = Registry("GRID_GENERATOR")
"""
Registry for modules that creates object detection anchors for feature maps.
"""


def _create_grid_offsets(size, stride, device):
    grid_height, grid_width = size
    shifts_x = torch.arange(0,
                            grid_width * stride,
                            step=stride,
                            dtype=torch.float32,
                            device=device)
    shifts_y = torch.arange(0,
                            grid_height * stride,
                            step=stride,
                            dtype=torch.float32,
                            device=device)
    shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x)
    shift_x = shift_x.reshape(-1)
Exemplo n.º 19
0
from typing import List
import torch
import numpy as np
import fvcore.nn.weight_init as weight_init
from torch import nn
from torch.nn import functional as F

from detectron2.layers import Conv2d, Linear, ShapeSpec, cat, get_norm
from detectron2.structures import Instances
from detectron2.utils.registry import Registry

logger = logging.getLogger(__name__)

_TOTAL_SKIPPED = 0

ROI_PROPERTY_HEAD_REGISTRY = Registry("ROI_PROPERTY_HEAD")
ROI_PROPERTY_HEAD_REGISTRY.__doc__ = """
Registry for properties heads, which make property predictions from per-region features.

The registered object will be called with `obj(cfg, input_shape)`.
"""


def build_properties_head(cfg, input_shape):
    """Build a properties head from `cfg.MODEL.ROI_PROPERTY_HEAD.NAME`."""
    name = cfg.MODEL.ROI_PROPERTY_HEAD.NAME
    return ROI_PROPERTY_HEAD_REGISTRY.get(name)(cfg, input_shape)


def property_rcnn_loss(pred_logits, instances, num_classes):
    gt_prop_labels = []
Exemplo n.º 20
0
import copy
import logging
import os
import traceback

import detectron2.utils.comm as comm
import mobile_cv.lut.lib.pt.flops_utils as flops_utils
import torch
from d2go.utils.helper import run_once
from detectron2.utils.analysis import FlopCountAnalysis
from detectron2.utils.file_io import PathManager
from detectron2.utils.registry import Registry
from fvcore.nn import flop_count_str, flop_count_table

PROFILER_REGISTRY = Registry("PROFILER")

logger = logging.getLogger(__name__)


@torch.no_grad()
def dump_flops_info(model, inputs, output_dir, use_eval_mode=True):
    """
    Dump flops information about model, using the given model inputs.
    Information are dumped to output_dir using various flop counting tools
    in different formats. Only a simple table is printed to terminal.

    Args:
        inputs: a tuple of positional arguments used to call model with.
        use_eval_mode: turn the model into eval mode for flop counting. Otherwise,
            will use the original mode. It's recommended to use eval mode, because
Exemplo n.º 21
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import copy
import math
from typing import List
import torch
from torch import nn

from detectron2.config import configurable
from detectron2.layers import ShapeSpec
from detectron2.structures import Boxes, RotatedBoxes
from detectron2.utils.registry import Registry

ANCHOR_GENERATOR_REGISTRY = Registry("ANCHOR_GENERATOR")
ANCHOR_GENERATOR_REGISTRY.__doc__ = """
Registry for modules that creates object detection anchors for feature maps.

The registered object will be called with `obj(cfg, input_shape)`.
"""


class BufferList(nn.Module):
    """
    Similar to nn.ParameterList, but for buffers
    """
    def __init__(self, buffers=None):
        super(BufferList, self).__init__()
        if buffers is not None:
            self.extend(buffers)

    def extend(self, buffers):
        offset = len(self)
Exemplo n.º 22
0
from typing import List, Tuple

import torch
import torch.nn as nn

from detectron2.utils.registry import Registry
from detectron2.layers import ShapeSpec, get_norm, DeformConv

MEAT_HEADS_REGISTRY = Registry("META_HEADS")
MEAT_HEADS_REGISTRY.__doc__ = """
Registry for ROI heads in a generalized R-CNN model.
ROIHeads take feature maps and region proposals, and
perform per-region computation.

The registered object will be called with `obj(cfg, input_shape)`.
"""

FEAT_ADAPTION_METHODS = [
    "Empty", "Unsupervised Offset", "Supervised Offset", "Split Unsup Offset"
]


class HeadBase(nn.Module):
    def __init__(self, cfg, input_shape: List[ShapeSpec]):
        """
        Arguments:
            in_channels (int): number of channels of the input feature
        """
        super().__init__()
        head_params = cfg.MODEL.META_ARCH
        # TODO: Implement the sigmoid version first.
Exemplo n.º 23
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from typing import List
import fvcore.nn.weight_init as weight_init
import torch
from torch import nn
from torch.nn import functional as F

from detectron2.layers import Conv2d, ConvTranspose2d, ShapeSpec, cat, get_norm
from detectron2.structures import Instances
from detectron2.utils.events import get_event_storage
from detectron2.utils.registry import Registry
from detectron2.layers.roi_align import ROIAlign

ROI_MASK_HEAD_REGISTRY = Registry("ROI_MASK_HEAD")
ROI_MASK_HEAD_REGISTRY.__doc__ = """
Registry for mask heads, which predicts instance masks given
per-region features.

The registered object will be called with `obj(cfg, input_shape)`.
"""


def mask_logits_from_proposals(pred_mask_logits, instances):
    """
    Compute the mask prediction loss defined in the Mask R-CNN paper.

    Args:
        pred_mask_logits (Tensor): A tensor of shape (B, C, Hmask, Wmask) or (B, 1, Hmask, Wmask)
            for class-specific or class-agnostic, where B is the total number of predicted masks
            in all images, C is the number of foreground classes, and Hmask, Wmask are the height
            and width of the mask predictions. The values are logits.
Exemplo n.º 24
0
# -*- encoding: utf-8 -*-
"""
@File         : /detectron2/detectron2/modeling/flow/build.py
@Time         : 2020-11-24 23:58:33
@Author       : Facebook, Inc. and its affiliates.
@Last Modified: 2020-11-25 22:26:41
@Modified By  : Chen-Jianhu ([email protected])
@License      : Copyright(C), USTC
@Desc         : None
"""

import torch

from detectron2.utils.registry import Registry

FLOW_NET_REGISTRY = Registry("FLOW_NET")
FLOW_NET_REGISTRY.__doc__ = """
Registry for flow net, which preadict optical flow from image pairs.

The registered object must be a callable that accepts two arguments:

1. A :class:`detectron2.config.CfgNode`
2. A :class:`detectron2.layers.ShapeSpec`, which contains the input shape specification.

It must returns an instance of :class:`Backbone`.
"""


def build_flow_net(cfg):
    """
    Build a flow net from `cfg.MODEL.FLOW_NET.NAME`.
Exemplo n.º 25
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import torch
from torch import nn
from torch.nn import functional as F

from detectron2.layers import Conv2d, ConvTranspose2d, ShapeSpec, cat, interpolate
from detectron2.structures import heatmaps_to_keypoints
from detectron2.utils.events import get_event_storage
from detectron2.utils.registry import Registry

_TOTAL_SKIPPED = 0

ROI_KEYPOINT_HEAD_REGISTRY = Registry("ROI_KEYPOINT_HEAD")
"""
Registry for keypoint heads, which make keypoint predictions from per-region features.
"""


def build_keypoint_head(cfg, input_shape):
    """
    Build a keypoint head from `cfg.MODEL.ROI_KEYPOINT_HEAD.NAME`.
    """
    name = cfg.MODEL.ROI_KEYPOINT_HEAD.NAME
    return ROI_KEYPOINT_HEAD_REGISTRY.get(name)(cfg, input_shape)


def keypoint_rcnn_loss(pred_keypoint_logits, instances, normalizer):
    """
    Arguments:
        pred_keypoint_logits (Tensor): A tensor of shape (N, K, S, S) where N is the total number
            of instances in the batch, K is the number of keypoints, and S is the side length
Exemplo n.º 26
0
#!/usr/bin/env python3

from typing import NamedTuple, List, Tuple

from detectron2.utils.registry import Registry

KEYPOINT_METADATA_REGISTRY = Registry("KEYPOINT_METADATA")
KEYPOINT_METADATA_REGISTRY.__doc__ = "Registry keypoint metadata definitions"


class KeypointMetadata(NamedTuple):
    names: List[str]
    flip_map: List[Tuple[str, str]]
    connection_rules: List[Tuple[str, str, Tuple[int, int, int]]]

    def to_dict(self):
        return {
            "keypoint_names": self.names,
            "keypoint_flip_map": self.flip_map,
            "keypoint_connection_rules": self.connection_rules,
        }


def get_keypoint_metadata(name):
    return KEYPOINT_METADATA_REGISTRY.get(name)().to_dict()
Exemplo n.º 27
0
from torch import nn
from torch.nn import functional as F

from detectron2.layers import cat
from detectron2.modeling import ROI_HEADS_REGISTRY, StandardROIHeads
from detectron2.utils.registry import Registry
from d2go.config import CfgNode as CN
from d2go.data.dataset_mappers import (
    D2GO_DATA_MAPPER_REGISTRY,
    D2GoDatasetMapper,
)
from d2go.utils.helper import alias

logger = logging.getLogger(__name__)

SUBCLASS_FETCHER_REGISTRY = Registry("SUBCLASS_FETCHER")


def add_subclass_configs(cfg):
    _C = cfg
    _C.MODEL.SUBCLASS = CN()
    _C.MODEL.SUBCLASS.SUBCLASS_ON = False
    _C.MODEL.SUBCLASS.NUM_SUBCLASSES = 0  # must be set
    _C.MODEL.SUBCLASS.NUM_LAYERS = 1
    _C.MODEL.SUBCLASS.SUBCLASS_ID_FETCHER = "SubclassFetcher"  # ABC, must be set
    _C.MODEL.SUBCLASS.SUBCLASS_MAPPING = [
    ]  # subclass mapping from model output to annotation


class SubclassFetcher(ABC):
    """ Fetcher class to read subclass id annotations from dataset and prepare for train/eval.
Exemplo n.º 28
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from typing import Dict, List
import torch
import torch.nn.functional as F
from torch import nn

from detectron2.layers import ShapeSpec
from detectron2.utils.registry import Registry

from ..anchor_generator import build_anchor_generator
from ..box_regression import Box2BoxTransform
from ..matcher import Matcher
from .build import PROPOSAL_GENERATOR_REGISTRY
from .rpn_outputs import RPNOutputs, find_top_rpn_proposals

RPN_HEAD_REGISTRY = Registry("RPN_HEAD")
"""
Registry for RPN heads, which take feature maps and perform
objectness classification and bounding box regression for anchors.
"""


def build_rpn_head(cfg, input_shape):
    """
    Build an RPN head defined by `cfg.MODEL.RPN.HEAD_NAME`.
    """
    name = cfg.MODEL.RPN.HEAD_NAME
    return RPN_HEAD_REGISTRY.get(name)(cfg, input_shape)


@RPN_HEAD_REGISTRY.register()
from detectron2.utils.events import get_event_storage
from detectron2.utils.registry import Registry

from ..backbone.resnet import BottleneckBlock, make_stage
from ..box_regression import Box2BoxTransform
from ..matcher import Matcher, MatcherIgnore
from ..poolers import ROIPooler
from ..proposal_generator.proposal_utils import add_ground_truth_to_proposals
from ..sampling import subsample_labels, bernoulli_subsample_labels
from .box_head import build_box_head
from .fast_rcnn import FastRCNNOutputLayers, FastRCNNOutputs
from .overlap_head import OverlapFastRCNNOutputs, OverlapOutputLayers
from .keypoint_head import build_keypoint_head
from .mask_head import build_mask_head

ROI_HEADS_REGISTRY = Registry("ROI_HEADS")
ROI_HEADS_REGISTRY.__doc__ = """
Registry for ROI heads in a generalized R-CNN model.
ROIHeads take feature maps and region proposals, and
perform per-region computation.

The registered object will be called with `obj(cfg, input_shape)`.
The call is expected to return an :class:`ROIHeads`.
"""

logger = logging.getLogger(__name__)


def build_roi_heads(cfg, input_shape):
    """
    Build ROIHeads defined by `cfg.MODEL.ROI_HEADS.NAME`.
Exemplo n.º 30
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import numpy as np
import fvcore.nn.weight_init as weight_init
import torch
from torch import nn
from torch.nn import functional as F

from detectron2.structures import Boxes, Instances, pairwise_iou
from detectron2.layers import Conv2d, ShapeSpec, get_norm
from detectron2.utils.registry import Registry
from detectron2.layers import cat
from detectron2.utils.events import get_event_storage

from ..postprocessing import detector_postprocess
RECLS_NET_REGISTRY = Registry("RECLS_NET")
RECLS_NET_REGISTRY.__doc__ = """
Registry for recls heads, which make recls predictions from per-region features.

The registered object will be called with `obj(cfg, input_shape)`.
"""


def mask_recls_filter_loss(recls, pred_mask_logits, mask_features, instances, box_ths=0.8, mask_ths=0.95, gt_weight=0.1,
                           pre_logits=[]):
    # cls_agnostic_mask = pred_mask_logits.size(1) == 1
    mask_side_len = pred_mask_logits.size(2)
    assert pred_mask_logits.size(2) == pred_mask_logits.size(3), "Mask prediction must be square!"
    index = [0]

    pred_classes_lst = []
    gt_classes_lst = []