Esempio n. 1
0
def _capture_activations(layer: nn.Module, inputs: Tuple[torch.Tensor],
                         outputs: Tuple[torch.Tensor]):
    r"""Forward hook handler captures and saves activations flowing into the
    ``layer`` in ``layer.activations`` during forward pass.

    Args:
        layer: Layer to capture the activations in.
        inputs: Inputs to the ``layer``.
        outputs: Outputs of the ``layer``.
    """
    layer_type = get_layer_type(layer)
    if (not requires_grad(layer)
            or layer_type not in _supported_layers_grad_samplers.keys()
            or not layer.training):
        return

    if _hooks_disabled:
        return
    if get_layer_type(layer) not in _supported_layers_grad_samplers.keys():
        raise ValueError("Hook installed on unsupported layer")

    if not hasattr(layer, "activations"):
        layer.activations = []

    layer.activations.append(inputs[0].detach())
Esempio n. 2
0
def _layer_activations(layer: nn.Module, layer_args: List[Any],
                       out: Any) -> int:
    """
    Computes the number of activations produced by a single layer.

    Activations are counted only for convolutional layers. To override this behavior, a
    layer can define a method to compute activations with the signature below, which
    will be used to compute the activations instead.

    Class MyModule(nn.Module):
        def activations(self, out, *layer_args):
            ...
    """

    typestr = layer.__repr__()
    if hasattr(layer, "activations"):
        activations = layer.activations(out, *layer_args)
    elif isinstance(layer, (nn.Conv1d, nn.Conv2d, nn.Conv3d)):
        activations = out.numel()
    else:
        return 0

    message = [f"module: {typestr}", f"activations: {activations}"]
    logging.debug("\t".join(message))
    return int(activations)
Esempio n. 3
0
def _capture_activations(layer: nn.Module, input: List[torch.Tensor],
                         output: torch.Tensor):
    """Save activations into layer.activations in forward pass"""
    if _hooks_disabled:
        return
    if get_layer_type(layer) not in _supported_layers_grad_samplers.keys():
        raise ValueError("Hook installed on unsupported layer")

    layer.activations = input[0].detach()
    def capture_activations_hook(
        self,
        module: nn.Module,
        forward_input: List[torch.Tensor],
        _forward_output: torch.Tensor,
    ):
        if not requires_grad(module) or not module.training:
            return

        if not self.hooks_enabled:
            return

        if not hasattr(module, "activations"):
            module.activations = []
        module.activations.append(forward_input[0].detach())  # pyre-ignore
def _capture_activations(
    layer: nn.Module, inputs: Tuple[torch.Tensor], outputs: Tuple[torch.Tensor]
):
    r"""Forward hook handler captures and saves activations flowing into the
    ``layer`` in ``layer.activations`` during forward pass.

    Args:
        layer: Layer to capture the activations in.
        inputs: Inputs to the ``layer``.
        outputs: Outputs of the ``layer``.
    """
    if _hooks_disabled:
        return
    if get_layer_type(layer) not in _supported_layers_grad_samplers.keys():
        raise ValueError("Hook installed on unsupported layer")

    # pyre-fixme[16]: `Module` has no attribute `activations`.
    layer.activations = inputs[0].detach()