def _forward_pre_hook( self, mod: Module, inp: Union[Tuple[Tensor, ...], Tensor], ): self._call_count += 1 mod._analyzed_layer_desc = AnalyzedLayerDesc( name=mod._analyzed_layer_name, type_=mod.__class__.__name__, execution_order=self._call_count, )
def _create_mod_hooks(self, mod: Module, name: str) -> List[RemovableHandle]: mod._analyzed_layer_desc = None mod._analyzed_layer_name = name forward_pre_hook = mod.register_forward_pre_hook(self._forward_pre_hook) if isinstance(mod, _ConvNd): forward_hook = mod.register_forward_hook(self._conv_hook) elif isinstance(mod, Linear): forward_hook = mod.register_forward_hook(self._linear_hook) elif isinstance(mod, _BatchNorm): forward_hook = mod.register_forward_hook(self._bn_hook) elif isinstance(mod, _MaxPoolNd) or isinstance(mod, _AvgPoolNd): forward_hook = mod.register_forward_hook(self._pool_hook) elif isinstance(mod, _AdaptiveAvgPoolNd) or isinstance(mod, _AdaptiveMaxPoolNd): forward_hook = mod.register_forward_hook(self._adaptive_pool_hook) elif ( isinstance(mod, Threshold) or isinstance(mod, ReLU) or isinstance(mod, ReLU6) or isinstance(mod, RReLU) or isinstance(mod, LeakyReLU) or isinstance(mod, PReLU) or isinstance(mod, ELU) or isinstance(mod, CELU) or isinstance(mod, SELU) or isinstance(mod, GLU) or isinstance(mod, Hardtanh) or isinstance(mod, Tanh) or isinstance(mod, Sigmoid) or isinstance(mod, LogSigmoid) ): forward_hook = mod.register_forward_hook(self._activation_hook) elif isinstance(mod, Softmax) or isinstance(mod, Softmax2d): forward_hook = mod.register_forward_hook(self._softmax_hook) else: forward_hook = mod.register_forward_hook(self._module_hook) return [forward_pre_hook, forward_hook]