コード例 #1
0
ファイル: base.py プロジェクト: serre-lab/gammanet_pytorch
    def setup_model(self):
        self.model = model_tools.get_model(self.cfg.model)

        model_parameters = filter(lambda p: p.requires_grad,
                                  self.model.parameters())
        params = sum([np.prod(p.size()) for p in model_parameters])
        self.logger.info('number of trainable parameters : %d' % params)
コード例 #2
0
    def setup_model(self):
        self.model = model_tools.get_model(self.cfg.model)

        model_parameters = filter(lambda p: p.requires_grad,
                                  self.model.parameters())
        params = sum([np.prod(p.size()) for p in model_parameters])
        self.logger.info('number of trainable parameters : %d' % params)

        self.encode_layer = NNEncLayer()
        self.boost_layer = PriorBoostLayer()
        self.nongray_mask = NonGrayMaskLayer()
コード例 #3
0
    def __init__(self,
                 base_ff=vgg_example,
                 hgru_params=hgru_params_example,
                 timesteps=6):
        super().__init__()

        self.hgru_params = hgru_params
        self.timesteps = timesteps
        self.base_ff = model_tools.get_model(base_ff)
        self.build_fb_layers()

        self.eps = 0.01
コード例 #4
0
    def __init__(
            self,
            base_ff=vgg_example,
            gn_params=gn_params_example,
            timesteps=6,
            hidden_init='identity',
            attention='gala',  # 'se', None
            attention_layers=2,
            saliency_filter_size=5,
            norm_attention=False,
            normalization_fgru='InstanceNorm2d',
            normalization_fgru_params={'affine': True},
            normalization_gate='InstanceNorm2d',
            normalization_gate_params={'affine': True},
            force_alpha_divisive=True,
            force_non_negativity=True,
            multiplicative_excitation=True,
            ff_non_linearity='ReLU',
            us_resize_before_block=True,
            skip_horizontal=True,
            readout=True,
            readout_feats=1):
        super().__init__()
        self.timesteps = timesteps
        self.gn_params = gn_params

        assert len(gn_params) % 2 == 1, 'the number of fgrus is not odd'
        self.gn_layers = len(gn_params) // 2 + 1

        self.ff_non_linearity = ff_non_linearity
        self.normalization_fgru = normalization_fgru

        self.us_resize_before_block = us_resize_before_block

        self.normalization_fgru_params = normalization_fgru_params
        self.fgru_params = {
            'hidden_init': hidden_init,
            'attention': attention,
            'attention_layers': attention_layers,
            'saliency_filter_size': saliency_filter_size,
            'norm_attention': norm_attention,
            'normalization_fgru': normalization_fgru,
            'normalization_fgru_params': normalization_fgru_params,
            'normalization_gate': normalization_gate,
            'normalization_gate_params': normalization_gate_params,
            'ff_non_linearity': ff_non_linearity,
            'force_alpha_divisive': force_alpha_divisive,
            'force_non_negativity': force_non_negativity,
            'multiplicative_excitation': multiplicative_excitation,
            'timesteps': timesteps
        }

        self.base_ff = model_tools.get_model(base_ff)
        #VGG_16(weights_path=weights_path, load_weights=load_weights)

        self.build_fb_layers()
        self.skip_horizontal = skip_horizontal
        self.use_readout = False
        if readout:
            self.use_readout = True
            self.build_readout(readout_feats)