Exemple #1
0
    def build_encoder(self):
        """ helper to build the encoder type

        :returns: an encoder
        :rtype: nn.Module

        """
        encoder = layers.get_encoder(**self.config)(
            output_size=self.config['continuous_size'])
        return torch.jit.script(encoder) if self.config['jit'] else encoder
Exemple #2
0
    def _get_dense_net_map(self, name='dense'):
        """ Internal helper to build a dense network

        :param name: name of the network
        :returns: a nn.Sequential Dense net
        :rtype: nn.Sequential

        """
        config = deepcopy(self.config)
        config['encoder_layer_type'] = 'dense'
        return get_encoder(config, name=name)
Exemple #3
0
    def _get_dense_net_map(self, name='vrnn'):
        """ helper to pull a dense encoder

        :param name: the name of the dense network
        :returns: the dense network
        :rtype: nn.Module

        """
        config = deepcopy(self.config)
        config['encoder_layer_type'] = 'dense'
        return get_encoder(config, name=name)
Exemple #4
0
    def _get_dense_net(self, input_size, name='dense'):
        """ Internal helper to build a dense network

        :param name: name of the network
        :returns: a nn.Sequential Dense net
        :rtype: nn.Sequential

        """
        config = deepcopy(self.config)
        config['encoder_layer_type'] = 'dense'
        config['input_shape'] = [input_size]
        return get_encoder(name=name, **config)
Exemple #5
0
    def build_encoder(self):
        """ helper to build the encoder type

        :returns: an encoder
        :rtype: nn.Module

        """
        encoder = layers.get_encoder(**self.config)(
            output_size=self.reparameterizer.input_size)
        print('encoder has {} parameters\n'.format(
            utils.number_of_parameters(encoder) / 1e6))
        return torch.jit.script(encoder) if self.config['jit'] else encoder
Exemple #6
0
    def _lazy_build_phi_x(self, input_shape):
        """ Lazily build an encoder to extract features.

        :param input_shape: the input tensor shape
        :returns: an encoder module
        :rtype: nn.Module

        """
        conf = deepcopy(self.config)
        conf['input_shape'] = input_shape
        return nn.Sequential(
            get_encoder(**conf)(output_size=self.config['latent_size']),
            self.activation_fn()
            # nn.SELU()
        )
Exemple #7
0
    def _lazy_build_phi_x(self, input_shape):
        """ Lazily build an encoder to extract features.

        :param input_shape: the input tensor shape
        :returns: an encoder module
        :rtype: nn.Module

        """
        return nn.Sequential(
            get_encoder(self.config)(input_shape=input_shape,
                                     output_size=self.config['latent_size'],
                                     activation_fn=self.activation_fn),
            self.activation_fn()
            #nn.SELU()
        )
Exemple #8
0
    def build_encoder(self):
        """ helper to build the encoder type

        :returns: an encoder
        :rtype: nn.Module

        """
        conv_layer_types = ['conv', 'coordconv', 'resnet']
        input_shape = [self.input_shape[0], 0, 0] if self.config['encoder_layer_type'] \
            in conv_layer_types else self.input_shape

        # return the encoder
        return get_encoder(self.config)(
            input_shape=input_shape,
            output_size=self.reparameterizer.input_size,
            activation_fn=self.activation_fn)
Exemple #9
0
    def __init__(self, input_shape, **kwargs):
        """ Implements a Simple VAE with a conditioned latent variable

        :param input_shape: the input shape
        :returns: an object of AbstractVAE
        :rtype: AbstractVAE

        """
        super(ClassConditionedPriorVAE, self).__init__(input_shape, **kwargs)
        assert 'output_size' in self.config, "need to specify label size in argparse"

        # build a projector for the labels to the learned prior
        conf = deepcopy(self.config)
        conf['encoder_layer_type'] = 'dense'
        conf['input_shape'] = [conf['output_size']
                               ]  # the size of the labels from the dataset
        self.prior_mlp = layers.get_encoder(**conf)(
            output_size=self.reparameterizer.input_size)
Exemple #10
0
    def _build_model(self):
        ''' helper function to build convolutional or dense decoder
            chans * 2 because we want to do relationships'''
        crop_size = [
            self.config['img_shp'][0], self.config['window_size'],
            self.config['window_size']
        ]

        # main function approximator to extract crop features
        nlayer_map = {
            'conv': {
                32: 4,
                70: 6,
                100: 7
            },
            'resnet': {
                32: None,
                64: None,
                70: None,
                100: None
            },
            'dense': {
                32: 3,
                64: 3,
                70: 3,
                100: 3
            }
        }
        bilinear_size = (self.config['window_size'],
                         self.config['window_size'])
        conv = get_encoder(self.config, name='crop_feat')(
            crop_size,
            self.config['latent_size'],
            activation_fn=str_to_activ_module(self.config['activation']),
            num_layers=nlayer_map[self.config['encoder_layer_type']][
                self.config['window_size']],
            bilinear_size=bilinear_size)

        # takes the state + output of conv and projects it
        # the +1 is for ACT
        state_output_size = self.config['latent_size'] + 1 if self.config['concat_prediction_size'] <= 0 \
            else self.config['concat_prediction_size'] + 1
        state_input_size = self.config['latent_size'] if self.config['disable_rnn_proj'] \
            else self.config['latent_size']*2
        state_projector = nn.Sequential(
            self._get_dense(name='state_proj')(
                state_input_size,
                state_output_size,
                normalization_str=self.config['dense_normalization'],
                activation_fn=str_to_activ_module(self.config['activation'])))

        # takes the finally aggregated vector and projects to output dims
        input_size = self.config['concat_prediction_size'] * self.config['max_time_steps'] \
            if self.config['concat_prediction_size'] > 0 else self.config['latent_size']
        output_projector = self._get_dense(name='output_proj')(
            input_size,
            self.output_size,
            normalization_str=self.config['dense_normalization'],
            activation_fn=str_to_activ_module(self.config['activation']))

        return conv, state_projector, output_projector
Exemple #11
0
 def _get_dense(self, name='imsp'):
     config = deepcopy(self.config)
     config['encoder_layer_type'] = 'dense'
     return get_encoder(config, name=name)