Esempio n. 1
0
    def _construct_model(self):

        # initialize dictionaries for holding indices of subnetworks
        self._layer_inds, self._tensor_inds = {}, {}

        # construct earlier parts of the model
        self._construct_inputs()
        self._construct_Phi()
        self._construct_latent()
        self._construct_F()

        # get output layers
        out_layer = Dense(self.output_dim, name=self._proc_name('output'))
        act_layer = _get_act_layer(self.output_act)
        self._layers.extend([out_layer, act_layer])

        # append output tensors
        self._tensors.append(out_layer(self.tensors[-1]))
        self._tensors.append(act_layer(self.tensors[-1]))

        # construct a new model
        self._model = Model(inputs=self.inputs, outputs=self.output)

        # compile model
        self._compile_model()
Esempio n. 2
0
    def _construct_model(self):

        # get an input tensor
        self._input = Input(shape=(self.input_dim,), name=self._proc_name('input'))

        # get potential names of layers
        names = [self._proc_name('dense_'+str(i)) for i in range(len(self.dense_sizes))]

        # construct layers and tensors
        self._layers, tensors = construct_dense(self._input, self.dense_sizes,
                                                acts=self.acts, k_inits=self.k_inits,
                                                dropouts=self.dropouts, l2_regs=self.l2_regs,
                                                names=names)
        self._tensors = [self._input] + tensors

        # get output layers
        out_layer = Dense(self.output_dim, name=self._proc_name('output'))
        act_layer = _get_act_layer(self.output_act)
        self._layers.extend([out_layer, act_layer])

        # append output tensors
        self._tensors.append(out_layer(self._tensors[-1]))
        self._tensors.append(act_layer(self._tensors[-1]))
        self._output = self._tensors[-1]

        # construct a new model
        self._model = Model(inputs=self._input, outputs=self._output)

        # compile model
        self._compile_model()
Esempio n. 3
0
def construct_dense(input_tensor, sizes,
                    acts='relu', k_inits='he_uniform',
                    dropouts=0., l2_regs=0.,
                    names=None):
    """"""
    
    # repeat options if singletons
    acts, k_inits, names = iter_or_rep(acts), iter_or_rep(k_inits), iter_or_rep(names)
    dropouts, l2_regs = iter_or_rep(dropouts), iter_or_rep(l2_regs)

    # lists of layers and tensors
    layers, tensors = [], [input_tensor]

    # iterate to make specified layers
    z = zip(sizes, acts, k_inits, dropouts, l2_regs, names)
    for s, act, k_init, dropout, l2_reg, name in z:

        # get layers and append them to list
        kwargs = ({'kernel_regularizer': l2(l2_reg), 'bias_regularizer': l2(l2_reg)} 
                  if l2_reg > 0. else {})
        dense_layer = Dense(s, kernel_initializer=k_init, name=name, **kwargs)
        act_layer = _get_act_layer(act)
        layers.extend([dense_layer, act_layer])

        # get tensors and append them to list
        tensors.append(dense_layer(tensors[-1]))
        tensors.append(act_layer(tensors[-1]))

        # apply dropout if specified
        if dropout > 0.:
            dr_name = None if name is None else '{}_dropout'.format(name)
            layers.append(Dropout(dropout, name=dr_name))
            tensors.append(layers[-1](tensors[-1]))

    return layers, tensors[1:]
Esempio n. 4
0
def construct_distributed_dense(input_tensor, sizes, acts='relu', k_inits='he_uniform', 
                                                                  names=None, l2_regs=0.):
    """"""

    # repeat options if singletons
    acts, k_inits, names = iter_or_rep(acts), iter_or_rep(k_inits), iter_or_rep(names)
    l2_regs = iter_or_rep(l2_regs)
    
    # list of tensors
    layers, tensors = [], [input_tensor]

    # iterate over specified layers
    for s, act, k_init, name, l2_reg in zip(sizes, acts, k_inits, names, l2_regs):
        
        # define a dense layer that will be applied through time distributed
        kwargs = {} 
        if l2_reg > 0.:
            kwargs.update({'kernel_regularizer': l2(l2_reg), 'bias_regularizer': l2(l2_reg)})
        d_layer = Dense(s, kernel_initializer=k_init, **kwargs)

        # get layers and append them to list
        tdist_layer = TimeDistributed(d_layer, name=name)
        act_layer = _get_act_layer(act)
        layers.extend([tdist_layer, act_layer])

        # get tensors and append them to list
        tensors.append(tdist_layer(tensors[-1]))
        tensors.append(act_layer(tensors[-1]))

    return layers, tensors