예제 #1
0
def construct_distributed_dense(input_tensor,
                                sizes,
                                acts='relu',
                                k_inits='he_uniform',
                                names=None):
    """"""

    # repeat options if singletons
    acts, k_inits, names = iter_or_rep(acts), iter_or_rep(
        k_inits), iter_or_rep(names)

    # list of tensors
    tensors = [input_tensor]

    # iterate over specified layers
    for s, act, k_init, name in zip(sizes, acts, k_inits, names):

        # define a dense layer that will be applied through time distributed
        d_layer = Dense(s, kernel_initializer=k_init)

        # append time distributed layer to list of ppm layers
        tdist_tensor = TimeDistributed(d_layer, name=name)(tensors[-1])
        tensors.extend([tdist_tensor, _apply_act(act, tdist_tensor)])

    return tensors
def make_dense_layers(sizes, input_layer=None, input_shape=None, activations='relu', dropouts=0., l2_regs=0., k_inits='he_uniform'):

    # process options
    activations, dropouts = iter_or_rep(activations), iter_or_rep(dropouts)
    l2_regs, k_inits = iter_or_rep(l2_regs), iter_or_rep(k_inits)

    if input_shape is not None:
        input_layer = Input(shape=input_shape)

    # a list to store the layers
    dense_layers = [input_layer]

    # iterate over specified dense layers
    z = zip(sizes, activations, k_inits, dropouts, l2_regs)
    for i,(s, act, k_init, dropout, l2_reg) in enumerate(z):

        # construct variable argument dict
        kwargs = {'kernel_initializer': k_init}
        if l2_reg > 0.:
            kwargs.update({'kernel_regularizer': l2(l2_reg), 'bias_regularizer': l2(l2_reg)})

        # a new dense layer
        new_layer = _apply_act(act, Dense(s, **kwargs)(dense_layers[-1]))

        # apply dropout (does nothing if dropout is zero)
        if dropout > 0.:
            new_layer = Dropout(dropout)(new_layer)

        # apply new layer to previous and append to list
        dense_layers.append(new_layer)

    return dense_layers
예제 #3
0
    def _construct_Phi(self):

        # a list of the per-particle layers
        self._Phi = [self.inputs[-1]]

        # iterate over specified layers
        for i, (s, act, k_init) in enumerate(
                zip(self.Phi_sizes, self.Phi_acts, self.Phi_k_inits)):

            # define a dense layer that will be applied through time distributed
            d_layer = Dense(s, kernel_initializer=k_init)

            # append time distributed layer to list of ppm layers
            td_name = self._proc_name('tdist_' + str(i))
            tdist_tensor = TimeDistributed(d_layer,
                                           name=td_name)(self._Phi[-1])
            self._Phi.extend([tdist_tensor, _apply_act(act, tdist_tensor)])
예제 #4
0
    def _construct_model(self):

        # construct earlier parts of the model
        self._construct_inputs()
        self._construct_Phi()
        self._construct_latent()
        self._construct_F()

        # output layer, applied to the last backend layer
        out_name = self._proc_name('output')
        d_tensor = Dense(self.output_dim, name=out_name)(self._F[-1])
        self._output = _apply_act(self.output_act, d_tensor)

        # construct a new model
        self._model = Model(inputs=self.inputs, outputs=self.output)

        # compile model
        self._compile_model()
예제 #5
0
    def _construct_F(self):

        # a list of backend layers
        self._F = [self.latent[-1]]

        # iterate over specified backend layers
        z = zip(self.F_sizes, self.F_acts, self.F_k_inits, self.F_dropouts)
        for i, (s, act, k_init, dropout) in enumerate(z):

            # a new dense layer
            d_name = self._proc_name('dense_' + str(i))
            d_tensor = Dense(s, kernel_initializer=k_init,
                             name=d_name)(self._F[-1])
            act_tensor = _apply_act(act, d_tensor)
            self._F.extend([d_tensor, act_tensor])

            # apply dropout (does nothing if dropout is zero)
            if dropout > 0.:
                dr_name = self._proc_name('dropout_' + str(i))
                dr_tensor = Dropout(dropout, name=dr_name)(act_tensor)
                self._F.append(dr_tensor)
예제 #6
0
def construct_dense(input_tensor,
                    sizes,
                    acts='relu',
                    k_inits='he_uniform',
                    dropouts=0.,
                    l2_regs=0.,
                    names=None):
    """"""

    # repeat options if singletons
    acts, k_inits = iter_or_rep(acts), iter_or_rep(k_inits)
    dropouts, l2_regs = iter_or_rep(dropouts), iter_or_rep(l2_regs)
    names = iter_or_rep(names)

    # list of tensors
    tensors = [input_tensor]

    # iterate to make specified layers
    z = zip(sizes, acts, k_inits, dropouts, l2_regs, names)
    for s, act, k_init, dropout, l2_reg, name in z:

        # make new dense layer
        kwargs = {}
        if l2_reg > 0.:
            kwargs.update({
                'kernel_regularizer': l2(l2_reg),
                'bias_regularizer': l2(l2_reg)
            })
        d_tensor = Dense(s, kernel_initializer=k_init, name=name,
                         **kwargs)(tensors[-1])
        tensors.extend([d_tensor, _apply_act(act, d_tensor)])

        # apply dropout if specified
        if dropout > 0.:
            dr_name = None if name is None else '{}_dropout'.format(name)
            tensors.append(Dropout(dropout, name=dr_name)(tensors[-1]))

    return tensors