예제 #1
0
def shareable_dilated_conv_layers(n_channels, dilation_rate):
    extent = 1 + (2 * dilation_rate)
    shared_cnv = layers.AtrousConv1D(n_channels,
                                     extent,
                                     atrous_rate=dilation_rate,
                                     border_mode="same")
    shared_bnm = layers.BatchNormalization(mode=2)
    return shared_cnv, shared_bnm
예제 #2
0
    def _res_unit(self, inputs, nb_filter, size=3, stride=1, atrous=1,
                  stage=1, block=1):

        name = '%02d-%02d/' % (stage, block)
        id_name = '%sid_' % (name)
        res_name = '%sres_' % (name)

        # Residual branch

        # 1x1 down-sample conv
        x = kl.BatchNormalization(name=res_name + 'bn1')(inputs)
        x = kl.Activation('relu', name=res_name + 'act1')(x)
        w_reg = kr.WeightRegularizer(l1=self.l1_decay, l2=self.l2_decay)
        x = kl.Conv1D(nb_filter[0], 1,
                      name=res_name + 'conv1',
                      subsample_length=stride,
                      init=self.init,
                      W_regularizer=w_reg)(x)

        # LxL conv
        x = kl.BatchNormalization(name=res_name + 'bn2')(x)
        x = kl.Activation('relu', name=res_name + 'act2')(x)
        w_reg = kr.WeightRegularizer(l1=self.l1_decay, l2=self.l2_decay)
        x = kl.AtrousConv1D(nb_filter[1], size,
                            atrous_rate=atrous,
                            name=res_name + 'conv2',
                            border_mode='same',
                            init=self.init,
                            W_regularizer=w_reg)(x)

        # 1x1 up-sample conv
        x = kl.BatchNormalization(name=res_name + 'bn3')(x)
        x = kl.Activation('relu', name=res_name + 'act3')(x)
        w_reg = kr.WeightRegularizer(l1=self.l1_decay, l2=self.l2_decay)
        x = kl.Conv1D(nb_filter[2], 1,
                      name=res_name + 'conv3',
                      init=self.init,
                      W_regularizer=w_reg)(x)

        # Identity branch
        if nb_filter[-1] != inputs._keras_shape[-1] or stride > 1:
            w_reg = kr.WeightRegularizer(l1=self.l1_decay, l2=self.l2_decay)
            identity = kl.Conv1D(nb_filter[2], 1,
                                 name=id_name + 'conv1',
                                 subsample_length=stride,
                                 init=self.init,
                                 W_regularizer=w_reg)(inputs)
        else:
            identity = inputs

        x = kl.merge([identity, x], name=name + 'merge', mode='sum')

        return x
예제 #3
0
def dilated_conv_block(x, dilation_rate, skip_inputs=None):
    extent = 1 + (2 * dilation_rate)
    n_channels = int(x.get_shape()[-1])
    cnv = layers.AtrousConv1D(n_channels,
                              extent,
                              atrous_rate=dilation_rate,
                              border_mode="same")(x)
    bnm = layers.BatchNormalization()(cnv)
    act = layers.LeakyReLU(0.2)(bnm)
    if skip_inputs is None:
        return act
    else:
        return act, layers.merge(skip_inputs + [act], mode="sum")