Esempio n. 1
0
    def __init__(self,  config):
        super(Model, self).__init__(config)
        self.config = config
        if self.config.embedding.use_embedding:
            self.embedding = EmbeddingsLayer(config.embedding)
        else:
            self.reshape = keras.layers.Reshape((config.DPCNN.input_length, config.DPCNN.embedding_dimension))

        #self.embedding = keras.layers.Embedding(config.DPCNN.input_dim, config.DPCNN.embedding_dimension,
        #                                        input_length=config.DPCNN.input_length)
        self.spatial_dropout1d = keras.layers.SpatialDropout1D(config.DPCNN.spatial_dropout)
        self.conv_1 = keras.layers.Conv1D(filters=config.DPCNN.filters, kernel_size=1, padding='SAME', \
                                          kernel_regularizer=keras.regularizers.l2(config.DPCNN.l2))
        self.prelu = keras.layers.PReLU()

        self.first_block = ResCNN(filters=config.DPCNN.filters)
        self.add = keras.layers.Add()
        self.max_pooling = keras.layers.MaxPool1D(pool_size=3, strides=2)

        self.repeats = []
        for i in range(0,config.DPCNN.repeat-1):
            self.repeats.append(Repeat(filters=config.DPCNN.filters))
        self.repeats.append(Repeat(filters=config.DPCNN.filters, globa=True))

        self.fc1 = keras.layers.Dense(256)
        self.bn1 = keras.layers.BatchNormalization()
        self.fc_prelu = keras.layers.PReLU()

        self.dropout = keras.layers.Dropout(config.DPCNN.spatial_dropout)
        self.fc = keras.layers.Dense(config.DPCNN.num_classes)
Esempio n. 2
0
    def __init__(self, config):
        super(Model, self).__init__()
        self.config = config
        if self.config.embedding.use_embedding:
            self.embedding = EmbeddingsLayer(config.embedding)
            self.reshape = keras.layers.Reshape((config.input_length, config.embedding.hidden_size, 1))
            self.embedding_size = config.embedding.hidden_size
        else:
            self.reshape = keras.layers.Reshape((config.input_length, config.TextCNN.embedding_dimension, 1))
            self.embedding_size = config.TextCNN.embedding_dimension
            #keras.layers.Embedding(config.TextCNN.input_dim, config.TextCNN.embedding_dimension,
            #                                    input_length=config.TextCNN.input_length)


        self.kernel_sizes = config.TextCNN.kernel_sizes
        self.convs = []
        self.pools = []

        for kernel_size in self.kernel_sizes:
            conv = keras.layers.Conv2D(filters=64, kernel_size=(kernel_size, self.embedding_size),
                                 strides=1, padding='valid', activation='relu')
            self.convs.append(conv)
            pool =  keras.layers.MaxPool2D(pool_size=(config.input_length - kernel_size + 1, 1), padding='valid')
            self.pools.append(pool)

        #self.top_k = self.config.TextCNN.top_k_max_pooling
        self.flatten = keras.layers.Flatten()
        self.fc = keras.layers.Dense(config.num_classes)
Esempio n. 3
0
    def __init__(self, config):
        """all convolutional blocks
        4 kinds of conv blocks, which #feature_map are 64,128,256,512
        Depth:             9  17 29 49
        ------------------------------
        conv block 512:    2  4  4  6
        conv block 256:    2  4  4  10
        conv block 128:    2  4  10 16
        conv block 64:     2  4  10 16
        First conv. layer: 1  1  1  1
        """
        super(Model, self).__init__()
        if self.config.embedding.use_embedding:
            self.embedding = EmbeddingsLayer(config.embedding)
        else:
            self.reshape = keras.layers.Reshape(
                (config.TextVDCNN.input_length,
                 config.TextVDCNN.embedding_dimension))
        self.vdcnn_num_convs = {}
        self.vdcnn_num_convs[9] = [2, 2, 2, 2]
        self.vdcnn_num_convs[17] = [4, 4, 4, 4]
        self.vdcnn_num_convs[29] = [10, 10, 4, 4]
        self.vdcnn_num_convs[49] = [16, 16, 10, 6]
        self.num_kernels = [64, 128, 256, 512]
        self.config = config
        self.vdcnn_depth = config.TextVDCNN.vdcnn_depth

        self.first_conv = keras.layers.Conv1D(filters=64,
                                              kernel_size=3,
                                              strides=1,
                                              padding='SAME',
                                              activation='relu')

        self.identity_blocks = []
        self.con_blocks = []
        for i, num_kernel in enumerate(self.num_kernels):
            tmp_identity_blocks = []
            for j in range(0, self.vdcnn_num_convs[self.vdcnn_depth][i] - 1):
                tmp_identity_blocks.append(
                    IdentityBlock(filter_num=num_kernel))
            self.identity_blocks.append(tmp_identity_blocks)
            self.con_blocks.append(
                ConvBlock(filter_num=num_kernel,
                          pool_type=self.config.TextVDCNN.pool_type))

        self.relu = tf.keras.layers.ReLU()

        self.top_k = self.config.TextVDCNN.top_k_max_pooling
        self.k_max_pooling = k_max_pooling()
        self.flatten = keras.layers.Flatten()
        self.fc1 = keras.layers.Dense(2048, activation="relu")
        self.fc2 = keras.layers.Dense(2048, activation="relu")
        self.fc = keras.layers.Dense(config.TextVDCNN.num_classes)
Esempio n. 4
0
    def __init__(self, config):
        super(Model, self).__init__()
        self.config = config
        if self.config.embedding.use_embedding:
            self.embedding = EmbeddingsLayer(config.embedding)
        else:
            self.reshape = keras.layers.Reshape(
                (config.TextSWEM.input_length,
                 config.TextSWEM.embedding_dimension))

        self.embedding_aver = keras.layers.GlobalAveragePooling1D()

        self.embedding_max = keras.layers.GlobalMaxPool1D()

        self.fc = keras.layers.Dense(config.TextSWEM.num_classes,
                                     activation='softmax')
Esempio n. 5
0
    def __init__(self, config):
        super(Model, self).__init__()
        self.config = config
        if self.config.embedding.use_embedding:
            self.embedding = EmbeddingsLayer(config.embedding)
        else:
            self.reshape = keras.layers.Reshape(
                (config.TextRNN.input_length,
                 config.TextRNN.embedding_dimension))

        if self.config.TextRNN.rnn_type == RNNType.LSTM:
            layer_cell = keras.layers.LSTM
        elif self.config.TextRNN.rnn_type == RNNType.GRU:
            layer_cell = keras.layers.GRU
        else:
            layer_cell = keras.layers.SimpleRNN

        self.rnn_type = config.TextRNN.rnn_type
        self.num_layers = config.TextRNN.num_layers
        self.bidirectional = config.TextRNN.bidirectional

        self.layer_cells = []
        for i in range(config.TextRNN.num_layers):
            if config.TextRNN.bidirectional:
                self.layer_cells.append(
                    keras.layers.Bidirectional(
                        layer_cell(config.TextRNN.hidden_dimension,
                                   use_bias=config.TextRNN.use_bias,
                                   activation=config.TextRNN.activation,
                                   kernel_regularizer=keras.regularizers.l2(
                                       self.config.TextRNN.l2 * 0.1),
                                   recurrent_regularizer=keras.regularizers.l2(
                                       self.config.TextRNN.l2))))
            else:
                self.layer_cells.append(
                    layer_cell(config.TextRNN.hidden_dimension,
                               use_bias=config.TextRNN.use_bias,
                               activation=config.TextRNN.activation,
                               kernel_regularizer=keras.regularizers.l2(
                                   self.config.TextRNN.l2 * 0.1),
                               recurrent_regularizer=keras.regularizers.l2(
                                   self.config.TextRNN.l2)))

        self.fc = keras.layers.Dense(config.TextRNN.num_classes)
Esempio n. 6
0
    def __init__(self, config):
        super(Model, self).__init__()
        self.config = config
        if self.config.embedding.use_embedding:
            self.embedding = EmbeddingsLayer(config.embedding)
        else:
            self.reshape = keras.layers.Reshape(
                (config.TextDCNN.input_length,
                 config.TextDCNN.embedding_dimension))

        self.multil_kernel_sizes = config.TextDCNN.kernel_sizes
        self.convs = []

        for kernel_sizes in self.multil_kernel_sizes:
            conv = BasicConvBlock(input_len_max=config.TextDCNN.input_length,
                                  filters=config.TextDCNN.filters,
                                  kernel_sizes=kernel_sizes)
            self.convs.append(conv)

        #self.top_k = self.config.TextCNN.top_k_max_pooling
        self.flatten = keras.layers.Flatten()
        self.dropout = keras.layers.Dropout(config.TextDCNN.dropout)
        self.fc = keras.layers.Dense(config.num_classes)
Esempio n. 7
0
    def __init__(self, config):
        super(Model, self).__init__()
        self.config = config
        if self.config.embedding.use_embedding:
            self.embedding = EmbeddingsLayer(config.embedding)
        else:
            self.reshape = keras.layers.Reshape(
                (config.TextRCNN.input_length,
                 config.TextRCNN.embedding_dimension))

        if self.config.TextRNN.rnn_type == RNNType.LSTM:
            layer_cell = keras.layers.LSTM
        elif self.config.TextRNN.rnn_type == RNNType.GRU:
            layer_cell = keras.layers.GRU
        else:
            layer_cell = keras.layers.SimpleRNN

        self.layer_cells = []
        for i in range(config.TextRNN.num_layers):
            if config.TextRNN.bidirectional:
                self.layer_cells.append(
                    keras.layers.Bidirectional(
                        layer_cell(config.TextRNN.hidden_dimension,
                                   use_bias=config.TextRNN.use_bias,
                                   activation=config.TextRNN.activation,
                                   kernel_regularizer=keras.regularizers.l2(
                                       self.config.TextRNN.l2 * 0.1),
                                   recurrent_regularizer=keras.regularizers.l2(
                                       self.config.TextRNN.l2))))
            else:
                self.layer_cells.append(
                    layer_cell(config.TextRNN.hidden_dimension,
                               use_bias=config.TextRNN.use_bias,
                               activation=config.TextRNN.activation,
                               kernel_regularizer=keras.regularizers.l2(
                                   self.config.TextRNN.l2 * 0.1),
                               recurrent_regularizer=keras.regularizers.l2(
                                   self.config.TextRNN.l2)))
        if config.TextRNN.bidirectional:
            rnn_out_dimension = config.TextRNN.hidden_dimension * 2
        else:
            rnn_out_dimension = config.TextRNN.hidden_dimension
        self.reshape = keras.layers.Reshape((rnn_out_dimension, 1, 1))

        self.kernel_sizes = config.TextCNN.kernel_sizes
        self.convs = []
        self.pools = []

        for kernel_size, filter_size in zip(config.TextCNN.kernel_sizes,
                                            config.TextCNN.filter_sizes):
            conv = keras.layers.Conv2D(filters=filter_size,
                                       kernel_size=(kernel_size, 1),
                                       strides=1,
                                       padding='valid',
                                       activation='relu')
            self.convs.append(conv)
            pool = keras.layers.MaxPool2D(
                pool_size=(config.TextCNN.input_length - kernel_size + 1, 1),
                padding='valid')
            self.pools.append(pool)

        self.flatten = keras.layers.Flatten()

        self.fc = keras.layers.Dense(config.TextCNN.num_classes)