示例#1
0
    def test_fit(self):
        model = keras.models.Sequential()
        model.add(
            TargetedDropout(layer=keras.layers.Dense(units=3,
                                                     activation='tanh'),
                            drop_rate=0.8,
                            target_rate=0.2,
                            drop_patterns=['kernel'],
                            mode=TargetedDropout.MODE_WEIGHT,
                            input_shape=(5, )))
        model.add(
            TargetedDropout(
                layer=keras.layers.Dense(units=2, activation='softmax'),
                drop_rate=0.8,
                target_rate=0.2,
                drop_patterns=['kernel'],
                mode=TargetedDropout.MODE_UNIT,
            ))
        model.compile('adam', 'sparse_categorical_crossentropy')

        x, y, w = self._get_data()
        model.fit(x, y, epochs=50)

        model_path = os.path.join(
            tempfile.gettempdir(),
            'keras_targeted_dropout_%f.h5' % np.random.random())
        model.save(model_path)
        model = keras.models.load_model(
            model_path, custom_objects={'TargetedDropout': TargetedDropout})

        x, y, _ = self._get_data(w)
        predicted = model.predict(x).argmax(axis=-1)
        self.assertLess(np.sum(np.not_equal(y, predicted)), x.shape[0] // 10)
示例#2
0
    def test_unit_drop_shape(self):
        model = keras.models.Sequential()
        model.add(
            TargetedDropout(layer=Weight(),
                            drop_rate=0.0,
                            target_rate=0.35,
                            drop_patterns=['kernel'],
                            mode=TargetedDropout.MODE_UNIT,
                            input_shape=(1, )))
        model.compile('sgd', 'mse')

        model_path = os.path.join(
            tempfile.gettempdir(),
            'keras_targeted_dropout_%f.h5' % np.random.random())
        model.save(model_path)
        model = keras.models.load_model(model_path,
                                        custom_objects={
                                            'Weight': Weight,
                                            'TargetedDropout': TargetedDropout,
                                        })

        dropped = model.predict(np.ones((1, 1)))[0]
        col = dropped.shape[-1]
        count = 0
        for c in range(col):
            if np.sum(dropped[:, c]) == 0.0:
                count += 1
        self.assertEqual(35, count)
        rate = np.mean(np.equal(dropped, 0.0))
        self.assertGreater(rate, 0.34)
        self.assertLess(rate, 0.36)
示例#3
0
def capsulenet_model(embeddings, max_sequence_length, num_words, embedding_dim, labels_index, trainable=False):
    K.clear_session()       
    inp = Input(shape=(max_sequence_length,))
    x = Embedding(num_words, embedding_dim, weights=[embeddings], trainable=False)(inp)
    x = SpatialDropout1D(rate=0.2)(x)
    x = Bidirectional(GRU(40, return_sequences=True))(x)   #, kernel_initializer=glorot_normal(seed=1230), recurrent_initializer=orthogonal(gain=1.0, seed=1000)

    # x = Bidirectional(LSTM(40, return_sequences=True))(x)
    x = Capsule(num_capsule=10, dim_capsule=16, routings=4, share_weights=True)(x)
    x = Flatten()(x)
    # avg_pool = GlobalAveragePooling1D()(x)
    # x = GlobalMaxPooling1D()(x)
    
    # x = concatenate([flatt, avg_pool])

    x = Dense(16, activation="relu", kernel_initializer=glorot_normal(seed=1230))(x)
    # x = Dropout(0.5)(x)
    x = TargetedDropout(drop_rate=0.5, target_rate=0.5)(x)
    x = BatchNormalization()(x)

    x = Dense(labels_index, activation="sigmoid")(x)
    model = Model(inputs=inp, outputs=x)
    model.compile(loss='binary_crossentropy', optimizer=Adam(),metrics=[f1])#Adam()
    model.summary()
    return model
示例#4
0
def model_lstm_atten(embeddings, max_sequence_length, num_words, embedding_dim,
                     labels_index):

    inp = Input(shape=(max_sequence_length, ))
    x = Embedding(num_words,
                  embedding_dim,
                  weights=[embeddings],
                  trainable=False)(inp)
    x = SpatialDropout1D(0.1)(x)
    x = Bidirectional(CuDNNLSTM(150, return_sequences=True))(x)
    y = Bidirectional(CuDNNGRU(150, return_sequences=True))(x)

    atten_1 = Attention(max_sequence_length)(x)  # skip connect
    atten_2 = Attention(max_sequence_length)(y)
    avg_pool = GlobalAveragePooling1D()(y)
    max_pool = GlobalMaxPooling1D()(y)

    conc = concatenate([atten_1, atten_2, avg_pool, max_pool])
    conc = Dense(256, activation="relu")(conc)
    # conc = Dropout(0.5)(conc)
    conc = TargetedDropout(drop_rate=0.5, target_rate=0.5)(conc)
    outp = Dense(labels_index, activation="softmax")(conc)

    model = Model(inputs=inp, outputs=outp)
    model.compile(loss='categorical_crossentropy',
                  optimizer='adam',
                  metrics=['accuracy'])
    model.summary()

    return model
def get_targeted_dropout_model():
    model = keras.models.Sequential()
    model.add(
        TargetedDropout(input_shape=(28, 28, 1),
                        drop_rate=0.5,
                        target_rate=0.5,
                        name='Input-Dropout'))
    model.add(
        keras.layers.Conv2D(filters=64,
                            kernel_size=3,
                            activation='relu',
                            padding='same',
                            name='Conv-1'))
    model.add(keras.layers.MaxPool2D(pool_size=2, name='Pool-1'))
    model.add(TargetedDropout(drop_rate=0.5, target_rate=0.5,
                              name='Dropout-1'))
    model.add(
        keras.layers.Conv2D(filters=32,
                            kernel_size=3,
                            activation='relu',
                            padding='same',
                            name='Conv-2'))
    model.add(keras.layers.MaxPool2D(pool_size=2, name='Pool-2'))
    model.add(TargetedDropout(drop_rate=0.5, target_rate=0.5,
                              name='Dropout-2'))
    model.add(keras.layers.Flatten(name='Flatten'))
    model.add(keras.layers.Dense(units=256, activation='relu', name='Dense'))
    model.add(
        TargetedDropout(drop_rate=0.5, target_rate=0.5, name='Dense-Dropout'))
    model.add(
        keras.layers.Dense(units=10, activation='softmax', name='Softmax'))
    model.compile(
        optimizer='adam',
        loss='sparse_categorical_crossentropy',
        metrics=['accuracy'],
    )
    return model
示例#6
0
    def test_target_rate(self):
        model = keras.models.Sequential()
        model.add(
            TargetedDropout(input_shape=(None, None),
                            drop_rate=0.0,
                            target_rate=0.4))
        model.compile(optimizer='adam', loss='mse')
        model_path = os.path.join(
            tempfile.gettempdir(),
            'keras_targeted_dropout_%f.h5' % random.random())
        model.save(model_path)
        model = keras.models.load_model(
            model_path,
            custom_objects={'TargetedDropout': TargetedDropout},
        )

        inputs = np.reshape(np.arange(20), (-1, 1, 4))
        outputs = model.predict(inputs)
        expected = np.array([
            [[0., 0., 0., 0.]],
            [[0., 0., 0., 0.]],
            [[8., 9., 10., 11.]],
            [[12., 13., 14., 15.]],
            [[16., 17., 18., 19.]],
        ])
        self.assertTrue(np.allclose(expected, outputs), (expected, outputs))

        inputs = np.array([
            [[1, 5, 2, 3]],
            [[4, 8, 1, 5]],
            [[5, 2, 5, 1]],
            [[2, 9, 4, 3]],
            [[4, 7, 5, 6]],
        ])
        outputs = model.predict(inputs)
        expected = np.array([
            [[0., 0., 0., 0.]],
            [[4., 8., 0., 5.]],
            [[5., 0., 5., 0.]],
            [[0., 9., 4., 0.]],
            [[4., 7., 5., 6.]],
        ])
        self.assertTrue(np.allclose(expected, outputs), (expected, outputs))

        inputs = np.random.random((100, 10, 10))
        outputs = model.predict(inputs)
        zero_num = np.sum((outputs == 0.0).astype(keras.backend.floatx()))
        actual_rate = zero_num / 10000.0
        self.assertTrue(0.39 < actual_rate < 0.41)
示例#7
0
    def test_masked(self):
        model = keras.models.Sequential()
        model.add(
            keras.layers.Masking(
                batch_size=None,
                input_shape=(None, None),
            ))
        model.add(TargetedDropout(
            drop_rate=0.4,
            target_rate=0.6,
        ))
        model.compile(optimizer='adam', loss='mse')
        model_path = os.path.join(
            tempfile.gettempdir(),
            'keras_targeted_dropout_%f.h5' % random.random())
        model.save(model_path)
        model = keras.models.load_model(
            model_path,
            custom_objects={'TargetedDropout': TargetedDropout},
        )
        model.summary()

        inputs = np.array([
            [[1, 5, 2, 3]],
            [[0, 0, 0, 0]],
            [[5, 2, 5, 1]],
            [[2, 9, 4, 3]],
            [[4, 7, 5, 6]],
        ])
        outputs = model.predict(inputs)
        expected = np.array([
            [[0., 0., 0., 0.]],
            [[0., 0., 0., 0.]],
            [[5., 0., 5., 0.]],
            [[0., 9., 0., 0.]],
            [[4., 7., 5., 6.]],
        ])
        self.assertTrue(np.allclose(expected, outputs), (expected, outputs))
示例#8
0
def model_HAN(embedding_matrix):
    
    inp = Input(shape=(maxlen,))
    x = Embedding(nb_words, embed_size, weights=[embedding_matrix], trainable=False)(inp)
    x = SpatialDropout1D(0.2)(x)
    x = Bidirectional(LSTM(40, return_sequences=True))(x)
    y = Bidirectional(GRU(40, return_sequences=True))(x)
    
    atten_1 = HAN_AttLayer()(x) # skip connect
    atten_2 = HAN_AttLayer()(y)
    avg_pool = GlobalAveragePooling1D()(y)
    max_pool = GlobalMaxPooling1D()(y)
    
    conc = concatenate([atten_1, atten_2, avg_pool, max_pool])
    conc = Dense(16, activation="relu")(conc)
    # conc = Dropout(0.1)(conc)
    conc = TargetedDropout(drop_rate=0.5, target_rate=0.5)(conc)
    outp = Dense(1, activation="sigmoid")(conc)    

    model = Model(inputs=inp, outputs=outp)
    model.compile(loss='binary_crossentropy', optimizer=Adam(), metrics=[f1])
    
    return model
示例#9
0
 def test_drop_rate(self):
     model = keras.models.Sequential()
     model.add(
         keras.layers.Lambda(
             function=lambda x: TargetedDropout(
                 drop_rate=0.4,
                 target_rate=0.4,
             )(x, training=True),
             input_shape=(None, None, None),
         ))
     model.compile(optimizer='adam', loss='mse')
     model_path = os.path.join(
         tempfile.gettempdir(),
         'keras_targeted_dropout_%f.h5' % random.random())
     model.save(model_path)
     model = keras.models.load_model(
         model_path,
         custom_objects={'TargetedDropout': TargetedDropout},
     )
     inputs = np.random.random((100, 10, 10, 10))
     outputs = model.predict(inputs)
     zero_num = np.sum((outputs == 0.0).astype(keras.backend.floatx()))
     actual_rate = zero_num / 100000.0
     self.assertTrue(0.15 < actual_rate < 0.17)