示例#1
0
    def __init__(self, units):
        super(MyRNN, self).__init__()

        # transform text to embedding representation
        self.embedding = layers.Embedding(hp.total_words,
                                          hp.embedding_len,
                                          input_length=hp.max_sentence_len)

        # two layer rnn
        # normal rnn
        self.rnn = Sequential([
            layers.SimpleRNN(units=units,
                             dropout=0.5,
                             return_sequences=True,
                             unroll=True),
            layers.SimpleRNN(units=units, dropout=0.5, unroll=True)
        ])

        # # lstm rnn
        # self.rnn = Sequential([
        #     layers.LSTM(units=units, dropout=0.5, return_sequences=True, unroll=True),
        #     layers.LSTM(units=units, dropout=0.5, unroll=True)
        # ])

        # # gru rnn
        # self.rnn = Sequential([
        #     layers.GRU(units=units, dropout=0.5, return_sequences=True, unroll=True),
        #     layers.GRU(units=units, dropout=0.5, unroll=True)
        # ])

        self.fc = layers.Dense(1, activation=tf.nn.sigmoid)
    def test_generator_dynamic_shapes(self):

        x = [
            'I think juice is great',
            'unknown is the best language since slicedbread',
            'a a a a a a a',
            'matmul'
            'Yaks are also quite nice',
        ]
        y = [1, 0, 0, 1, 1]

        vocab = {
            word: i + 1
            for i, word in enumerate(
                sorted(set(itertools.chain(*[i.split() for i in x]))))
        }

        def data_gen(batch_size=2):
            np.random.seed(0)
            data = list(zip(x, y)) * 10
            np.random.shuffle(data)

            def pack_and_pad(queue):
                x = [[vocab[j] for j in i[0].split()] for i in queue]
                pad_len = max(len(i) for i in x)
                x = np.array([i + [0] * (pad_len - len(i)) for i in x])
                y = np.array([i[1] for i in queue])
                del queue[:]
                return x, y[:, np.newaxis]

            queue = []
            for i, element in enumerate(data):
                queue.append(element)
                if not (i + 1) % batch_size:
                    yield pack_and_pad(queue)

            if queue:
                # Last partial batch
                yield pack_and_pad(queue)

        model = testing_utils.get_model_from_layers([
            layers_module.Embedding(input_dim=len(vocab) + 1, output_dim=4),
            layers_module.SimpleRNN(units=1),
            layers_module.Activation('sigmoid')
        ],
                                                    input_shape=(None, ))

        model.compile(loss=losses.binary_crossentropy, optimizer='sgd')
        model.fit(data_gen(), epochs=1, steps_per_epoch=5)
示例#3
0
 def __init__(self,
              units,
              activation='tanh',
              use_bias=True,
              kernel_initializer='glorot_uniform',
              recurrent_initializer='orthogonal',
              bias_initializer='zeros',
              dropout=0.,
              return_sequences=False,
              return_state=False,
              go_backwards=False,
              stateful=False,
              num_layers=1,
              bidirectional=False,
              **kwargs):
     super(SimpleRNN, self).__init__(*kwargs)
     assert num_layers == 1, "Only support single layer for CuDNN RNN in keras"
     self._rnn = layers.SimpleRNN(
         units=units,
         activation=activation,
         use_bias=use_bias,
         kernel_initializer=kernel_initializer,
         recurrent_initializer=recurrent_initializer,
         bias_initializer=bias_initializer,
         dropout=dropout,
         recurrent_dropout=0.,
         return_sequences=return_sequences,
         return_state=return_state,
         go_backwards=go_backwards,
         stateful=stateful,
         unroll=False)
     if bidirectional:
         self._rnn = layers.Bidirectional(
             self._rnn,
             merge_mode='concat',
         )
optimizer = keras.optimizers.Adam(lr=0.0002,
                                  beta_1=0.9,
                                  beta_2=0.999,
                                  epsilon=1e-8)
es = keras.callbacks.EarlyStopping(monitor='val_accuracy',
                                   mode='max',
                                   verbose=1,
                                   patience=4)

# # RNN

# In[92]:

model = Sequential(name="RNN")
model.add(layers.Embedding(input_dim=X_train.shape[1], output_dim=64))
model.add(layers.SimpleRNN(128))
model.add(layers.Dense(6, activation="softmax"))

# In[93]:

model.compile(optimizer=optimizer,
              loss="categorical_crossentropy",
              metrics=['accuracy'])

# In[94]:

model.fit(X_train,
          Y_train,
          validation_data=(X_test, Y_test),
          callbacks=[es],
          epochs=50)