Esempio n. 1
0
    def prep_model(self, module_prep_model, oact='sigmoid'):
        # Input embedding and encoding
        model = Sequential()
        N = B.embedding(model,
                        self.emb,
                        self.vocab,
                        self.s0pad,
                        self.s1pad,
                        self.c['inp_e_dropout'],
                        self.c['inp_w_dropout'],
                        add_flags=self.c['e_add_flags'])

        # Sentence-aggregate embeddings
        final_outputs = module_prep_model(model, N, self.s0pad, self.s1pad,
                                          self.c)

        # Measurement

        if self.c['ptscorer'] == '1':
            # special scoring mode just based on the answer
            # (assuming that the question match is carried over to the answer
            # via attention or another mechanism)
            ptscorer = B.cat_ptscorer
            final_outputs = [final_outputs[1]]
        else:
            ptscorer = self.c['ptscorer']

        kwargs = dict()
        if ptscorer == B.mlp_ptscorer:
            kwargs['sum_mode'] = self.c['mlpsum']
            kwargs['Dinit'] = self.c['Dinit']
        if 'f_add' in self.c:
            for inp in self.c['f_add']:
                model.add_input(inp, input_shape=(1, ))  # assumed scalar
            kwargs['extra_inp'] = self.c['f_add']
        model.add_node(name='scoreS',
                       input=ptscorer(model, final_outputs, self.c['Ddim'], N,
                                      self.c['l2reg'], **kwargs),
                       layer=Activation(oact))
        model.add_output(name='score', input='scoreS')
        return model
def test_TensorBoard():
    import shutil
    import tensorflow as tf
    import keras.backend.tensorflow_backend as KTF
    old_session = KTF._get_session()
    filepath = './logs'
    (X_train, y_train), (X_test,
                         y_test) = get_test_data(nb_train=train_samples,
                                                 nb_test=test_samples,
                                                 input_shape=(input_dim, ),
                                                 classification=True,
                                                 nb_class=nb_class)
    y_test = np_utils.to_categorical(y_test)
    y_train = np_utils.to_categorical(y_train)
    # case 1 Sequential wo accuracy
    with tf.Graph().as_default():
        session = tf.Session('')
        KTF._set_session(session)
        model = Sequential()
        model.add(Dense(nb_hidden, input_dim=input_dim, activation='relu'))
        model.add(Dense(nb_class, activation='softmax'))
        model.compile(loss='categorical_crossentropy', optimizer='sgd')

        tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1)
        cbks = [tsb]
        model.fit(X_train,
                  y_train,
                  batch_size=batch_size,
                  show_accuracy=True,
                  validation_data=(X_test, y_test),
                  callbacks=cbks,
                  nb_epoch=2)
        assert os.path.exists(filepath)
        shutil.rmtree(filepath)

    # case 2 Sequential w accuracy
    with tf.Graph().as_default():
        session = tf.Session('')
        KTF._set_session(session)
        model = Sequential()
        model.add(Dense(nb_hidden, input_dim=input_dim, activation='relu'))
        model.add(Dense(nb_class, activation='softmax'))
        model.compile(loss='categorical_crossentropy', optimizer='sgd')

        tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1)
        cbks = [tsb]
        model.fit(X_train,
                  y_train,
                  batch_size=batch_size,
                  show_accuracy=True,
                  validation_data=(X_test, y_test),
                  callbacks=cbks,
                  nb_epoch=2)
        assert os.path.exists(filepath)
        shutil.rmtree(filepath)

    # case 3 Graph
    with tf.Graph().as_default():
        session = tf.Session('')
        KTF._set_session(session)
        model = Graph()
        model.add_input(name='X_vars', input_shape=(input_dim, ))

        model.add_node(Dense(nb_hidden, activation="sigmoid"),
                       name='Dense1',
                       input='X_vars')
        model.add_node(Dense(nb_class, activation="softmax"),
                       name='last_dense',
                       input='Dense1')
        model.add_output(name='output', input='last_dense')
        model.compile(optimizer='sgd', loss={'output': 'mse'})

        tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1)
        cbks = [tsb]
        model.fit({
            'X_vars': X_train,
            'output': y_train
        },
                  batch_size=batch_size,
                  validation_data={
                      'X_vars': X_test,
                      'output': y_test
                  },
                  callbacks=cbks,
                  nb_epoch=2)
        assert os.path.exists(filepath)
        shutil.rmtree(filepath)

    KTF._set_session(old_session)
def test_TensorBoard():
    import shutil
    import tensorflow as tf
    import keras.backend.tensorflow_backend as KTF
    old_session = KTF._get_session()
    filepath = './logs'
    (X_train, y_train), (X_test, y_test) = get_test_data(nb_train=train_samples,
                                                         nb_test=test_samples,
                                                         input_shape=(input_dim,),
                                                         classification=True,
                                                         nb_class=nb_class)
    y_test = np_utils.to_categorical(y_test)
    y_train = np_utils.to_categorical(y_train)
    # case 1 Sequential wo accuracy
    with tf.Graph().as_default():
        session = tf.Session('')
        KTF._set_session(session)
        model = Sequential()
        model.add(Dense(nb_hidden, input_dim=input_dim, activation='relu'))
        model.add(Dense(nb_class, activation='softmax'))
        model.compile(loss='categorical_crossentropy', optimizer='sgd')

        tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1)
        cbks = [tsb]
        model.fit(X_train, y_train, batch_size=batch_size, show_accuracy=True,
                  validation_data=(X_test, y_test), callbacks=cbks, nb_epoch=2)
        assert os.path.exists(filepath)
        shutil.rmtree(filepath)

    # case 2 Sequential w accuracy
    with tf.Graph().as_default():
        session = tf.Session('')
        KTF._set_session(session)
        model = Sequential()
        model.add(Dense(nb_hidden, input_dim=input_dim, activation='relu'))
        model.add(Dense(nb_class, activation='softmax'))
        model.compile(loss='categorical_crossentropy', optimizer='sgd')

        tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1)
        cbks = [tsb]
        model.fit(X_train, y_train, batch_size=batch_size, show_accuracy=True,
                  validation_data=(X_test, y_test), callbacks=cbks, nb_epoch=2)
        assert os.path.exists(filepath)
        shutil.rmtree(filepath)

    # case 3 Graph
    with tf.Graph().as_default():
        session = tf.Session('')
        KTF._set_session(session)
        model = Graph()
        model.add_input(name='X_vars', input_shape=(input_dim, ))

        model.add_node(Dense(nb_hidden, activation="sigmoid"),
                       name='Dense1', input='X_vars')
        model.add_node(Dense(nb_class, activation="softmax"),
                       name='last_dense',
                       input='Dense1')
        model.add_output(name='output', input='last_dense')
        model.compile(optimizer='sgd', loss={'output': 'mse'})

        tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1)
        cbks = [tsb]
        model.fit({'X_vars': X_train, 'output': y_train},
                  batch_size=batch_size,
                  validation_data={'X_vars': X_test, 'output': y_test},
                  callbacks=cbks, nb_epoch=2)
        assert os.path.exists(filepath)
        shutil.rmtree(filepath)

    KTF._set_session(old_session)
Esempio n. 4
0
def test_TensorBoard():
    import shutil
    import tensorflow as tf
    import keras.backend.tensorflow_backend as KTF
    old_session = KTF.get_session()
    filepath = './logs'
    (X_train, y_train), (X_test,
                         y_test) = get_test_data(nb_train=train_samples,
                                                 nb_test=test_samples,
                                                 input_shape=(input_dim, ),
                                                 classification=True,
                                                 nb_class=nb_class)
    y_test = np_utils.to_categorical(y_test)
    y_train = np_utils.to_categorical(y_train)

    def data_generator(train):
        if train:
            max_batch_index = len(X_train) // batch_size
        else:
            max_batch_index = len(X_test) // batch_size
        i = 0
        while 1:
            if train:
                yield (X_train[i * batch_size:(i + 1) * batch_size],
                       y_train[i * batch_size:(i + 1) * batch_size])
            else:
                yield (X_test[i * batch_size:(i + 1) * batch_size],
                       y_test[i * batch_size:(i + 1) * batch_size])
            i += 1
            i = i % max_batch_index

    def data_generator_graph(train):
        while 1:
            if train:
                yield {'X_vars': X_train, 'output': y_train}
            else:
                yield {'X_vars': X_test, 'output': y_test}

    # case 1 Sequential

    with tf.Graph().as_default():
        session = tf.Session('')
        KTF.set_session(session)
        model = Sequential()
        model.add(Dense(nb_hidden, input_dim=input_dim, activation='relu'))
        model.add(Dense(nb_class, activation='softmax'))
        model.compile(loss='categorical_crossentropy',
                      optimizer='sgd',
                      metrics=['accuracy'])

        tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1)
        cbks = [tsb]

        # fit with validation data
        model.fit(X_train,
                  y_train,
                  batch_size=batch_size,
                  validation_data=(X_test, y_test),
                  callbacks=cbks,
                  nb_epoch=2)

        # fit with validation data and accuracy
        model.fit(X_train,
                  y_train,
                  batch_size=batch_size,
                  validation_data=(X_test, y_test),
                  callbacks=cbks,
                  nb_epoch=2)

        # fit generator with validation data
        model.fit_generator(data_generator(True),
                            len(X_train),
                            nb_epoch=2,
                            validation_data=(X_test, y_test),
                            callbacks=cbks)

        # fit generator without validation data
        model.fit_generator(data_generator(True),
                            len(X_train),
                            nb_epoch=2,
                            callbacks=cbks)

        # fit generator with validation data and accuracy
        model.fit_generator(data_generator(True),
                            len(X_train),
                            nb_epoch=2,
                            validation_data=(X_test, y_test),
                            callbacks=cbks)

        # fit generator without validation data and accuracy
        model.fit_generator(data_generator(True),
                            len(X_train),
                            nb_epoch=2,
                            callbacks=cbks)

        assert os.path.exists(filepath)
        shutil.rmtree(filepath)

    # case 2 Graph

    with tf.Graph().as_default():
        session = tf.Session('')
        KTF.set_session(session)
        model = Graph()
        model.add_input(name='X_vars', input_shape=(input_dim, ))

        model.add_node(Dense(nb_hidden, activation="sigmoid"),
                       name='Dense1',
                       input='X_vars')
        model.add_node(Dense(nb_class, activation="softmax"),
                       name='last_dense',
                       input='Dense1')
        model.add_output(name='output', input='last_dense')
        model.compile(optimizer='sgd', loss={'output': 'mse'})

        tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1)
        cbks = [tsb]

        # fit with validation
        model.fit({
            'X_vars': X_train,
            'output': y_train
        },
                  batch_size=batch_size,
                  validation_data={
                      'X_vars': X_test,
                      'output': y_test
                  },
                  callbacks=cbks,
                  nb_epoch=2)

        # fit wo validation
        model.fit({
            'X_vars': X_train,
            'output': y_train
        },
                  batch_size=batch_size,
                  callbacks=cbks,
                  nb_epoch=2)

        # fit generator with validation
        model.fit_generator(data_generator_graph(True),
                            1000,
                            nb_epoch=2,
                            validation_data={
                                'X_vars': X_test,
                                'output': y_test
                            },
                            callbacks=cbks)

        # fit generator wo validation
        model.fit_generator(data_generator_graph(True),
                            1000,
                            nb_epoch=2,
                            callbacks=cbks)

        assert os.path.exists(filepath)
        shutil.rmtree(filepath)

    KTF.set_session(old_session)
Esempio n. 5
0
def test_TensorBoard():
    import shutil
    import tensorflow as tf
    import keras.backend.tensorflow_backend as KTF
    old_session = KTF.get_session()
    filepath = './logs'
    (X_train, y_train), (X_test, y_test) = get_test_data(nb_train=train_samples,
                                                         nb_test=test_samples,
                                                         input_shape=(input_dim,),
                                                         classification=True,
                                                         nb_class=nb_class)
    y_test = np_utils.to_categorical(y_test)
    y_train = np_utils.to_categorical(y_train)

    def data_generator(train):
        if train:
            max_batch_index = len(X_train) // batch_size
        else:
            max_batch_index = len(X_test) // batch_size
        i = 0
        while 1:
            if train:
                yield (X_train[i * batch_size: (i + 1) * batch_size], y_train[i * batch_size: (i + 1) * batch_size])
            else:
                yield (X_test[i * batch_size: (i + 1) * batch_size], y_test[i * batch_size: (i + 1) * batch_size])
            i += 1
            i = i % max_batch_index

    def data_generator_graph(train):
        while 1:
            if train:
                yield {'X_vars': X_train, 'output': y_train}
            else:
                yield {'X_vars': X_test, 'output': y_test}

    # case 1 Sequential

    with tf.Graph().as_default():
        session = tf.Session('')
        KTF.set_session(session)
        model = Sequential()
        model.add(Dense(nb_hidden, input_dim=input_dim, activation='relu'))
        model.add(Dense(nb_class, activation='softmax'))
        model.compile(loss='categorical_crossentropy',
                      optimizer='sgd',
                      metrics=['accuracy'])

        tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1)
        cbks = [tsb]

        # fit with validation data
        model.fit(X_train, y_train, batch_size=batch_size,
                  validation_data=(X_test, y_test), callbacks=cbks, nb_epoch=2)

        # fit with validation data and accuracy
        model.fit(X_train, y_train, batch_size=batch_size,
                  validation_data=(X_test, y_test), callbacks=cbks, nb_epoch=2)

        # fit generator with validation data
        model.fit_generator(data_generator(True), len(X_train), nb_epoch=2,
                            validation_data=(X_test, y_test),
                            callbacks=cbks)

        # fit generator without validation data
        model.fit_generator(data_generator(True), len(X_train), nb_epoch=2,
                            callbacks=cbks)

        # fit generator with validation data and accuracy
        model.fit_generator(data_generator(True), len(X_train), nb_epoch=2,
                            validation_data=(X_test, y_test),
                            callbacks=cbks)

        # fit generator without validation data and accuracy
        model.fit_generator(data_generator(True), len(X_train), nb_epoch=2,
                            callbacks=cbks)

        assert os.path.exists(filepath)
        shutil.rmtree(filepath)

    # case 2 Graph

    with tf.Graph().as_default():
        session = tf.Session('')
        KTF.set_session(session)
        model = Graph()
        model.add_input(name='X_vars', input_shape=(input_dim,))

        model.add_node(Dense(nb_hidden, activation="sigmoid"),
                       name='Dense1', input='X_vars')
        model.add_node(Dense(nb_class, activation="softmax"),
                       name='last_dense',
                       input='Dense1')
        model.add_output(name='output', input='last_dense')
        model.compile(optimizer='sgd', loss={'output': 'mse'})

        tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1)
        cbks = [tsb]

        # fit with validation
        model.fit({'X_vars': X_train, 'output': y_train},
                  batch_size=batch_size,
                  validation_data={'X_vars': X_test, 'output': y_test},
                  callbacks=cbks, nb_epoch=2)

        # fit wo validation
        model.fit({'X_vars': X_train, 'output': y_train},
                  batch_size=batch_size,
                  callbacks=cbks, nb_epoch=2)

        # fit generator with validation
        model.fit_generator(data_generator_graph(True), 1000, nb_epoch=2,
                            validation_data={'X_vars': X_test, 'output': y_test},
                            callbacks=cbks)

        # fit generator wo validation
        model.fit_generator(data_generator_graph(True), 1000, nb_epoch=2,
                            callbacks=cbks)

        assert os.path.exists(filepath)
        shutil.rmtree(filepath)

    KTF.set_session(old_session)
# Layer 8
model.add(Dense(10, init='glorot_normal'))
model.add(Activation('softmax'))

model.compile(loss='categorical_crossentropy', optimizer=RMSprop())
model.fit(X_train[:10],
          Y_train[:10],
          batch_size=1,
          nb_epoch=1,
          verbose=1,
          show_accuracy=True)

# V. GoogleNet Model
model = Graph()
model.add_input(name='n00', input_shape=(1, 28, 28))

# layer 1
model.add_node(Convolution2D(64, 1, 1, activation='relu'),
               name='n11',
               input='n00')
model.add_node(Flatten(), name='n11_f', input='n11')

model.add_node(Convolution2D(96, 1, 1, activation='relu'),
               name='n12',
               input='n00')

model.add_node(Convolution2D(16, 1, 1, activation='relu'),
               name='n13',
               input='n00')
Esempio n. 7
0
    except: embedding_weights[index,:] = np.random.rand( 1 , embeddings_dim )
le = preprocessing.LabelEncoder( )
le.fit( train_labels + test_labels )
train_labels = le.transform( train_labels )
test_labels = le.transform( test_labels )
print "Classes that are considered in the problem : " + repr( le.classes_ ) 





print ("Method = CNN from the paper 'Convolutional Neural Networks for Sentence Classification'")
np.random.seed(0)
nb_filter = embeddings_dim
model = Graph()
model.add_input(name='input', input_shape=(max_sent_len,), dtype=int)
model.add_node(Embedding(max_features, embeddings_dim, input_length=max_sent_len, mask_zero=False, weights=[embedding_weights] ), name='embedding', input='input')
model.add_node(Dropout(0.25), name='dropout_embedding', input='embedding')
for n_gram in [3, 5, 7]:
    model.add_node(Convolution1D(nb_filter=nb_filter, filter_length=n_gram, border_mode='valid', activation='relu', subsample_length=1, input_dim=embeddings_dim, input_length=max_sent_len), name='conv_' + str(n_gram), input='dropout_embedding')
    model.add_node(MaxPooling1D(pool_length=max_sent_len - n_gram + 1), name='maxpool_' + str(n_gram), input='conv_' + str(n_gram))
    model.add_node(Flatten(), name='flat_' + str(n_gram), input='maxpool_' + str(n_gram))
model.add_node(Dropout(0.25), name='dropout', inputs=['flat_' + str(n) for n in [3, 5, 7]])
model.add_node(Dense(1, input_dim=nb_filter * len([3, 5, 7])), name='dense', input='dropout')
model.add_node(Activation('sigmoid'), name='sigmoid', input='dense')
model.add_output(name='output', input='sigmoid')
if num_classes == 2: model.compile(loss={'output': 'binary_crossentropy'}, optimizer='adam')
else: model.compile(loss={'output': 'categorical_crossentropy'}, optimizer='adam') 
model.fit({'input': train_sequences, 'output': train_labels}, batch_size=32, nb_epoch=30)
results = np.array(model.predict({'input': test_sequences}, batch_size=32)['output'])
if num_classes != 2: results = results.argmax(axis=-1)