예제 #1
0
Y_test = np_utils.to_categorical(y_test, nb_classes)

model = NN_Model(n_epochs=nb_epoch, n_batch=batch_size, val_Freq=1)

model.add(Convolution2D(nb_filters, 1, nb_conv, nb_conv, border_mode='full'))
model.add(Activation('relu'))
model.add(Convolution2D(nb_filters, nb_filters, nb_conv, nb_conv))
model.add(Activation('relu'))
model.add(MaxPooling2D(poolsize=(nb_pool, nb_pool)))
model.add(Drop_out(0.25))

model.add(Flatten())
# the resulting image after conv and pooling is the original shape
# divided by the pooling with a number of filters for each "pixel"
# (the number of filters is determined by the last Conv2D)
model.add(FC_layer(nb_filters * (shapex / nb_pool) * (shapey / nb_pool), 128))
model.add(Activation('relu'))
model.add(Drop_out(0.5))

model.add(FC_layer(128, nb_classes))
model.add(Activation('softmax'))

model.compile(loss='categorical_crossentropy',
              optimizer='Adadelta',
              mask=False)

model.train(X_train, None, Y_train, X_test, None, Y_test)
#score = model.evaluate(X_test, Y_test, show_accuracy=True, verbose=0)
##print('Test score:', score[0])
#print('Test accuracy:', score[1])
예제 #2
0
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)

model = NN_Model(n_epochs=nb_epoch,n_batch=batch_size,val_Freq=1)

model.add(Convolution2D(nb_filters, 1, nb_conv, nb_conv, border_mode='full'))
model.add(Activation('relu'))
model.add(Convolution2D(nb_filters, nb_filters, nb_conv, nb_conv))
model.add(Activation('relu'))
model.add(MaxPooling2D(poolsize=(nb_pool, nb_pool)))
model.add(Drop_out(0.25))

model.add(Flatten())
# the resulting image after conv and pooling is the original shape
# divided by the pooling with a number of filters for each "pixel"
# (the number of filters is determined by the last Conv2D)
model.add(FC_layer(nb_filters * (shapex / nb_pool) * (shapey / nb_pool), 128))
model.add(Activation('relu'))
model.add(Drop_out(0.5))

model.add(FC_layer(128, nb_classes))
model.add(Activation('softmax'))

model.compile(loss='categorical_crossentropy', optimizer='Adadelta',mask=False)

model.train(X_train, None , Y_train, X_test, None, Y_test)
#score = model.evaluate(X_test, Y_test, show_accuracy=True, verbose=0)
##print('Test score:', score[0])
#print('Test accuracy:', score[1])

예제 #3
0
####build model
print 'Initializing model...'

mode='tr'

model = NN_Model(n_epochs=n_epochs,n_batch=n_batch,snapshot=snapshot_Freq,
            sample_Freq=sample_Freq,val_Freq=val_Freq,L1_reg=L1_reg,L2_reg=L2_reg)
model.add(Embedding(n_words,dim_word))            
model.add(Drop_out(0.25))
model.add(GRU(n_u,n_h))
model.add(Drop_out())
model.add(Pool('mean'))
model.add(Drop_out())
model.add(FC_layer(n_h,n_y))
model.add(Activation('softmax'))
model.compile(optimizer=optimizer,loss=loss)



filepath='save/review3.pkl'

if mode=='tr':
    if os.path.isfile(filepath): model.load(filepath)
    print '<training data>'    
    seq,seq_mask,targets=prepare_full_data_keras(train[0],train[1],n_maxlen)
    print '<validation data>'
    val,val_mask,val_targets=prepare_full_data_keras(valid[0],valid[1],n_maxlen)

    model.train(seq,seq_mask,targets,val,val_mask,val_targets,verbose)
    model.save(filepath)