Esempio n. 1
0
#leave out possible long anomalies
X_train = X_train[:int(len(X_train)*0.97)]
Y_train = Y_train[:int(len(Y_train)*0.97)]
batches_X,batches_Y = chunks(X_train,Y_train, 30000)

print('Building model...')
model = Sequential()
model.add(Embedding(max_features_X, embedding_size, mask_zero=True))
for l in range(nb_layers):
    model.add(LSTM(embedding_size, hidden_size, return_sequences=True))
model.add(TimeDistributedDense(hidden_size,max_features_Y))
model.add(Activation('time_distributed_softmax'))

if os.path.exists(fdir+'/weights.hdf5'):
    model.load_weights(fdir+'/weights.hdf5')
    print (model.shape())
rmsprop=RMSprop(lr=0.0002, rho=0.99, epsilon=1e-8, clipnorm=5)    
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')

if (mode =='train'):
    #save all checkpoints        
    checkpointer = ModelCheckpoint(filepath=fdir+"/weights.hdf5", verbose=1, save_best_only=False)
    history = LossHistory()
    sample = Sample()
    
    print("Training...")
    
    for e in range(nb_epoch):
        print("epoch %d" % e)
        #for X_batch,Y_batch in zip(batches_X,batches_Y):
        for i, batch in enumerate(batches_X):
Esempio n. 2
0
#leave out possible long anomalies
X_train = X_train[:int(len(X_train) * 0.97)]
Y_train = Y_train[:int(len(Y_train) * 0.97)]
batches_X, batches_Y = chunks(X_train, Y_train, 30000)

print('Building model...')
model = Sequential()
model.add(Embedding(max_features_X, embedding_size, mask_zero=True))
for l in range(nb_layers):
    model.add(LSTM(embedding_size, hidden_size, return_sequences=True))
model.add(TimeDistributedDense(hidden_size, max_features_Y))
model.add(Activation('time_distributed_softmax'))

if os.path.exists(fdir + '/weights.hdf5'):
    model.load_weights(fdir + '/weights.hdf5')
    print(model.shape())
rmsprop = RMSprop(lr=0.0002, rho=0.99, epsilon=1e-8, clipnorm=5)
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')

if (mode == 'train'):
    #save all checkpoints
    checkpointer = ModelCheckpoint(filepath=fdir + "/weights.hdf5",
                                   verbose=1,
                                   save_best_only=False)
    history = LossHistory()
    sample = Sample()

    print("Training...")

    for e in range(nb_epoch):
        print("epoch %d" % e)