예제 #1
0
            feed_dict = {stories[0]: trainX[0],
                         queries[0]: trainX[1],
                         stories[1]: trainX[2],
                         queries[1]: trainX[3],
                         answers[0]: trainY[0],
                         answers[1]: trainY[1]}
            return sess.run(loss_op, feed_dict=feed_dict)

        def predictf(trainX):
            feed_dict = {stories[0]: trainX[0],
                         queries[0]: trainX[1],
                         stories[1]: trainX[2],
                         queries[1]: trainX[3]}
            return sess.run(logits, feed_dict=feed_dict)
        
        def savef(filepath):
            return modelsaver.save(sess, filepath)
        
        def restoref(filepath):
            modelsaver.restore(sess, filepath)

    return model(fitf=fitf, testf=testf, predictf=predictf, savef=savef, restoref=restoref)

ver, qas, repeat = handlesysargs('en/hn', None, 10)
#embedding_matrix, word_idx = pre_train_embedding(EMBEDDING_SIZE, ver)

def wmethod():
    global word_idx
    return word_idx
evaluate_model(compile_model, ver, qas, pad=1, wmethod='concat', flatten=0, repeat=repeat, E2E=1);
예제 #2
0
    
    # Using last C as W per adjacent weight tying
    # func = lambda x:tf.matmul(x, tf.transpose(embedlayer.get_weights()[0], [1,0]))
    # dl = Lambda(func)(newu)
    
    pred = Activation('softmax')(dl)
    
    model = Model(input=[story_input, query_input], output=[pred])
    
    # opt = Adam(lr=0.001,
               # beta_1=0.9,
               # beta_2=0.999,
               # epsilon=1e-08,
               # decay=0.0)
    
    opt = SGD(lr=0.0,
              momentum=0.0,
              decay=0.0,
              nesterov=False)
    
    model.compile(optimizer=opt,
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])
    return model, [LearningRateScheduler(step_decay)]
    # return model, None

ver = 'en'
qas = None
ver, qas, repeat = handlesysargs(ver, qas)
evaluate_model(compile_model, ver, qas, pad=1, wmethod=None, flatten=0, word=0, repeat=repeat);