Exemplo n.º 1
0
def run(p="../../../data/atis/atis.pkl", wordembdim=100, innerdim=200, lr=0.05, numbats=100, epochs=20, validinter=1, wreg=0.0003, depth=1):
    p = os.path.join(os.path.dirname(__file__), p)
    train, test, dics = pickle.load(open(p))
    word2idx = dics["words2idx"]
    table2idx = dics["tables2idx"]
    label2idx = dics["labels2idx"]
    label2idxrev = {v: k for k, v in label2idx.items()}
    train = zip(*train)
    test = zip(*test)
    print "%d training examples, %d test examples" % (len(train), len(test))
    #tup2text(train[0], word2idx, table2idx, label2idx)
    maxlen = 0
    for tup in train + test:
        maxlen = max(len(tup[0]), maxlen)

    numwords = max(word2idx.values()) + 2
    numlabels = max(label2idx.values()) + 2

    # get training data
    traindata = getdatamatrix(train, maxlen, 0).astype("int32")
    traingold = getdatamatrix(train, maxlen, 2).astype("int32")
    trainmask = (traindata > 0).astype("float32")

    # test data
    testdata = getdatamatrix(test, maxlen, 0).astype("int32")
    testgold = getdatamatrix(test, maxlen, 2).astype("int32")
    testmask = (testdata > 0).astype("float32")

    res = atiseval(testgold-1, testgold-1, label2idxrev); print res#; exit()

    # define model
    innerdim = [innerdim] * depth
    m = SimpleSeqTransducer(indim=numwords, embdim=wordembdim, innerdim=innerdim, outdim=numlabels)
    '''m = StupidAtis(inpembdim = wordembdim, indim = numwords, outdim = numlabels)
    m = StupidAtisNative(inpembdim=wordembdim, indim=numwords, outdim=numlabels)'''
    #m = StupidAtisScanMod(inpembdim=wordembdim, indim=numwords, outdim=numlabels)
    #m = StupidAtisScanModNative(inpembdim=wordembdim, indim=numwords, outdim=numlabels)

    # training
    '''m.train([traindata, trainmask], traingold).adagrad(lr=lr).grad_total_norm(5.0).seq_cross_entropy().l2(wreg)\
        .split_validate(splits=5, random=True).seq_cross_entropy().seq_accuracy().validinter(validinter)\
        .train(numbats, epochs)'''

    m.train([traindata], traingold).adagrad(lr=lr).grad_total_norm(5.0).seq_cross_entropy().l2(wreg)\
        .split_validate(splits=5, random=True).seq_cross_entropy().seq_accuracy().validinter(validinter)\
        .train(numbats, epochs)

    # predict after training
    testpredprobs = m.predict(testdata, testmask)
    testpred = np.argmax(testpredprobs, axis=2)-1
    #testpred = testpred * testmask
    #print np.vectorize(lambda x: label2idxrev[x] if x > -1 else " ")(testpred)

    evalres = atiseval(testpred, testgold-1, label2idxrev); print evalres
Exemplo n.º 2
0
    def test_output_shape(self):
        # settings
        batsize = 10
        seqlen = 5
        invocsize = 50
        inembdim = 50
        innerdim = 11
        outvocsize = 17

        # data
        traindata = np.random.randint(0, invocsize, (batsize, seqlen))
        traingold = np.random.randint(0, outvocsize, (batsize, seqlen))

        # model
        m = SimpleSeqTransducer(indim=invocsize, embdim=inembdim, innerdim=innerdim, outdim=outvocsize)

        pred = m.predict(traindata)
        self.assertEqual(pred.shape, (batsize, seqlen, outvocsize))
Exemplo n.º 3
0
    def test_output_shape(self):
        # settings
        batsize = 10
        seqlen = 5
        invocsize = 50
        inembdim = 50
        innerdim = 11
        outvocsize = 17

        # data
        traindata = np.random.randint(0, invocsize, (batsize, seqlen))
        traingold = np.random.randint(0, outvocsize, (batsize, seqlen))

        # model
        m = SimpleSeqTransducer(indim=invocsize, embdim=inembdim, innerdim=innerdim, outdim=outvocsize)

        pred = m.predict(traindata)
        self.assertEqual(pred.shape, (batsize, seqlen, outvocsize))
Exemplo n.º 4
0
def run(p="../../../data/atis/atis.pkl",
        wordembdim=100,
        innerdim=200,
        lr=0.05,
        numbats=100,
        epochs=20,
        validinter=1,
        wreg=0.0003,
        depth=1):
    p = os.path.join(os.path.dirname(__file__), p)
    train, test, dics = pickle.load(open(p))
    word2idx = dics["words2idx"]
    table2idx = dics["tables2idx"]
    label2idx = dics["labels2idx"]
    label2idxrev = {v: k for k, v in label2idx.items()}
    train = zip(*train)
    test = zip(*test)
    print "%d training examples, %d test examples" % (len(train), len(test))
    #tup2text(train[0], word2idx, table2idx, label2idx)
    maxlen = 0
    for tup in train + test:
        maxlen = max(len(tup[0]), maxlen)

    numwords = max(word2idx.values()) + 2
    numlabels = max(label2idx.values()) + 2

    # get training data
    traindata = getdatamatrix(train, maxlen, 0).astype("int32")
    traingold = getdatamatrix(train, maxlen, 2).astype("int32")
    trainmask = (traindata > 0).astype("float32")

    # test data
    testdata = getdatamatrix(test, maxlen, 0).astype("int32")
    testgold = getdatamatrix(test, maxlen, 2).astype("int32")
    testmask = (testdata > 0).astype("float32")

    res = atiseval(testgold - 1, testgold - 1, label2idxrev)
    print res  #; exit()

    # define model
    innerdim = [innerdim] * depth
    m = SimpleSeqTransducer(indim=numwords,
                            embdim=wordembdim,
                            innerdim=innerdim,
                            outdim=numlabels)
    '''m = StupidAtis(inpembdim = wordembdim, indim = numwords, outdim = numlabels)
    m = StupidAtisNative(inpembdim=wordembdim, indim=numwords, outdim=numlabels)'''
    #m = StupidAtisScanMod(inpembdim=wordembdim, indim=numwords, outdim=numlabels)
    #m = StupidAtisScanModNative(inpembdim=wordembdim, indim=numwords, outdim=numlabels)

    # training
    '''m.train([traindata, trainmask], traingold).adagrad(lr=lr).grad_total_norm(5.0).seq_cross_entropy().l2(wreg)\
        .split_validate(splits=5, random=True).seq_cross_entropy().seq_accuracy().validinter(validinter)\
        .train(numbats, epochs)'''

    m.train([traindata], traingold).adagrad(lr=lr).grad_total_norm(5.0).seq_cross_entropy().l2(wreg)\
        .split_validate(splits=5, random=True).seq_cross_entropy().seq_accuracy().validinter(validinter)\
        .train(numbats, epochs)

    # predict after training
    testpredprobs = m.predict(testdata, testmask)
    testpred = np.argmax(testpredprobs, axis=2) - 1
    #testpred = testpred * testmask
    #print np.vectorize(lambda x: label2idxrev[x] if x > -1 else " ")(testpred)

    evalres = atiseval(testpred, testgold - 1, label2idxrev)
    print evalres