Пример #1
0
    
    recipes     = [name for i in range(0,batchSize)]
    div         = np.linspace(0.2,1.,batchSize)
    startLen    = len(recipes[0])
    
    #create a batch of recipes
    ind         = 0
    epochLoss   = []
    for cInd in range(400): 
#        if (cInd+maxlen) < startLen:
#            recs    = [r[cInd:cInd+maxlen] for r in recipes]
#        else:
#            recs  = [r[-maxlen:] for r in recipes]
        
        #print(maxlen, maxWord,len(mh.char_indices),len(mh.word_indices))
        Xchar,Xword,Xcon,dummy    = helper.getCharAndWordNoState(recipes,contextVec,maxlen,maxWord,mh.char_indices,mh.word_indices,step=1,predict=True)
        newLength   = (Xchar.shape[0])/4        
        
        inds        = [(newLength*(divind+1))-1 for divind in range(0,batchSize)]
        #helper.checkExampleWords(Xword[inds[1]],mh.vocab)

        Xchar   = Xchar[inds]
        Xword   = Xword[inds]
        Xcon    = Xcon[inds]        
        
        

        preds       = model.predict_on_batch([Xchar,Xword,Xcon])[0]
        for d,pred in enumerate(preds):
            #print(d,pred)
            next_index  = helper.sample(pred, div[d])
Пример #2
0
    model.add(LSTM(512, return_sequences=False))
    model.add(Dropout(.2))
    model.add(Dense(mh.numChars))
    model.add(Activation('softmax'))
    
    model.compile(loss='categorical_crossentropy', optimizer='rmsprop')

#create a batch of recipes
ind         = 0
print("begin training")
while True: 
    if ind + batchSize > len(recipes):
        ind         = 0
        np.random.shuffle(recipes)
    Xcontext    = contextVecs[ind:ind+batchSize]
    recs        = recipes[ind:ind+batchSize]
    recs        = helper.shuffleIngredients(recs)
    
    Xcharacter, Xword, Xcontext, y  = helper.getCharAndWordNoState(recs,Xcontext,maxlen,maxWord,mh.char_indices,mh.word_indices,step=step)
   

    loss    = model.fit([Xcharacter, Xword, Xcontext], y,nb_epoch=1,batch_size=batchSize)

    ind     = ind+batchSize

    if (ind / batchSize) % 10 == 0:
        print()
        jsonstring  = model.to_json()
        with open("../models/recipeRNN3noState.json",'wb') as f:
            f.write(jsonstring)
        model.save_weights("../models/recipeRNN3noState.h5",overwrite=True)
Пример #3
0
    model.add(Dense(mh.numChars))
    model.add(Activation('softmax'))
    
    model.compile(loss='categorical_crossentropy', optimizer='rmsprop')

#create a batch of recipes
ind         = 0
print("begin training")
while True: 
    if ind + batchSize > len(recipes):
        print("new epoch")
        ind         = 0
        recipes, contextVecs = helper.sameShuffle(recipes,contextVecs)
        contextVecs = np.array(contextVecs)
    Xcontext = contextVecs[ind:ind+batchSize]
    recs = recipes[ind:ind+batchSize]
    recs = helper.shuffleIngredients(recs)
    
    Xcharacter, Xword, Xcontext, y = helper.getCharAndWordNoState(recs,Xcontext,maxlen,maxWord,mh.char_indices,mh.word_indices,step=step)
   

    loss = model.fit([Xcharacter, Xword, Xcontext], y,nb_epoch=1,batch_size=batchSize)

    ind = ind+batchSize

    if (ind / batchSize) % 10 == 0:
        print(ind)
        jsonstring  = model.to_json()
        with open("../models/recipeRNN3noState.json",'wb') as f:
            f.write(jsonstring)
        model.save_weights("../models/recipeRNN3noState.h5",overwrite=True)