Esempio n. 1
0
model.compile(loss='categorical_crossentropy', optimizer='Adadelta')
print('done complie')
scoring = theano.function(x_test,
                          y_score,
                          allow_input_downcast=True,
                          mode=None)
history = model.fit([user, Items],
                    y_train,
                    nb_epoch=5,
                    batch_size=1024,
                    verbose=2,
                    show_accuracy=True)

#history = model.train_on_batch([user ,Items] ,y_train,accuracy=True)# nb_epoch=10, batch_size=1024, verbose=2, show_accuracy=True)
print('done training')
#user_test ,Items_test, y_test = load_dataset(r"C:\Users\t-alie\Downloads\movieLens_1M\movielens.userstest100k.centered",r"C:\Users\t-alie\Downloads\movieLens_1M\movielens.itemstest100k",r"C:\Users\t-alie\Downloads\movieLens_1M\movielens.itemstest100k.fakeneg",50781)
y_p = model.predict([user, Items])
y_pp = model.custom_predict([user, Items], scoring)
print('done score compile')

pfile = open(r"C:\Users\t-alie\Downloads\movieLens_1M\yp", "w")
for y in y_p:
    pfile.write("%s\n" % y)

for y in y_pp:
    pfile.write("%s\n" % y)

pfile.close()

print('done prediction')
#model.save_weights(r'f:\1b.model')
##model.add(Activation('normalization'))
model.add(Reshape(2))
y_score= model.get_output(train=False)
x_test=model.get_input(train=False)
model.add(Activation('softmax'))
##model.add(Merge([userModel, itemModel], mode='sum'))


print('done model construction')
model.compile(loss='categorical_crossentropy', optimizer='Adadelta')
print('done complie')
scoring= theano.function(x_test,y_score,allow_input_downcast=True, mode=None)
history = model.fit([user ,Items] ,y_train, nb_epoch=100, batch_size=2048, verbose=2, show_accuracy=True)

#history = model.train_on_batch([user ,Items] ,y_train,accuracy=True)# nb_epoch=10, batch_size=1024, verbose=2, show_accuracy=True)
print('done training')
user_test ,Items_test, y_test = load_dataset(r"C:\Users\t-alie\Downloads\movieLens_1M\movielens.userstest100k.centered",r"C:\Users\t-alie\Downloads\movieLens_1M\movielens.itemstest100k",r"C:\Users\t-alie\Downloads\movieLens_1M\movielens.itemstest100k.fakeneg",50781)
y_p=model.custom_predict([user_test,Items_test],scoring)
#y_pp=model.predict([user_test,Items_test])
pfile=open(r"C:\Users\t-alie\Downloads\movieLens_1M\yp_max","w")
for y in y_p:
	pfile.write("%s\n" %y)
pfile.close()
#pfile1=open(r"C:\Users\t-alie\Downloads\movieLens_1M\yp1","w")
#for y in y_pp:
#	pfile1.write("%s\n" %y)

#pfile1.close()
print('done prediction')
#model.save_weights(r'f:\1b.model')
#print('done saving')
Esempio n. 3
0
##itemModel.add(Reshape(4))
##itemModel.add(Dense(4, 2))
itm = itemModel.get_input(train=False)
usr = userModel.get_input(train=False)
itemrep = itemModel.get_output(train=False)
userrep = userModel.get_output(train=False)
model = Sequential()
model.add(Cosine([userModel, itemModel]))  #should output 2 values
#model.add(TimeDistributedDense(300, 1))
##model.add(Activation('normalization'))
model.add(Reshape(2))
y_score = model.get_output(train=False)
x_test = model.get_input(train=False)
model.add(Activation('softmax'))
print("Complie model...")
model.compile(loss='categorical_crossentropy', optimizer='adam')
print("Complie outs...")
outv1 = theano.function([usr], userrep, allow_input_downcast=True, mode=None)
outv2 = theano.function([itm], itemrep, allow_input_downcast=True, mode=None)
print("load W...")
model.load_weights(r'c:\users\t-alie\txtfactorization.model')
print("start predicting...")
df = open(r'c:\users\t-alie\docrep.txt', 'w')
wf = open(r'c:\users\t-alie\wordrep.txt', 'w')
for d in range(0, 8982):
    dh = userModel.custom_predict([X_train[d]], outv1)
    df.write("%s\n" % dh)

    #, nb_epoch=nb_epoch, batch_size=batch_size, verbose=1, show_accuracy=True, validation_split=0.1)
#score = model.evaluate(X_test, Y_test, batch_size=batch_size, verbose=1, show_accuracy=True)