Exemple #1
0
shuffle = True
show_accuracy = True

MODEL_ROOT = '../models/elephas/'
PREDICTION_ROOT = '../predictions/'

MODEL = 'stop_attention2_noLSTM_batch_{}'.format(batch_size)

print 'Loading data...'
(X_train,Y_train),(X_test,Y_test) = stop_load_data()
TEST_ID = '../Data/pkl/img_q_id_test'
TEST_ID_PKL = pickle.load(open(TEST_ID+'.pkl','rb'))
ids   = map(nameToId,[ TEST_ID_PKL[idx][1] for idx in range(len(TEST_ID_PKL)) ])

print 'Building model...'
model = Akar.keras_model(1)

#print 'Defining callbacks...'
#checkpoint = ModelCheckpoint('../models/elephas/checkpoint_'+MODEL+'.h5', monitor='val_loss', verbose=0, save_best_only=True, mode='auto')
#earlystopping = EarlyStopping(monitor='val_loss', patience=2, verbose=0)

print 'Start training...'
for epoch in [20,40,60,80,100,120,140,160,180,200]:
    model.fit( X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=verbose, callbacks=[],validation_split=validation_split, shuffle=shuffle,show_accuracy=show_accuracy)
    model.save_weights(MODEL_ROOT+MODEL+"_{}.h5".format(epoch))
    
    probs = model.predict(X_test,batch_size=128)

    answers = map(numToC,np.argmax(probs,axis=1).tolist())
    prediction = zip(ids,answers)
    
#load picture features
IM_ID = pickle.load(open('../Data/val2014/ID.pkl','rb'))
IM_ID_DICT = dict()
for num in xrange(len(IM_ID)):
    ID = IM_ID[num].split('_')[2].split('.')[0]
    IM_ID_DICT[ID]=num
mem_shape = (40504,1,1000)
mem_image = np.memmap('../Data/val2014/vgg_feats.memmap',dtype='float32',mode='r',shape=mem_shape )
#===== prepare pickList =====
pickList = range(0,len(ID_PKL))
numToC = {0:'A',1:'B',2:'C',3:'D',4:'E'}
answers = []

print "start making model..."
#model = Keras_model_noLSTM.keras_model(1)
model = Keras_model_attention_2.keras_model(1)
model.load_weights(MODEL_NAME)
#===== Start testing =====
print "Start testing!"
for epoch in xrange(1):#NULL loop
    tStart = time.time()
    progress = ProgressBar().start()
    for batch_num in xrange(0,int(len(pickList)/BATCH_SIZE)):
        progress.update(int((batch_num+1)/float(72801/BATCH_SIZE)*100))
        start = batch_num*BATCH_SIZE    
        end   = (batch_num+1)*BATCH_SIZE
        if end > len(pickList):
            start = len(pickList)-BATCH_SIZE
            end = len(pickList)
        batched_image = []
        batched_words = []
Exemple #3
0
#load picture features
IM_ID = pickle.load(open('../Data/val2014/ID.pkl','rb'))
IM_ID_DICT = dict()
for num in xrange(len(IM_ID)):
    ID = IM_ID[num].split('_')[2].split('.')[0]
    IM_ID_DICT[ID]=num
mem_shape = (40504,1,1000)
mem_image = np.memmap('../Data/val2014/vgg_feats.memmap',dtype='float32',mode='r',shape=mem_shape )
#===== prepare pickList =====
pickList = range(0,len(ID_PKL))
numToC = {0:'A',1:'B',2:'C',3:'D',4:'E'}
answers = []

print "start making model..."
#model = Keras_model_noLSTM.keras_model(1)
model = Keras_model_attention_2.keras_model(1)
model.load_weights(MODEL_NAME)
#===== Start testing =====
print "Start testing!"
for epoch in xrange(1):#NULL loop
    tStart = time.time()
    progress = ProgressBar().start()
    for batch_num in xrange(0,int(len(pickList)/BATCH_SIZE)):
        progress.update(int((batch_num+1)/float(72801/BATCH_SIZE)*100))
        start = batch_num*BATCH_SIZE    
        end   = (batch_num+1)*BATCH_SIZE
        if end > len(pickList):
            start = len(pickList)-BATCH_SIZE
            end = len(pickList)
        batched_image = []
        batched_words = []