Esempio n. 1
0
def epoch_loop():
    train_parameters = config.select(
        'model_type, learning_rate, which_text') + {
            'param': 'train_loss',
            'time': time.ctime()
        }
    val_parameters = config.select('model_type, learning_rate, which_text') + {
        'param': 'val_loss',
        'time': time.ctime()
    }
    # register parameters for hyperboard
    train_loss_agent = agent.register(train_parameters.getdict(),
                                      'loss',
                                      overwrite=True)
    val_loss_agent = agent.register(val_parameters.getdict(),
                                    'loss',
                                    overwrite=True)

    gvar = GlobalVariable()
    trainDataset = ArchDataset(config.copy(train=True), gvar)
    validDataset = ArchDataset(config.copy(train=False), gvar)

    checkpoints = config.select('model_type, learning_rate, which_text') + {
        'train_loss': [],
        'valid_loss': []
    }
    checkpoints.update(start_time=time.asctime())

    for epoch in range(config.max_epoch):
        model.train()
        train_loss = train(epoch, trainDataset)
        agent.append(train_loss_agent, epoch, train_loss)
        checkpoints.train_loss.append(train_loss)

        model.eval()
        val_loss = validation(epoch, validDataset)
        agent.append(val_loss_agent, epoch, val_loss)
        checkpoints.valid_loss.append(val_loss)

        if (epoch + 1) % config.savefreq == 0:
            print('saving model')
            #checkpoints.update(model = model)
            checkpoints.update(end_time=time.asctime())
            torch.save(model, config.checkpoint)
Esempio n. 2
0
#coding=utf-8

from torch.autograd import Variable
from configure import GlobalVariable
from PIL import Image

import numpy as np
import json
import urllib

gvar = GlobalVariable()
gvar.resnet = gvar.resnet.cuda()
data_pair = [
    json.loads(line.strip()) for line in open('static/dataset/data_pair.json')
]
query_num = 10


def handle_sentence(query_sen):
    dists = np.square(gvar.sentences - query_sen).sum(1)
    index = dists.argsort()
    dists = dists[index]
    topk = [data_pair[index[i]] for i in range(query_num)]
    print(topk)
    images = ['static/dataset/' + topk[i]['imgpath'] for i in range(query_num)]
    for i in range(query_num):
        for key in topk[i]:
            topk[i][key] = str(topk[i][key])
    return topk, images
    '''
    # get images
Esempio n. 3
0
import numpy as np
get_ipython().magic('matplotlib inline')

plt.plot(title_chars_lengths)
#plt.show()

plt.plot(title_words_lengths)
plt.show()


# In[17]:


from configure import GlobalVariable

gvar = GlobalVariable()
print gvar.


# In[101]:


import random
import numpy as np

def getitem(index):
    title = data_pair[index]['keywords']
    sentence = gvar.sentence_transform(title)
    if sentence.shape[0] == 0:
        sentence = np.zeros((1, 300))
    return sentence.mean(0)
Esempio n. 4
0
# coding: utf-8

# In[1]:

from configure import GlobalVariable
from linecache_light import LineCache

gvar = GlobalVariable()
data_pair = LineCache('static/dataset/data_pair.json')

print gvar.arch_feats.shape
print gvar.sentence_ids.shape, gvar.sentence_ids
print data_pair.num_lines

# In[2]:

import numpy as np
arch_feats = np.load('images.npy')
print arch_feats.shape

from torchvision import transforms
img_transform = transforms.Compose([
    transforms.Scale(100),
    transforms.CenterCrop(100),
    transforms.ToTensor(),
    transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224,
                                                          0.225]),
])

# In[3]: