Ejemplo n.º 1
0
def testmodel(mymodel, args, cuda_gpu):
    mytraindata = myDataset(path=args.json_file,
                            height=args.height,
                            width=args.width,
                            autoaugment=args.autoaugment)
    mytrainloader = torch.utils.data.DataLoader(mytraindata,
                                                batch_size=1,
                                                shuffle=False)
    gnd = loadquery(args.valdata_dir)

    mymodel.eval()
    with torch.no_grad():
        print('>> Extracting descriptors for query images...')
        qloader = torch.utils.data.DataLoader(ImagesFromList(
            root='',
            images=[i['queryimgid'] for i in gnd],
            imsize=mytraindata.height,
            transform=mytraindata.transform),
                                              batch_size=1,
                                              shuffle=False,
                                              num_workers=0,
                                              pin_memory=True)
        # qoolvecs = torch.zeros(args.classnum, len(gnd)).cuda()
        qoolvecs = torch.zeros(OUTPUT_DIM[args.backbone], len(gnd)).cuda()
        lenq = len(qloader)
        for i, input in enumerate(qloader):
            out = mymodel(input.cuda())
            qoolvecs[:, i] = out.data.squeeze()
            if (i + 1) % 10 == 0:
                print('\r>>>> {}/{} done...'.format(i + 1, lenq), end='')
        print('')

        poolvecs = torch.zeros(OUTPUT_DIM[args.backbone],
                               len(mytrainloader)).cuda()
        idlist = []
        print('>> Extracting descriptors for database images...')
        for index, data in enumerate(mytrainloader):
            batch_x, batch_y, batch_id = data
            idlist.append(batch_id[0])
            if cuda_gpu:
                batch_x = batch_x.cuda()

            batch_x = batch_x.float()
            # batch_x, batch_y = Variable(batch_x), Variable(batch_y)
            out = mymodel(batch_x)
            poolvecs[:, index] = out
            if (index + 1) % 10 == 0:
                print('\r>>>> {}/{} done...'.format(index + 1,
                                                    len(mytrainloader)),
                      end='')

        vecs = poolvecs.cpu().numpy()
        qvecs = qoolvecs.cpu().numpy()

        # search, rank, and print
        scores = np.dot(vecs.T, qvecs)
        ranks = np.argsort(-scores, axis=0)
        dataset = args.json_file.split('/')[-1].replace("all.json", "")
        compute_map_and_print(dataset, ranks, gnd, idlist)
Ejemplo n.º 2
0
def testmultibranch(args,cuda_gpu):
    args.train_dir='/mnt/sdb/shibaorong/logs/paris/triplet/usmine/withclass_cluster11/parameter_61.pkl'

    mymodel = builGraph.getModel(args.backbone, args.classnum, args.gpu,
                                 'extractor', cuda_gpu=cuda_gpu, pretrained=False)

    if os.path.exists(args.train_dir):
        checkpoint = torch.load(args.train_dir)
        mymodel.load_state_dict(checkpoint['model_state_dict'])

    mytraindata = myDataset(path=args.json_file, height=args.height, width=args.width,
                            autoaugment=args.autoaugment)
    mytrainloader = torch.utils.data.DataLoader(mytraindata, batch_size=1, shuffle=False)
    gnd = loadquery(args.valdata_dir)

    mymodel.eval()
    with torch.no_grad():

        poolvecs = torch.zeros(OUTPUT_DIM[args.backbone], len(mytrainloader)).cuda()
        idlist = []
        print('>> Extracting descriptors for database images...')
        for index, data in enumerate(mytrainloader):
            batch_x, batch_y, batch_id = data
            idlist.append(batch_id[0])
            if cuda_gpu:
                batch_x = batch_x.cuda()

            batch_x = batch_x.float()
            # batch_x, batch_y = Variable(batch_x), Variable(batch_y)
            out = mymodel(batch_x)
            #out=torch.cat((out1,out2),-1)
            poolvecs[:, index] = out
            if (index + 1) % 10 == 0:
                print('\r>>>> {}/{} done...'.format(index + 1, len(mytrainloader)), end='')


        qindexs=np.arange(len(mytrainloader))[np.in1d(idlist,[i['queryimgid'] for i in gnd])]
        newgnd=[idlist[i] for i in qindexs]
        g=[[i['queryimgid'] for i in gnd].index(j) for j in newgnd]
        gnd=[gnd[i] for i in g]

        vecs = poolvecs.cpu().numpy()
        qvecs = vecs[:,qindexs]

        # search, rank, and print
        scores = np.dot(vecs.T, qvecs)
        ranks = np.argsort(-scores, axis=0)

        dataset = args.json_file.split('/')[-1].replace("all.json", "")
        compute_map_and_print(dataset, ranks, gnd, idlist)
Ejemplo n.º 3
0
    def create_triplet_clusterbased(self, mymodel, ClusterInfo, args):
        print('create_triplet....................')
        iddict, _, _ = ClusterInfo
        gnd = loadquery(args.valdata_dir)
        mymodel.eval()
        with torch.no_grad():

            featurelist = []
            #pathlist = []
            clusterlabelist = []
            #classlabelist = []

            for i in range(len(self.filenames)):
                ID = str(self.ids[i])
                feature = iddict[ID]['feature']
                featurelist.append(feature)
                clusterlabel = iddict[ID]['label']
                clusterlabelist.append(clusterlabel)

            qindexs = random.sample([i for i in range(len(featurelist))],
                                    self.qsize)
            #qindexs = np.arange(len(self.filenames))[np.in1d(self.filenames, [i['queryimgid'] for i in gnd])]
            '''newgnd = [self.filenames[i] for i in qindexs]
      g = [[i['queryimgid'] for i in gnd].index(j) for j in newgnd]
      gnd = [gnd[i] for i in g]'''
            vecs = np.transpose(np.array(featurelist))
            qvecs = vecs[:, qindexs]
            scores = np.dot(vecs.T, qvecs)
            ranks = np.argsort(-scores, axis=0)

            triplet_pool = []
            for i in range(ranks.shape[1]):
                rank = ranks[:, i]
                aimg = self.filenames[qindexs[i]]
                aclusterlabel = clusterlabelist[qindexs[i]]
                aclasslabel = self.labels[qindexs[i]]
                mark = False
                '''p=[self.filenames[g] for g in rank]
        po=gnd[i]['ok']
        pos = np.arange(len(p))[np.in1d(p, po)]'''
                for j in range(rank.shape[0]):
                    if j >= self.qscale:
                        continue
                    #jmark=pos[j]
                    rindex = rank[j]
                    jpath = self.filenames[rindex]
                    jclusterlabel = clusterlabelist[rindex]
                    jclasslabel = self.labels[rindex]

                    if jclasslabel == aclasslabel and jclusterlabel == aclusterlabel:
                        if mark:
                            pimg = jpath
                            nimg = self.filenames[rank[j - 1]]
                            triplet_pool.append([(aimg, aclasslabel),
                                                 (pimg, jclasslabel),
                                                 (nimg,
                                                  self.labels[rank[j - 1]])])

                        mark = False

                    else:
                        mark = True
                    '''elif jclasslabel!=aclasslabel and jclusterlabel!=aclusterlabel:
                      mark = True'''

        self.triplet_pool = triplet_pool
Ejemplo n.º 4
0
def testOnlinepair(args,cuda_gpu,type='extractor',similartype='dot'):
    mymodel = builGraph.getModel(args.backbone, args.classnum, args.gpu,
                                 type, cuda_gpu=cuda_gpu, pretrained=False)
    if os.path.exists(args.train_dir):
        print(args.train_dir)
        checkpoint = torch.load(args.train_dir,map_location='cpu')
        mymodel.load_state_dict(checkpoint['model_state_dict'])

    for index,jfile in enumerate(args.json_file):

        dataset = jfile.split('/')[-1].replace("all.json", "")
        mytraindata = myDataset(path=jfile, height=args.height, width=args.width,
                                autoaugment=args.autoaugment)
        mytrainloader = torch.utils.data.DataLoader(mytraindata, batch_size=1, shuffle=False)
        gnd = loadquery(args.valdata_dir[index])

        mymodel.eval()
        with torch.no_grad():

            poolvecs = torch.zeros(OUTPUT_DIM[args.backbone], len(mytrainloader)).cuda()
            idlist = []
            print('>> Extracting descriptors for {} images...'.format(dataset))
            for index, data in enumerate(mytrainloader):
                batch_x, batch_y, batch_id = data
                idlist.append(batch_id[0])
                if cuda_gpu:
                    batch_x = batch_x.cuda()

                batch_x = batch_x.float()
                # batch_x, batch_y = Variable(batch_x), Variable(batch_y)
                out = mymodel(batch_x)
                poolvecs[:, index] = out
                if (index + 1) % 10 == 0:
                    print('\r>>>> {}/{} done...'.format(index + 1, len(mytrainloader)), end='')

            qindexs = np.arange(len(mytrainloader))[np.in1d(idlist, [i['queryimgid'] for i in gnd])]
            newgnd = [idlist[i] for i in qindexs]
            g = [[i['queryimgid'] for i in gnd].index(j) for j in newgnd]
            gnd = [gnd[i] for i in g]

            vecs = poolvecs.cpu().numpy()
            '''pca = PCA(whiten=True,n_components=1000,random_state=732)
            vecst=pca.fit_transform(np.transpose(vecs))
            vecst=l2n(totensor(vecst))
            vecs=np.transpose(tonumpy(vecst))'''

            qvecs = vecs[:, qindexs]

            # search, rank, and print
            if similartype=='dot':
                scores = np.dot(vecs.T, qvecs)
                ranks = np.argsort(-scores, axis=0)
            elif similartype=='euclidean':
                dis=np.zeros([vecs.shape[1],qvecs.shape[1]])

                for j in range(qvecs.shape[1]):
                    d = (vecs - np.reshape(qvecs[:, j], (qvecs.shape[0], 1))) ** 2
                    disj = np.sum(d, axis=0)
                    dis[:, j] = disj
                ranks=np.argsort(dis,axis=0)


            compute_map_and_print(dataset, ranks, gnd, idlist)