예제 #1
0
    def reshape(self, bottom, top):
        #sample a category
        categid = np.random.choice(self.numclasses)
        #sample an image for this category
        imid = self.data_percateg[categid]['imids'][np.random.choice(
            len(self.data_percateg[categid]['imids']))]

        img = cv2.imread(self._params.imgpath.format(self.names[imid]))

        #get all possibilities for this category
        start = self.data_percateg[categid]['im_end_index'][imid] + 1
        stop = self.data_percateg[categid]['im_end_index'][imid + 1]
        #pick a box
        idx = np.random.choice(np.arange(start, stop + 1),
                               cfg.TRAIN_SAMPLES_PER_IMG)
        boxid = self.data_percateg[categid]['boxids'][idx]
        boxes = self.boxes[imid][boxid, :] - 1

        instid = self.data_percateg[categid]['instids'][idx]
        #load the gt
        [inst, categories] = sbd.load_gt(self.names[imid])
        masks = np.zeros((idx.size, 1, inst.shape[0], inst.shape[1]))
        for k in range(idx.size):
            masks[k, 0, :, :] = (inst == instid[k] + 1).astype(np.float32)
        categids = categid * np.ones(idx.size)

        #get the blobs
        im_new, spp_boxes, normalized_boxes, categids, masksblob, instance_wts = get_blobs(
            img, boxes.astype(np.float32), categids, masks)

        #save blobs in private dict
        self.myblobs['image'] = im_new.astype(np.float32)
        self.myblobs['normalizedboxes'] = normalized_boxes.astype(np.float32)
        self.myblobs['sppboxes'] = spp_boxes.astype(np.float32)
        self.myblobs['categids'] = categids.astype(np.float32)
        self.myblobs['labels'] = masksblob.astype(np.float32)
        self.myblobs['instance_wts'] = instance_wts.astype(np.float32)

        #and reshape
        for i in range(len(top)):
            top[i].reshape(*(self.myblobs[self.blob_names[i]].shape))
예제 #2
0
  def reshape(self, bottom, top):
    #sample a category
    categid = np.random.choice(self.numclasses)
    #sample an image for this category
    imid = self.data_percateg[categid]['imids'][np.random.choice(len(self.data_percateg[categid]['imids']))]
    
    img = cv2.imread(self._params.imgpath.format(self.names[imid]))

    #get all possibilities for this category
    start = self.data_percateg[categid]['im_end_index'][imid]+1
    stop = self.data_percateg[categid]['im_end_index'][imid+1]
    #pick a box
    idx = np.random.choice(np.arange(start,stop+1), cfg.TRAIN_SAMPLES_PER_IMG)
    boxid = self.data_percateg[categid]['boxids'][idx]
    boxes = self.boxes[imid][boxid,:]-1    

    instid = self.data_percateg[categid]['instids'][idx]
    #load the gt
    [inst, categories] = sbd.load_gt(self.names[imid])
    masks = np.zeros((idx.size, 1,inst.shape[0],inst.shape[1]))
    for k in range(idx.size):
      masks[k,0,:,:] = (inst==instid[k]+1).astype(np.float32)
    categids = categid*np.ones(idx.size)

    #get the blobs
    im_new, spp_boxes, normalized_boxes, categids, masksblob, instance_wts = get_blobs(img, boxes.astype(np.float32), categids, masks)

    #save blobs in private dict
    self.myblobs['image']=im_new.astype(np.float32)
    self.myblobs['normalizedboxes']=normalized_boxes.astype(np.float32)
    self.myblobs['sppboxes']=spp_boxes.astype(np.float32)
    self.myblobs['categids']=categids.astype(np.float32)
    self.myblobs['labels']=masksblob.astype(np.float32)
    self.myblobs['instance_wts']=instance_wts.astype(np.float32)

    #and reshape
    for i in range(len(top)):
      top[i].reshape(*(self.myblobs[self.blob_names[i]].shape))
예제 #3
0
def get_all_outputs(net,names, dets, imgpath, sppath, regsppath,thresh=0.4,outpath=None, do_eval = True, eval_thresh = [0.5, 0.7]):
  numcategs=dets['boxes'].size

  if do_eval:
    #we will accumulate the overlaps and the classes of the gt
    all_ov=[]
    gt = []
    for j in range(numcategs):
      all_ov.append([])


  #a dictionary of times
  times = {}
  times['boxes']=0.
  times['pred']=0.
  times['sp']=0.
  times['ov']=0.
  times['total']=0.
  for i in range(len(names)):
    t1=time.time()
    img = cv2.imread(imgpath.format(names[i]))
    #get all boxes for this image
    boxes_img = np.zeros((0,4))
    cids_img = np.zeros((0,1))
    for j in range(numcategs):
      boxes_img = np.vstack((boxes_img, dets['boxes'][j][i]))
      cids_img = np.vstack((cids_img, j*np.ones((dets['boxes'][j][i].shape[0],1))))
    t2=time.time()
    times['boxes']=times['boxes']+t2-t1


    #get the predictions
    output = get_hypercolumn_prediction(net, img, boxes_img.astype(np.float32), cids_img)
    t3=time.time()
    times['pred']=times['pred']+t3-t2
    #project to sp
    (sp, reg2sp) = sprep.read_sprep(sppath.format(names[i]), regsppath.format(names[i]))
    newreg2sp_all = paste_output_sp(output, boxes_img.astype(np.float32)-1., sp.shape, sp)
    newreg2sp_all = np.squeeze(newreg2sp_all)
    newreg2sp_all = newreg2sp_all>=thresh
    newreg2sp_all = newreg2sp_all.T
    t4=time.time()
    times['sp'] = times['sp']+t4-t3
    #save if needed
    if outpath is not None:
      savemat(outpath.format(names[i]), {'output':output})
    #evaluate
    if do_eval:
      inst, categories = sbd.load_gt(names[i])
      ov = evaluation.compute_overlap_sprep(sp, newreg2sp_all, inst)
      #separate according to categories
      for j in range(numcategs):
        all_ov[j].append(ov[:,cids_img.reshape(-1)==j])
      #append categories
      gt.append(np.squeeze(categories-1))
    t5=time.time()
    times['ov'] = times['ov']+t5-t4
    if i % 100 == 0:
      total = float(i+1)
      print 'Doing : {:d}, get boxes:{:.2f} s, get pred:{:.2f} s, get sp:{:.2f} s, get ov:{:.2f} s'.format(i, times['boxes']/total,
                        times['pred']/total, times['sp']/total, times['ov']/total)

  ap = [[] for _ in eval_thresh]
  prec = [[] for _ in eval_thresh]
  rec = [[] for _ in eval_thresh]
  for i in range(numcategs):
    print 'Evaluating :{:d}'.format(i)

    for t,thr in enumerate(eval_thresh):
      ap_, prec_, rec_ = evaluation.generalized_det_eval_simple(dets['scores'][i].tolist()[0:len(names)], all_ov[i], gt, i, thr)
      ap[t].append(ap_)
      prec[t].append(prec_)
      rec[t].append(rec_)
  return ap, prec, rec, all_ov, gt