Пример #1
0
    def __init__(self, args):
        # load model
        model_prefix = osp.join(ROOT_DIR, 'output',
                                args.dataset + '_' + args.splitBy,
                                args.model_id)
        tic = time.time()
        infos = json.load(open(model_prefix + '.json'))
        model_path = model_prefix + '.pth'
        self.dataset = args.dataset
        self.model_opt = infos['opt']
        self.word_to_ix = infos['word_to_ix']
        self.ix_to_att = {ix: att for att, ix in infos['att_to_ix'].items()}
        self.model = self.load_matnet_model(model_path, self.model_opt)
        print('MatNet [%s_%s\'s %s] loaded in %.2f seconds.' %
              (args.dataset, args.splitBy, args.model_id, time.time() - tic))

        # load mask r-cnn
        tic = time.time()
        args.imdb_name = self.model_opt['imdb_name']
        args.net_name = self.model_opt['net_name']
        args.tag = self.model_opt['tag']
        args.iters = self.model_opt['iters']
        self.mrcn = inference.Inference(args)
        self.imdb = self.mrcn.imdb
        print(
            'Mask R-CNN: imdb[%s], tag[%s], id[%s_mask_rcnn_iter_%s] loaded in %.2f seconds.'
            % (args.imdb_name, args.tag, args.net_name, args.iters,
               time.time() - tic))
Пример #2
0
def main(args):
    # Image Directory
    params = vars(args)
    dataset_splitBy = params['dataset'] + '_' + params['splitBy']
    if 'coco' or 'combined' in dataset_splitBy:
        IMAGE_DIR = 'data/images/mscoco/images/train2014'
    elif 'clef' in dataset_splitBy:
        IMAGE_DIR = 'data/images/saiapr_tc-12'
    else:
        print('No image directory prepared for ', args.dataset)
        sys.exit(0)

    # make save dir
    save_dir = osp.join('cache/detections', dataset_splitBy)
    if not osp.isdir(save_dir):
        os.makedirs(save_dir)
    print(save_dir)

    # get mrcn instance
    mrcn = inference.Inference(args)

    # load detections = [{det_id, box, image_id, category_id, category_name, score}]
    save_dir = osp.join('cache/detections', args.dataset + '_' + args.splitBy)
    detections = json.load(open(osp.join(save_dir, args.dets_file_name)))
    image_to_dets = {}
    for det in detections:
        image_id = det['image_id']
        if image_id not in image_to_dets:
            image_to_dets[image_id] = []
        image_to_dets[image_id] += [det]

    # run mask rcnn
    for i, image_id in enumerate(image_to_dets.keys()):
        dets = image_to_dets[image_id]

        img_path = osp.join(
            IMAGE_DIR, 'COCO_train2014_' + str(image_id).zfill(12) + '.jpg')
        assert osp.isfile(img_path), img_path

        boxes = np.array([det['box'] for det in dets], dtype=np.float32)
        boxes = xywh_to_xyxy(boxes)

        labels = [
            mrcn.imdb._class_to_ind[det['category_name']] for det in dets
        ]
        labels = np.array(labels, dtype=np.int32)

        masks, rles = mrcn.boxes_to_masks(img_path, boxes, labels)

        # add rles to det
        for ix, det in enumerate(dets):
            det['rle'] = rles[ix]

        print('%s/%s done.' % (i, len(image_to_dets)))

    # save dets.json = [{det_id, box, image_id, score}]
    # to cache/detections/
    save_path = osp.join(save_dir, args.dets_file_name[:-10] + '_masks.json')
    with open(save_path, 'w') as f:
        json.dump(detections, f)
Пример #3
0
def main(args):
  # Image Directory
  params = vars(args)
  dataset_splitBy = params['dataset'] + '_' + params['splitBy']
  IMAGE_DIR = 'data/images/mscoco/images/train2014'

  # make save dir
  save_dir = osp.join('cache/detections', dataset_splitBy)
  if not osp.isdir(save_dir):
    os.makedirs(save_dir)
  print(save_dir)

  # get mrcn instance
  mrcn = inference.Inference(args)

  # load detections = [{det_id, box, image_id, category_id, category_name, score}]
  save_dir = osp.join('cache/detections', args.dataset+'_'+args.splitBy)
  dets_file_name = 'matt_dets_%s_%s_%s_%d.json' % (args.m, args.tid, dataset_splitBy, args.top_N)
  with open(osp.join(save_dir, dets_file_name), 'r') as f:
    detections = json.load(f)
  image_to_dets = {}
  for det in detections:
    image_id = det['image_id']
    if image_id not in image_to_dets:
      image_to_dets[image_id] = []
    image_to_dets[image_id] += [det]

  # run mask rcnn
  pbar = tqdm(total=len(image_to_dets.keys()), ascii=True, ncols=120)
  for i, image_id in enumerate(image_to_dets.keys()):
    dets = image_to_dets[image_id]

    img_path = osp.join(IMAGE_DIR, 'COCO_train2014_'+str(image_id).zfill(12)+'.jpg')
    assert osp.isfile(img_path), img_path

    boxes = np.array([det['box'] for det in dets], dtype=np.float32)
    boxes = xywh_to_xyxy(boxes)

    labels = [mrcn.imdb._class_to_ind[det['category_name']] for det in dets]
    labels = np.array(labels, dtype=np.int32)

    masks, rles = mrcn.boxes_to_masks(img_path, boxes, labels)

    # add rles to det
    for ix, det in enumerate(dets):
      det['rle'] = rles[ix]
    
    pbar.update(1)
  pbar.close()

  # save dets.json = [{det_id, box, image_id, score}]
  # to cache/detections/
  mask_file_name = 'matt_mask_%s_%s_%s_%d.json' % (args.m, args.tid, dataset_splitBy, args.top_N)
  save_path = osp.join(save_dir, mask_file_name)
  with open(save_path, 'w') as f:
    json.dump(detections, f)
Пример #4
0
def main(args):
    dataset_splitBy = args.dataset + '_' + args.splitBy
    if not osp.isdir(osp.join('cache/feats/', dataset_splitBy)):
        os.makedirs(osp.join('cache/feats/', dataset_splitBy))

    # Image Directory
    if 'coco' in dataset_splitBy:
        IMAGE_DIR = 'data/images/mscoco/images/train2014'
    elif 'clef' in dataset_splitBy:
        IMAGE_DIR = 'data/images/saiapr_tc-12'
    else:
        IMAGE_DIR = f'/input/data_endovis2017_instances_cropped/{args.splitBy}/images'
        # print('No image directory prepared for ', args.dataset)
        # sys.exit(0)

    # load dataset
    data_json = osp.join('cache/prepro', dataset_splitBy, 'data.json')
    data_h5 = osp.join('cache/prepro', dataset_splitBy, 'data.h5')
    loader = Loader(data_json, data_h5)
    images = loader.images

    # args.dataset = args.dataset.replace('endovis2017', 'endovis_2017')
    # load mrcn model
    mrcn = inference.Inference(args)
    imdb = mrcn.imdb
    # args.dataset = args.dataset.replace('endovis_2017', 'endovis2017')

    # feats_h5
    feats_dir = osp.join(
        'cache/feats', dataset_splitBy, 'mrcn',
        '%s_%s_%s' % (args.net_name, args.imdb_name, args.tag))
    if not osp.isdir(feats_dir):
        os.makedirs(feats_dir)

    # extract
    for i, image in enumerate(images):
        file_name = image['file_name']
        img_path = osp.join(IMAGE_DIR, file_name)
        feat, im_info = mrcn.extract_head(img_path)
        feat = feat.data.cpu().numpy()

        # write
        feat_h5 = osp.join(feats_dir, str(image['image_id']) + '.h5')
        f = h5py.File(feat_h5, 'w')
        f.create_dataset('head', dtype=np.float32, data=feat)
        f.create_dataset('im_info', dtype=np.float32, data=im_info)
        f.close()
        if i % 10 == 0:
            print('%s/%s image_id[%s] size[%s] im_scale[%.2f] writen.' %
                  (i + 1, len(images), image['image_id'], feat.shape,
                   im_info[0][2]))

    print('Done.')
Пример #5
0
def main(args):

    # Image Directory
    params = vars(args)
    dataset_splitBy = params['dataset'] + '_' + params['splitBy']
    if 'coco' or 'combined' in dataset_splitBy:
        IMAGE_DIR = 'data/images/mscoco/images/train2014'
    elif 'clef' in dataset_splitBy:
        IMAGE_DIR = 'data/images/saiapr_tc-12'
    else:
        print('No image directory prepared for ', args.dataset)
        sys.exit(0)

    # make save dir
    save_dir = osp.join('cache/detections', dataset_splitBy)
    if not osp.isdir(save_dir):
        os.makedirs(save_dir)
    print(save_dir)

    # get mrcn instance
    mrcn = inference.Inference(args)
    imdb = mrcn.imdb

    # import refer
    from refer import REFER
    data_root, dataset, splitBy = params['data_root'], params[
        'dataset'], params['splitBy']
    refer = REFER(data_root, dataset, splitBy)
    cat_name_to_cat_ix = {
        category_name: category_id
        for category_id, category_name in refer.Cats.items()
    }

    # detect and prepare dets.json
    proposals = []
    det_id = 0
    cnt = 0

    # # TEMPS DEBUG
    # # os.makedirs('cache/old_internals')
    # img_path = '/home/mwb/Datasets/mscoco/images/train2014/COCO_train2014_000000581857.jpg'
    # scores, boxes = mrcn.predict(img_path)
    # image_feat = mrcn.net._predictions['__temp_net_conv'].data.cpu().numpy()
    # roi_feats = mrcn.net._predictions['__temp_pool5'].data.cpu().numpy()
    # rois = mrcn.net._predictions['__temp_rois'].data.cpu().numpy()[:,1:]
    # head_feats = mrcn.net._predictions['__temp_head_feats'].data.cpu().numpy()
    # head_pool = mrcn.net._predictions['__temp_head_pool'].data.cpu().numpy()
    # print(image_feat.shape, roi_feats.shape, rois.shape, head_feats.shape, head_pool.shape)
    # np.save('cache/old_internals/image_feat.npy', image_feat)
    # np.save('cache/old_internals/roi_feats.npy', roi_feats)
    # np.save('cache/old_internals/rois.npy', rois)
    # np.save('cache/old_internals/head_feats.npy', head_feats)
    # np.save('cache/old_internals/head_pool.npy', head_pool)

    for image_id, image in refer.Imgs.items():
        file_name = image['file_name']
        img_path = osp.join(IMAGE_DIR, file_name)

        # predict
        scores, boxes = mrcn.predict(img_path)

        rois = mrcn.net._predictions['rois'].data.cpu().numpy(
        )[:, 1:] / mrcn._scale
        cnt += 1
        print('%s/%s done.' % (cnt, len(refer.Imgs)))

        info = {
            'image_id': image_id,
            'rois': rois,
            'scores': scores,
            'boxes': boxes,
            'roi_scores':
            mrcn.net._predictions['__roi_scores'].data.cpu().numpy()
        }
        torch.cuda.empty_cache()

        proposals.append(info)

    # save dets.json = [{det_id, box, image_id, score}]
    # to cache/detections/
    save_path = osp.join(
        save_dir,
        '%s_%s_%s_proposals.pkl' % (args.net_name, args.imdb_name, args.tag))
    with open(save_path, 'wb') as f:
        pickle.dump(proposals, f)
Пример #6
0
def main(args):

    # Image Directory
    params = vars(args)
    dataset_splitBy = params['dataset'] + '_' + params['splitBy']

    if 'coco' in dataset_splitBy or 'combined' in dataset_splitBy:
        IMAGE_DIR = 'data/images/mscoco/images/train2014'
    elif 'clef' in dataset_splitBy:
        IMAGE_DIR = 'data/images/saiapr_tc-12'
    elif 'sunspot' in dataset_splitBy:
        IMAGE_DIR = 'data/images/SUNRGBD'
    else:
        print('No image directory prepared for ', args.dataset)
        sys.exit(0)

    # make save dir
    save_dir = osp.join('cache/detections', dataset_splitBy)
    if not osp.isdir(save_dir):
        os.makedirs(save_dir)
    print(save_dir)

    # get mrcn instance
    mrcn = inference.Inference(args)
    imdb = mrcn.imdb

    # import refer
    from refer import REFER
    data_root, dataset, splitBy = params['data_root'], params[
        'dataset'], params['splitBy']
    refer = REFER(data_root, dataset, splitBy)
    cat_name_to_cat_ix = {
        category_name: category_id
        for category_id, category_name in refer.Cats.items()
    }

    # detect and prepare dets.json
    dets = []
    det_id = 0
    h5_id = 0
    cnt = 0
    for image_id, image in refer.Imgs.items():
        file_name = image['file_name']
        img_path = osp.join(IMAGE_DIR, file_name)

        # predict
        try:
            scores, boxes = mrcn.predict(img_path)
        except:
            print('Error on {}'.format(img_path))
            sys.exit(0)

        # get cls_to_dets, class_name -> [xyxysc] (n, 5)
        cls_to_dets, num_dets = cls_to_detections(scores, boxes, imdb,
                                                  args.nms_thresh,
                                                  args.conf_thresh)

        # make sure num_dets > 0 for this image, otherwise we lower down the conf_thresh
        thresh = args.conf_thresh
        while num_dets == 0:
            thresh = float(thresh) - 0.1
            cls_to_dets, num_dets = cls_to_detections(scores, boxes, imdb,
                                                      args.nms_thresh, thresh)

        # add to dets
        for category_name, detections in cls_to_dets.items():
            # detections is list of (n, 5), [xyxysc]
            for detection in detections.tolist():
                x1, y1, x2, y2, sc = detection
                det = {
                    'det_id': det_id,
                    'h5_id': det_id,  # we make h5_id == det_id
                    'box': [x1, y1, x2 - x1 + 1, y2 - y1 + 1],
                    'image_id': image_id,
                    'category_id': cat_name_to_cat_ix[category_name],
                    'category_name': category_name,
                    'score': sc
                }
                dets += [det]
                det_id += 1

        cnt += 1
        print('%s/%s done.' % (cnt, len(refer.Imgs)))

    # save dets.json = [{det_id, box, image_id, score}]
    # to cache/detections/
    save_path = osp.join(
        save_dir,
        '%s_%s_%s_dets.json' % (args.net_name, args.imdb_name, args.tag))
    with open(save_path, 'w') as f:
        json.dump(dets, f)
Пример #7
0
def main(args):

    # Image Directory
    params = vars(args)
    dataset_splitBy = params['dataset'] + '_' + params['splitBy']
    if 'coco' or 'combined' in dataset_splitBy:
        IMAGE_DIR = 'data/images/mscoco/images/train2014'
    elif 'clef' in dataset_splitBy:
        IMAGE_DIR = 'data/images/saiapr_tc-12'
    else:
        print('No image directory prepared for ', args.dataset)
        sys.exit(0)

    # make save dir
    save_dir = osp.join('cache/detections', dataset_splitBy)
    if not osp.isdir(save_dir):
        os.makedirs(save_dir)
    print(save_dir)

    # get mrcn instance
    mrcn = inference.Inference(args)
    imdb = mrcn.imdb

    # import refer
    from refer import REFER
    data_root, dataset, splitBy = params['data_root'], params[
        'dataset'], params['splitBy']
    refer = REFER(data_root, dataset, splitBy)
    cat_name_to_cat_ix = {
        category_name: category_id
        for category_id, category_name in refer.Cats.items()
    }

    # detect and prepare dets.json
    dets = []
    det_id = 0
    h5_id = 0
    cnt = 0
    for image_id, image in refer.Imgs.items():
        file_name = image['file_name']
        img_path = osp.join(IMAGE_DIR, file_name)

        # predict
        scores, boxes = mrcn.predict(img_path)

        cls_to_dets, num_dets = cls_to_detections(scores, boxes, imdb,
                                                  args.nms_thresh, 0)

        img_det_list = []
        for category_name, detections in cls_to_dets.items():
            # detections is list of (n, 5), [xyxysc]
            for detection in detections.tolist():
                x1, y1, x2, y2, sc = detection
                det = {
                    'box': [x1, y1, x2 - x1 + 1, y2 - y1 + 1],
                    'category_name': category_name,
                    'score': sc
                }
                img_det_list.append(det)
        sorted_img_det_list = sorted(img_det_list,
                                     key=lambda x: x['score'],
                                     reverse=True)
        for det in sorted_img_det_list[:300]:
            dets.append({
                'det_id': det_id,
                'h5_id': det_id,  # we make h5_id == det_id
                'box': det['box'],
                'image_id': image_id,
                'category_id': cat_name_to_cat_ix[det['category_name']],
                'category_name': det['category_name'],
                'score': det['score']
            })
            det_id += 1
        cnt += 1
        print('%s/%s done.' % (cnt, len(refer.Imgs)))

    save_path = osp.join(save_dir, 'mrcn_dets_300_%s.json' % (dataset_splitBy))
    with open(save_path, 'w') as f:
        json.dump(dets, f)
Пример #8
0
def main(args):

  # Image Directory
  params = vars(args)
  dataset_splitBy = params['dataset'] + '_' + params['splitBy']
  if 'coco' or 'combined' in dataset_splitBy:
    IMAGE_DIR = 'data/images/mscoco/images/train2014'
  elif 'clef' in dataset_splitBy:
    IMAGE_DIR = 'data/images/saiapr_tc-12'
  else:
    print('No image directory prepared for ', args.dataset)
    sys.exit(0)

  # make save dir
  save_dir = osp.join('cache/detections', dataset_splitBy)
  if not osp.isdir(save_dir):
    os.makedirs(save_dir)
  print(save_dir)

  # get mrcn instance
  mrcn = inference.Inference(args)
  imdb = mrcn.imdb

  # import refer
  from refer import REFER
  data_root, dataset, splitBy = params['data_root'], params['dataset'], params['splitBy']
  refer = REFER(data_root, dataset, splitBy)
  cat_name_to_cat_ix = {category_name: category_id for category_id, category_name in refer.Cats.items()}

  # detect and prepare dets.json
  proposals = []
  det_id = 0
  cnt = 0

  # # TEMPS DEBUG
  # # os.makedirs('cache/old_internals')
  # img_path = '/home/mwb/Datasets/mscoco/images/train2014/COCO_train2014_000000581857.jpg'
  # scores, boxes = mrcn.predict(img_path)
  # image_feat = mrcn.net._predictions['__temp_net_conv'].data.cpu().numpy()
  # roi_feats = mrcn.net._predictions['__temp_pool5'].data.cpu().numpy()
  # rois = mrcn.net._predictions['__temp_rois'].data.cpu().numpy()[:,1:]
  # head_feats = mrcn.net._predictions['__temp_head_feats'].data.cpu().numpy()
  # head_pool = mrcn.net._predictions['__temp_head_pool'].data.cpu().numpy()
  # print(image_feat.shape, roi_feats.shape, rois.shape, head_feats.shape, head_pool.shape)
  # np.save('cache/old_internals/image_feat.npy', image_feat)
  # np.save('cache/old_internals/roi_feats.npy', roi_feats)
  # np.save('cache/old_internals/rois.npy', rois)
  # np.save('cache/old_internals/head_feats.npy', head_feats)
  # np.save('cache/old_internals/head_pool.npy', head_pool)

  val_image_ids = {refer.Refs[ref_id]['image_id'] for ref_id in refer.getRefIds(split='val') if refer.Refs[ref_id]['split'] == 'val'}
  print('val image num:', len(val_image_ids))


  start = time()
  # for image_id, image in refer.Imgs.items():
  for image_id in val_image_ids:
    image = refer.Imgs[image_id]
    file_name = image['file_name']
    img_path = osp.join(IMAGE_DIR, file_name)

    # predict
    scores, boxes = mrcn.predict(img_path)

    rois = mrcn.net._predictions['rois'].data.cpu().numpy()[:,1:] / mrcn._scale
    cnt += 1
    # print('%s/%s done.' % (cnt, len(refer.Imgs)))
    print('%s/%s done.' % (cnt, len(val_image_ids)))

    # info = {
    #   'image_id': image_id,
    #   'rois': rois,
    #   'scores': scores, 
    #   'boxes': boxes,
    #   'roi_scores': mrcn.net._predictions['__roi_scores'].data.cpu().numpy()
    # }
    torch.cuda.empty_cache()

    # proposals.append(info)
  
  total_t = time() - start
  avg_t = total_t / len(val_image_ids)
  print('time: %.6f / %.6f = %.6f' % (total_t, len(val_image_ids), avg_t))