Exemplo n.º 1
0
def make_data():
    from COCOAllJoints import COCOJoints
    from dataset import Preprocessing
    d = COCOJoints()
    train_data, _ = d.load_data(1)
    from tensorpack.dataflow import DataFromList, MapData, BatchData
    dp = DataFromList(train_data)
    dp = MapData(dp, Preprocessing)
    dp = BatchData(dp, cfg.batch_size, use_list=True)
    dp.reset_state()
    dataiter = dp.get_data()
    return dataiter
Exemplo n.º 2
0
    def make_data(self):
        from COCOAllJoints import COCOJoints
        from dataset import Preprocessing

        d = COCOJoints()
        train_data, _ = d.load_data(cfg.min_kps)
        from tfflat.data_provider import DataFromList, MultiProcessMapDataZMQ, BatchData, MapData
        dp = DataFromList(train_data)
        if cfg.dpflow_enable:
            dp = MultiProcessMapDataZMQ(dp, cfg.nr_dpflows, Preprocessing)
        else:
            dp = MapData(dp, Preprocessing)
        dp = BatchData(dp, cfg.batch_size // cfg.nr_aug)
        dp.reset_state()
        dataiter = dp.get_data()

        return dataiter
Exemplo n.º 3
0
    def make_data(self):
        from COCOAllJoints import COCOJoints
        from dataset import Preprocessing

        d = COCOJoints()
        train_data, _ = d.load_data(cfg.min_kps)

        def dataiter(train_data):
            ind = 0
            while True:
                batch_data = []
                for i in range(cfg.batch_size // cfg.nr_aug):
                    ind += 1
                    if ind > len(train_data): ind %= len(train_data)
                    data = Preprocessing(train_data[i])
                    batch_data.append(data)
                ret = []

                # aggregate
                for i in range(len(batch_data[0])):
                    ret.append(
                        np.asarray([
                            batch_data[j][i] for j in range(len(batch_data))
                        ]))
                yield ret

        if not cfg.dpflow_enable:
            return dataiter(train_data)
        else:
            from tfflat.dpflow import provider, receiver

            def mpfunc(id):
                np.random.seed(id)
                np.random.shuffle(train_data)
                return dataiter(train_data)

            provider(cfg.nr_dpflows, cfg.proj_name, mpfunc)

            def mpdataiter():
                dataiter = receiver(cfg.proj_name)
                for data in dataiter:
                    yield data

            return mpdataiter()
Exemplo n.º 4
0
    def make_data(self):
        from COCOAllJoints import COCOJoints
        from dataset import Preprocessing

        d = COCOJoints()
        train_data, _ = d.load_data(cfg.min_kps)

        def dataiter(train_data):
            ind = 0
            while True:
                batch_data = []
                for i in range(cfg.batch_size // cfg.nr_aug):
                    ind += 1
                    if ind > len(train_data): ind %= len(train_data)
                    data = Preprocessing(train_data[i])
                    batch_data.append(data)
                ret = []

                # aggregate
                for i in range(len(batch_data[0])):
                    ret.append(
                        np.asarray([
                            batch_data[j][i] for j in range(len(batch_data))
                        ]))
                yield ret

        if not cfg.dpflow_enable:
            return dataiter(train_data)
        else:
            from tfflat.data_provider import DataFromList, MultiProcessMapDataZMQ, BatchData
            dp = MultiProcessMapDataZMQ(DataFromList(train_data), 10,
                                        Preprocessing)
            dp = BatchData(dp, cfg.batch_size // cfg.nr_aug)
            dp.reset_state()
            dataiter = dp.get_data()
            return dataiter
Exemplo n.º 5
0
def test(test_model, logger):
    eval_gt = COCO(cfg.gt_path)
    import json
    with open(cfg.det_path, 'r') as f:
        dets = json.load(f)

    test_subset = False
    if test_subset:
        eval_gt.imgs = dict(list(eval_gt.imgs.items())[:100])
        anns = dict()
        for i in eval_gt.imgs:
            for j in eval_gt.getAnnIds(i):
                anns[j] = eval_gt.anns[j]
        eval_gt.anns = anns
    dets = [i for i in dets if i['image_id'] in eval_gt.imgs]

    dets = [i for i in dets if i['category_id'] == 1]
    dets.sort(key=lambda x: (x['image_id'], x['score']), reverse=True)
    for i in dets:
        i['imgpath'] = '/home/dx/data/coco/val2014/COCO_val2014_000000%06d.jpg' % i['image_id']
    img_num = len(np.unique([i['image_id'] for i in dets]))

    use_gtboxes = False
    if use_gtboxes:
        d = COCOJoints()
        coco_train_data, coco_test_data = d.load_data()
        coco_test_data.sort(key=lambda x: x['imgid'])
        for i in coco_test_data:
            i['image_id'] = i['imgid']
            i['score'] = 1.
        dets = coco_test_data

    from tfflat.mp_utils import MultiProc
    img_start = 0
    ranges = [0]
    images_per_gpu = int(img_num / len(args.gpu_ids.split(','))) + 1
    for run_img in range(img_num):
        img_end = img_start + 1
        while img_end < len(dets) and dets[img_end]['image_id'] == dets[img_start]['image_id']:
            img_end += 1
        if (run_img + 1) % images_per_gpu == 0 or (run_img + 1) == img_num:
            ranges.append(img_end)
        img_start = img_end

    def func(id):
        cfg.set_args(args.gpu_ids.split(',')[id])
        tester = Tester(Network(), cfg)
        tester.load_weights(test_model)
        range = [ranges[id], ranges[id + 1]]
        return test_net(tester, logger, dets, range)

    MultiGPUFunc = MultiProc(len(args.gpu_ids.split(',')), func)
    all_res, dump_results = MultiGPUFunc.work()

    # evaluation
    result_path = osp.join(cfg.output_dir, 'results.json')
    with open(result_path, 'w') as f:
        json.dump(dump_results, f)

    eval_dt = eval_gt.loadRes(result_path)
    cocoEval = COCOeval(eval_gt, eval_dt, iouType='keypoints')

    cocoEval.evaluate()
    cocoEval.accumulate()
    cocoEval.summarize()
Exemplo n.º 6
0
    print("Logs: ", args.logs)
    config.display()
    # Create model
    model = cpn.CPN(mode="training", config=config, model_dir=args.logs)
    # Select weights file to load
    if args.model.lower() == "last":
        # Find last trained weights
        model_path = model.find_last()[1]
    else:
        model_path = args.model
    # Load weights
    print("Loading weights ", model_path)
    model.load_weights(model_path, by_name=True)  #, exclude=exclude)
    config_tf = tf.ConfigProto()
    config_tf.gpu_options.allow_growth = True
    tf.Session(config=config_tf)
    # Training dataset. Use the training set and 35K from the
    # validation set, as as in the Mask RCNN paper.
    coco_joints = COCOJoints()
    dataset_train, dataset_val = coco_joints.load_data(min_kps=1)

    # Training
    base_lr = config.LEARNING_RATE
    for i in range(0, 10):
        model.train(dataset_train,
                    dataset_val,
                    learning_rate=base_lr,
                    epochs=10 * (i + 1),
                    layers='all')
        base_lr = base_lr / 2
Exemplo n.º 7
0
                    gaussian_kernel=config.GK7)
        imgs = imgs.astype(np.float32)
        for index_ in range(len(imgs)):
            imgs[index_] = image_preprocessing(imgs[index_], config)

        return_args = [imgs.astype(np.float32),
            heatmaps15.astype(np.float32).transpose(0, 2, 3, 1),
            heatmaps11.astype(np.float32).transpose(0, 2, 3, 1),
            heatmaps9.astype(np.float32).transpose(0, 2, 3, 1),
            heatmaps7.astype(np.float32).transpose(0, 2, 3, 1),
            valids.astype(np.float32)]

        return return_args
    else:
        for index_ in range(len(imgs)):
            imgs[index_] = image_preprocessing(imgs[index_], config)
        return [np.asarray(imgs).astype(np.float32), details]

if __name__ == '__main__':
    import sys
    sys.path.append('../data/COCO')
    sys.path.append('../lib/utils')
    sys.path.append('../models')
    from COCOAllJoints import COCOJoints
    coco_joints = COCOJoints()
    train, _ = coco_joints.load_data(min_kps=1)
    from models.config import DefaultConfig
    config = DefaultConfig()
    data = preprocessing(train[0], config, stage='train', debug=False)
    from IPython import embed; embed()