Esempio n. 1
0
def main_inference(model_json, model_weights, num_stack, num_class, imgfile, confth, tiny):
    if tiny:
        xnet = HourglassNet(num_classes=16, num_stacks=args.num_stack, num_channels=128, inres=(192, 192),
                            outres=(48, 48))
    else:
        xnet = HourglassNet(num_classes=16, num_stacks=args.num_stack, num_channels=256, inres=(256, 256),
                            outres=(64, 64))

    xnet.load_model(model_json, model_weights)

    out, scale = xnet.inference_file(imgfile)

    kps = post_process_heatmap(out[0, :, :, :])

    ignore_kps = ['plevis', 'thorax', 'head_top']
    kp_keys = MPIIDataGen.get_kp_keys()
    mkps = list()
    for i, _kp in enumerate(kps):
        if kp_keys[i] in ignore_kps:
            _conf = 0.0
        else:
            _conf = _kp[2]
        mkps.append((_kp[0] * scale[1] * 4, _kp[1] * scale[0] * 4, _conf))

    cvmat = render_joints(cv2.imread(imgfile), mkps, confth)

    cv2.imshow('frame', cvmat)
    cv2.waitKey()
def main_eval(model_json, model_weights, num_stack, num_class, matfile):
    xnet = HourglassNet(num_class, num_stack, (256, 256), (64, 64))

    xnet.load_model(model_json, model_weights)

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json", "../../data/mpii/images",
                          inres=(256, 256), outres=(64, 64), is_train=False)

    print 'val data size', valdata.get_dataset_size()

    valkps = np.zeros(shape=(valdata.get_dataset_size(), 16, 2), dtype=np.float)

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.generator(batch_size, num_stack, sigma=1, is_shuffle=False , with_meta=True):

        count += batch_size

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        get_final_pred_kps(valkps, out[-1], _meta)

    scipy.io.savemat(matfile, mdict={'preds' : valkps})

    run_pckh(model_json, matfile)
def main_test():
    xnet = HourglassNet(16, 8, (256, 256), (64, 64))

    xnet.load_model("../../trained_models/hg_s8_b1_sigma1/net_arch.json", "../../trained_models/hg_s8_b1_sigma1/weights_epoch22.h5")

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json", "../../data/mpii/images",
                                inres=(256, 256), outres=(64, 64), is_train=False)

    total_good, total_fail = 0, 0
    threshold = 0.5

    print('val data size', valdata.get_dataset_size())

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.tt_generator(batch_size, 8, sigma=2, is_shuffle=False , with_meta=True):

        count += batch_size
        if count % (batch_size*100) == 0:
            print(count, 'processed', total_good, total_fail)

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        good, bad = cal_heatmap_acc(out[-1], _meta, threshold)

        total_good += good
        total_fail += bad

    print(total_good, total_fail, threshold, total_good*1.0/(total_good + total_fail))
Esempio n. 4
0
def main_test():
    xnet = HourglassNet(16, 8, (256, 256), (64, 64))

    xnet.load_model("../../trained_models/hg_s8_b1_v1_adam/net_arch.json",
                    "../../trained_models/hg_s8_b1_v1_adam/weights_epoch22.h5")

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json", "../../data/mpii/images",
                                inres=(256, 256), outres=(64, 64), is_train=False)

    print 'val data size', valdata.get_dataset_size()

    valkps = np.zeros(shape=(valdata.get_dataset_size(), 16, 2), dtype=np.float)

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.generator(batch_size, 8, sigma=2, is_shuffle=False , with_meta=True):

        count += batch_size

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        get_final_pred_kps(valkps, out[-1], _meta)


    matfile = os.path.join( "../../trained_models/hg_s8_b1_v1_adam/", 'preds_e22.mat')
    scipy.io.savemat(matfile, mdict={'preds' : valkps})

    run_pckh('hg_s8_b1_epoch22', matfile)
def main_video(model_json, model_weights, num_stack, num_class, videofile,
               confth):

    xnet = HourglassNet(num_class, num_stack, (256, 256), (64, 64))
    xnet.load_model(model_json, model_weights)

    cap = cv2.VideoCapture(videofile)
    while (cap.isOpened()):
        ret, frame = cap.read()
        if ret:
            rgb = frame[:, :, ::-1]  # bgr -> rgb
            out, scale = xnet.inference_rgb(rgb, frame.shape)

            kps = post_process_heatmap(out[0, :, :, :])

            ignore_kps = ['plevis', 'thorax', 'head_top']
            kp_keys = MPIIDataGen.get_kp_keys()
            mkps = list()
            for i, _kp in enumerate(kps):
                if kp_keys[i] in ignore_kps:
                    _conf = 0.0
                else:
                    _conf = _kp[2]
                mkps.append(
                    (_kp[0] * scale[1] * 4, _kp[1] * scale[0] * 4, _conf))

            framejoints = render_joints(frame, mkps, confth)

            cv2.imshow('frame', framejoints)
            cv2.waitKey(10)
Esempio n. 6
0
def main_eval(model_json, model_weights, num_stack, num_class, matfile, tiny):
    inres = (192, 192) if tiny else (256, 256)
    outres = (48, 48) if tiny else (64, 64)
    num_channles = 128 if tiny else 256

    xnet = HourglassNet(num_classes=num_class,
                        num_stacks=num_stack,
                        num_channels=num_channles,
                        inres=inres,
                        outres=outres)

    xnet.load_model(model_json, model_weights)

    # dataset_path = '/home/tomas_bordac/nyu_croped'
    dataset_path = os.path.join('D:\\', 'nyu_croped')
    valdata = NYUHandDataGen('joint_data.mat',
                             dataset_path,
                             inres=inres,
                             outres=outres,
                             is_train=False)

    print('val data size', valdata.get_dataset_size())

    valkps = np.zeros(shape=(valdata.get_dataset_size(), 11, 2),
                      dtype=np.float)

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.generator(batch_size,
                                                  num_stack,
                                                  sigma=3,
                                                  is_shuffle=False,
                                                  with_meta=True):

        count += batch_size

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        get_final_pred_kps(valkps, out[-1], _meta, outres)

    scipy.io.savemat(matfile, mdict={'preds': valkps})

    run_pckh(model_json, matfile)
Esempio n. 7
0
def main_test():
    xnet = HourglassNet(16, 8, (256, 256), (64, 64))

    xnet.load_model("../../trained_models/hg_s8_b1/net_arch.json",
                    "../../trained_models/hg_s8_b1/weights_epoch29.h5")

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json",
                          "../../data/mpii/images",
                          inres=(256, 256),
                          outres=(64, 64),
                          is_train=False)

    for _img, _gthmap in valdata.generator(1, 8, sigma=2, is_shuffle=True):
        out = xnet.model.predict(_img)

        scipy.misc.imshow(_img[0, :, :, :])
        #view_predict_hmap(_gthmap)
        view_predict_hmap(out, show_raw=False)
Esempio n. 8
0
def main_eval(model_path, num_stack, num_class, matfile, tiny):
    inres = (192, 192) if tiny else (256, 256)
    outres = (48, 48) if tiny else (64, 64)
    num_channles = 128 if tiny else 256

    xnet = HourglassNet(num_classes=num_class,
                        num_stacks=num_stack,
                        num_channels=num_channles,
                        inres=inres,
                        outres=outres)

    xnet.load_model(model_path)

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json",
                          "../../data/mpii/images",
                          inres=inres,
                          outres=outres,
                          is_train=False)

    print('val data size', valdata.get_dataset_size())

    valkps = np.zeros(shape=(valdata.get_dataset_size(), 16, 2),
                      dtype=np.float)

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.generator(batch_size,
                                                  num_stack,
                                                  sigma=1,
                                                  is_shuffle=False,
                                                  with_meta=True):

        count += batch_size

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        get_final_pred_kps(valkps, out[-1], _meta, outres)

    scipy.io.savemat(matfile, mdict={'preds': valkps})

    run_pckh(model_path, matfile)