def main_eval(model_json, model_weights, num_stack, num_class, matfile):
    xnet = HourglassNet(num_class, num_stack, (256, 256), (64, 64))

    xnet.load_model(model_json, model_weights)

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json", "../../data/mpii/images",
                          inres=(256, 256), outres=(64, 64), is_train=False)

    print 'val data size', valdata.get_dataset_size()

    valkps = np.zeros(shape=(valdata.get_dataset_size(), 16, 2), dtype=np.float)

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.generator(batch_size, num_stack, sigma=1, is_shuffle=False , with_meta=True):

        count += batch_size

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        get_final_pred_kps(valkps, out[-1], _meta)

    scipy.io.savemat(matfile, mdict={'preds' : valkps})

    run_pckh(model_json, matfile)
Example #2
0
def main_test():
    xnet = HourglassNet(16, 8, (256, 256), (64, 64))

    xnet.load_model("../../trained_models/hg_s8_b1_v1_adam/net_arch.json",
                    "../../trained_models/hg_s8_b1_v1_adam/weights_epoch22.h5")

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json", "../../data/mpii/images",
                                inres=(256, 256), outres=(64, 64), is_train=False)

    print 'val data size', valdata.get_dataset_size()

    valkps = np.zeros(shape=(valdata.get_dataset_size(), 16, 2), dtype=np.float)

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.generator(batch_size, 8, sigma=2, is_shuffle=False , with_meta=True):

        count += batch_size

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        get_final_pred_kps(valkps, out[-1], _meta)


    matfile = os.path.join( "../../trained_models/hg_s8_b1_v1_adam/", 'preds_e22.mat')
    scipy.io.savemat(matfile, mdict={'preds' : valkps})

    run_pckh('hg_s8_b1_epoch22', matfile)
Example #3
0
def main_eval(model_json, model_weights, num_stack, num_class, matfile, tiny):
    inres = (192, 192) if tiny else (256, 256)
    outres = (48, 48) if tiny else (64, 64)
    num_channles = 128 if tiny else 256

    xnet = HourglassNet(num_classes=num_class,
                        num_stacks=num_stack,
                        num_channels=num_channles,
                        inres=inres,
                        outres=outres)

    xnet.load_model(model_json, model_weights)

    # dataset_path = '/home/tomas_bordac/nyu_croped'
    dataset_path = os.path.join('D:\\', 'nyu_croped')
    valdata = NYUHandDataGen('joint_data.mat',
                             dataset_path,
                             inres=inres,
                             outres=outres,
                             is_train=False)

    print('val data size', valdata.get_dataset_size())

    valkps = np.zeros(shape=(valdata.get_dataset_size(), 11, 2),
                      dtype=np.float)

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.generator(batch_size,
                                                  num_stack,
                                                  sigma=3,
                                                  is_shuffle=False,
                                                  with_meta=True):

        count += batch_size

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        get_final_pred_kps(valkps, out[-1], _meta, outres)

    scipy.io.savemat(matfile, mdict={'preds': valkps})

    run_pckh(model_json, matfile)
Example #4
0
def main_eval(model_path, num_stack, num_class, matfile, tiny):
    inres = (192, 192) if tiny else (256, 256)
    outres = (48, 48) if tiny else (64, 64)
    num_channles = 128 if tiny else 256

    xnet = HourglassNet(num_classes=num_class,
                        num_stacks=num_stack,
                        num_channels=num_channles,
                        inres=inres,
                        outres=outres)

    xnet.load_model(model_path)

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json",
                          "../../data/mpii/images",
                          inres=inres,
                          outres=outres,
                          is_train=False)

    print('val data size', valdata.get_dataset_size())

    valkps = np.zeros(shape=(valdata.get_dataset_size(), 16, 2),
                      dtype=np.float)

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.generator(batch_size,
                                                  num_stack,
                                                  sigma=1,
                                                  is_shuffle=False,
                                                  with_meta=True):

        count += batch_size

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        get_final_pred_kps(valkps, out[-1], _meta, outres)

    scipy.io.savemat(matfile, mdict={'preds': valkps})

    run_pckh(model_path, matfile)
Example #5
0
    print 'val data size', valdata.get_dataset_size()

    valkps = np.zeros(shape=(valdata.get_dataset_size(), 16, 2), dtype=np.float)

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.generator(batch_size, 8, sigma=2, is_shuffle=False , with_meta=True):

        count += batch_size

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        get_final_pred_kps(valkps, out[-1], _meta)


    matfile = os.path.join( "../../trained_models/hg_s8_b1_v1_adam/", 'preds_e22.mat')
    scipy.io.savemat(matfile, mdict={'preds' : valkps})

    run_pckh('hg_s8_b1_epoch22', matfile)


if __name__ == '__main__':
    os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
    os.environ["CUDA_VISIBLE_DEVICES"] = '2'
    #main_test()
    matfile = os.path.join( "../../trained_models/hg_s8_b1_v1_adam/", 'preds_e22.mat')
    run_pckh('hg_s8_b1_epoch22', matfile)