Esempio n. 1
0
 def run_eval(self,epoch):
     val_data = MPIIDataGen(jsonfile='../../data/mpii/mpii_annotations.json',imgpath='../../data/mpii/images',inres=self.inres,outres=self.outres,is_train=False)
     
     total_success , total_fail = 0 ,0
     threshold = 0.5
     
     count = 0
     batch_size = 8
     
     for _img , _gthmap , _meta in val_data.generator(batch_size,8,sigma=2,is_shuffle=False,with_meta=True):
         
         count += batch_size
         if count > val_data.get_dataset_size():
             break
             
         out = self.model.predict(_img)
         
         suc,bad = cal_heatmap_acc(out[-1],_meta,threshold)
         
         total_success +=suc
         total_fail += bad
     
     acc = total_success * 1.0 / (total_success+total_fail)
     
     print('Eval Accuracy ',acc,' @ Epoch ',epoch)
     
     with open(os.path.join(self.get_folder_path(),'val.txt'),'a+') as xfile:
         xfile.write('Epoch '+ str(epoch) + ':' + str(acc) +'\n')
Esempio n. 2
0
    def train(self, batch_size, model_path, epochs):
        train_dataset = MPIIDataGen('../../data/mpii/mpii_annotations.json',
                                    '../../data/mpii/images',
                                    inres=self.inres,
                                    outres=self.outres,
                                    is_train=True)

        train_gen = train_dataset.generator(batch_size=batch_size,
                                            num_stack=self.num_stacks,
                                            sigma=1,
                                            is_shuffle=True,
                                            rot_flag=True,
                                            scale_flag=True,
                                            flip_flag=True)

        csvlogger = CSVLogger(
            os.path.join(
                model_path, "csv_train_" +
                str(datetime.datetime.now().strftime('%H:%M')) + ".csv"))

        checkpoint = EvalCallBack(model_path, self.inres, self.outres)

        xcallbacks = [csvlogger, checkpoint]

        self.model.fit_generator(
            generator=train_gen,
            steps_per_epoch=train_dataset.get_dataset_size() // batch_size,
            epochs=epochs,
            callbacks=xcallbacks)
Esempio n. 3
0
    def train(self, batch_size, model_path, epochs):
        train_dataset = MPIIDataGen(
            "/home/mike/Documents/stacked_hourglass_tf2/data/mpii/mpii_annotations.json",
            "/home/mike/datasets/mpii_human_pose_v1/images",
            inres=self.inres,
            outres=self.outres,
            is_train=True)
        train_gen = train_dataset.generator(batch_size,
                                            self.num_stacks,
                                            sigma=1,
                                            is_shuffle=True,
                                            rot_flag=True,
                                            scale_flag=True,
                                            flip_flag=True)

        # TypeError: expected str, bytes or os.PathLike object, not NoneType
        csvlogger = CSVLogger(
            os.path.join(
                model_path, "csv_train_" +
                str(datetime.datetime.now().strftime('%H:%M')) + ".csv"))
        modelfile = os.path.join(model_path,
                                 'weights_{epoch:02d}_{loss:.2f}.hdf5')

        checkpoint = EvalCallBack(model_path, self.inres, self.outres)

        xcallbacks = [csvlogger, checkpoint]

        # ValueError: Failed to find data adapter that can handle input: <class 'NoneType'>, <class 'NoneType'>
        self.model.fit_generator(
            generator=train_gen,
            steps_per_epoch=train_dataset.get_dataset_size() // batch_size,
            epochs=epochs,
            callbacks=xcallbacks)
def main_test():
    xnet = HourglassNet(16, 8, (256, 256), (64, 64))

    xnet.load_model("../../trained_models/hg_s8_b1_sigma1/net_arch.json", "../../trained_models/hg_s8_b1_sigma1/weights_epoch22.h5")

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json", "../../data/mpii/images",
                                inres=(256, 256), outres=(64, 64), is_train=False)

    total_good, total_fail = 0, 0
    threshold = 0.5

    print('val data size', valdata.get_dataset_size())

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.tt_generator(batch_size, 8, sigma=2, is_shuffle=False , with_meta=True):

        count += batch_size
        if count % (batch_size*100) == 0:
            print(count, 'processed', total_good, total_fail)

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        good, bad = cal_heatmap_acc(out[-1], _meta, threshold)

        total_good += good
        total_fail += bad

    print(total_good, total_fail, threshold, total_good*1.0/(total_good + total_fail))
    def train(self, batch_size, model_path, epochs):
        train_dataset = MPIIDataGen("../../data/mpii/mpii_annotations.json", "../../data/mpii/images",
                                      inres=self.inres,  outres=self.outres, is_train=True)
        train_gen = train_dataset.generator(batch_size, self.num_stacks, sigma=1, is_shuffle=True,
                                    rot_flag=True, scale_flag=True, flip_flag=True)

        csvlogger = CSVLogger(os.path.join(model_path, "csv_train_"+ str(datetime.datetime.now().strftime('%H:%M')) + ".csv"))
        modelfile = os.path.join(model_path, 'weights_{epoch:02d}_{loss:.2f}.hdf5')

        checkpoint =  EvalCallBack(model_path)

        xcallbacks = [csvlogger, checkpoint]

        self.model.fit_generator(generator=train_gen, steps_per_epoch=train_dataset.get_dataset_size()//batch_size,
                                 #validation_data=val_gen, validation_steps= val_dataset.get_dataset_size()//batch_size,
                                 epochs=epochs, callbacks=xcallbacks)
Esempio n. 6
0
def main_inference(model_json, model_weights, num_stack, num_class, imgfile, confth, tiny):
    if tiny:
        xnet = HourglassNet(num_classes=16, num_stacks=args.num_stack, num_channels=128, inres=(192, 192),
                            outres=(48, 48))
    else:
        xnet = HourglassNet(num_classes=16, num_stacks=args.num_stack, num_channels=256, inres=(256, 256),
                            outres=(64, 64))

    xnet.load_model(model_json, model_weights)

    out, scale = xnet.inference_file(imgfile)

    kps = post_process_heatmap(out[0, :, :, :])

    ignore_kps = ['plevis', 'thorax', 'head_top']
    kp_keys = MPIIDataGen.get_kp_keys()
    mkps = list()
    for i, _kp in enumerate(kps):
        if kp_keys[i] in ignore_kps:
            _conf = 0.0
        else:
            _conf = _kp[2]
        mkps.append((_kp[0] * scale[1] * 4, _kp[1] * scale[0] * 4, _conf))

    cvmat = render_joints(cv2.imread(imgfile), mkps, confth)

    cv2.imshow('frame', cvmat)
    cv2.waitKey()
def main_video(model_json, model_weights, num_stack, num_class, videofile,
               confth):

    xnet = HourglassNet(num_class, num_stack, (256, 256), (64, 64))
    xnet.load_model(model_json, model_weights)

    cap = cv2.VideoCapture(videofile)
    while (cap.isOpened()):
        ret, frame = cap.read()
        if ret:
            rgb = frame[:, :, ::-1]  # bgr -> rgb
            out, scale = xnet.inference_rgb(rgb, frame.shape)

            kps = post_process_heatmap(out[0, :, :, :])

            ignore_kps = ['plevis', 'thorax', 'head_top']
            kp_keys = MPIIDataGen.get_kp_keys()
            mkps = list()
            for i, _kp in enumerate(kps):
                if kp_keys[i] in ignore_kps:
                    _conf = 0.0
                else:
                    _conf = _kp[2]
                mkps.append(
                    (_kp[0] * scale[1] * 4, _kp[1] * scale[0] * 4, _conf))

            framejoints = render_joints(frame, mkps, confth)

            cv2.imshow('frame', framejoints)
            cv2.waitKey(10)
Esempio n. 8
0
def main_test():
    xnet = HourglassNet(16, 8, (256, 256), (64, 64))

    xnet.load_model("../../trained_models/hg_s8_b1/net_arch.json",
                    "../../trained_models/hg_s8_b1/weights_epoch29.h5")

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json",
                          "../../data/mpii/images",
                          inres=(256, 256),
                          outres=(64, 64),
                          is_train=False)

    for _img, _gthmap in valdata.generator(1, 8, sigma=2, is_shuffle=True):
        out = xnet.model.predict(_img)

        scipy.misc.imshow(_img[0, :, :, :])
        #view_predict_hmap(_gthmap)
        view_predict_hmap(out, show_raw=False)
    def resume_train(self, batch_size, model_json, model_weights, init_epoch, epochs):

        self.load_model(model_json, model_weights)
        self.model.compile(optimizer=RMSprop(lr=5e-4), loss=mean_squared_error, metrics=["accuracy"])

        train_dataset = MPIIDataGen("../../data/mpii/mpii_annotations.json", "../../data/mpii/images",
                                    inres=self.inres, outres=self.outres, is_train=True)

        train_gen = train_dataset.generator(batch_size, self.num_stacks, sigma=1, is_shuffle=True,
                                    rot_flag=True, scale_flag=True, flip_flag=True)

        model_dir = os.path.dirname(os.path.abspath(model_json))
        print model_dir , model_json
        csvlogger = CSVLogger(os.path.join(model_dir, "csv_train_" + str(datetime.datetime.now().strftime('%H:%M')) + ".csv"))

        checkpoint = EvalCallBack(model_dir)

        xcallbacks = [csvlogger, checkpoint]

        self.model.fit_generator(generator=train_gen, steps_per_epoch=train_dataset.get_dataset_size() // batch_size,
                                 initial_epoch=init_epoch, epochs=epochs, callbacks=xcallbacks)
def main_vis():
    xdata = MPIIDataGen("../../data/mpii/mpii_annotations.json",
                        "../../data/mpii/images",
                        inres=(256, 256),
                        outres=(64, 64),
                        is_train=True)

    count = 0
    for _img, _gthmap, _meta in xdata.tt_generator(1,
                                                   4,
                                                   sigma=1,
                                                   with_meta=True,
                                                   is_shuffle=True,
                                                   rot_flag=True,
                                                   scale_flag=True):
        xgthmap = _gthmap[-1]
        print(_img.dtype)
        #scipy.misc.imshow(_img[0,:,:,:])
        debug_view_gthmap_v2(_img[0, :, :, :], xgthmap[0, :, :, :])

    print('scan done')
def main_eval(model_json, model_weights, num_stack, num_class, matfile):
    xnet = HourglassNet(num_class, num_stack, (256, 256), (64, 64))

    xnet.load_model(model_json, model_weights)

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json", "../../data/mpii/images",
                          inres=(256, 256), outres=(64, 64), is_train=False)

    print 'val data size', valdata.get_dataset_size()

    valkps = np.zeros(shape=(valdata.get_dataset_size(), 16, 2), dtype=np.float)

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.generator(batch_size, num_stack, sigma=1, is_shuffle=False , with_meta=True):

        count += batch_size

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        get_final_pred_kps(valkps, out[-1], _meta)

    scipy.io.savemat(matfile, mdict={'preds' : valkps})

    run_pckh(model_json, matfile)
Esempio n. 12
0
def main_test():
    xnet = HourglassNet(16, 8, (256, 256), (64, 64))

    xnet.load_model("../../trained_models/hg_s8_b1_v1_adam/net_arch.json",
                    "../../trained_models/hg_s8_b1_v1_adam/weights_epoch22.h5")

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json", "../../data/mpii/images",
                                inres=(256, 256), outres=(64, 64), is_train=False)

    print 'val data size', valdata.get_dataset_size()

    valkps = np.zeros(shape=(valdata.get_dataset_size(), 16, 2), dtype=np.float)

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.generator(batch_size, 8, sigma=2, is_shuffle=False , with_meta=True):

        count += batch_size

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        get_final_pred_kps(valkps, out[-1], _meta)


    matfile = os.path.join( "../../trained_models/hg_s8_b1_v1_adam/", 'preds_e22.mat')
    scipy.io.savemat(matfile, mdict={'preds' : valkps})

    run_pckh('hg_s8_b1_epoch22', matfile)
def main_check_gt():

    def check_gt_invalid_kps(metalst):
        valid, invalid = 0, 0
        for meta in metalst:
            tpts = meta['tpts']
            for i in range(tpts.shape[0]):
                if tpts[i, 0] > 0 and tpts[i, 1] > 0 and tpts[i, 2] > 0:
                    valid += 1
                else:
                    invalid += 1
        return valid , invalid

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json", "../../data/mpii/images",
                          inres=(256, 256), outres=(64, 64), is_train=False)

    total_valid, total_invalid = 0, 0


    print('val data size', valdata.get_dataset_size())

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.tt_generator(batch_size, 8, sigma=2, is_shuffle=False, with_meta=True):

        count += batch_size
        if count % (batch_size * 100) == 0:
            print(count, 'processed', total_valid, total_invalid)

        if count > valdata.get_dataset_size():
            break

        good, bad = check_gt_invalid_kps(_meta)

        total_valid += good
        total_invalid += bad

    print(total_valid, total_invalid,  total_valid * 1.0 / (total_valid + total_invalid))
Esempio n. 14
0
    def run_eval(self, epoch):
        valdata = MPIIDataGen(
            "/home/mike/Documents/stacked_hourglass_tf2/data/mpii/mpii_annotations.json",
            "/home/mike/datasets/mpii_human_pose_v1/images",
            inres=self.inres,
            outres=self.outres,
            is_train=False)

        total_suc, total_fail = 0, 0
        threshold = 0.5

        count = 0
        batch_size = 8
        for _img, _gthmap, _meta in valdata.generator(batch_size,
                                                      8,
                                                      sigma=2,
                                                      is_shuffle=False,
                                                      with_meta=True):

            count += batch_size
            if count > valdata.get_dataset_size():
                break

            out = self.model.predict(_img)

            suc, bad = cal_heatmap_acc(out[-1], _meta, threshold)

            total_suc += suc
            total_fail += bad

        acc = total_suc * 1.0 / (total_fail + total_suc)

        print('Eval Accuray ', acc, '@ Epoch ', epoch)

        with open(os.path.join(self.get_folder_path(), 'val.txt'),
                  'a+') as xfile:
            xfile.write('Epoch ' + str(epoch) + ':' + str(acc) + '\n')
Esempio n. 15
0
    def infer(self, imgfile, conf_threshold):
        out, scale = self.hgnet.inference_file(imgfile)

        kps = post_process_heatmap(out[0, :, :, :])

        ignore_kps = ['plevis', 'thorax', 'head_top']
        kp_keys = MPIIDataGen.get_kp_keys()
        mkps = list()
        for i, _kp in enumerate(kps):
            if kp_keys[i] in ignore_kps:
                _conf = 0.0
            else:
                _conf = _kp[2]
            mkps.append((_kp[0] * scale[1] * 4, _kp[1] * scale[0] * 4, _conf))

        cvmat = render_joints(cv2.imread(imgfile), mkps, conf_threshold)

        cv2.imshow('frame', cvmat)
        cv2.waitKey()
Esempio n. 16
0
def main_eval(model_path, num_stack, num_class, matfile, tiny):
    inres = (192, 192) if tiny else (256, 256)
    outres = (48, 48) if tiny else (64, 64)
    num_channles = 128 if tiny else 256

    xnet = HourglassNet(num_classes=num_class,
                        num_stacks=num_stack,
                        num_channels=num_channles,
                        inres=inres,
                        outres=outres)

    xnet.load_model(model_path)

    valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json",
                          "../../data/mpii/images",
                          inres=inres,
                          outres=outres,
                          is_train=False)

    print('val data size', valdata.get_dataset_size())

    valkps = np.zeros(shape=(valdata.get_dataset_size(), 16, 2),
                      dtype=np.float)

    count = 0
    batch_size = 8
    for _img, _gthmap, _meta in valdata.generator(batch_size,
                                                  num_stack,
                                                  sigma=1,
                                                  is_shuffle=False,
                                                  with_meta=True):

        count += batch_size

        if count > valdata.get_dataset_size():
            break

        out = xnet.model.predict(_img)

        get_final_pred_kps(valkps, out[-1], _meta, outres)

    scipy.io.savemat(matfile, mdict={'preds': valkps})

    run_pckh(model_path, matfile)
Esempio n. 17
0
sys.path.insert(0, '../data_gen')
sys.path.insert(0, '../eval')

from hourglass_lj import Hourglass
from mpii_datagen import MPIIDataGen
from skimage.transform import resize
import numpy as np
import imageio
import matplotlib.pyplot as plt
#%%
mynet = Hourglass(num_classes=16, num_channels=256, num_stacks=2, inres=(256, 256), outres=(64, 64))
mynet.load_model('../../trained_models/lj_model/net_arch.json', '../../trained_models/lj_model/weights_epoch74.h5')
#%%
valdata = MPIIDataGen("../../data/mpii/mpii_annotations.json",
                      "../../data/mpii/images",
                      inres=(256, 256), outres=(64, 64), is_train=False)

val_gen = valdata.generator(8, 8, sigma=1, is_shuffle=False, with_meta=True)

img, htmap, meta = next(val_gen)
img, htmap, meta = next(val_gen)

# img1 =  imageio.imread('C:/Users/LJ/Stacked_Hourglass_Network_Keras/data/mpii/images/072960618.jpg')
out = mynet.model.predict(img)
#%%
noimg = 7
plt.figure()
plt.imshow(img[noimg,:])
plt.axis('off')