def get_dataflow(annot_path, img_dir, batch_size):
    """
    This function initializes the tensorpack dataflow and serves generator
    for training operation.

    :param annot_path: path to the annotation file
    :param img_dir: path to the images
    :param batch_size: batch size
    :return: dataflow object
    """
    df = CocoDataFlow((368, 368), annot_path, img_dir)
    df.prepare()
    df = MapData(df, read_img)
    df = MapData(df, gen_mask)
    df = MapData(df, augment)
    df = MapData(df, apply_mask)
    df = MapData(df, build_sample)
    df = PrefetchDataZMQ(df, nr_proc=4)  #df = PrefetchData(df, 2, 1)
    df = BatchData(df, batch_size, use_list=False)
    df = MapData(
        df, lambda x: ([x[0], x[1], x[2]], [
            x[3], x[4], x[3], x[4], x[3], x[4], x[3], x[4], x[3], x[4], x[3],
            x[4]
        ]))
    df.reset_state()

    return df
Ejemplo n.º 2
0
                        meta.aug_joints,
                        1,
                        stride=8)

    return [meta, mask_paf, mask_heatmap, pafmap, heatmap]


if __name__ == '__main__':
    batch_size = 10
    curr_dir = os.path.dirname(__file__)

    annot_path = os.path.join(
        curr_dir, '../dataset/annotations/pen_keypoints_validation.json')
    img_dir = os.path.abspath(os.path.join(curr_dir, '../dataset/validation/'))
    df = CocoDataFlow(
        (368, 368), COCODataPaths(annot_path, img_dir))  #, select_ids=[1000])
    df.prepare()
    df = MapData(df, read_img)
    df = MapData(df, gen_mask)
    df = MapData(df, augment)
    df = MapData(df, apply_mask)
    df = MapData(df, build_debug_sample)
    df = PrefetchData(df, nr_prefetch=2, nr_proc=1)

    df.reset_state()
    gen = df.get_data()

    for g in gen:
        show_image_mask_center_of_main_person(g)
        #show_image_heatmap_paf(g)