Example #1
0
def main():
    # Where to look for Cityscapes
    if 'CITYSCAPES_DATASET_INDIA' in os.environ:
        cityscapesPath = os.environ['CITYSCAPES_DATASET_INDIA']
    else:
        cityscapesPath = os.path.join(
            os.path.dirname(os.path.realpath(__file__)), '..', '..')
    # how to search for all ground truth
    searchFine = os.path.join(cityscapesPath, "gtFine", "*", "*",
                              "*_gt*_polygons.json")
    searchCoarse = os.path.join(cityscapesPath, "gtCoarse", "*", "*",
                                "*_gt*_polygons.json")

    # search files
    filesFine = glob.glob(searchFine)
    filesFine.sort()
    filesCoarse = glob.glob(searchCoarse)
    filesCoarse.sort()

    # concatenate fine and coarse
    #files = filesFine + filesCoarse
    files = filesFine  # use this line if fine is enough for now.

    # quit if we did not find anything
    if not files:
        printError("Did not find any files. Please consult the README.")

    # a bit verbose
    print("Processing {} annotation files".format(len(files)))

    # iterate through files
    progress = 0
    print("Progress: {:>3} %".format(progress * 100 / len(files)), end=' ')
    for f in files:
        # create the output filename
        dst = f.replace("_polygons.json", "_labelTrainIds.png")

        # do the conversion
        # encoding can be set to
        #     - "ids"      : classes are encoded using the regular label IDs
        #     - "trainIds" : classes are encoded using the training IDs
        #     - "color"    : classes are encoded using the corresponding colors
        try:
            json2labelImg(f, dst, "trainIds")
        except:
            print("Failed to convert: {}".format(f))
            raise

        # status
        progress += 1
        print("\rProgress: {:>3} %".format(progress * 100 / len(files)),
              end=' ')
        sys.stdout.flush()
def main():
    # Where to look for Cityscapes
    if 'CITYSCAPES_DATASET' in os.environ:
        cityscapesPath = os.environ['CITYSCAPES_DATASET']
    else:
        cityscapesPath = os.path.join(
            os.path.dirname(os.path.realpath(__file__)), '..', '../dataset')
    # how to search for all ground truth
    searchFine = os.path.join(cityscapesPath, "gtFine", "*", "*",
                              "*_gt*_polygons.json")
    searchCoarse = os.path.join(cityscapesPath, "gtCoarse", "*", "*",
                                "*_gt*_polygons.json")
    print("searchFine:" + searchFine)
    print("searchCoarse:" + searchCoarse)
    # search files
    filesFine = glob.glob(searchFine)
    filesFine.sort()
    filesCoarse = glob.glob(searchCoarse)
    filesCoarse.sort()

    # concatenate fine and coarse
    files = filesFine + filesCoarse
    # files = filesFine # use this line if fine is enough for now.

    # quit if we did not find anything
    if not files:
        printError("Did not find any files. Please consult the README.")

    # a bit verbose
    print("Processing {} annotation files".format(len(files)))

    # iterate through files
    progress = 0
    print("Progress: {:>3} %".format(progress * 100 / len(files)), end=' ')
    for f in files:
        # create the output filename
        # dst = f.replace( "_polygons.json" , "_instanceTrainIds.png" )
        dst = f.replace("_polygons.json", "_instanceIds.png")
        print("Processing file:{}".format(dst))
        # do the conversion
        try:
            # json2instanceImg( f , dst , "trainIds" )
            json2instanceImg(f, dst)
        except:
            print("Failed to convert: {}".format(f))
            raise

        # status
        progress += 1
        print("\rProgress: {:>3} %".format(progress * 100 / len(files)),
              end=' ')
        sys.stdout.flush()
def main():
    # Where to look for Cityscapes
    # Where to look for Cityscapes
    # if 'CITYSCAPES_DATASET' in os.environ:
    #     cityscapesPath = os.environ['CITYSCAPES_DATASET']
    # else:
    #     cityscapesPath = os.path.join(os.path.dirname(os.path.realpath(__file__)),'..','..')
    cityscapesPath = os.path.expanduser('~/exp/datasets/cityscapes/gtFine_trainvaltest')
    # how to search for all ground truth
    searchFine   = os.path.join( cityscapesPath , "gtFine"   , "*" , "*" , "*_gt*_polygons.json" )
    searchCoarse = os.path.join( cityscapesPath , "gtCoarse" , "*" , "*" , "*_gt*_polygons.json" )

    # search files
    filesFine = glob.glob( searchFine )
    filesFine.sort()
    filesCoarse = glob.glob( searchCoarse )
    filesCoarse.sort()

    # concatenate fine and coarse
    files = filesFine + filesCoarse
    # files = filesFine # use this line if fine is enough for now.

    # quit if we did not find anything
    if not files:
        printError( "Did not find any files. Please consult the README." )

    # a bit verbose
    print "Processing {} annotation files".format(len(files))

    # iterate through files
    progress = 0
    print "Progress: {:>3} %".format( progress * 100 / len(files) ) ,
    for f in files:
        # create the output filename
        dst = f.replace( "_polygons.json" , "_labelTrainIds.png" )

        # do the conversion
        try:
            json2labelImg( f , dst , "trainIds" )
        except:
            print "Failed to convert: {}".format(f)
            raise

        # status
        progress += 1
        print "\rProgress: {:>3} %".format( progress * 100 / len(files) ) ,
        sys.stdout.flush()
Example #4
0
def main():
    cityscapesPath = "/Users/ht/Documents/dataset/cityscapes/gtFine_trainvaltest"
    # how to search for all ground truth
    searchFine = os.path.join(cityscapesPath, "gtFine", "*", "*",
                              "*_gt*_polygons.json")
    searchCoarse = os.path.join(cityscapesPath, "gtCoarse", "*", "*",
                                "*_gt*_polygons.json")

    # search files
    filesFine = glob.glob(searchFine)
    filesFine.sort()
    filesCoarse = glob.glob(searchCoarse)
    filesCoarse.sort()

    # concatenate fine and coarse
    #files = filesFine + filesCoarse
    files = filesFine  # use this line if fine is enough for now.

    # quit if we did not find anything
    if not files:
        printError("Did not find any files. Please consult the README.")

    # a bit verbose
    print("Processing {} annotation files".format(len(files)))

    # iterate through files
    progress = 0
    print("Progress: {:>3} %".format(progress * 100 / len(files)), end=' ')
    for f in files:
        # create the output filename
        dst = f.replace("_polygons.json", "_labelTrainIds.png")

        # do the conversion
        try:
            json2labelImg(f, dst, "trainIds")
        except:
            print("Failed to convert: {}".format(f))
            raise

        # status
        progress += 1
        print("\rProgress: {:>3} %".format(progress * 100 / len(files)),
              end=' ')
        sys.stdout.flush()
Example #5
0
def main():
    # Where to look for Cityscapes
    root = '../../../data/cityscape'
    os.environ['CITYSCAPES_DATASET'] = root
    store_path = 'gtFine_allperson'
    label_tochose = 'rider'
    set = 'val'
    gtset_name = 'gtFine'
    print(label_tochose, set, gtset_name, store_path)
    # city="darmstadt"
    # if not os.path.exists(store_path):
    #     os.mkdir(store_path)
    # set_path=os.path.join(store_path,set)
    # if not os.path.exists(set_path):
    #     os.mkdir(set_path)
    # city_path=os.path.join(set_path,city)
    # if not os.path.exists(city_path):
    #     os.mkdir(city_path)

    if 'CITYSCAPES_DATASET' in os.environ:
        cityscapesPath = os.environ['CITYSCAPES_DATASET']
    else:
        cityscapesPath = os.path.join(
            os.path.dirname(os.path.realpath(__file__)), '..', '..')
    # how to search for all ground truth
    searchFine = os.path.join(cityscapesPath, gtset_name, set, "*",
                              "*_gt*_polygons.json")

    # search files
    filesFine = glob.glob(searchFine)
    filesFine.sort()

    # concatenate fine and coarse
    files = filesFine
    # files = filesFine # use this line if fine is enough for now.

    # quit if we did not find anything
    if not files:
        printError("Did not find any files. Please consult the README.")

    # a bit verbose
    print("Processing {} annotation files".format(len(files)))

    # iterate through files
    progress = -1
    numIns_all = 0
    numimg_selected = 0
    # print("Progress: {:>3} %".format( progress * 100 / len(files) ), end=' ')
    for i, f in enumerate(files):
        # create the output filename
        progress += 1
        # print("\rProgress: {:>3} %".format( progress * 100 / len(files) ), end=' ')
        print('Processing........', i, len(files), i * 1.0 / len(files))

        # if numIns_all>=1000:
        #     break
        # do the conversion
        try:
            # json2instanceImg( f , dst , "trainIds" )
            print(i, 'image............', numIns_all, 'ins selected now')
            insImg_arr, insIds_arr, Sizes_arr, num_instances = json2instanceArr(
                f, 'trainIds', label_tochose=label_tochose)
            if num_instances == 0:
                # progress += 1
                continue
            numIns_all += num_instances
            numimg_selected += 1
            for instance, id, size in zip(insImg_arr, insIds_arr, Sizes_arr):

                try:
                    dst = f.replace("_gtFine_polygons.json",
                                    "_" + str(id) + ".png")
                    dst = dst.replace("gtFine", store_path)

                    n_last = len(dst.split('/')[-1])
                    parent_dir = dst[:-n_last]
                    pathtodir(parent_dir)

                    imsave(dst, instance)

                except:
                    print("Failed to save: {}".format(id))
                    raise
        except:
            print("Failed to convert: {}".format(f))
            raise

        # status

        sys.stdout.flush()

    print('Ins count all', numIns_all, ',all imgs', len(files),
          ',seleced imgs', numimg_selected,
          ',average instances per img selected',
          numIns_all * 1.0 / numimg_selected, ',ave ins per img all',
          numIns_all * 1.0 / len(files))