def main(): files = polygonsjson_path_list # quit if we did not find anything if not files: raise ValueError("j) Did not find any files!!!") # a bit verbose print("Processing {} annotation files".format(len(files))) # iterate through files progress = 0 print("Progress: {:>3} %".format(progress * 100 / len(files)), end=' ') for f in files: # create the output filename dst = f.replace( "_polygons.json", "_instanceIds.png" ) # i. "_instanceTrainIds.png" 말고 "_instanceIds.png" 로 해줬음. # do the conversion try: json2instanceImg( f, dst, "ids") # i. 세번째인풋인 encoding 을 "trainIds" 말고 "ids" 로 해줬음. except: print("Failed to convert: {}".format(f)) raise # status progress += 1 print("\rProgress: {:>3} %".format(progress * 100 / len(files)), end=' ') sys.stdout.flush()
def main(): # Where to look for Cityscapes if 'CITYSCAPES_DATASET' in os.environ: cityscapesPath = os.environ['CITYSCAPES_DATASET'] else: cityscapesPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), '..', '..') # how to search for all ground truth cityscapesPath = '/home/pengsida/Datasets/cityscape' searchFine = os.path.join(cityscapesPath, "gtFine", "*", "*", "*_gt*_polygons.json") searchCoarse = os.path.join(cityscapesPath, "gtCoarse", "*", "*", "*_gt*_polygons.json") # search files filesFine = glob.glob(searchFine) filesFine.sort() filesCoarse = glob.glob(searchCoarse) filesCoarse.sort() # concatenate fine and coarse files = filesFine + filesCoarse # files = filesFine # use this line if fine is enough for now. # quit if we did not find anything if not files: printError("Did not find any files. Please consult the README.") # a bit verbose print("Processing {} annotation files".format(len(files))) # iterate through files progress = 0 print("Progress: {:>3} %".format(progress * 100 / len(files)), end=' ') for f in files: f = '/home/pengsida/Datasets/cityscape/gtFine/val/frankfurt/frankfurt_000000_000294_gtFine_polygons.json' # create the output filename dst = f.replace("_polygons.json", "_instanceTrainIds.png") # do the conversion try: json2instanceImg(f, dst, "trainIds") except: print("Failed to convert: {}".format(f)) raise # status progress += 1 print("\rProgress: {:>3} %".format(progress * 100 / len(files)), end=' ') sys.stdout.flush()
def augment_data_to_cityscapes(target_dir, split): folder_root = join(target_dir, "gtFine", split) for folder in list( filter(lambda x: os.path.isdir(join(folder_root, x)), os.listdir(folder_root))): json_root_dir = join(folder_root, folder) for json_file in os.listdir(json_root_dir): name, _ = json_file.split(".") base_name = name[:name.rfind("_")] json2labelImg(join(json_root_dir, name + ".json"), join(json_root_dir, f"{base_name}_labelIds.png")) json2instanceImg( join(json_root_dir, name + ".json"), join(json_root_dir, f"{base_name}_instanceIds.png"))
def main(): # Where to look for Cityscapes if 'CITYSCAPES_DATASET' in os.environ: cityscapesPath = os.environ['CITYSCAPES_DATASET'] else: cityscapesPath = os.path.join(os.path.dirname(os.path.realpath(__file__)),'..','..') # how to search for all ground truth searchFine = os.path.join( cityscapesPath , "gtFine" , "*" , "*" , "*_gt*_polygons.json" ) searchCoarse = os.path.join( cityscapesPath , "gtCoarse" , "*" , "*" , "*_gt*_polygons.json" ) # search files filesFine = glob.glob( searchFine ) filesFine.sort() filesCoarse = glob.glob( searchCoarse ) filesCoarse.sort() # concatenate fine and coarse files = filesFine + filesCoarse # files = filesFine # use this line if fine is enough for now. # quit if we did not find anything if not files: printError( "Did not find any files. Please consult the README." ) # a bit verbose print("Processing {} annotation files".format(len(files))) # iterate through files progress = 0 print("Progress: {:>3} %".format( progress * 100 / len(files) ), end=' ') for f in files: # create the output filename dst = f.replace( "_polygons.json" , "_instanceTrainIds.png" ) # do the conversion try: json2instanceImg( f , dst , "trainIds" ) except: print("Failed to convert: {}".format(f)) raise # status progress += 1 print("\rProgress: {:>3} %".format( progress * 100 / len(files) ), end=' ') sys.stdout.flush()
def createTrainIdInstanceImgs(cityscapesPath, fine_fold_name, coarse_fold_name): # how to search for all ground truth searchFine = os.path.join(cityscapesPath, fine_fold_name, "*", "*", "*_gt*_polygons.json") searchCoarse = os.path.join(cityscapesPath, coarse_fold_name, "*", "*", "*_gt*_polygons.json") # search files filesFine = glob.glob(searchFine) filesFine.sort() filesCoarse = glob.glob(searchCoarse) filesCoarse.sort() # concatenate fine and coarse files = filesFine + filesCoarse # files = filesFine # use this line if fine is enough for now. # quit if we did not find anything if not files: printError("Did not find any files. Please consult the README.") # a bit verbose print("Processing {} annotation files".format(len(files))) # iterate through files progress = 0 print("Progress: {:>3} %".format(progress * 100 / len(files)), end=' ') for f in files: # create the output filename dst = f.replace("_polygons.json", "_instanceTrainIds.png") # do the conversion try: json2instanceImg(f, dst, "trainIds") except: print("Failed to convert: {}".format(f)) raise # status progress += 1 print("\rProgress: {:>3} %".format(progress * 100 / len(files)), end=' ') sys.stdout.flush()