def main(args):

    # how to search for all ground truth
    searchFine = os.path.join(args.datadir, "gtFine", "*", "*",
                              "*_gt*_polygons.json")

    # search files
    filesFine = glob.glob(searchFine)
    filesFine.sort()

    files = filesFine

    if not files:
        tqdm.writeError("Did not find any files. Please consult the README.")

    # a bit verbose
    tqdm.write("Processing {} annotation files".format(len(files)))

    # iterate through files
    progress = 0
    tqdm.write("Progress: {:>3} %".format(progress * 100 / len(files)),
               end=' ')

    from multiprocessing import Pool
    import time

    pool = Pool(args.num_workers)
    # results = pool.map(process_pred_gt_pair, pairs)
    results = list(tqdm(pool.imap(process_folder, files), total=len(files)))
    pool.close()
    pool.join()
示例#2
0
def main(args):
    import sys
    if args.panoptic:
        args.instance = True
    sys.path.append(
        os.path.normpath(
            os.path.join(os.path.dirname(__file__), '..', 'helpers')))
    # how to search for all ground truth
    searchFine = os.path.join(args.datadir, "gtFine", "*", "*",
                              "*_gt*_polygons.json")

    # search files
    filesFine = glob.glob(searchFine)
    filesFine.sort()

    files = filesFine

    if not files:
        tqdm.writeError("Did not find any files. Please consult the README.")

    # a bit verbose
    tqdm.write(
        "Processing {} annotation files for Sematic/Instance Segmentation".
        format(len(files)))

    # iterate through files
    progress = 0
    tqdm.write("Progress: {:>3} %".format(progress * 100 / len(files)),
               end=' ')

    from multiprocessing import Pool
    import time

    pool = Pool(args.num_workers)
    # results = pool.map(process_pred_gt_pair, pairs)
    results = list(tqdm(pool.imap(process_folder, files), total=len(files)))
    pool.close()
    pool.join()

    if args.panoptic:
        for split in ['train', 'val']:

            tqdm.write("Panoptic Segmentation {} split".format(split))
            folder_name = os.path.join(args.datadir, 'gtFine')
            output_folder = os.path.join(folder_name, split + "_panoptic")
            os.makedirs(output_folder, exist_ok=True)
            out_file = os.path.join(folder_name, split + "_panoptic.json")
            panoptic_converter(args.num_workers,
                               os.path.join(folder_name, split), output_folder,
                               out_file)
示例#3
0
def main(args):
    import sys
    if args.panoptic:
        args.instance = True
    sys.path.append(
        os.path.normpath(
            os.path.join(os.path.dirname(__file__), '..', 'helpers')))
    # how to search for all ground truth
    searchFine = os.path.join(args.datadir, "gtFine", "*", "*",
                              "*_gt*_polygons.json")

    # search files
    filesFine = glob.glob(searchFine)
    filesFine.sort()

    files = []  #filesFine

    #for semi supervised domain adaptation, convert only selected images
    filesnew_semisup = []
    filesnewunsup = []
    if args.semisup_da:
        d_strat = list(
            pd.read_csv(
                './domain_adaptation/target/semi-supervised/selected_samples.csv',
                header=None)[0])
        d_strat = [
            "/".join(
                filenew.replace("_labellevel3Ids.png", "").split("/")[-3:])
            for filenew in d_strat
        ]
        print(d_strat)
        for fileold in filesFine:
            if "val/" not in fileold:
                searchstr = "/".join(
                    fileold.replace("_polygons.json", "").split("/")[-3:])
                if searchstr in d_strat:
                    print(searchstr)
                    filesnew_semisup.append(fileold)
            else:
                filesnew_semisup.append(fileold)
        files = filesnew_semisup
    elif args.unsup_da or args.weaksup_da:  #for unsupervised domain adaptation, convert only val images
        for fileold in filesFine:
            if "val/" in fileold:
                filesnewunsup.append(fileold)
        files = filesnewunsup
    else:
        files = filesFine

    #print('args.semisup_da', args.semisup_da, len(files))
    if not files:
        tqdm.writeError("Did not find any files. Please consult the README.")

    # a bit verbose
    tqdm.write(
        "Processing {} annotation files for Sematic/Instance Segmentation".
        format(len(files)))

    # iterate through files
    progress = 0
    tqdm.write("Progress: {:>3} %".format(progress * 100 / len(files)),
               end=' ')

    from multiprocessing import Pool
    import time

    pool = Pool(args.num_workers)
    # results = pool.map(process_pred_gt_pair, pairs)
    results = list(tqdm(pool.imap(process_folder, files), total=len(files)))
    pool.close()
    pool.join()

    if args.panoptic:
        from cityscape_panoptic_gt import panoptic_converter
        for split in ['train', 'val']:

            tqdm.write("Panoptic Segmentation {} split".format(split))
            folder_name = os.path.join(args.datadir, 'gtFine')
            output_folder = os.path.join(folder_name, split + "_panoptic")
            os.makedirs(output_folder, exist_ok=True)
            out_file = os.path.join(folder_name, split + "_panoptic.json")
            panoptic_converter(args.num_workers,
                               os.path.join(folder_name, split), output_folder,
                               out_file)