Exemplo n.º 1
0
def parse_args(args=None):
    parser = PAIArgumentParser()

    # GENERAL/SHARED PARAMETERS
    parser.add_argument('--version', action='version', version='%(prog)s v' + __version__)
    parser.add_root_argument("args", PredictAndEvalArgs)
    return parser.parse_args(args=args).args
Exemplo n.º 2
0
def main():
    parser = PAIArgumentParser()

    parser.add_argument("--version",
                        action="version",
                        version="%(prog)s v" + __version__)
    parser.add_root_argument("root", PredictArgs, flat=True)
    args = parser.parse_args()

    run(args.root)
Exemplo n.º 3
0
def parse_args(args=None):
    from calamari_ocr.ocr.scenario import CalamariScenario

    parser = PAIArgumentParser()
    parser.add_argument("--version", action="version", version="%(prog)s v" + __version__)

    default_trainer_params = CalamariScenario.default_trainer_params()
    parser.add_root_argument("trainer", default_trainer_params.__class__, default=default_trainer_params)
    params = parser.parse_args(args).trainer
    return params
Exemplo n.º 4
0
def parse_args(args=None):
    from calamari_ocr.ocr.scenario import CalamariScenario
    parser = PAIArgumentParser()
    parser.add_argument('--version',
                        action='version',
                        version='%(prog)s v' + __version__)

    default_trainer_params = CalamariScenario.default_trainer_params()
    parser.add_root_argument('trainer',
                             default_trainer_params.__class__,
                             default=default_trainer_params)

    return parser.parse_args(args).trainer
Exemplo n.º 5
0
def main(args=None):
    parser = PAIArgumentParser()
    parser.add_argument('--version', action='version', version='%(prog)s v' + __version__)
    parser.add_root_argument("args", Args)
    parser.add_argument("--line_height", type=int, default=48,
                        help="The line height")
    parser.add_argument("--pad", type=int, default=16,
                        help="Padding (left right) of the line")

    args = parser.parse_args(args=args)

    data: CalamariDataGeneratorParams = args.args.data
    gen = data.create(PipelineMode.EVALUATION)

    logger.info(f"Loading {len(data)} files")
    images, texts, metas = list(zip(
        *map(lambda s: (s.inputs, s.targets, s.meta), tqdm_wrapper(gen.generate(), progress_bar=True, total=len(gen)))))
    statistics = {
        "n_lines": len(images),
        "chars": [len(c) for c in texts],
        "widths": [img.shape[1] / img.shape[0] * args.line_height + 2 * args.pad for img in images
                   if img is not None and img.shape[0] > 0 and img.shape[1] > 0],
        "total_line_width": 0,
        "char_counts": {},
    }

    for image, text in zip(images, texts):
        for c in text:
            if c in statistics["char_counts"]:
                statistics["char_counts"][c] += 1
            else:
                statistics["char_counts"][c] = 1

    statistics["av_line_width"] = np.average(statistics["widths"])
    statistics["max_line_width"] = np.max(statistics["widths"])
    statistics["min_line_width"] = np.min(statistics["widths"])
    statistics["total_line_width"] = np.sum(statistics["widths"])

    statistics["av_chars"] = np.average(statistics["chars"])
    statistics["max_chars"] = np.max(statistics["chars"])
    statistics["min_chars"] = np.min(statistics["chars"])
    statistics["total_chars"] = np.sum(statistics["chars"])

    statistics["av_px_per_char"] = statistics["av_line_width"] / statistics["av_chars"]
    statistics["codec_size"] = len(statistics["char_counts"])

    del statistics["chars"]
    del statistics["widths"]

    print(statistics)
    return statistics
Exemplo n.º 6
0
def main(args=None):
    parser = PAIArgumentParser()
    parser.add_argument('--version', action='version', version='%(prog)s v' + __version__)

    parser.add_argument("--n_cols", type=int, default=1)
    parser.add_argument("--n_rows", type=int, default=5)
    parser.add_argument("--select", type=int, nargs="+", default=[])

    parser.add_argument("--preload", action='store_true', help='Simulate preloading')
    parser.add_argument("--as_validation", action='store_true', help="Access as validation instead of training data.")
    parser.add_argument("--n_augmentations", type=float, default=0)
    parser.add_argument("--no_plot", action='store_true', help='This parameter is for testing only')

    parser.add_root_argument("data", DataWrapper)
    args = parser.parse_args(args=args)

    data_wrapper: DataWrapper = args.data
    data_params = data_wrapper.data
    data_params.pre_proc.run_parallel = False
    data_params.pre_proc.erase_all(PrepareSampleProcessorParams)
    for p in data_params.pre_proc.processors_of_type(AugmentationProcessorParams):
        p.n_augmentations = args.n_augmentations
    data_params.__post_init__()
    data_wrapper.pipeline.mode = PipelineMode.EVALUATION if args.as_validation else PipelineMode.TRAINING
    data_wrapper.gen.prepare_for_mode(data_wrapper.pipeline.mode)

    data = Data(data_params)
    if len(args.select) == 0:
        args.select = list(range(len(data_wrapper.gen)))
    else:
        try:
            data_wrapper.gen.select(args.select)
        except NotImplementedError:
            logger.warning(f"Selecting is not supported for a data generator of type {type(data_wrapper.gen)}. "
                           f"Resuming without selection.")
    data_pipeline = data.create_pipeline(data_wrapper.pipeline, data_wrapper.gen)
    if args.preload:
        data_pipeline = data_pipeline.as_preloaded()

    if args.no_plot:
        with data_pipeline as dataset:
            list(zip(args.select, dataset.generate_input_samples(auto_repeat=False)))
        return

    import matplotlib.pyplot as plt
    f, ax = plt.subplots(args.n_rows, args.n_cols, sharey='all')
    row, col = 0, 0
    with data_pipeline as dataset:
        for i, (id, sample) in enumerate(zip(args.select, dataset.generate_input_samples(auto_repeat=False))):
            line, text, params = sample.inputs, sample.targets, sample.meta
            if args.n_cols == 1:
                ax[row].imshow(line.transpose())
                ax[row].set_title("ID: {}\n{}".format(id, text))
            else:
                ax[row, col].imshow(line.transpose())
                ax[row, col].set_title("ID: {}\n{}".format(id, text))

            row += 1
            if row == args.n_rows:
                row = 0
                col += 1

            if col == args.n_cols or i == len(dataset) - 1:
                plt.show()
                f, ax = plt.subplots(args.n_rows, args.n_cols, sharey='all')
                row, col = 0, 0

    plt.show()
Exemplo n.º 7
0
import json

from examples.structures.hierarchical import Parent
from paiargparse import PAIArgumentParser

if __name__ == "__main__":
    parser = PAIArgumentParser()
    parser.add_argument("positional_arg", type=str, help="A positional arg")
    parser.add_argument("--required_arg",
                        type=int,
                        help="This parameter must be specified",
                        required=True)
    parser.add_root_argument("root", Parent)
    args = parser.parse_args()

    d = vars(args)
    d["root"] = d["root"].to_dict()
    print(json.dumps(d))