def _main(_): arg_parser = flags_core.define_flags(FLAG_LIST, with_config_file=False) args, remaining_argv = flags_core.intelligent_parse_flags( FLAG_LIST, arg_parser) flags_core.verbose_flags(FLAG_LIST, args, remaining_argv) average_checkpoints(checkpoints=flatten_string_list(args["checkpoints"]), output_path=args["output_path"])
def _main(_): arg_parser = flags_core.define_flags(FLAG_LIST, with_config_file=False) args, remaining_argv = flags_core.intelligent_parse_flags( FLAG_LIST, arg_parser) flags_core.verbose_flags(FLAG_LIST, args, remaining_argv) metric = build_metric(args) evaluate(metric, args["hypo_file"], args["ref_file"])
def _main(_): # define and parse program flags arg_parser = flags_core.define_flags(FLAG_LIST) args, remaining_argv = flags_core.intelligent_parse_flags( FLAG_LIST, arg_parser, _pre_load_args) args, remaining_argv = flags_core.extend_define_and_parse( BaseExperiment.REGISTRY_NAME, args, remaining_argv) if args["entry.class"] is None: raise ValueError("Must provide entry/entry.class.") run_experiment(args, remaining_argv)
def _main(_): arg_parser = flags_core.define_flags(FLAG_LIST, with_config_file=False) args, remaining_argv = flags_core.intelligent_parse_flags( FLAG_LIST, arg_parser) flags_core.verbose_flags(FLAG_LIST, args, remaining_argv) generate_vocab(input=args["input"], output=args["output"], min_frequency=args["min_frequency"], max_vocab_size=args["max_vocab_size"], lowercase=args["lowercase"], extra_slots=args["extra_slots"])
def _main(_): # define and parse program flags arg_parser = flags_core.define_flags(FLAG_LIST, with_config_file=True) args, remaining_argv = flags_core.intelligent_parse_flags( FLAG_LIST, arg_parser) flags_core.verbose_flags(FLAG_LIST, args, remaining_argv) dataset = build_dataset(args) feature_extractor = build_feature_extractor(args) if dataset is None: raise ValueError("dataset must be provided.") main(dataset, feature_extractor)
def _main(_): arg_parser = flags_core.define_flags(FLAG_LIST, with_config_file=False) args, remaining_argv = flags_core.intelligent_parse_flags(FLAG_LIST, arg_parser) flags_core.verbose_flags(FLAG_LIST, args, remaining_argv) tokenizer = build_tokenizer(args) with tf.io.gfile.GFile(args["input"]) as fp: with tf.io.gfile.GFile(args["output"], "w") as fw: for line in fp: line = lowercase_and_remove_punctuations(tokenizer.language, line.strip(), args["lowercase"], args["remove_punctuation"]) fw.write(tokenizer.tokenize(line, return_str=True) + "\n")
def _main(_): # define and parse program flags arg_parser = flags_core.define_flags(FLAG_LIST, with_config_file=True) args, remaining_argv = flags_core.intelligent_parse_flags( FLAG_LIST, arg_parser) flags_core.verbose_flags(FLAG_LIST, args, remaining_argv) dataset = build_dataset(args) if dataset is None: raise ValueError("dataset must be provided.") main(dataset=dataset, output_transcript_file=args["output_transcript_file"], output_translation_file=args["output_translation_file"])
def _main(_): # define and parse program flags arg_parser = flags_core.define_flags(FLAG_LIST, with_config_file=True) args, remaining_argv = flags_core.intelligent_parse_flags( FLAG_LIST, arg_parser) flags_core.verbose_flags(FLAG_LIST, args, remaining_argv) task = build_task(args) dataset = build_dataset(args) if dataset is None: raise ValueError("dataset must be provided.") main(processor_id=args["processor_id"], num_processors=args["num_processors"], num_output_shards=args["num_output_shards"], output_range_begin=args["output_range_begin"], output_range_end=args["output_range_end"], output_template=args["output_template"], progressbar=args["progressbar"], dataset=dataset, task=task)