def main(_argv): model_configs = maybe_load_yaml(DEFAULT_EVAL_CONFIGS) # load flags from config file model_configs = load_from_config_path(FLAGS.config_paths, model_configs) # replace parameters in configs_file with tf FLAGS model_configs = update_eval_model_configs(model_configs, FLAGS) model_configs = deep_merge_dict(model_configs, ModelConfigs.load(FLAGS.model_dir)) model_configs = update_eval_model_configs(model_configs, FLAGS) runner = EvalExperiment(model_configs=model_configs) runner.run()
def main(_argv): # load flags from config file model_configs = load_from_config_path(FLAGS.config_paths) # replace parameters in configs_file with tf FLAGS model_configs = update_configs_from_flags(model_configs, FLAGS, EVAL_ARGS.keys()) model_configs = deep_merge_dict(model_configs, ModelConfigs.load(FLAGS.model_dir)) model_configs = update_configs_from_flags(model_configs, FLAGS, EVAL_ARGS.keys()) runner = EvalExperiment(model_configs=model_configs) runner.run()
def main(_argv): model_configs = maybe_load_yaml(DEFAULT_INFER_CONFIGS) # load flags from config file model_configs = load_from_config_path(FLAGS.config_paths, model_configs) # replace parameters in configs_file with tf FLAGS model_configs = update_infer_model_configs(model_configs, FLAGS) model_dirs = FLAGS.model_dir.strip().split(",") if len(model_dirs) == 1: model_configs = deep_merge_dict(model_configs, ModelConfigs.load(model_dirs[0])) model_configs = update_infer_model_configs(model_configs, FLAGS) runner = InferExperiment(model_configs=model_configs) else: runner = EnsembleExperiment(model_configs=model_configs, model_dirs=model_dirs, weight_scheme=FLAGS.weight_scheme) runner.run()
def main(_argv): # load flags from config file model_configs = load_from_config_path(FLAGS.config_paths) # replace parameters in configs_file with tf FLAGS model_configs = update_configs_from_flags(model_configs, FLAGS, INFER_ARGS.keys()) model_dirs = FLAGS.model_dir.strip().split(",") ip, port = FLAGS.server_address.split(":") addr = (ip, int(port)) server = TranslateServer(addr, TranslateRequestHandler) server.init_experiment(model_configs=model_configs, model_dirs=model_dirs, weight_scheme=FLAGS.weight_scheme) server.serve_forever()
def main(_argv): model_configs = maybe_load_yaml(DEFAULT_INFER_CONFIGS) # load flags from config file model_configs = load_from_config_path(FLAGS.config_paths, model_configs) # replace parameters in configs_file with tf FLAGS model_configs = update_infer_model_configs(model_configs, FLAGS) model_dirs = FLAGS.model_dir.strip().split(",") if len(model_dirs) == 1: model_configs = deep_merge_dict(model_configs, ModelConfigs.load(model_dirs[0])) runner = InferExperiment(model_configs=model_configs) else: runner = EnsembleExperiment(model_configs=model_configs, model_dirs=model_dirs, weight_scheme=FLAGS.weight_scheme) runner.run()
def main(_argv): # load flags from config file model_configs = load_from_config_path(FLAGS.config_paths) # replace parameters in configs_file with tf FLAGS model_configs = update_configs_from_flags(model_configs, FLAGS, TRAIN_ARGS.keys()) model_dir = model_configs["model_dir"] if not gfile.Exists(model_dir): gfile.MakeDirs(model_dir) if "CUDA_VISIBLE_DEVICES" not in os.environ.keys(): raise OSError("need CUDA_VISIBLE_DEVICES environment variable") tf.logging.info("CUDA_VISIBLE_DEVICES={}".format(os.environ["CUDA_VISIBLE_DEVICES"])) training_runner = TrainingExperiment( model_configs=model_configs) training_runner.run()
def main(_argv): model_configs = maybe_load_yaml(DEFAULT_TRAIN_CONFIGS) # load flags from config file model_configs = load_from_config_path(FLAGS.config_paths, model_configs) # replace parameters in configs_file with tf FLAGS model_configs = update_train_model_configs(model_configs, FLAGS) model_dir = model_configs["model_dir"] if not gfile.Exists(model_dir): gfile.MakeDirs(model_dir) if "CUDA_VISIBLE_DEVICES" not in os.environ.keys(): raise OSError("need CUDA_VISIBLE_DEVICES environment variable") tf.logging.info("CUDA_VISIBLE_DEVICES={}".format(os.environ["CUDA_VISIBLE_DEVICES"])) training_runner = TrainingExperiment( model_configs=model_configs) training_runner.run()