def parse_config( config_file: Optional[IO], entrypoint: Optional[List[str]], overrides: Iterable[str], volumes: Iterable[str], ) -> Dict[str, Any]: config = {} # type: Dict[str, Any] if config_file: with config_file: config = yaml.safe_load(config_file) for config_arg in overrides: if "=" not in config_arg: raise ValueError("Could not read configuration option '{}'\n\n" "Expecting:\n{}".format(config_arg, CONFIG_DESC)) key, value = config_arg.split("=", maxsplit=1) # type: Tuple[str, Any] # Separate values if a comma exists. Use yaml.safe_load() to cast # the value(s) to the type YAML would use, e.g., "4" -> 4. if "," in value: value = [yaml.safe_load(v) for v in value.split(",")] else: value = yaml.safe_load(value) # Certain configurations keys are expected to have list values. # Convert a single value to a singleton list if needed. if key in _CONFIG_PATHS_COERCE_TO_LIST: value = [value] # TODO(#2703): Consider using full JSONPath spec instead of dot # notation. config = _set_nested_config(config, key.split("."), value) for volume_arg in volumes: if ":" not in volume_arg: raise ValueError("Could not read volume option '{}'\n\n" "Expecting:\n{}".format(volume_arg, VOLUME_DESC)) host_path, container_path = volume_arg.split(":", maxsplit=1) bind_mounts = config.setdefault("bind_mounts", []) bind_mounts.append({ "host_path": host_path, "container_path": container_path }) # Use the entrypoint command line argument if an entrypoint has not already # defined by previous settings. if not config.get("entrypoint") and entrypoint: config["entrypoint"] = entrypoint return config
def test_non_root_experiment(auth: Authentication, tmp_path: pathlib.Path) -> None: user = create_linked_user(65534, "nobody", 65534, "nogroup") with logged_in_user(user): with open(conf.fixtures_path("no_op/model_def.py")) as f: model_def_content = f.read() with open(conf.fixtures_path("no_op/single-one-short-step.yaml")) as f: config = yaml.safe_load(f) # Use a user-owned path to ensure shared_fs uses the container_path and not host_path. with non_tmp_shared_fs_path() as host_path: config["checkpoint_storage"] = { "type": "shared_fs", "host_path": host_path, } # Call `det --version` in a startup hook to ensure that det is on the PATH. with FileTree( tmp_path, { "startup-hook.sh": "det --version || exit 77", "const.yaml": yaml.dump(config), # type: ignore "model_def.py": model_def_content, }, ) as tree: exp.run_basic_test(str(tree.joinpath("const.yaml")), str(tree), None)
def _parse_config_file_or_exit(config_file: io.FileIO) -> Dict: experiment_config = yaml.safe_load(config_file.read()) config_file.close() if not experiment_config or not isinstance(experiment_config, dict): print("Error: invalid experiment config file {}".format( config_file.name)) sys.exit(1) return experiment_config
def all_cases() -> Iterator["str"]: for root, _, files in os.walk(CASES_ROOT): for file in files: if file.endswith(".yaml"): path = os.path.join(root, file) with open(path) as f: cases = yaml.safe_load(f) for case in cases: display_path = os.path.relpath(path, CASES_ROOT) yield display_path + "::" + case["name"]
def preview_search(args: Namespace) -> None: experiment_config = yaml.safe_load(args.config_file.read()) args.config_file.close() if "searcher" not in experiment_config: print("Experiment configuration must have 'searcher' section") sys.exit(1) r = api.post(args.master, "searcher/preview", body=experiment_config) j = r.json() def to_full_name(kind: str) -> str: if kind[-1] == "R": return "train {} records".format(kind[:-1]) if kind[-1] == "B": return "train {} batch(es)".format(kind[:-1]) if kind[-1] == "E": return "train {} epoch(s)".format(kind[:-1]) elif kind == "V": return "validation" elif kind == "C": return "checkpoint" else: raise ValueError("unexpected kind: {}".format(kind)) def render_sequence(sequence: List[str]) -> str: if not sequence: return "N/A" instructions = [] current = sequence[0] count = 0 for k in sequence: if k != current: instructions.append("{} x {}".format(count, to_full_name(current))) current = k count = 1 else: count += 1 instructions.append("{} x {}".format(count, to_full_name(current))) return ", ".join(instructions) headers = ["Trials", "Breakdown"] values = [(count, render_sequence(operations.split())) for operations, count in j["results"].items()] print(colored("Using search configuration:", "green")) yml = yaml.YAML() yml.indent(mapping=2, sequence=4, offset=2) yml.dump(experiment_config["searcher"], sys.stdout) print() print("This search will create a total of {} trial(s).".format( sum(j["results"].values()))) print(tabulate.tabulate(values, headers, tablefmt="presto"), flush=False)
def load_config(config_path: str) -> Any: with open(config_path) as f: config = yaml.safe_load(f) return config
def test_schemas(test_case: str) -> None: cases_file, case_name = test_case.split("::", 1) with open(os.path.join(CASES_ROOT, cases_file)) as f: cases = yaml.safe_load(f) for case in cases: Case(**case).run()
def test_v1() -> None: for cases_file in cases_files(): with open(cases_file) as f: cases = yaml.safe_load(f) for case in cases: Case(**case).run()
def set_template(args: Namespace) -> None: with args.template_file: body = yaml.safe_load(args.template_file) api.put(args.master, path="templates/" + args.template_name, body=body) print(colored("Set template {}".format(args.template_name), "green"))
def _parse_config(field: Any) -> Any: # Pretty print the config field. return yaml.safe_dump(yaml.safe_load(base64.b64decode(field)), default_flow_style=False)