Example #1
0
def test_interpolating_nested_parameters(tmp_path):
    included_params = {
        # - and _ to test they work when finding params to interpolate.
        "hello": {
            "world": {
                "foo-foo_foo": "meep"
            }
        },
        "same_file": "moo %hello.world.foo-foo_foo% moo",
        "nested": {
            "interpolate_me_nested": "%hello.world.foo-foo_foo% nested"
        },
    }
    included_params_path = tmp_path / "included.params"
    with open(included_params_path, "w") as included_params_out:
        yaml.dump(included_params, included_params_out)

    reloaded_included_params = YAMLParametersLoader().load(
        included_params_path)

    # check nested interpolation works within the same file
    assert reloaded_included_params.string("same_file") == "moo meep moo"
    # check interpolation works when the parameter being interpolate is not top-level
    assert (reloaded_included_params.string("nested.interpolate_me_nested") ==
            "meep nested")

    including_params = {
        "_includes": ["included.params"],
        "interpolate_me": "lala %hello.world.foo-foo_foo% lala",
    }

    including_params_path = tmp_path / "including.params"
    with open(including_params_path, "w") as including_params_out:
        yaml.dump(including_params, including_params_out)

    loaded_params = YAMLParametersLoader().load(including_params_path)

    # check nested interpolation works across files
    assert loaded_params.string("interpolate_me") == "lala meep lala"
Example #2
0
def main(cluster_params: Parameters, job_param_file: Path) -> None:
    runner = SlurmPythonRunner.from_parameters(cluster_params)
    job_params = YAMLParametersLoader().load(job_param_file)
    entry_point = job_params.string("entry_point")
    memory = MemoryAmount.parse(job_params.string("memory"))
    runner.run_entry_point(
        entry_point_name=entry_point,
        param_file=job_param_file,
        partition=cluster_params.string("partition"),
        working_directory=job_params.optional_creatable_directory(
            "working_directory") or Path(os.getcwd()),
        num_gpus=job_params.integer("num_gpus",
                                    default=0,
                                    valid_range=Range.at_least(0)),
        num_cpus=job_params.integer("num_cpus",
                                    default=1,
                                    valid_range=Range.at_least(1)),
        job_name=job_params.string("job_name", default=entry_point),
        memory_request=memory,
        echo_template=cluster_params.boolean("echo_template", default=False),
        slurm_script_path=job_params.optional_creatable_file(
            "slurm_script_path"),
    )
Example #3
0
def main(params: Parameters):
    viz = SituationVisualizer()
    # try to get the directory for rendering for an experiment
    adam_root = params.existing_directory("adam_root")
    root_output_directory = params.optional_creatable_directory(
        "experiment_group_dir")
    if root_output_directory is not None:
        m9_experiments_dir = adam_root / "parameters" / "experiments" / "m9"
        param_files: List[Path] = []

        if params.boolean("include_objects"):
            param_files.append(m9_experiments_dir / "objects.params")

        if params.boolean("include_attributes"):
            param_files.append(m9_experiments_dir / "attributes.params")

        if params.boolean("include_relations"):
            param_files.append(m9_experiments_dir / "relations.params")

        if params.boolean("include_events"):
            param_files.append(m9_experiments_dir / "events.params")

        # This activates a special "debug" curriculum,
        # which is meant to be edited in the code by a developer to do fine-grained debugging.
        if params.boolean("include_debug", default=False):
            param_files.append(m9_experiments_dir / "debug.params")

        # loop over all experiment params files
        for param_file in param_files:
            experiment_params = YAMLParametersLoader().load(param_file)
            if "curriculum" in experiment_params:
                # get the experiment curriculum list (if there is one)

                curriculum = curriculum_from_params(experiment_params)[0]
                directory_name = experiment_params.string(
                    "experiment") + "/renders"
                if not os.path.isdir(root_output_directory / directory_name):
                    os.mkdir(root_output_directory / directory_name)
                for instance_group in curriculum:
                    try:
                        make_scenes(
                            params,
                            [instance_group],
                            root_output_directory / directory_name,
                            viz,
                        )
                    except RuntimeError as err:
                        print(f"uncaught exception: {err}")

    else:
        # render phase 1 scenes:
        root_output_directory = params.optional_creatable_directory(
            "screenshot_directory")
        assert root_output_directory is not None
        if not os.path.isdir(root_output_directory):
            os.mkdir(root_output_directory)
        for idx, instance_group in enumerate(
                build_curriculum(None, None,
                                 GAILA_PHASE_1_LANGUAGE_GENERATOR)):
            # do any filtering here
            if instance_group.name() in EXCLUDED_CURRICULA:
                continue
            directory_name = f"{idx:03}-{instance_group.name()}"
            if not os.path.isdir(root_output_directory / directory_name):
                os.mkdir(root_output_directory /
                         directory_name)  # type: ignore

            # then call some function from make_scenes.py to run the curriculum
            make_scenes(params, [instance_group],
                        root_output_directory / directory_name, viz)
Example #4
0
def main(params: Parameters):
    adam_root = params.existing_directory("adam_root")
    m13_experiments_dir = adam_root / "parameters" / "experiments" / "m13"
    use_pegasus = params.boolean("use_pegasus", default=False)
    if use_pegasus:
        initialize_vista_pegasus_wrapper(params)

    param_files: List[Path] = []

    if params.boolean("include_objects", default=True):
        param_files.append(m13_experiments_dir / "objects.params")

    if params.boolean("include_imprecise_size", default=True):
        param_files.append(m13_experiments_dir / "imprecise_size.params")

    if params.boolean("include_imprecise_temporal", default=True):
        param_files.append(m13_experiments_dir / "imprecise_temporal.params")

    if params.boolean("include_subtle_verb", default=True):
        param_files.append(m13_experiments_dir / "subtle_verb.params")

    if params.boolean("include_object_restrictions", default=True):
        param_files.append(m13_experiments_dir / "object_restrictions.params")

    if params.boolean("include_functionally_defined_objects", default=True):
        param_files.append(m13_experiments_dir / "functionally_defined_objects.params")

    if params.boolean("include_relations", default=True):
        param_files.append(m13_experiments_dir / "relations.params")

    if params.boolean("include_generics", default=True):
        param_files.append(m13_experiments_dir / "generics.params")

    if params.boolean("include_verbs_with_dynamic_prepositions", default=True):
        param_files.append(
            m13_experiments_dir / "events_with_dynamic_prepositions.params"
        )

    if params.boolean("include_m9_complete", default=False):
        param_files.append(m13_experiments_dir / "m9_complete.params")

    if params.boolean("include_m13_complete", default=False):
        param_files.append(m13_experiments_dir / "m13_complete.params")

    if params.boolean("include_m13_shuffled", default=False):
        param_files.append(m13_experiments_dir / "m13_shuffled.params")

    # This activates a special "debug" curriculum,
    # which is meant to be edited in the code by a developer to do fine-grained debugging.
    if params.boolean("include_debug", default=False):
        param_files.append(m13_experiments_dir / "debug.params")

    # If any of the param files don't exist, bail out earlier instead of making the user
    # wait for the error.
    for param_file in param_files:
        if not param_file.exists():
            raise RuntimeError(f"Expected param file {param_file} does not exist")

    for param_file in param_files:
        logging.info("Running %s", param_file)
        experiment_params = YAMLParametersLoader().load(param_file)
        if not use_pegasus:
            log_experiment_entry_point(experiment_params)
        else:
            experiment_name = Locator(experiment_params.string("experiment"))
            experiment_params = experiment_params.unify(
                {
                    "experiment_group_dir": directory_for(experiment_name) / "output",
                    "hypothesis_log_dir": directory_for(experiment_name) / "hypotheses",
                    # State pickles will go under experiment_name/learner_state
                    "learner_logging_path": directory_for(experiment_name),
                    "log_learner_state": True,
                    "resume_from_latest_logged_state": True,
                    "log_hypothesis_every_n_steps": params.integer(
                        "save_state_every_n_steps"
                    ),
                    "debug_learner_pickling": params.boolean(
                        "debug_learner_pickling", default=False
                    ),
                }
            )

            run_python_on_parameters(
                experiment_name, log_experiment_script, experiment_params, depends_on=[]
            )

    if use_pegasus:
        write_workflow_description()