def _real_parameters_only_entry_point(main_method: Callable[[Parameters],
                                                            None],
                                      usage_message: str = None,
                                      *,
                                      parameters: Optional[Parameters] = None,
                                      program_name: Optional[str] = None,
                                      args: Sequence[str]) -> None:
    if not program_name:
        # Get original script name for use in the usage message.
        import __main__ as main  # pylint:disable=import-outside-toplevel

        program_name = os.path.basename(main.__file__)

    arg_parser = ArgumentParser(prog=program_name, description=usage_message)
    if not parameters:
        arg_parser.add_argument("param_file", type=Path)
    arg_parser.add_argument("-p", action="append", nargs=2, required=False)

    parsed_args = arg_parser.parse_args(args)

    params = YAMLParametersLoader().load(parsed_args.param_file)
    if parsed_args.p:
        params = params.unify(params.from_key_value_pairs(parsed_args.p))
    configure_logging_from(params)
    log.info("Ran with parameters:\n%s", params)
    main_method(params)
Exemple #2
0
    def test_environmental_variable_interpolation(self):
        loader = YAMLParametersLoader()
        os.environ["___TEST_PARAMETERS___"] = "foo"
        os.environ["___TEST_CLASHING_PARAM___"] = "bar"
        loaded_params = loader.load_string(ENV_VAR_INTERPOLATION_INPUT)

        reference_params = Parameters.from_mapping(
            yaml.safe_load(ENV_VAR_INTERPOLATION_REFERENCE))

        self.assertEqual(reference_params, loaded_params)
Exemple #3
0
def main(params: Parameters):
    adam_root = params.existing_directory("adam_root")
    m13_experiments_dir = adam_root / "parameters" / "experiments" / "m13"

    param_files: List[Path] = []

    if params.boolean("include_objects", default=True):
        param_files.append(m13_experiments_dir / "objects.params")

    if params.boolean("include_imprecise_size", default=True):
        param_files.append(m13_experiments_dir / "imprecise_size.params")

    if params.boolean("include_imprecise_temporal", default=True):
        param_files.append(m13_experiments_dir / "imprecise_temporal.params")

    if params.boolean("include_subtle_verb", default=True):
        param_files.append(m13_experiments_dir / "subtle_verb.params")

    if params.boolean("include_object_restrictions", default=True):
        param_files.append(m13_experiments_dir / "object_restrictions.params")

    if params.boolean("include_functionally_defined_objects", default=True):
        param_files.append(m13_experiments_dir /
                           "functionally_defined_objects.params")

    if params.boolean("include_relations", default=True):
        param_files.append(m13_experiments_dir / "relations.params")

    if params.boolean("include_generics", default=True):
        param_files.append(m13_experiments_dir / "generics.params")

    if params.boolean("include_verbs_with_dynamic_prepositions", default=True):
        param_files.append(m13_experiments_dir /
                           "events_with_dynamic_prepositions.params")

    if params.boolean("include_m9_complete", default=False):
        param_files.append(m13_experiments_dir / "m9_complete.params")

    if params.boolean("include_m13_complete", default=False):
        param_files.append(m13_experiments_dir / "m13_complete.params")

    if params.boolean("include_m13_shuffled", default=False):
        param_files.append(m13_experiments_dir / "m13_shuffled.params")

    # This activates a special "debug" curriculum,
    # which is meant to be edited in the code by a developer to do fine-grained debugging.
    if params.boolean("include_debug", default=False):
        param_files.append(m13_experiments_dir / "debug.params")

    # If any of the param files don't exist, bail out earlier instead of making the user
    # wait for the error.
    for param_file in param_files:
        if not param_file.exists():
            raise RuntimeError(
                f"Expected param file {param_file} does not exist")

    for param_file in param_files:
        logging.info("Running %s", param_file)
        experiment_params = YAMLParametersLoader().load(param_file)
        log_experiment_entry_point(experiment_params)
Exemple #4
0
def test_exception_when_interpolating_unknown_param(tmp_path) -> None:
    parameters = {"hello": "world", "interpolate_me": "%unknown_param%"}
    params_file = tmp_path / "tmp.params"
    with open(params_file, "w") as out:
        yaml.dump(parameters, out)
    with pytest.raises(Exception):
        YAMLParametersLoader().load(params_file)
Exemple #5
0
def main(params: Parameters):
    adam_root = params.existing_directory("adam_root")
    m9_experiments_dir = adam_root / "parameters" / "experiments" / "m9"
    param_files: List[Path] = []

    if params.boolean("include_objects", default=True):
        param_files.append(m9_experiments_dir / "objects.params")

    if params.boolean("include_attributes", default=True):
        param_files.append(m9_experiments_dir / "attributes.params")

    if params.boolean("include_relations", default=True):
        param_files.append(m9_experiments_dir / "relations.params")

    if params.boolean("include_events", default=True):
        param_files.append(m9_experiments_dir / "events.params")

    # This activates a special "debug" curriculum,
    # which is meant to be edited in the code by a developer to do fine-grained debugging.
    if params.boolean("include_debug", default=False):
        param_files.append(m9_experiments_dir / "debug.params")

    # If any of the param files don't exist, bail out earlier instead of making the user
    # wait for the error.
    for param_file in param_files:
        if not param_file.exists():
            raise RuntimeError(
                f"Expected param file {param_file} does not exist")

    for param_file in param_files:
        logging.info("Running %s", param_file)
        experiment_params = YAMLParametersLoader().load(param_file)
        log_experiment_entry_point(experiment_params)
Exemple #6
0
 def test_double_context_fail(self):
     # cannot specify both deprecated context argument and new included_context argument
     with self.assertRaises(ParameterError):
         YAMLParametersLoader().load(
             f='foo: "foo"',
             context=Parameters.empty(),
             included_context=Parameters.empty(),
         )
Exemple #7
0
    def test_interpolation(self):
        context = Parameters.from_mapping(
            yaml.safe_load(self.WRITING_REFERENCE))
        loader = YAMLParametersLoader()
        self.assertEqual(
            loader._interpolate(
                Parameters.from_mapping(
                    yaml.safe_load(self.MULTIPLE_INTERPOLATION_REFERENCE)),
                context,
            )._data,
            immutabledict([
                ("pear", "raspberry"),
                ("banana", "raspberry"),
                ("apple", "raspberry"),
                ("the_ultimate_fruit", "raspberry"),
            ]),
        )
        self.assertEqual(
            loader._interpolate(
                Parameters.from_mapping(
                    yaml.safe_load(
                        self.MULTIPLE_INTERPOLATION_REFERENCE_NEEDING_CONTEXT)
                ),
                context,
            )._data,
            immutabledict([
                ("pear", "raspberry/world"),
                ("banana", "raspberry/world"),
                ("apple", "raspberry/world"),
                ("the_ultimate_fruit", "raspberry/world"),
                # the actual pair ("hello", "world") should not be present
            ]),
        )
        self.assertEqual(
            loader._interpolate(
                Parameters.from_mapping(
                    yaml.safe_load(self.NESTED_INTERPOLATION)),
                context,
            ).as_nested_dicts(),
            {
                "key": 2,
                "key2": "fooo",
                "key3": {
                    "lalala": "fooo",
                    "meep": 2,
                    "list": [1, 2, 3]
                },
            },
        )

        with self.assertRaisesRegex(
                ParameterInterpolationError,
                r"These interpolated parameters form at least one graph cycle that must be fixed: "
                r"\('b', 'c'\)",
        ):
            loader._interpolate(
                Parameters.from_mapping(
                    yaml.safe_load('a: "%b%"\nb: "%c%"\nc: "%b%"')),
                context,
            )
def main(params: Parameters):
    adam_root = params.existing_directory("adam_root")
    experiments_dir = adam_root / "parameters" / "experiments"
    param_file = experiments_dir / "object_restrictions.params"

    if not param_file.exists():
        raise RuntimeError(f"Expected param file {param_file} does not exist")

    logging.info("Running %s", param_file)
    experiment_params = YAMLParametersLoader().load(param_file)
    log_experiment_entry_point(experiment_params)
Exemple #9
0
    def test_inclusion(self):
        loader = YAMLParametersLoader()
        test_root_dir = Path(tempfile.mkdtemp())
        # we want test inclusion across different directories
        test_nested_dir = test_root_dir / "nested"
        test_nested_dir.mkdir(exist_ok=True, parents=True)
        input_file = test_nested_dir / "input.params"
        input_file.write_text(INCLUSION_INPUT_FIRST_FILE, encoding="utf-8")

        included_file = test_root_dir / "include_one_level_up.params"
        included_file.write_text(INCLUSION_INPUT_PARENT, encoding="utf-8")

        grandparent_file = test_root_dir / "include_same_dir.params"
        grandparent_file.write_text(INCLUSION_INPUT_GRANDPARENT,
                                    encoding="utf-8")

        params = loader.load(input_file)
        shutil.rmtree(test_root_dir)

        self.assertEqual(INCLUSION_REFERENCE, dict(params.as_nested_dicts()))
def test_binary_source_sink_from_params(tmp_path: Path) -> None:
    sink_params_text = f"""
output:
   type: zip
   path: {tmp_path / "output.zip"}
    """
    sink_params = YAMLParametersLoader().load_string(sink_params_text)
    with byte_key_value_sink_from_params(sink_params) as sink:
        sink.put("hello", "world".encode("utf-8"))
        sink.put("goodbye", "fred".encode("utf-8"))
    source_params_text = f"""
    altinput:
       type: zip
       path: {tmp_path / "output.zip"}
        """
    source_params = YAMLParametersLoader().load_string(source_params_text)

    # we test specifying an alternate namespace
    with byte_key_value_source_from_params(
            source_params, input_namespace="altinput") as source:
        assert source["hello"].decode("utf-8") == "world"
        assert source["goodbye"].decode("utf-8") == "fred"
def test_directory_byte_key_value_sink(tmp_path: Path):
    output_dir = tmp_path / "output"
    output_dir.mkdir()
    sink_params_txt = f"""
output:
   type: file-map
   path: {output_dir}
    """
    sink_params = YAMLParametersLoader().load_string(sink_params_txt)
    with byte_key_value_sink_from_params(sink_params) as dir_sink:
        dir_sink.put("foo", b"bar")
        dir_sink.put("hello", b"world")

    source_params_txt = f"""
input:
   type: _doc_id_binary_source_from_params
   path: {output_dir / "_index"}
    """
    source_params = YAMLParametersLoader().load_string(source_params_txt)
    with byte_key_value_linear_source_from_params(source_params) as dir_source:
        assert dir_source["foo"] == b"bar"
        assert dir_source["hello"] == b"world"
Exemple #12
0
    def test_boolean(self):
        params = YAMLParametersLoader().load_string("""
            true_param : true
            false_param : false
            non_boolean_param: 'Fred'
        """)

        self.assertTrue(params.boolean("true_param"))
        self.assertFalse(params.boolean("false_param"))
        with self.assertRaises(ParameterError):
            params.boolean("non_boolean_param")

        self.assertTrue(params.boolean("not-appearing", default=True))
        # test with a False-y default
        self.assertFalse(params.boolean("not-appearing", default=False))
def test_char_source_sink_from_params(tmp_path: Path) -> None:
    sink_params_text = f"""
output:
   type: zip
   path: {tmp_path / "output.zip"}
    """
    sink_params = YAMLParametersLoader().load_string(sink_params_text)
    with char_key_value_sink_from_params(sink_params) as sink:
        sink.put("hello", "world")
        sink.put("goodbye", "fred")
    source_params_text = f"""
    altinput:
       type: zip
       path: {tmp_path / "output.zip"}
        """
    source_params = YAMLParametersLoader().load_string(source_params_text)

    reference = {"hello": "world", "goodbye": "fred"}

    # we test specifying an alternate namespace
    with char_key_value_source_from_params(
            source_params, input_namespace="altinput") as source:
        for k, v in source.items():
            assert reference[k] == v
Exemple #14
0
def main(cluster_params: Parameters, job_param_file: Path) -> None:
    runner = SlurmPythonRunner.from_parameters(cluster_params)
    job_params = YAMLParametersLoader().load(job_param_file)
    entry_point = job_params.string("entry_point")
    memory = MemoryAmount.parse(job_params.string("memory"))
    runner.run_entry_point(
        entry_point_name=entry_point,
        param_file=job_param_file,
        partition=cluster_params.string("partition"),
        working_directory=job_params.optional_creatable_directory(
            "working_directory") or Path(os.getcwd()),
        num_gpus=job_params.integer("num_gpus",
                                    default=0,
                                    valid_range=Range.at_least(0)),
        num_cpus=job_params.integer("num_cpus",
                                    default=1,
                                    valid_range=Range.at_least(1)),
        job_name=job_params.string("job_name", default=entry_point),
        memory_request=memory,
        echo_template=cluster_params.boolean("echo_template", default=False),
        slurm_script_path=job_params.optional_creatable_file(
            "slurm_script_path"),
    )
Exemple #15
0
def main(params: Parameters):
    adam_root = params.existing_directory("adam_root")
    m6_experiments_dir = adam_root / "parameters" / "experiments" / "m6"
    param_files = [
        m6_experiments_dir / "each-object-by-itself.pursuit.params",
        m6_experiments_dir / "pursuit-single-noise.params",
        m6_experiments_dir / "static-prepositions.params",
        m6_experiments_dir / "pursuit-double-noise.params",
    ]

    # If any of the param files don't exist, bail out earlier instead of making the user
    # wait for the error.
    for param_file in param_files:
        if not param_file.exists():
            raise RuntimeError(
                f"Expected param file {param_file} does not exist")

    for param_file in param_files:
        logging.info("Running %s", param_file)
        experiment_params = YAMLParametersLoader().load(param_file)
        log_experiment_entry_point(experiment_params)
Exemple #16
0
def main(params: Parameters):
    for experiment, curriculum, learner in [
        (
            "generics-with-learner",
            "actions-and-generics-curriculum",
            "integrated-learner-recognizer",
        ),
        (
            "generics-without-learner",
            "actions-and-generics-curriculum",
            "integrated-learner-recognizer-without-generics",
        ),
    ]:
        logging.info("Running %s", experiment)
        setup_specifications = YAMLParametersLoader().load_string(
            CONFIGURATION_STRING.format(
                experiment=experiment, learner=learner, curriculum=curriculum
            )
        )
        experiment_params = params.unify(setup_specifications)
        print("Configuration specifications: \n", experiment_params)
        log_experiment_entry_point(experiment_params)
Exemple #17
0
def test_interpolating_nested_parameters(tmp_path):
    included_params = {
        # - and _ to test they work when finding params to interpolate.
        "hello": {
            "world": {
                "foo-foo_foo": "meep"
            }
        },
        "same_file": "moo %hello.world.foo-foo_foo% moo",
        "nested": {
            "interpolate_me_nested": "%hello.world.foo-foo_foo% nested"
        },
    }
    included_params_path = tmp_path / "included.params"
    with open(included_params_path, "w") as included_params_out:
        yaml.dump(included_params, included_params_out)

    reloaded_included_params = YAMLParametersLoader().load(
        included_params_path)

    # check nested interpolation works within the same file
    assert reloaded_included_params.string("same_file") == "moo meep moo"
    # check interpolation works when the parameter being interpolate is not top-level
    assert (reloaded_included_params.string("nested.interpolate_me_nested") ==
            "meep nested")

    including_params = {
        "_includes": ["included.params"],
        "interpolate_me": "lala %hello.world.foo-foo_foo% lala",
    }

    including_params_path = tmp_path / "including.params"
    with open(including_params_path, "w") as including_params_out:
        yaml.dump(including_params, including_params_out)

    loaded_params = YAMLParametersLoader().load(including_params_path)

    # check nested interpolation works across files
    assert loaded_params.string("interpolate_me") == "lala meep lala"
Exemple #18
0
def main(params: Parameters):
    viz = SituationVisualizer()
    # try to get the directory for rendering for an experiment
    adam_root = params.existing_directory("adam_root")
    root_output_directory = params.optional_creatable_directory(
        "experiment_group_dir")
    if root_output_directory is not None:
        m9_experiments_dir = adam_root / "parameters" / "experiments" / "m9"
        param_files: List[Path] = []

        if params.boolean("include_objects"):
            param_files.append(m9_experiments_dir / "objects.params")

        if params.boolean("include_attributes"):
            param_files.append(m9_experiments_dir / "attributes.params")

        if params.boolean("include_relations"):
            param_files.append(m9_experiments_dir / "relations.params")

        if params.boolean("include_events"):
            param_files.append(m9_experiments_dir / "events.params")

        # This activates a special "debug" curriculum,
        # which is meant to be edited in the code by a developer to do fine-grained debugging.
        if params.boolean("include_debug", default=False):
            param_files.append(m9_experiments_dir / "debug.params")

        # loop over all experiment params files
        for param_file in param_files:
            experiment_params = YAMLParametersLoader().load(param_file)
            if "curriculum" in experiment_params:
                # get the experiment curriculum list (if there is one)

                curriculum = curriculum_from_params(experiment_params)[0]
                directory_name = experiment_params.string(
                    "experiment") + "/renders"
                if not os.path.isdir(root_output_directory / directory_name):
                    os.mkdir(root_output_directory / directory_name)
                for instance_group in curriculum:
                    try:
                        make_scenes(
                            params,
                            [instance_group],
                            root_output_directory / directory_name,
                            viz,
                        )
                    except RuntimeError as err:
                        print(f"uncaught exception: {err}")

    else:
        # render phase 1 scenes:
        root_output_directory = params.optional_creatable_directory(
            "screenshot_directory")
        assert root_output_directory is not None
        if not os.path.isdir(root_output_directory):
            os.mkdir(root_output_directory)
        for idx, instance_group in enumerate(
                build_curriculum(None, None,
                                 GAILA_PHASE_1_LANGUAGE_GENERATOR)):
            # do any filtering here
            if instance_group.name() in EXCLUDED_CURRICULA:
                continue
            directory_name = f"{idx:03}-{instance_group.name()}"
            if not os.path.isdir(root_output_directory / directory_name):
                os.mkdir(root_output_directory /
                         directory_name)  # type: ignore

            # then call some function from make_scenes.py to run the curriculum
            make_scenes(params, [instance_group],
                        root_output_directory / directory_name, viz)
Exemple #19
0
def get_parameters():
    parser = argparse.ArgumentParser()
    parser.add_argument("parameter_filename", type=Path)
    args = parser.parse_args()
    return YAMLParametersLoader().load(args.parameter_filename)
Exemple #20
0
def main(params: Parameters):
    adam_root = params.existing_directory("adam_root")
    m13_experiments_dir = adam_root / "parameters" / "experiments" / "m13"
    use_pegasus = params.boolean("use_pegasus", default=False)
    if use_pegasus:
        initialize_vista_pegasus_wrapper(params)

    param_files: List[Path] = []

    if params.boolean("include_objects", default=True):
        param_files.append(m13_experiments_dir / "objects.params")

    if params.boolean("include_imprecise_size", default=True):
        param_files.append(m13_experiments_dir / "imprecise_size.params")

    if params.boolean("include_imprecise_temporal", default=True):
        param_files.append(m13_experiments_dir / "imprecise_temporal.params")

    if params.boolean("include_subtle_verb", default=True):
        param_files.append(m13_experiments_dir / "subtle_verb.params")

    if params.boolean("include_object_restrictions", default=True):
        param_files.append(m13_experiments_dir / "object_restrictions.params")

    if params.boolean("include_functionally_defined_objects", default=True):
        param_files.append(m13_experiments_dir / "functionally_defined_objects.params")

    if params.boolean("include_relations", default=True):
        param_files.append(m13_experiments_dir / "relations.params")

    if params.boolean("include_generics", default=True):
        param_files.append(m13_experiments_dir / "generics.params")

    if params.boolean("include_verbs_with_dynamic_prepositions", default=True):
        param_files.append(
            m13_experiments_dir / "events_with_dynamic_prepositions.params"
        )

    if params.boolean("include_m9_complete", default=False):
        param_files.append(m13_experiments_dir / "m9_complete.params")

    if params.boolean("include_m13_complete", default=False):
        param_files.append(m13_experiments_dir / "m13_complete.params")

    if params.boolean("include_m13_shuffled", default=False):
        param_files.append(m13_experiments_dir / "m13_shuffled.params")

    # This activates a special "debug" curriculum,
    # which is meant to be edited in the code by a developer to do fine-grained debugging.
    if params.boolean("include_debug", default=False):
        param_files.append(m13_experiments_dir / "debug.params")

    # If any of the param files don't exist, bail out earlier instead of making the user
    # wait for the error.
    for param_file in param_files:
        if not param_file.exists():
            raise RuntimeError(f"Expected param file {param_file} does not exist")

    for param_file in param_files:
        logging.info("Running %s", param_file)
        experiment_params = YAMLParametersLoader().load(param_file)
        if not use_pegasus:
            log_experiment_entry_point(experiment_params)
        else:
            experiment_name = Locator(experiment_params.string("experiment"))
            experiment_params = experiment_params.unify(
                {
                    "experiment_group_dir": directory_for(experiment_name) / "output",
                    "hypothesis_log_dir": directory_for(experiment_name) / "hypotheses",
                    # State pickles will go under experiment_name/learner_state
                    "learner_logging_path": directory_for(experiment_name),
                    "log_learner_state": True,
                    "resume_from_latest_logged_state": True,
                    "log_hypothesis_every_n_steps": params.integer(
                        "save_state_every_n_steps"
                    ),
                    "debug_learner_pickling": params.boolean(
                        "debug_learner_pickling", default=False
                    ),
                }
            )

            run_python_on_parameters(
                experiment_name, log_experiment_script, experiment_params, depends_on=[]
            )

    if use_pegasus:
        write_workflow_description()
Exemple #21
0
# but conda expects PS1 (the prompt variable) to be set.
if [[ -z ${{PS1+x}} ]]
  then
    export PS1=""
fi

{conda_lines}
{spack_lines}

cd {working_directory}
python -m {entry_point} {param_file}
"""

if __name__ == "__main__":
    parser = argparse.ArgumentParser(  # pylint:disable=invalid-name
        description="Run a Python script on SLURM")
    parser.add_argument(
        "cluster_parameters",
        type=Path,
        help="Param file with general information about the SLURM cluster",
    )
    parser.add_argument("job_parameters",
                        type=Path,
                        help="Param file with job-specific parameters")
    args = parser.parse_args()  # pylint:disable=invalid-name

    main(
        cluster_params=YAMLParametersLoader().load(args.cluster_parameters),
        job_param_file=args.job_parameters,
    )