def test_example(tmpdir):
    # temporarily change working directory to be able to run the files "as is"
    cwd = os.getcwd()
    os.chdir(docs_src_folder)
    info_yaml = yaml_load_file("gaussian.yaml")
    info_yaml.pop(_output_prefix)
    globals_example = {}
    exec(
        open(os.path.join(docs_src_folder, "create_info.py")).read(),
        globals_example)
    try:
        assert is_equal_info(info_yaml, globals_example["info"]), (
            "Inconsistent info between yaml and insteractive.")
        exec(
            open(os.path.join(docs_src_folder, "load_info.py")).read(),
            globals_example)
        globals_example["info_from_yaml"].pop(_output_prefix)
        assert is_equal_info(info_yaml, globals_example["info_from_yaml"]), (
            "Inconsistent info between interactive and *loaded* yaml.")
        # Run the chain -- constant seed so results are the same!
        globals_example["info"]["sampler"]["mcmc"] = (
            globals_example["info"]["sampler"]["mcmc"] or {})
        globals_example["info"]["sampler"]["mcmc"].update({"seed": 0})
        exec(
            open(os.path.join(docs_src_folder, "run.py")).read(),
            globals_example)
        # Analyze and plot -- capture print output
        stream = StringIO()
        with stdout_redirector(stream):
            exec(
                open(os.path.join(docs_src_folder, "analyze.py")).read(),
                globals_example)
        # Comparing text output
        out_filename = "analyze_out.txt"
        contents = "".join(
            open(os.path.join(docs_src_folder, out_filename)).readlines())
        # The endswith guarantees that getdist messages and warnings are ignored
        assert stream.getvalue().replace("\n", "").replace(" ", "").endswith(
            contents.replace("\n", "").replace(" ", "")), (
                "Text output does not coincide:\nwas\n%s\nand " % contents +
                "now it's\n%sstream.getvalue()" % stream.getvalue())
        # Comparing plot
        plot_filename = "example_quickstart_plot.png"
        test_filename = tmpdir.join(plot_filename)
        globals_example["gdplot"].export(str(test_filename))
        print("Plot created at '%s'" % str(test_filename))
        test_img = imread(str(test_filename)).astype(float)
        docs_img = imread(os.path.join(docs_img_folder,
                                       plot_filename)).astype(float)
        npixels = test_img.shape[0] * test_img.shape[1]
        assert (
            np.count_nonzero(test_img == docs_img) /
            (4 * npixels) >= pixel_tolerance), (
                "Images are too different. Maybe GetDist conventions changed?")
    except:
        raise
    finally:
        # Back to the working directory of the tests, just in case, and restart the rng
        os.chdir(cwd)
def test_example():
    # temporarily change working directory to be able to run the files "as is"
    cwd = os.getcwd()
    try:
        os.chdir(docs_src_folder)
        info_yaml = yaml_load_file("gaussian.yaml")
        info_yaml.pop("output")
        globals_example = {}
        exec(
            open(os.path.join(docs_src_folder, "create_info.py")).read(),
            globals_example)
        assert is_equal_info(info_yaml, globals_example["info"]), (
            "Inconsistent info between yaml and interactive.")
        exec(
            open(os.path.join(docs_src_folder, "load_info.py")).read(),
            globals_example)
        globals_example["info_from_yaml"].pop("output")
        assert is_equal_info(info_yaml, globals_example["info_from_yaml"]), (
            "Inconsistent info between interactive and *loaded* yaml.")
        # Run the chain -- constant seed so results are the same!
        globals_example["info"]["sampler"]["mcmc"] = (
            globals_example["info"]["sampler"]["mcmc"] or {})
        exec(
            open(os.path.join(docs_src_folder, "run.py")).read(),
            globals_example)
        # Run the minimizer -- output doesn't matter. Just checking that it does not fail
        exec(
            open(os.path.join(docs_src_folder, "run_min.py")).read(),
            globals_example)
        # Analyze and plot -- capture print output
        stream = StringIO()
        with stdout_redirector(stream):
            exec(
                open(os.path.join(docs_src_folder, "analyze.py")).read(),
                globals_example)
        # Checking results
        mean, covmat = [
            globals_example["info"]["likelihood"]["gaussian_mixture"][x]
            for x in ["means", "covs"]
        ]
        assert (KL_norm(m1=mean,
                        S1=covmat,
                        m2=globals_example["mean"],
                        S2=globals_example["covmat"]) <= KL_tolerance
                ), ("Sampling appears not to have worked too well. Run again?")
    finally:
        # Back to the working directory of the tests, just in case
        os.chdir(cwd)
예제 #3
0
    def dump_info(self, input_info, full_info):
        """
        Saves the info in the chain folder twice:
           - the input info.
           - idem, populated with the modules' defaults.

        If resuming a sample, checks first that old and new infos are consistent.
        """
        # trim known params of each likelihood: for internal use only
        full_info_trimmed = deepcopy(full_info)
        for lik_info in full_info_trimmed.get(_likelihood, {}).values():
            if hasattr(lik_info, "pop"):
                lik_info.pop(_params, None)
        try:
            # We will test the old info agains the dumped+loaded new info.
            # This is because we can't actually check if python objects are the same as before.
            old_info = yaml_load_file(self.file_full)
            new_info = yaml_load(yaml_dump(full_info_trimmed))
            if not is_equal_info(old_info, new_info, strict=False):
                self.log.error(
                    "Old and new sample information not compatible! "
                    "Resuming not possible!")
                raise HandledException
        except IOError:
            # There was no previous chain
            pass
        # We write the new one anyway (maybe updated debug, resuming...)
        for f, info in [(self.file_input, input_info),
                        (self.file_full, full_info_trimmed)]:
            with open(f, "w") as f_out:
                try:
                    f_out.write(yaml_dump(info))
                except OutputError as e:
                    self.log.error(e.message)
                    raise HandledException
예제 #4
0
파일: output.py 프로젝트: rancesol/cobaya
    def dump_info(self, input_info, updated_info, check_compatible=True):
        """
        Saves the info in the chain folder twice:
           - the input info.
           - idem, populated with the modules' defaults.

        If resuming a sample, checks first that old and new infos are consistent.
        """
        # trim known params of each likelihood: for internal use only
        updated_info_trimmed = deepcopy_where_possible(updated_info)
        for lik_info in updated_info_trimmed.get(_likelihood, {}).values():
            if hasattr(lik_info, "pop"):
                lik_info.pop(_params, None)
        if check_compatible:
            try:
                # We will test the old info against the dumped+loaded new info.
                # This is because we can't actually check if python objects do change
                old_info = self.reload_updated_info()
                new_info = yaml_load(yaml_dump(updated_info_trimmed))
                ignore_blocks = []
                if list(new_info.get(_sampler, [None]))[0] == "minimize":
                    ignore_blocks = [_sampler]
                if not is_equal_info(old_info, new_info, strict=False,
                                     ignore_blocks=ignore_blocks):
                    # HACK!!! NEEDS TO BE FIXED
                    if list(updated_info.get(_sampler, [None]))[0] == "minimize":
                        raise LoggedError(
                            self.log, "Old and new sample information not compatible! "
                            "At this moment it is not possible to 'force' deletion of "
                            "and old 'minimize' run. Please delete it by hand. "
                            "We are working on fixing this very soon!")
                    raise LoggedError(
                        self.log, "Old and new sample information not compatible! "
                        "Resuming not possible!")
            except IOError:
                # There was no previous chain
                pass
        # We write the new one anyway (maybe updated debug, resuming...)
        for f, info in [(self.file_input, input_info),
                        (self.file_updated, updated_info_trimmed)]:
            if not info:
                pass
            with open(f, "w") as f_out:
                try:
                    f_out.write(yaml_dump(info))
                except OutputError as e:
                    raise LoggedError(self.log, str(e))
def test_cosmo_docs_basic():
    flag = True
    for theo in ["camb", "classy"]:
        info_new = create_input(preset=preset_pre + theo)
        info_yaml_new = yaml_dump(info_new)
        file_path = os.path.join(path, file_pre + theo + ".yaml")
        with open(file_path) as docs_file:
            info_yaml_docs = "".join(docs_file.readlines())
        info_docs = yaml_load(info_yaml_docs)
        if not is_equal_info(
                info_new, info_docs, strict=True, print_not_log=True):
            with open(file_path, "w") as docs_file:
                docs_file.write(info_yaml_new)
            flag = False
            print("OLD:\n%s" % info_yaml_docs)
            print("----------------------------------------")
            print("NEW:\n%s" % info_yaml_new)
    assert flag, ("Differences in example input file. "
                  "Files have been re-generated; check out your git diff.")
예제 #6
0
def check_sampler_info(info_old: Optional[SamplersDict],
                       info_new: SamplersDict,
                       is_resuming=False):
    """
    Checks compatibility between the new sampler info and that of a pre-existing run.

    Done separately from `Output.check_compatible_and_dump` because there may be
    multiple samplers mentioned in an `updated.yaml` file, e.g. `MCMC` + `Minimize`.
    """
    logger_sampler = get_logger(__name__)
    if not info_old:
        return
    # TODO: restore this at some point: just append minimize info to the old one
    # There is old info, but the new one is Minimizer and the old one is not
    # if (len(info_old) == 1 and list(info_old) != ["minimize"] and
    #      list(info_new) == ["minimize"]):
    #     # In-place append of old+new --> new
    #     aux = info_new.pop("minimize")
    #     info_new.update(info_old)
    #     info_new.update({"minimize": aux})
    #     info_old = {}
    #     keep_old = {}
    if list(info_old) != list(info_new) and list(info_new) == ["minimize"]:
        return
    if list(info_old) == list(info_new):
        # Restore some selected old values for some classes
        keep_old = get_preferred_old_values({"sampler": info_old})
        info_new = recursive_update(info_new, keep_old.get("sampler", {}))
    if not is_equal_info({"sampler": info_old}, {"sampler": info_new},
                         strict=False):
        if is_resuming:
            raise LoggedError(
                logger_sampler,
                "Old and new Sampler information not compatible! "
                "Resuming not possible!")
        else:
            raise LoggedError(
                logger_sampler,
                "Found old Sampler information which is not compatible "
                "with the new one. Delete the previous output manually, "
                "or automatically with either "
                "'-f', '--force', 'force: True'")
예제 #7
0
    def check_and_dump_info(self, input_info, updated_info, check_compatible=True,
                            cache_old=False, use_cache_old=False, ignore_blocks=()):
        """
        Saves the info in the chain folder twice:
           - the input info.
           - idem, populated with the components' defaults.

        If resuming a sample, checks first that old and new infos and versions are
        consistent.
        """
        # trim known params of each likelihood: for internal use only
        self.check_lock()
        updated_info_trimmed = deepcopy_where_possible(updated_info)
        updated_info_trimmed["version"] = get_version()
        for like_info in updated_info_trimmed.get("likelihood", {}).values():
            (like_info or {}).pop("params", None)
        if check_compatible:
            # We will test the old info against the dumped+loaded new info.
            # This is because we can't actually check if python objects do change
            try:
                old_info = self.reload_updated_info(cache=cache_old,
                                                    use_cache=use_cache_old)
            except InputImportError:
                # for example, when there's a dynamically generated class that cannot
                # be found by the yaml loader (could use yaml loader that ignores them)
                old_info = None
            if old_info:
                # use consistent yaml read-in types
                # TODO: could probably just compare full infos here, with externals?
                #  for the moment cautiously keeping old behaviour
                old_info = yaml_load(yaml_dump(old_info))  # type: ignore
                new_info = yaml_load(yaml_dump(updated_info_trimmed))
                if not is_equal_info(old_info, new_info, strict=False,
                                     ignore_blocks=list(ignore_blocks) + [
                                         "output"]):
                    raise LoggedError(
                        self.log, "Old and new run information not compatible! "
                                  "Resuming not possible!")
                # Deal with version comparison separately:
                # - If not specified now, take the one used in resume info
                # - If specified both now and before, check new older than old one
                # (For Cobaya's own version, prefer new one always)
                old_version = old_info.get("version")
                new_version = new_info.get("version")
                if isinstance(old_version, str) and isinstance(new_version, str):
                    if version.parse(old_version) > version.parse(new_version):
                        raise LoggedError(
                            self.log, "You are trying to resume a run performed with a "
                                      "newer version of Cobaya: %r (you are using %r). "
                                      "Please, update your Cobaya installation.",
                            old_version, new_version)
                for k in set(kinds).intersection(updated_info):
                    if k in ignore_blocks or updated_info[k] is None:
                        continue
                    for c in updated_info[k]:
                        new_version = updated_info[k][c].get("version")
                        old_version = old_info[k][c].get("version")  # type: ignore
                        if new_version is None:
                            updated_info[k][c]["version"] = old_version
                            updated_info_trimmed[k][c]["version"] = old_version
                        elif old_version is not None:
                            cls = get_resolved_class(
                                c, k, None_if_not_found=True,
                                class_name=updated_info[k][c].get("class"))
                            if cls and cls.compare_versions(
                                    old_version, new_version, equal=False):
                                raise LoggedError(
                                    self.log, "You have requested version %r for "
                                              "%s:%s, but you are trying to resume a "
                                              "run that used a newer version: %r.",
                                    new_version, k, c, old_version)
        # If resuming, we don't want to to *partial* dumps
        if ignore_blocks and self.is_resuming():
            return
        # Work on a copy of the input info, since we are updating the prefix
        # (the updated one is already a copy)
        if input_info is not None:
            input_info = deepcopy_where_possible(input_info)
        # Write the new one
        for f, info in [(self.file_input, input_info),
                        (self.file_updated, updated_info_trimmed)]:
            if info:
                for k in ignore_blocks:
                    info.pop(k, None)
                info.pop("debug", None)
                info.pop("force", None)
                info.pop("resume", None)
                # make sure the dumped output_prefix does only contain the file prefix,
                # not the folder, since it's already been placed inside it
                info["output"] = self.updated_prefix()
                with open(f, "w", encoding="utf-8") as f_out:
                    try:
                        f_out.write(yaml_dump(sort_cosmetic(info)))
                    except OutputError as e:
                        raise LoggedError(self.log, str(e))
        if updated_info_trimmed and has_non_yaml_reproducible(updated_info_trimmed):
            try:
                import dill
            except ImportError:
                self.mpi_info('Install "dill" to save reproducible options file.')
            else:
                import pickle
                try:
                    with open(self.dump_file_updated, 'wb') as f:
                        dill.dump(sort_cosmetic(updated_info_trimmed), f,
                                  pickle.HIGHEST_PROTOCOL)
                except pickle.PicklingError as e:
                    os.remove(self.dump_file_updated)
                    self.mpi_info('Options file cannot be pickled %s', e)
예제 #8
0
파일: output.py 프로젝트: yufdu/cobaya
    def check_and_dump_info(self,
                            input_info,
                            updated_info,
                            check_compatible=True,
                            cache_old=False,
                            use_cache_old=False,
                            ignore_blocks=()):
        """
        Saves the info in the chain folder twice:
           - the input info.
           - idem, populated with the components' defaults.

        If resuming a sample, checks first that old and new infos and versions are
        consistent.
        """
        # trim known params of each likelihood: for internal use only
        updated_info_trimmed = deepcopy_where_possible(updated_info)
        updated_info_trimmed[_version] = __version__
        for like_info in updated_info_trimmed.get(kinds.likelihood,
                                                  {}).values():
            (like_info or {}).pop(_params, None)
        if check_compatible:
            # We will test the old info against the dumped+loaded new info.
            # This is because we can't actually check if python objects do change
            old_info = self.reload_updated_info(cache=cache_old,
                                                use_cache=use_cache_old)
            if old_info:
                new_info = yaml_load(yaml_dump(updated_info_trimmed))
                if not is_equal_info(
                        old_info,
                        new_info,
                        strict=False,
                        ignore_blocks=list(ignore_blocks) + [_output_prefix]):
                    raise LoggedError(
                        self.log,
                        "Old and new run information not compatible! "
                        "Resuming not possible!")
                # Deal with version comparison separately:
                # - If not specified now, take the one used in resume info
                # - If specified both now and before, check new older than old one
                # (For Cobaya's own version, prefer new one always)
                old_version = old_info.get(_version, None)
                new_version = new_info.get(_version, None)
                if old_version:
                    if version.parse(old_version) > version.parse(new_version):
                        raise LoggedError(
                            self.log,
                            "You are trying to resume a run performed with a "
                            "newer version of Cobaya: %r (you are using %r). "
                            "Please, update your Cobaya installation.",
                            old_version, new_version)
                for k in (kind for kind in kinds if kind in updated_info):
                    if k in ignore_blocks:
                        continue
                    for c in updated_info[k]:
                        new_version = updated_info[k][c].get(_version)
                        old_version = old_info[k][c].get(_version)
                        if new_version is None:
                            updated_info[k][c][_version] = old_version
                            updated_info_trimmed[k][c][_version] = old_version
                        elif old_version is not None:
                            cls = get_class(c, k, None_if_not_found=True)
                            if cls and cls.compare_versions(
                                    old_version, new_version, equal=False):
                                raise LoggedError(
                                    self.log,
                                    "You have requested version %r for "
                                    "%s:%s, but you are trying to resume a "
                                    "run that used a newer version: %r.",
                                    new_version, k, c, old_version)
        # If resuming, we don't want to to *partial* dumps
        if ignore_blocks and self.is_resuming():
            return
        # Work on a copy of the input info, since we are updating the prefix
        # (the updated one is already a copy)
        if input_info is not None:
            input_info = deepcopy_where_possible(input_info)
        # Write the new one
        for f, info in [(self.file_input, input_info),
                        (self.file_updated, updated_info_trimmed)]:
            if info:
                for k in ignore_blocks:
                    info.pop(k, None)
                info.pop(_debug, None)
                info.pop(_force, None)
                info.pop(_resume, None)
                # make sure the dumped output_prefix does only contain the file prefix,
                # not the folder, since it's already been placed inside it
                info[_output_prefix] = self.updated_prefix()
                with open(f, "w", encoding="utf-8") as f_out:
                    try:
                        f_out.write(yaml_dump(sort_cosmetic(info)))
                    except OutputError as e:
                        raise LoggedError(self.log, str(e))