示例#1
0
class TestDataset:
    classes = get_concrete(Model)
    concrete = []
    num_start = 100

    @classmethod
    def setup_class(cls):
        cls.pk_data = DummyPowerSpectrum_SDSS_DR12_Z061_NGC()
        cls.xi_data = DummyCorrelationFunction_SDSS_DR12_Z061_NGC()
        for c in cls.classes:
            model = c()
            if isinstance(model, PowerSpectrumFit):
                model.set_data(cls.pk_data.get_data())
            elif isinstance(model, CorrelationFunctionFit):
                model.set_data(cls.xi_data.get_data())
            cls.concrete.append(model)

    def test_pk_nonnan_likelihood_with_default_param_values(self):
        for c in self.concrete:
            if isinstance(c, PowerSpectrumFit):
                params = c.get_defaults()
                posterior = c.get_posterior(params)
                assert np.isfinite(posterior), f"Model {str(c)} at params {params} gave posterior {posterior}"

    def test_pk_random_starting_point_doesnt_fail(self):
        for c in self.concrete:
            if isinstance(c, PowerSpectrumFit):
                np.random.seed(0)
                for i in range(self.num_start):
                    params = c.get_raw_start()
                    posterior = c.get_posterior(params)
                    assert np.isfinite(posterior), f"Model {str(c)} at params {params} gave posterior {posterior}"

    def test_xi_nonnan_likelihood_with_default_param_values(self):
        for c in self.concrete:
            if isinstance(c, CorrelationFunctionFit):
                params = c.get_defaults()
                posterior = c.get_posterior(params)
                assert np.isfinite(posterior), f"Model {str(c)} at params {params} gave posterior {posterior}"

    def test_xi_random_starting_point_doesnt_fail(self):
        for c in self.concrete:
            if isinstance(c, CorrelationFunctionFit):
                np.random.seed(0)
                for i in range(self.num_start):
                    params = c.get_raw_start()
                    posterior = c.get_posterior(params)
                    assert np.isfinite(posterior), f"Model {str(c)} at params {params} gave posterior {posterior}"
示例#2
0
class TestDataset:
    classes = get_concrete(Dataset)
    concrete = []

    @classmethod
    def setup_class(cls):
        cls.concrete = [c() for c in cls.classes]

    def test_all_datasets_define_cosmology(self):
        for c in self.concrete:
            datas = c.get_data()
            for data in datas:
                keys = list(data.keys())
                assert "cosmology" in keys, "Data should have a cosmology key!"
                cosmology = data["cosmology"]
                assert isinstance(cosmology, dict)
                required_keys = ["z", "om", "h0", "ns", "ob"]
                for r in required_keys:
                    assert r in cosmology.keys(), f"Cosmology should have key {r}, but has keys {list(cosmology.keys())}"

    def test_all_power_spectrum_datasets(self):
        for c in self.concrete:
            if isinstance(c, PowerSpectrum):
                datas = c.get_data()
                for data in datas:
                    required_keys = ["name", "ks_output", "ks", "pk", "ks_input", "w_scale", "w_transform", "w_pk"]
                    computed_keys = ["w_mask", "num_mocks", "icov", "cov"]
                    for r in required_keys:
                        assert r in data.keys(), f"Power spectrum data needs to have key {r}"
                    for r in computed_keys:
                        assert r in data.keys(), f"Power spectrum data should have computed key {r}"

    def test_all_correlation_function_datasets(self):
        for c in self.concrete:
            if isinstance(c, CorrelationFunction):
                datas = c.get_data()
                for data in datas:
                    required_keys = ["name", "dist", "xi0"]  # "xi", "xi2", "xi4" would be good to have in the future
                    computed_keys = ["num_mocks", "icov", "cov"]
                    for r in required_keys:
                        assert r in data.keys(), f"Power spectrum data needs to have key {r}"
                    for r in computed_keys:
                        assert r in data.keys(), f"Power spectrum data should have computed key {r}"
示例#3
0
    parser = argparse.ArgumentParser()
    parser.add_argument("--model", type=str, default=None)
    parser.add_argument("--redshift", type=float, default=0.51)
    parser.add_argument("--om", type=float, default=0.31)
    parser.add_argument("--h0", type=float, default=0.676)
    parser.add_argument("--ob", type=float, default=0.04814)
    parser.add_argument("--ns", type=float, default=0.97)
    parser.add_argument("--mnu", type=float, default=0.0)
    parser.add_argument("--reconsmoothscale", type=float, default=21.21)
    args = parser.parse_args()

    assert (
        args.model is not None
    ), "This file is invoked by generate.py and requires you to pass in a model name, redshift, om, h0, ob, ns and reconsmoothscale"
    assert len(
        get_concrete(Model)
    ) > 0, "get_concrete(Model) reports no subclasses. Send Sam and email, imports are funky."

    # Find the right model
    model = [c() for c in get_concrete(Model) if args.model == c.__name__][0]
    logging.info(f"Model found is {model}")
    model.set_cosmology(
        {
            "z": args.redshift,
            "h0": args.h0,
            "om": args.om,
            "ob": args.ob,
            "ns": args.ns,
            "mnu": args.mnu,
            "reconsmoothscale": args.reconsmoothscale,
        },
示例#4
0
    return cs


if __name__ == "__main__":
    logging.basicConfig(
        level=logging.INFO,
        format="[%(levelname)7s |%(funcName)20s]   %(message)s")

    # Set up command line arguments
    parser = argparse.ArgumentParser()
    parser.add_argument("-r", "--refresh", action="store_true", default=False)
    parser.add_argument("--desi", action="store_true", default=False)
    args = parser.parse_args()

    base_names = [
        c for c in get_concrete(Dataset)
        if "DESI" not in c.__name__ or args.desi
    ]
    base_datasets = [
        c() for c in get_concrete(Dataset)
        if "DESI" not in c.__name__ or args.desi
    ]  # This returns all the dataset classes

    # For each dataset, check nredshift_bins and nsmooth_types. Duplicate the dataset classes so that each combination
    # of redshift_bins and smooth_types is present.
    ndatasets = [[d.nredshift_bins, d.nsmoothtypes] for d in base_datasets]
    datasets = []
    for (c, variants) in zip(base_names, ndatasets):
        # Stops Barry from needing pregenerated stuff for DESI data that is not currently included in the repo
        if "DESI" not in c.__name__ or args.desi:
            if variants[0] > 1:
示例#5
0
if __name__ == "__main__":
    logging.basicConfig(
        level=logging.INFO,
        format="[%(levelname)7s |%(funcName)20s]   %(message)s")

    # Set up command line arguments
    parser = argparse.ArgumentParser()
    parser.add_argument("-r", "--refresh", action="store_true", default=False)
    args = parser.parse_args()

    # This should be run on a HPC for the PTGenerator side of things.
    assert not is_local(), "Please run this on your HPC system"

    datasets = [
        c() for c in get_concrete(Dataset) if "Dummy" not in c.__name__
    ]

    cosmologies = get_cosmologies(datasets)
    logging.info(f"Have {len(cosmologies)} cosmologies")

    # Ensure all cosmologies exist
    for c in cosmologies:
        logging.info(f"Ensuring cosmology {c} is generated")
        generator = CambGenerator(om_resolution=101,
                                  h0_resolution=1,
                                  h0=c["h0"],
                                  ob=c["ob"],
                                  ns=c["ns"],
                                  redshift=c["z"])
        generator.load_data(can_generate=True)