예제 #1
0
def test_bad_loader(tmpdirec):
    fl = tmpdirec / "test-npy"

    with open(fl, "w") as ff:
        ff.write("some-text")

    with pytest.raises(LoadError):
        yaml.load(f"!npy {fl}")
예제 #2
0
def test_custom_unit(c):
    s = dump(c)
    with pytest.warns(u.UnitsWarning, match=f"'{c!s}' did not parse") as w:
        cy = load(s)
    assert len(w) == 1
    assert isinstance(cy, u.UnrecognizedUnit)
    assert str(cy) == str(c)

    with u.add_enabled_units(c):
        cy2 = load(s)
        assert cy2 is c
예제 #3
0
def test_custom_unit(c):
    s = dump(c)
    with catch_warnings() as w:
        cy = load(s)
    assert len(w) == 1
    assert f"'{c!s}' did not parse" in str(w[0].message)
    assert isinstance(cy, u.UnrecognizedUnit)
    assert str(cy) == str(c)

    with u.add_enabled_units(c):
        with catch_warnings() as w2:
            cy2 = load(s)
        assert len(w2) == 0

        assert cy2 is c
예제 #4
0
    def fromfile(cls, fh, verify=True):
        """Create a header from a yaml-encoded 'header' extension."""
        from astropy.io.misc import yaml

        data = fh['header'][()]
        items = yaml.load(data)
        return cls(**items, mutable=False, verify=verify)
예제 #5
0
파일: yaml.py 프로젝트: lpsinger/astropy
def from_yaml(yml, *, cosmology=None):
    """Load `~astropy.cosmology.Cosmology` from :mod:`yaml` object.

    Parameters
    ----------
    yml : str
        :mod:`yaml` representation of |Cosmology| object
    cosmology : str, `~astropy.cosmology.Cosmology` class, or None (optional, keyword-only)
        The expected cosmology class (or string name thereof). This argument is
        is only checked for correctness if not `None`.

    Returns
    -------
    `~astropy.cosmology.Cosmology` subclass instance

    Raises
    ------
    TypeError
        If the |Cosmology| object loaded from ``yml`` is not an instance of
        the ``cosmology`` (and ``cosmology`` is not `None`).
    """
    with u.add_enabled_units(cu):
        cosmo = load(yml)

    # Check argument `cosmology`, if not None
    # This kwarg is required for compatibility with |Cosmology.from_format|
    if isinstance(cosmology, str):
        cosmology = _COSMOLOGY_CLASSES[cosmology]
    if cosmology is not None and not isinstance(cosmo, cosmology):
        raise TypeError(f"cosmology {cosmo} is not an {cosmology} instance.")

    return cosmo
예제 #6
0
def loadyaml(filename):
    from astropy.io.misc import yaml as ayaml
    # Read yaml
    with open(filename, 'r') as infile:
        data = ayaml.load(infile)
    # Return
    return data
예제 #7
0
    def from_yaml(cls, yaml_file: str | dict) -> Observatory:
        """Instantiate an Observatory from a compatible YAML config file."""
        if isinstance(yaml_file, (str, Path)):
            with open(yaml_file) as fl:
                data = yaml.load(fl)
        elif isinstance(yaml_file, collections.abc.Mapping):
            data = yaml_file
        else:
            raise ValueError(
                "yaml_file must be a string filepath or a raw dict from such a file."
            )

        # Mask out some antennas if a max_antpos is set in the YAML
        max_antpos = data.pop("max_antpos", np.inf * un.m)
        antpos = data.pop("antpos")
        _n = len(antpos)

        antpos = antpos[np.sum(np.square(antpos), axis=1) < max_antpos**2]

        if max_antpos < np.inf * un.m:
            logger.info(
                f"Removed {_n - len(antpos)} antennas using given max_antpos={max_antpos} m."
            )

        # If we get only East and North coords, add zeros for the UP direction.
        if antpos.shape[1] == 2:
            antpos = np.hstack((antpos, np.zeros((len(antpos), 1))))

        _beam = data.pop("beam")
        kind = _beam.pop("class")
        _beam = getattr(beam, kind)(**_beam)

        return cls(antpos=antpos, beam=_beam, **data)
예제 #8
0
def test_timedelta():
    t = _get_time()
    dt = t - t + 0.1234556 * u.s
    dty = load(dump(dt))

    assert type(dt) is type(dty)
    for attr in ('shape', 'jd1', 'jd2', 'format', 'scale'):
        assert np.all(getattr(dt, attr) == getattr(dty, attr))
예제 #9
0
def test_skycoord(frame):

    c = SkyCoord([[1, 2], [3, 4]], [[5, 6], [7, 8]],
                 unit='deg', frame=frame,
                 obstime=Time('2016-01-02'),
                 location=EarthLocation(1000, 2000, 3000, unit=u.km))
    cy = load(dump(c))
    compare_coord(c, cy)
예제 #10
0
def observation_config(tmpdirec, observatory_config):
    with open(path.join(example_configs, "observation_hera.yml")) as fl:
        observation = yaml.load(fl)

    observation["observatory"] = observatory_config

    with open(path.join(tmpdirec, "observation.yml"), "w") as fl:
        yaml.dump(observation, fl)

    return path.join(tmpdirec, "observation.yml")
예제 #11
0
def test_pickle_loader(tmpdirec):
    pkl = tmpdirec / "test-pkl.pkl"

    obj = {"an": "object"}
    with open(pkl, "wb") as fl:
        pickle.dump(obj, fl)

    d = yaml.load(f"!pkl {pkl}")

    assert d == obj
예제 #12
0
def sensitivity_config(tmpdirec, observation_config):
    with open(path.join(example_configs, "sensitivity_hera.yml")) as fl:
        sensitivity = yaml.load(fl)

    sensitivity["observation"] = observation_config

    with open(path.join(tmpdirec, "sensitivity.yml"), "w") as fl:
        yaml.dump(sensitivity, fl)

    return path.join(tmpdirec, "sensitivity.yml")
예제 #13
0
 def _load_yaml(yaml_file):
     if isinstance(yaml_file, str):
         with open(yaml_file) as fl:
             data = yaml.load(fl)
     elif isinstance(yaml_file, Mapping):
         data = yaml_file
     else:
         raise ValueError(
             "yaml_file must be a string filepath or a raw dict from such a file."
         )
     return data
예제 #14
0
def test_npz_loader(tmpdirec):
    npz = tmpdirec / "test-npz.npz"

    obj = {"an": np.linspace(0, 1, 10), "b": np.zeros(10)}

    np.savez(npz, **obj)

    d = yaml.load(f"!npz {npz}")

    for k, v in d.items():
        assert k in obj
        assert np.allclose(v, obj[k])
예제 #15
0
def read_instrument_packs():
    """Read all instrument packs into the 'packs' variable."""
    for pack in glob.glob(resource_filename("pahfit", "packs/instrument/*.yaml")):
        try:
            with open(pack) as fd:
                p = yaml.load(fd)
        except IOError as e:
            raise PAHFITPackError("Error reading instrument pack file\n"
                                  f"\t{pack}\n\t{repr(e)}")
        else:
            telescope = os.path.basename(pack).rstrip('.yaml')
            packs[telescope] = p
예제 #16
0
def load_params(file: str):
    file = u.sanitise_file_ext(file, '.yaml')

    u.debug_print(2, 'Loading parameter file from ' + str(file))

    if os.path.isfile(file):
        with open(file) as f:
            p = yaml.load(f)
    else:
        p = None
        u.debug_print(1, 'No parameter file found at',
                      str(file) + ', returning None.')
    return p
예제 #17
0
def from_yaml(yml):
    """Load `~astropy.cosmology.Cosmology` from :mod:`yaml` object.

    Parameters
    ----------
    yml : str
        :mod:`yaml` representation of |Cosmology| object

    Returns
    -------
    `~astropy.cosmology.Cosmology` subclass instance
    """
    with u.add_enabled_units(cu):
        return load(yml)
예제 #18
0
def test_yaml_constructor():
    """Test :func:`~astropy.cosmology.io.yaml.yaml_constructor`."""
    # test function `constructor`
    constructor = yaml_constructor(FlatLambdaCDM)
    assert callable(constructor)

    # it's too hard to manually construct a node, so we only test dump/load
    # this is also a good round-trip test
    yml = dump(Planck18)
    with u.add_enabled_units(cu):  # needed for redshift units
        cosmo = load(yml)
    assert isinstance(cosmo, FlatLambdaCDM)
    assert cosmo == Planck18
    assert cosmo.meta == Planck18.meta
예제 #19
0
def calc_sense(
    configfile,
    array_file,
    direc,
    fname,
    thermal,
    samplevar,
    write_significance,
    plot,
    plot_title,
    prefix,
):
    """Calculate the sensitivity of an array.

    This is the primary command of 21cmSense, and can be run independently for a
    complete sensitivity calculation.
    """
    # If given an array-file, overwrite the "observation" parameter
    # in the config with the pickled array file, which has already
    # calculated the uv_coverage, hopefully.
    if array_file is not None:
        with open(configfile) as fl:
            cfg = yaml.load(fl)
        cfg["observation"] = path.abspath(array_file)

        configfile = tempfile.mktemp()
        with open(configfile, "w") as fl:
            yaml.dump(cfg, fl)

    sensitivity = sense.PowerSpectrum.from_yaml(configfile)
    logger.info(
        f"Used {len(sensitivity.k1d)} bins between "
        f"{sensitivity.k1d.min()} and {sensitivity.k1d.max()}"
    )
    sensitivity.write(filename=fname, thermal=thermal, sample=samplevar, prefix=prefix)

    if write_significance:
        sig = sensitivity.calculate_significance(thermal=thermal, sample=samplevar)
        logger.info(f"Significance of detection: {sig}")

    if plot and HAVE_MPL:
        fig = sensitivity.plot_sense_1d(thermal=thermal, sample=samplevar)
        if plot_title:
            plt.title(plot_title)
        prefix + "_" if prefix else ""
        fig.savefig(
            f"{prefix}{sensitivity.foreground_model}_"
            f"{sensitivity.observation.frequency:.3f}.png"
        )
예제 #20
0
 def open_summary(self, summary_file):
     with open(summary_file) as f:
         summary = yaml.load(f)
         # summary = yaml.full_load(f)
     self.z = summary['redshift']
     self.galaxy_name = summary['galaxyname']
     self.data = np.array(summary['transitions']['Ly alpha']['data'])
     self.errs = np.array(summary['transitions']['Ly alpha']['errs'])
     self.wave = np.array(summary['transitions']['Ly alpha']['wave'])
     self.mask = np.array(summary['transitions']['Ly alpha']['mask'])
     try:
         self._cfp = summary['transitions']['Ly alpha']['cont_fit_params']
     except KeyError:
         self._cfp = summary['transitions']['Ly alpha'][
             'continuum_fit_params']
     self._cont = self.wave * self._cfp['slope'] + self._cfp['intercept']
예제 #21
0
 def _loadCorsikaParametersFile(self, filename):
     '''
     Load CORSIKA parameters from a given file (filename not None),
     or from the default parameter file provided in the
     data directory (filename is None).
     '''
     if filename is not None:
         # User provided file.
         self._corsikaParametersFile = filename
     else:
         # Default file from data directory.
         self._corsikaParametersFile = io.getDataFile(
             'corsika', 'corsika_parameters.yml')
     self._logger.debug('Loading CORSIKA parameters from file {}'.format(
         self._corsikaParametersFile))
     with open(self._corsikaParametersFile, 'r') as f:
         self._corsikaParameters = yaml.load(f)
예제 #22
0
def sensitivity_config_defined_p21(tmpdirec, observation_config,
                                   sensitivity_config):
    with open(sensitivity_config) as fl:
        sensitivity = yaml.load(fl)

    pfile = path.join(
        example_configs,
        "../py21cmsense/data/ps_no_halos_nf0.521457_z9.50_useTs0_zetaX-1.0e+00_200_400Mpc_v2",
    )

    sensitivity["observation"] = observation_config

    with open(path.join(tmpdirec, "sensitivity_with_p21.yml"), "w") as fl:
        dump(sensitivity, fl)

        fl.write(f"p21: !txt {pfile}\n")

    return path.join(tmpdirec, "sensitivity_with_p21.yml")
예제 #23
0
def test_ndarray_subclasses(c):
    cy = load(dump(c))

    assert np.all(c == cy)
    assert c.shape == cy.shape
    assert type(c) is type(cy)

    cc = 'C_CONTIGUOUS'
    fc = 'F_CONTIGUOUS'
    if c.flags[cc] or c.flags[fc]:
        assert c.flags[cc] == cy.flags[cc]
        assert c.flags[fc] == cy.flags[fc]
    else:
        # Original was not contiguous but round-trip version
        # should be c-contig.
        assert cy.flags[cc]

    if hasattr(c, 'unit'):
        assert c.unit == cy.unit
예제 #24
0
def run_pipeline(config_file, jobfile, apus_args=None):
    jobkey = os.path.splitext(os.path.basename(jobfile))[0]
    logger = logging.getLogger(jobkey)
    logger.info("run pipeline {}".format(jobkey))

    # load config file
    with open(config_file, 'r') as fo:
        logger.info("use config file {}".format(config_file))
        # logger.info("\n\n{}\n".format(fo.read()))
        fo.seek(0)
        config = yaml.load(fo)

    # create jobdir
    workdir = config['workdir']
    jobdir = os.path.join(workdir, jobkey)
    if os.path.exists(jobdir):
        logger.warning("use existing jobdir {}".format(jobdir))
    else:
        os.makedirs(jobdir)
        logger.info("create job inputs dir {}".format(jobdir))

    config['jobdir'] = jobdir
    config['jobfile'] = jobfile
    config['jobkey'] = jobkey
    config['skymask_dir'] = jobdir + ".skymask"
    config['bpmask_dir'] = jobdir + ".bpmask"

    logger.info("arguments passed to Apus {}".format(apus_args))
    logger.propagate = False
    # call apus
    apuscore.bootstrap(
        dict(
            jobkey=jobkey,
            jobdir=jobdir,
            logdir=config['logdir'],
            env_overrides={
                'path_prefix': config['astromatic_prefix'],
                'tmpdir': config['tmpdir']
            },
            tlist=pipeline.get_tlist(config),
        ), apus_args)
예제 #25
0
def collectDataFromYamlOrDict(inYaml, inDict, allowEmpty=False):
    '''
    Collect input data that can be given either as a dict
    or as a yaml file.

    Parameters
    ----------
    inYaml: str
        Name of the Yaml file.
    inDict: dict
        Data as dict.
    allowEmpty: bool
        If True, an error won't be raised in case both yaml and dict are None.

    Returns
    -------
    data: dict
        Data as dict.
    '''
    _logger = logging.getLogger(__name__)

    if inYaml is not None:
        if inDict is not None:
            _logger.warning(
                'Both inDict inYaml were given - inYaml will be used')
        with open(inYaml) as file:
            data = yaml.load(file)
        return data
    elif inDict is not None:
        return dict(inDict)
    else:
        msg = 'configData has not been provided (by yaml file neither by dict)'
        if allowEmpty:
            _logger.warning(msg)
            return None
        else:
            _logger.error(msg)
            raise InvalidConfigData(msg)
예제 #26
0
    def from_yaml(cls, yaml_file):
        """Construct an :class:`Observation` from a YAML file."""
        if isinstance(yaml_file, str):
            with open(yaml_file) as fl:
                data = yaml.load(fl)
        elif isinstance(yaml_file, collections.abc.Mapping):
            data = yaml_file
        else:
            raise ValueError(
                "yaml_file must be a string filepath or a raw dict from such a file."
            )

        if (
            isinstance(data["observatory"], str)
            and isinstance(yaml_file, str)
            and not path.isabs(data["observatory"])
        ):
            data["observatory"] = path.join(
                path.dirname(yaml_file), data["observatory"]
            )

        observatory = obs.Observatory.from_yaml(data.pop("observatory"))
        return cls(observatory=observatory, **data)
예제 #27
0
def test_unit(c):
    cy = load(dump(c))
    if isinstance(c, u.CompositeUnit):
        assert c == cy
    else:
        assert c is cy
예제 #28
0
def test_numpy_types(c):
    cy = load(dump(c))
    assert c == cy
예제 #29
0
def test_serialized_column():
    sc = SerializedColumn({'name': 'hello', 'other': 1, 'other2': 2.0})
    scy = load(dump(sc))

    assert sc == scy
예제 #30
0
def test_time():
    t = _get_time()
    ty = load(dump(t))
    compare_time(t, ty)