示例#1
0
def remove_filters_from_files(catfile, physgrid, obsgrid, outbase, rm_filters):

    # remove the requested filters from the catalog file
    cat = Table.read(catfile)
    for cfilter in rm_filters:
        colname = "{}_rate".format(cfilter)
        if colname in cat.colnames:
            cat.remove_column(colname)
        else:
            print("{} not in catalog file".format(colname))
    cat.write("{}_cat.fits".format(outbase), overwrite=True)

    # get the sed grid and process
    g0 = FileSEDGrid(physgrid, backend="cache")
    filters = g0.header["filters"].split(" ")
    shortfilters = [(cfilter.split("_"))[-1].lower() for cfilter in filters]
    nlamb = []
    nfilters = []
    rindxs = []
    for csfilter, clamb, cfilter in zip(shortfilters, g0.lamb, filters):
        if csfilter not in rm_filters:
            nlamb.append(clamb)
            nfilters.append(cfilter)
        else:
            rindxs.append(shortfilters.index(csfilter))
    nseds = np.delete(g0.seds, rindxs, 1)

    print("orig filters: {}".format(" ".join(filters)))
    print(" new filters: {}".format(" ".join(nfilters)))

    g = SpectralGrid(np.array(nlamb),
                     seds=nseds,
                     grid=g0.grid,
                     backend="memory")
    g.grid.header["filters"] = " ".join(nfilters)
    g.writeHDF("{}_sed.grid.hd5".format(outbase))

    # get and process the observation model
    obsgrid = noisemodel.get_noisemodelcat(obsgrid)
    with tables.open_file("{}_noisemodel.grid.hd5".format(outbase),
                          "w") as outfile:
        outfile.create_array(outfile.root, "bias",
                             np.delete(obsgrid.root.bias, rindxs, 1))
        outfile.create_array(outfile.root, "error",
                             np.delete(obsgrid.root.error, rindxs, 1))
        outfile.create_array(
            outfile.root,
            "completeness",
            np.delete(obsgrid.root.completeness, rindxs, 1),
        )
示例#2
0
def remove_filters_from_files(catfile,
                              physgrid,
                              obsgrid,
                              outbase,
                              rm_filters):

    # remove the requested filters from the catalog file
    cat = Table.read(catfile)
    for cfilter in rm_filters:
        colname = '{}_rate'.format(cfilter)
        if colname in cat.colnames:
            cat.remove_column(colname)
        else:
            print('{} not in catalog file'.format(colname))
    cat.write('{}_cat.fits'.format(outbase), overwrite=True)

    # get the sed grid and process
    g0 = FileSEDGrid(physgrid, backend='cache')
    filters = g0.header['filters'].split(' ')
    shortfilters = [(cfilter.split('_'))[-1].lower() for cfilter in filters]
    nlamb = []
    nfilters = []
    rindxs = []
    for csfilter, clamb, cfilter in zip(shortfilters, g0.lamb, filters):
        if csfilter not in rm_filters:
            nlamb.append(clamb)
            nfilters.append(cfilter)
        else:
            rindxs.append(shortfilters.index(csfilter))
    nseds = np.delete(g0.seds, rindxs, 1)

    print('orig filters: {}'.format(' '.join(filters)))
    print(' new filters: {}'.format(' '.join(nfilters)))

    g = SpectralGrid(np.array(nlamb), seds=nseds,
                     grid=g0.grid, backend='memory')
    g.grid.header['filters'] = ' '.join(nfilters)
    g.writeHDF('{}_sed.grid.hd5'.format(outbase))

    # get and process the observation model
    obsgrid = noisemodel.get_noisemodelcat(obsgrid)
    with tables.open_file('{}_noisemodel.grid.hd5'.format(outbase), 'w') \
            as outfile:
        outfile.create_array(outfile.root, 'bias',
                             np.delete(obsgrid.root.bias, rindxs, 1))
        outfile.create_array(outfile.root, 'error',
                             np.delete(obsgrid.root.error, rindxs, 1))
        outfile.create_array(outfile.root, 'completeness',
                             np.delete(obsgrid.root.completeness, rindxs, 1))
示例#3
0
    def test_make_extinguished_sed_grid(self):
        """
        Generate the extinguished SED grid using a cached version of the
        spectral grid with priors and compare the result to a cached version.
        """

        g_pspec = SpectralGrid(self.priors_fname_cache, backend="memory")

        # generate the SED grid by integrating the filter response functions
        #   effect of dust extinction applied before filter integration
        #   also computes the dust priors as weights
        seds_fname = tempfile.NamedTemporaryFile(suffix=".hd5").name
        infoname = tempfile.NamedTemporaryFile(suffix=".asdf").name
        (seds_fname, g) = make_extinguished_sed_grid(
            "test",
            g_pspec,
            self.settings.filters,
            seds_fname=seds_fname,
            extLaw=self.settings.extLaw,
            av=self.settings.avs,
            rv=self.settings.rvs,
            fA=self.settings.fAs,
            rv_prior_model=self.settings.rv_prior_model,
            av_prior_model=self.settings.av_prior_model,
            fA_prior_model=self.settings.fA_prior_model,
            add_spectral_properties_kwargs=self.settings.
            add_spectral_properties_kwargs,
            info_fname=infoname,
        )

        # compare the new to the cached version
        compare_hdf5(self.seds_fname_cache, seds_fname)
def test_make_extinguished_sed_grid():

    # download the needed files
    priors_fname = download_rename("beast_example_phat_spec_w_priors.grid.hd5")
    filter_fname = download_rename("filters.hd5")

    # download cached version of sed grid
    seds_fname_cache = download_rename("beast_example_phat_seds.grid.hd5")

    ################
    # generate the same extinguished SED grid from the code

    # Add in the filters
    filters = [
        "HST_WFC3_F275W",
        "HST_WFC3_F336W",
        "HST_ACS_WFC_F475W",
        "HST_ACS_WFC_F814W",
        "HST_WFC3_F110W",
        "HST_WFC3_F160W",
    ]
    add_spectral_properties_kwargs = dict(filternames=filters)

    g_pspec = SpectralGrid(priors_fname, backend="memory")

    # generate the SED grid by integrating the filter response functions
    #   effect of dust extinction applied before filter integration
    #   also computes the dust priors as weights
    seds_fname = "/tmp/beast_example_phat_sed.grid.hd5"
    seds_fname, g_seds = make_extinguished_sed_grid(
        "test",
        g_pspec,
        filters,
        seds_fname=seds_fname,
        filterLib=filter_fname,
        extLaw=extinction.Gordon16_RvFALaw(),
        av=[0.0, 10.055, 1.0],
        rv=[2.0, 6.0, 1.0],
        fA=[0.0, 1.0, 0.25],
        av_prior_model={"name": "flat"},
        rv_prior_model={"name": "flat"},
        fA_prior_model={"name": "flat"},
        add_spectral_properties_kwargs=add_spectral_properties_kwargs,
    )

    # compare the new to the cached version
    compare_hdf5(seds_fname_cache, seds_fname)
def test_splinter_noisemodel(frac_unc):

    # make super simplified model SED grid
    lamb = np.linspace(1000.0, 4000, 4)
    seds = np.logspace(-4, -3, 4)[None, :] * np.array([1, 1.5])[:, None]

    modelsedgrid = SpectralGrid(
        lamb=lamb, seds=seds, grid=[1], backend="memory"  # dummy input for now
    )

    # make splinter noisemodel
    noise_fname = "/tmp/splinter_example_noisemodel_{:.2f}.grid.hd5".format(frac_unc)
    make_splinter_noise_model(noise_fname, modelsedgrid, frac_unc=frac_unc)

    # read entire noisemodel back in
    noisemodel = h5py.File(noise_fname)

    # read the estimated sigma and check if close to manually computed errors
    sigma = noisemodel["error"]

    np.testing.assert_allclose(sigma, frac_unc * seds)
示例#6
0
    def test_add_stellar_priors_to_spectral_grid(self):
        """
        Add the stellar priors to the a cached spectral grid and compare
        it to the cached version.
        """
        specgrid = SpectralGrid(self.spec_fname_cache, backend="memory")

        priors_fname = tempfile.NamedTemporaryFile(suffix=".hd5").name
        infoname = tempfile.NamedTemporaryFile(suffix=".asdf").name
        priors_fname, g = add_stellar_priors(
            "test",
            specgrid,
            priors_fname=priors_fname,
            age_prior_model=self.settings.age_prior_model,
            mass_prior_model=self.settings.mass_prior_model,
            met_prior_model=self.settings.met_prior_model,
            distance_prior_model=self.settings.distance_prior_model,
            info_fname=infoname,
        )

        # compare the new to the cached version
        compare_hdf5(self.priors_fname_cache, priors_fname)
示例#7
0
def test_add_stellar_priors_to_spectral_grid():

    # download the needed files
    gspec_fname = download_rename("beast_example_phat_spec_grid.hd5")

    # download cached version of spectral grid with priors
    priors_fname_cache = download_rename(
        "beast_example_phat_spec_w_priors.grid.hd5")

    ###############
    # generate the spectral grid with stellar priors from the code

    gspec_fname = "/tmp/beast_example_phat_spec_grid.hd5"
    specgrid = SpectralGrid(gspec_fname, backend="memory")

    priors_fname = "/tmp/beast_example_phat_spec_w_priors.grid.hd5"
    priors_fname, g = add_stellar_priors("test",
                                         specgrid,
                                         priors_fname=priors_fname)

    # compare the new to the cached version
    compare_hdf5(priors_fname_cache, priors_fname)
示例#8
0
        def gen_subgrid(i, sub_name):
            sub_g_pspec = SpectralGrid(sub_name)
            sub_seds_fname = "{}seds.gridsub{}.hd5".format(file_prefix, i)

            # generate the SED grid by integrating the filter response functions
            #   effect of dust extinction applied before filter integration
            #   also computes the dust priors as weights
            (sub_seds_fname, sub_g_seds) = make_extinguished_sed_grid(
                datamodel.project,
                sub_g_pspec,
                datamodel.filters,
                extLaw=datamodel.extLaw,
                av=datamodel.avs,
                rv=datamodel.rvs,
                fA=datamodel.fAs,
                rv_prior_model=datamodel.rv_prior_model,
                av_prior_model=datamodel.av_prior_model,
                fA_prior_model=datamodel.fA_prior_model,
                add_spectral_properties_kwargs=extra_kwargs,
                seds_fname=sub_seds_fname,
            )

            return sub_seds_fname
示例#9
0
def remove_filters_from_files(
    catfile,
    physgrid=None,
    obsgrid=None,
    outbase=None,
    physgrid_outfile=None,
    rm_filters=None,
    beast_filt=None,
):
    """
    Remove filters from catalog, physics grid, and/or obsmodel grid.  This has
    two primary use cases:

    1. When making simulated observations, you want to test how your fit quality
       changes with different combinations of filters.  In that case, put in
       files for both `physgrid` and `obsgrid`.  Set `rm_filters` to the
       filter(s) you wish to remove, and they will be removed both from those
       and from the catalog file.  The three new files will be output with the
       name prefix set in `outbase`.

    2. When running the BEAST, you have a master physics model grid with all
       filters present in the survey, but some fields don't have observations in
       all of those filters.  In that case, put the master grid in `physgrid`
       and set `rm_filters` to None.  The catalog will be used to determine the
       filters to remove (if any).  `obsgrid` should be left as None, because in
       this use case, the obsmodel grid has not yet been generated.  The output
       physics model grid will be named using the filename in `physgrid_outfile`
       (if given) or with the prefix in `outbase`.


    Parameters
    ----------
    catfile : string
        file name of photometry catalog

    physgrid : string (default=None)
        If set, remove filters from this physics model grid

    obsgrid : string (default=None)
        If set, remove filters from this obsmodel grid

    outbase : string (default=None)
        Path+file to prepend to all output file names.  Useful for case 1 above.

    physgrid_outfile : string (default=None)
        Path+name of the output physics model grid.  Useful for case 2 above.

    rm_filters : string or list of strings (default=None)
        If set, these are the filters to remove from all of the files.  If not
        set, only the filters present in catfile will be retained in physgrid
        and/or obsgrid.

    beast_filt : list of strings
        Sometimes there is ambiguity in the filter name (e.g., the grid has
        both HST_ACS_WFC_F475W and HST_WFC3_F475W, and the filter name is
        F475W).  Set this to the BEAST filter name to resolve any
        ambiguities.  For example, ['HST_WFC3_F475W', 'HST_WFC3_F814W'] ensures
        that these are the names used for F475W and F814W.

    """

    # read in the photometry catalog
    cat = Table.read(catfile)

    # if rm_filters set, remove the requested filters from the catalog
    if rm_filters is not None:
        for cfilter in np.atleast_1d(rm_filters):
            colname = "{}_rate".format(cfilter)
            if colname.upper() in cat.colnames:
                cat.remove_column(colname.upper())
            elif colname.lower() in cat.colnames:
                cat.remove_column(colname.lower())
            else:
                print("{} not in catalog file".format(colname))
        cat.write("{}_cat.fits".format(outbase), overwrite=True)

    # if rm_filters not set, extract the filter names that are present
    if rm_filters is None:
        cat_filters = [f[:-5].upper() for f in cat.colnames if f[-4:].lower() == "rate"]

    # if beast_filt is set, make a list of the short versions
    if beast_filt is not None:
        beast_filt_short = [(f.split("_"))[-1].upper() for f in beast_filt]

    # if physgrid set, process the SED grid
    if physgrid is not None:

        # read in the sed grid
        g0 = FileSEDGrid(physgrid, backend="cache")

        # extract info
        filters = g0.header["filters"].split(" ")
        shortfilters = [(cfilter.split("_"))[-1].upper() for cfilter in filters]
        rindxs = []
        rgridcols = []

        # loop through filters and determine what needs deleting
        for csfilter, cfilter in zip(shortfilters, filters):

            # --------------------------
            # if the user chose the filters to remove
            if rm_filters is not None:

                # if the current filter is in the list of filters to remove
                if csfilter in np.atleast_1d(rm_filters):

                    # if there's a list of BEAST instrument+filter references
                    if beast_filt is not None:

                        # if the current filter is in the list of BEAST references
                        if csfilter in beast_filt_short:

                            # if it's the same instrument, delete it
                            # (if it's not the same instrument, keep it)
                            if beast_filt[beast_filt_short.index(csfilter)] == cfilter:
                                rindxs.append(filters.index(cfilter))
                                for grid_col in g0.grid.colnames:
                                    if cfilter in grid_col:
                                        rgridcols.append(grid_col)

                        # if the current filter isn't in the BEAST ref list, delete it
                        else:
                            rindxs.append(filters.index(cfilter))
                            for grid_col in g0.grid.colnames:
                                if cfilter in grid_col:
                                    rgridcols.append(grid_col)

                    # if there isn't a list of BEAST refs, delete it
                    else:
                        rindxs.append(filters.index(cfilter))
                        for grid_col in g0.grid.colnames:
                            if cfilter in grid_col:
                                rgridcols.append(grid_col)

            # --------------------------
            # if the removed filters are determined from the catalog file
            if rm_filters is None:

                # if the current filter is present in the catalog filters
                if csfilter in cat_filters:

                    # if there's a list of BEAST instrument+filter references
                    # (if there isn't a list of BEAST refs, keep it)
                    if beast_filt is not None:

                        # if the current filter is in the list of BEAST references
                        # (if the current filter isn't in the BEAST ref list, keep it)
                        if csfilter in beast_filt_short:

                            # if it's not the same instrument, delete it
                            # (if it's the same instrument, keep it)
                            if beast_filt[beast_filt_short.index(csfilter)] != cfilter:
                                rindxs.append(filters.index(cfilter))
                                for grid_col in g0.grid.colnames:
                                    if cfilter in grid_col:
                                        rgridcols.append(grid_col)

                # if the current filter isn't in the catalog filters, delete it
                else:
                    rindxs.append(filters.index(cfilter))
                    for grid_col in g0.grid.colnames:
                        if cfilter in grid_col:
                            rgridcols.append(grid_col)

        # delete column(s)
        nseds = np.delete(g0.seds, rindxs, 1)
        nlamb = np.delete(g0.lamb, rindxs, 0)
        nfilters = np.delete(filters, rindxs, 0)
        for rcol in rgridcols:
            g0.grid.delCol(rcol)

        print("orig filters: {}".format(" ".join(filters)))
        print(" new filters: {}".format(" ".join(nfilters)))

        # save the modified grid
        g = SpectralGrid(np.array(nlamb), seds=nseds, grid=g0.grid, backend="memory")
        g.grid.header["filters"] = " ".join(nfilters)
        if physgrid_outfile is not None:
            g.writeHDF(physgrid_outfile)
        elif outbase is not None:
            g.writeHDF("{}_seds.grid.hd5".format(outbase))
        else:
            raise ValueError("Need to set either outbase or physgrid_outfile")

    # if obsgrid set, process the observation model
    if obsgrid is not None:
        obsgrid = noisemodel.get_noisemodelcat(obsgrid)
        with tables.open_file("{}_noisemodel.grid.hd5".format(outbase), "w") as outfile:
            outfile.create_array(
                outfile.root, "bias", np.delete(obsgrid["bias"], rindxs, 1)
            )
            outfile.create_array(
                outfile.root, "error", np.delete(obsgrid["error"], rindxs, 1)
            )
            outfile.create_array(
                outfile.root,
                "completeness",
                np.delete(obsgrid["completeness"], rindxs, 1),
            )
示例#10
0
def make_extinguished_grid(
    spec_grid,
    filter_names,
    extLaw,
    avs,
    rvs,
    fAs=None,
    av_prior_model={"name": "flat"},
    rv_prior_model={"name": "flat"},
    fA_prior_model={"name": "flat"},
    chunksize=0,
    add_spectral_properties_kwargs=None,
    absflux_cov=False,
    filterLib=None,
):
    """
    Extinguish spectra and extract an SEDGrid through given series of filters
    (all wavelengths in stellar SEDs and filter response functions are assumed
    to be in Angstroms)

    Parameters
    ----------
    spec_grid: string or grid.SpectralGrid
        if string:
        spec_grid is the filename to the grid file with stellar spectra
        the backend to load this grid will be the minimal invasive: 'HDF'
        if possible, 'cache' otherwise.

        if not a string, expecting the corresponding SpectralGrid instance
        (backend already setup)

    filter_names: list
        list of filter names according to the filter lib

    Avs: sequence
        Av values to iterate over

    av_prior_model: list
        list including prior model name and parameters

    Rvs: sequence
        Rv values to iterate over

    rv_prior_model: list
        list including prior model name and parameters

    fAs: sequence (optional)
        f_A values to iterate over
        f_A can be omitted if the extinction Law does not use it or allow
        fixed values

    fA_prior_model: list
        list including prior model name and parameters

    chunksize: int, optional (default=0)
        number of extinction model variations to generate at each cycle.
        Note that this means len(spec_grid * chunksize)
        If default <= 0, all models will be returned at once.

    filterLib:  str
        full filename to the filter library hd5 file

    add_spectral_properties_kwargs: dict
        keyword arguments to call :func:`add_spectral_properties` at each
        iteration to add model properties from the spectra into the grid
        property table

    asbflux_cov: boolean
        set to calculate the absflux covariance matrices for each model
        (can be very slow!!!  But it is the right thing to do)

    Returns
    -------
    g: grid.SpectralGrid
        final grid of reddened SEDs and models
    """
    # Check inputs
    # ============
    # get the stellar grid (no dust yet)
    # if string is provided try to load the most memory efficient backend
    # otherwise use a cache-type backend (load only when needed)
    if isinstance(spec_grid, str):
        ext = spec_grid.split(".")[-1]
        if ext in ["hdf", "hd5", "hdf5"]:
            g0 = SpectralGrid(spec_grid, backend="hdf")
        else:
            g0 = SpectralGrid(spec_grid, backend="cache")
    else:
        helpers.type_checker("spec_grid", spec_grid, SpectralGrid)
        g0 = spec_grid

    # Tag fA usage
    if fAs is None:
        with_fA = False
    else:
        with_fA = True

    # get the min/max R(V) values necessary for the grid point definition
    min_Rv = min(rvs)
    max_Rv = max(rvs)

    # Create the sampling mesh
    # ========================
    # basically the dot product from all input 1d vectors
    # setup interation over the full dust parameter grid
    if with_fA:
        dustpriors = PriorWeightsDust(avs, av_prior_model, rvs, rv_prior_model,
                                      fAs, fA_prior_model)

        it = np.nditer(np.ix_(avs, rvs, fAs))
        niter = np.size(avs) * np.size(rvs) * np.size(fAs)
        npts, pts = _make_dust_fA_valid_points_generator(it, min_Rv, max_Rv)

        # Pet the user
        print("""number of initially requested points = {0:d}
              number of valid points = {1:d} (based on restrictions in R(V)
                 versus f_A plane)
              """.format(niter, npts))

        if npts == 0:
            raise AttributeError("No valid points")
    else:
        dustpriors = PriorWeightsDust(avs, av_prior_model, rvs, rv_prior_model,
                                      [1.0], fA_prior_model)

        it = np.nditer(np.ix_(avs, rvs))
        npts = np.size(avs) * np.size(rvs)
        pts = ((float(ak), float(rk)) for ak, rk in it)

    # Generate the Grid
    # =================
    N0 = len(g0.grid)
    N = N0 * npts

    if chunksize <= 0:
        print("Generating a final grid of {0:d} points".format(N))
    else:
        print("Generating a final grid of {0:d} points in {1:d}" +
              " pieces".format(N, int(float(N0) / chunksize + 1.0)))

    if chunksize <= 0:
        chunksize = npts

    if add_spectral_properties_kwargs is not None:
        nameformat = add_spectral_properties_kwargs.pop("nameformat",
                                                        "{0:s}") + "_wd"

    for chunk_pts in helpers.chunks(pts, chunksize):
        # iter over chunks of models

        # setup chunk outputs
        cols = {"Av": np.empty(N, dtype=float), "Rv": np.empty(N, dtype=float)}

        if with_fA:
            cols["Rv_A"] = np.empty(N, dtype=float)
            cols["f_A"] = np.empty(N, dtype=float)

        keys = list(g0.keys())
        for key in keys:
            cols[key] = np.empty(N, dtype=float)

        n_filters = len(filter_names)
        _seds = np.empty((N, n_filters), dtype=float)
        if absflux_cov:
            n_offdiag = ((n_filters**2) - n_filters) / 2
            _cov_diag = np.empty((N, n_filters), dtype=float)
            _cov_offdiag = np.empty((N, n_offdiag), dtype=float)

        for count, pt in enumerate(tqdm(chunk_pts, desc="SED grid")):

            if with_fA:
                Av, Rv, f_A = pt
                dust_prior_weight = dustpriors.get_weight(Av, Rv, f_A)
                Rv_MW = extLaw.get_Rv_A(Rv, f_A)
                r = g0.applyExtinctionLaw(extLaw,
                                          Av=Av,
                                          Rv=Rv,
                                          f_A=f_A,
                                          inplace=False)
                # add extra "spectral bands" if requested
                if add_spectral_properties_kwargs is not None:
                    r = add_spectral_properties(
                        r,
                        nameformat=nameformat,
                        filterLib=filterLib,
                        **add_spectral_properties_kwargs)
                temp_results = r.getSEDs(filter_names, filterLib=filterLib)
                # adding the dust parameters to the models
                cols["Av"][N0 * count:N0 * (count + 1)] = Av
                cols["Rv"][N0 * count:N0 * (count + 1)] = Rv
                cols["f_A"][N0 * count:N0 * (count + 1)] = f_A
                cols["Rv_A"][N0 * count:N0 * (count + 1)] = Rv_MW

            else:
                Av, Rv = pt
                dust_prior_weight = dustpriors.get_weight(Av, Rv, 1.0)
                r = g0.applyExtinctionLaw(extLaw, Av=Av, Rv=Rv, inplace=False)

                if add_spectral_properties_kwargs is not None:
                    r = add_spectral_properties(
                        r,
                        nameformat=nameformat,
                        filterLib=filterLib,
                        **add_spectral_properties_kwargs)
                temp_results = r.getSEDs(filter_names, filterLib=filterLib)
                # adding the dust parameters to the models
                cols["Av"][N0 * count:N0 * (count + 1)] = Av
                cols["Rv"][N0 * count:N0 * (count + 1)] = Rv

            # get new attributes if exist
            for key in list(temp_results.grid.keys()):
                if key not in keys:
                    k1 = N0 * count
                    k2 = N0 * (count + 1)
                    cols.setdefault(key, np.empty(
                        N, dtype=float))[k1:k2] = temp_results.grid[key]

            # compute the fractional absflux covariance matrices
            if absflux_cov:
                absflux_covmats = calc_absflux_cov_matrices(
                    r, temp_results, filter_names)
                _cov_diag[N0 * count:N0 * (count + 1)] = absflux_covmats[0]
                _cov_offdiag[N0 * count:N0 * (count + 1)] = absflux_covmats[1]

            # assign the extinguished SEDs to the output object
            _seds[N0 * count:N0 * (count + 1)] = temp_results.seds[:]

            # copy the rest of the parameters
            for key in keys:
                cols[key][N0 * count:N0 * (count + 1)] = g0.grid[key]

            # multiply existing prior weights by the dust prior weight
            cols["weight"][N0 * count:N0 * (count + 1)] *= dust_prior_weight
            cols["prior_weight"][N0 * count:N0 *
                                 (count + 1)] *= dust_prior_weight

            if count == 0:
                cols["lamb"] = temp_results.lamb[:]

        _lamb = cols.pop("lamb")

        # free the memory of temp_results
        # del temp_results
        # del tempgrid

        # Ship
        if absflux_cov:
            g = SpectralGrid(
                _lamb,
                seds=_seds,
                cov_diag=_cov_diag,
                cov_offdiag=_cov_offdiag,
                grid=Table(cols),
                backend="memory",
            )
        else:
            g = SpectralGrid(_lamb,
                             seds=_seds,
                             grid=Table(cols),
                             backend="memory")

        g.grid.header["filters"] = " ".join(filter_names)

        yield g
示例#11
0
def apply_distance_grid(specgrid, distances, redshift=0):
    """
    Distances are applied to the spectral grid by copying the grid and
    applying a scaling factor.

    Parameters
    ----------

    project: str
        project name

    specgrid: grid.SpectralGrid object
        spectral grid to transform

    distances: list of float
        Distances at which models should be shifted
        0 means absolute magnitude.
        Expecting pc units

    redshift: float
        Redshift to which wavelengths should be shifted
        Default is 0 (rest frame)
    """
    g0 = specgrid

    # Current length of the grid
    N0 = len(g0.grid)
    N = N0 * len(distances)

    # Make singleton list if a single distance is given
    if not hasattr(distances, "__iter__"):
        _distances = [distances]
    else:
        _distances = distances

    # Add distance column if multiple distances are specified
    cols = {}
    cols["distance"] = np.empty(N, dtype=float)

    # Existing columns
    keys0 = list(g0.keys())
    for key in keys0:
        cols[key] = np.empty(N, dtype=float)

    n_sed_points = g0.seds.shape[1]
    new_seds = np.empty((N, n_sed_points), dtype=float)

    for count, distance in enumerate(tqdm(_distances, desc="Distance grid")):

        # The range where the current distance points will live
        distance_slice = slice(N0 * count, N0 * (count + 1))

        # The seds default to 10 pc.
        # Therefore, scale them with (d / (10 pc))**(-2).
        distance_pc = distance.to(units.pc).value
        new_seds[distance_slice, :] = g0.seds / (0.1 * distance_pc)**2

        # Fill in the distance in the distance column
        cols["distance"][distance_slice] = distance_pc

        # Copy the old columns
        for key in keys0:
            cols[key][distance_slice] = g0.grid[key]

    # apply redshift
    g0.lamb = g0.lamb * (1.0 + redshift)

    # New object
    g = SpectralGrid(g0.lamb,
                     seds=new_seds,
                     grid=Table(cols),
                     backend="memory")
    return g
示例#12
0
def trim_models(
    sedgrid,
    sedgrid_noisemodel,
    obsdata,
    sed_outname,
    noisemodel_outname,
    sigma_fac=3.0,
    n_detected=4,
    inFlux=True,
    trunchen=False,
):
    """
    For a given set of observations, there will be models that are so
    bright or faint that they will always have ~0 probability of fitting
    the data.  This program trims those models out of the SED grid
    so that time is not spent calculating model points that are always
    zero probability.

    Parameters
    ----------
    sedgrid: grid.SEDgrid instance
        model grid

    sedgrid_noisemodel: beast noisemodel instance
        noise model data

    obsdata: Observation object instance
        observation catalog

    sed_outname: str
        name for output sed file

    noisemodel_outname: str
        name for output noisemodel file

    sigma_fac: float
        factor for trimming the upper and lower range of grid so that
        the model range cuts off sigma_fac above and below the brightest
        and faintest models, respectively (default: 3.)

    n_detected: int
        minimum number of bands where ASTs yielded a detection for
        a given model, if fewer detections than n_detected this model
        gets eliminated (default: 4)

    inFlux: boolean
        if true data are in fluxes (default: True)

    trunchen: boolean
        if true use the trunchen noise model (default: False)
    """
    # Store the brigtest and faintest fluxes in each band (for data and asts)
    n_filters = len(obsdata.filters)
    min_data = np.zeros(n_filters)
    max_data = np.zeros(n_filters)
    min_models = np.zeros(n_filters)
    max_models = np.zeros(n_filters)
    for k, filtername in enumerate(obsdata.filters):
        sfiltname = obsdata.data.resolve_alias(filtername)
        if inFlux:
            min_data[k] = np.amin(obsdata.data[sfiltname] *
                                  obsdata.vega_flux[k])
            max_data[k] = np.amax(obsdata.data[sfiltname] *
                                  obsdata.vega_flux[k])
        else:
            min_data[k] = np.amin(10**(-0.4 * obsdata.data[sfiltname]) *
                                  obsdata.vega_flux[k])
            max_data[k] = np.amax(10**(-0.4 * obsdata.data[sfiltname]) *
                                  obsdata.vega_flux[k])

        min_models[k] = np.amin(sedgrid.seds[:, k])
        max_models[k] = np.amax(sedgrid.seds[:, k])

    # first remove all models that have any band with fluxes below the
    #    faintest ASTs run
    # when the noisemodel was computed, models with fluxes below the
    #    faintest ASTs were tagged with a negative error/uncertainty
    # identify the models that have been detected in enough bands
    #   the idea here is that if the ASTs are not measured that means
    #   that *none* were recovered and this implies
    #   that no model with these values would be recovered and thus the
    #   probability should always be zero
    model_unc = sedgrid_noisemodel.root.error[:]
    above_ast = model_unc > 0
    sum_above_ast = np.sum(above_ast, axis=1)
    indxs, = np.where(sum_above_ast >= n_detected)

    # cache the noisemodel values
    model_bias = sedgrid_noisemodel.root.bias[:]
    model_unc = np.fabs(sedgrid_noisemodel.root.error[:])
    model_compl = sedgrid_noisemodel.root.completeness[:]
    if trunchen:
        model_q_norm = sedgrid_noisemodel.root.q_norm[:]
        model_icov_diag = sedgrid_noisemodel.root.icov_diag[:]
        model_icov_offdiag = sedgrid_noisemodel.root.icov_offdiag[:]

    if len(indxs) <= 0:
        raise ValueError("no models are brighter than the minimum ASTs run")

    n_ast_indxs = len(indxs)

    # Find models with fluxes (with margin) between faintest and brightest data
    for k in range(n_filters):
        print("working on filter # = ", k)

        # Get upper and lower values for the models given the noise model
        #  sigma_fac defaults to 3.
        model_val = sedgrid.seds[indxs, k] + model_bias[indxs, k]
        model_down = model_val - sigma_fac * model_unc[indxs, k]
        model_up = model_val + sigma_fac * model_unc[indxs, k]

        nindxs, = np.where((model_up >= min_data[k])
                           & (model_down <= max_data[k]))
        if len(nindxs) > 0:
            indxs = indxs[nindxs]

    if len(indxs) == 0:
        raise ValueError("no models that are within the data range")

    print("number of original models = ", len(sedgrid.seds[:, 0]))
    print("number of ast trimmed models = ", n_ast_indxs)
    print("number of trimmed models = ", len(indxs))

    # Save the grid
    print("Writing trimmed sedgrid to disk into {0:s}".format(sed_outname))
    cols = {}
    for key in list(sedgrid.grid.keys()):
        cols[key] = sedgrid.grid[key][indxs]

    # New column to save the index of the model in the full grid
    cols["fullgrid_idx"] = indxs.astype(int)
    g = SpectralGrid(sedgrid.lamb,
                     seds=sedgrid.seds[indxs],
                     grid=Table(cols),
                     backend="memory")
    filternames = obsdata.filters
    g.grid.header["filters"] = " ".join(filternames)

    # trimmed grid name
    g.writeHDF(sed_outname)

    # save the trimmed noise model
    print("Writing trimmed noisemodel to disk into {0:s}".format(
        noisemodel_outname))
    with tables.open_file(noisemodel_outname, "w") as outfile:
        outfile.create_array(outfile.root, "bias", model_bias[indxs])
        outfile.create_array(outfile.root, "error", model_unc[indxs])
        outfile.create_array(outfile.root, "completeness", model_compl[indxs])
        if trunchen:
            outfile.create_array(outfile.root, "q_norm", model_q_norm[indxs])
            outfile.create_array(outfile.root, "icov_diag",
                                 model_icov_diag[indxs])
            outfile.create_array(outfile.root, "icov_offdiag",
                                 model_icov_offdiag[indxs])
示例#13
0
def add_stellar_priors(
    project,
    specgrid,
    distance_prior_model={"name": "flat"},
    age_prior_model={"name": "flat"},
    mass_prior_model={"name": "kroupa"},
    met_prior_model={"name": "flat"},
    verbose=True,
    priors_fname=None,
    info_fname=None,
    **kwargs,
):
    """
    make_priors -- compute the weights for the stellar priors

    Parameters
    ----------
    project: str
        project name

    specgrid: SpectralGrid object
        spectral grid to transform

    distance_prior_model: dict
        dict including prior model name and parameters

    age_prior_model: dict
        dict including prior model name and parameters

    mass_prior_model: dict
        dict including prior model name and parameters

    met_prior_model: dict
        dict including prior model name and parameters

    priors_fname: str
        full filename to which to save the spectral grid with priors

    info_fname : str
        Set to specify the filename to save beast info to, otherwise
        saved to project/project_beast_info.asdf

    Returns
    -------
    fname: str
       name of saved file

    g: SpectralGrid object
        spectral grid to transform
    """
    if priors_fname is None:
        priors_fname = "%s/%s_spec_w_priors.grid.hd5" % (project, project)
    if not os.path.isfile(priors_fname):

        if verbose:
            print("Make Prior Weights")

        compute_distance_age_mass_metallicity_weights(
            specgrid.grid,
            distance_prior_model=distance_prior_model,
            age_prior_model=age_prior_model,
            mass_prior_model=mass_prior_model,
            met_prior_model=met_prior_model,
            **kwargs,
        )

        # write to disk
        if hasattr(specgrid, "write"):
            specgrid.write(priors_fname)
        else:
            for gk in specgrid:
                gk.write(priors_fname, append=True)

    # save info to the beast info file
    info = {
        "distance_prior_model": distance_prior_model,
        "age_prior_model": age_prior_model,
        "mass_prior_model": mass_prior_model,
        "met_prior_model": met_prior_model,
    }
    if info_fname is None:
        info_fname = f"{project}/{project}_beast_info.asdf"
    add_to_beast_info_file(info_fname, info)

    # read in spectralgrid from file (possible not needed, need to check)
    g = SpectralGrid(priors_fname, backend="memory")

    return (priors_fname, g)
示例#14
0
def make_spectral_grid(
    project,
    oiso,
    osl=None,
    bounds={},
    verbose=True,
    spec_fname=None,
    distance=10,
    distance_unit=units.pc,
    redshift=0.0,
    filterLib=None,
    add_spectral_properties_kwargs=None,
    extLaw=None,
    **kwargs,
):
    """
    The spectral grid is generated using the stellar parameters by
    interpolation of the isochrones and the generation of spectra into the
    physical units

    Parameters
    ----------
    project: str
        project name

    oiso: isochrone.Isochrone object
        set of isochrones to use

    osl: stellib.Stellib object
        Spectral library to use (default stellib.Kurucz)

    distance: float or list of float
        distances at which models should be shifted, specified as a
        single number or as [min, max, step]

        0 means absolute magnitude.

    distance_unit: astropy length unit or mag
        distances will be evenly spaced in this unit
        therefore, specifying a distance grid in mag units will lead to
        a log grid

    redshift: float
        Redshift to which wavelengths should be shifted
        Default is 0 (rest frame)

    spec_fname: str
        full filename to save the spectral grid into

    filterLib:  str
        full filename to the filter library hd5 file

    extLaw: extinction.ExtLaw (default=None)
        if set, only save the spectrum for the wavelengths over which the
        extinction law is valid

    add_spectral_properties_kwargs: dict
        keyword arguments to call :func:`add_spectral_properties`
        to add model properties from the spectra into the grid property table

    Returns
    -------
    fname: str
       name of saved file

    g: grid.SpectralGrid object
        spectral grid to transform
    """
    if spec_fname is None:
        spec_fname = "%s/%s_spec_grid.hd5" % (project, project)

    # remove the isochrone points with logL=-9.999
    oiso.data = oiso[oiso["logL"] > -9]

    if not os.path.isfile(spec_fname):
        osl = osl or stellib.Kurucz()

        # filter extrapolations of the grid with given sensitivities in
        # logg and logT
        if "dlogT" not in bounds:
            bounds["dlogT"] = 0.1
        if "dlogg" not in bounds:
            bounds["dlogg"] = 0.3

        # make the spectral grid
        if verbose:
            print("Make spectra")
        g = creategrid.gen_spectral_grid_from_stellib_given_points(
            osl, oiso.data, bounds=bounds)

        # Construct the distances array. Turn single value into
        # 1-element list if single distance is given.
        _distance = np.atleast_1d(distance)
        if len(_distance) == 3:
            mindist, maxdist, stepdist = _distance
            distances = np.arange(mindist, maxdist + stepdist, stepdist)
        elif len(_distance) == 1:
            distances = np.array(_distance)
        else:
            raise ValueError(
                "distance needs to be (min, max, step) or single number")

        # calculate the distances in pc
        if distance_unit == units.mag:
            distances = np.power(10, distances / 5.0 + 1) * units.pc
        else:
            distances = (distances * distance_unit).to(units.pc)

        print("applying {} distances".format(len(distances)))

        if verbose:
            print(
                "Adding spectral properties:",
                add_spectral_properties_kwargs is not None,
            )
        if add_spectral_properties_kwargs is not None:
            nameformat = (
                add_spectral_properties_kwargs.pop("nameformat", "{0:s}") +
                "_nd")

        # Apply the distances to the stars. Seds already at 10 pc, need
        # multiplication by the square of the ratio to this distance.
        # TODO: Applying the distances might have to happen in chunks
        # for larger grids.
        def apply_distance_and_spectral_props(g):
            # distance
            g = creategrid.apply_distance_grid(g, distances, redshift=redshift)

            # spectral props
            if add_spectral_properties_kwargs is not None:
                g = creategrid.add_spectral_properties(
                    g,
                    nameformat=nameformat,
                    filterLib=filterLib,
                    **add_spectral_properties_kwargs,
                )

            # extinction
            if extLaw is not None:

                ext_law_range_A = 1e4 / np.array(extLaw.x_range)
                valid_lambda = np.where((g.lamb > np.min(ext_law_range_A))
                                        &
                                        (g.lamb < np.max(ext_law_range_A)))[0]

                g.lamb = g.lamb[valid_lambda]
                g.seds = g.seds[:, valid_lambda]

            return g

        # Perform the extensions defined above and Write to disk
        if hasattr(g, "write"):
            g = apply_distance_and_spectral_props(g)
            g.write(spec_fname)
        else:
            for gk in g:
                gk = apply_distance_and_spectral_props(gk)
                gk.write(spec_fname, append=True)

    g = SpectralGrid(spec_fname, backend="memory")

    return (spec_fname, g)
示例#15
0
def add_stellar_priors(project, specgrid,
                       distance_prior_model={'name': 'flat'},
                       age_prior_model={'name': 'flat'},
                       mass_prior_model={'name': 'kroupa'},
                       met_prior_model={'name': 'flat'},
                       verbose=True,
                       priors_fname=None,
                       **kwargs):
    """
    make_priors -- compute the weights for the stellar priors

    Parameters
    ----------
    project: str
        project name

    specgrid: SpectralGrid object
        spectral grid to transform

    distance_prior_model: dict
        dict including prior model name and parameters

    age_prior_model: dict
        dict including prior model name and parameters

    mass_prior_model: dict
        dict including prior model name and parameters

    met_prior_model: dict
        dict including prior model name and parameters

    priors_fname: str
        full filename to which to save the spectral grid with priors

    Returns
    -------
    fname: str
       name of saved file

    g: SpectralGrid object
        spectral grid to transform
    """
    if priors_fname is None:
        priors_fname = "%s/%s_spec_w_priors.grid.hd5" % (project, project)
    if not os.path.isfile(priors_fname):

        if verbose:
            print("Make Prior Weights")

        compute_distance_age_mass_metallicity_weights(
            specgrid.grid,
            distance_prior_model=distance_prior_model,
            age_prior_model=age_prior_model,
            mass_prior_model=mass_prior_model,
            met_prior_model=met_prior_model,
            **kwargs)

        # write to disk
        if hasattr(specgrid, "write"):
            specgrid.write(priors_fname)
        else:
            for gk in specgrid:
                gk.write(priors_fname, append=True)

    g = SpectralGrid(priors_fname, backend="memory")

    return (priors_fname, g)