예제 #1
0
def get_flux_noise_1sigma(detid, mask=False):

    """ For a detid get the the f50_1sigma value

    No need to mask the flux limits if you are using the
    curated catalog which is already masked. This avoid a
    few good sources on the edges of masks that get flagged
    with a high flux limit value.
    """

    global config, det_table

    sncut = 1

    sel_det = det_table["detectid"] == detid
    shotid = det_table["shotid"][sel_det][0]
    ifuslot = det_table["ifuslot"][sel_det][0]

    det_table_here = det_table[sel_det]

    datevobs = str(shotid)[0:8] + "v" + str(shotid)[8:11]

    fn = op.join(config.flim_dir, datevobs + "_sensitivity_cube.h5")
    if mask:
        mask_fn = op.join(config.flimmask, datevobs + "_mask.h5")
        sscube = SensitivityCubeHDF5Container(fn, mask_filename=mask_fn, flim_model="hdr1")
    else:
        sscube = SensitivityCubeHDF5Container(fn, flim_model="hdr1")
    scube = sscube.extract_ifu_sensitivity_cube("ifuslot_{}".format(ifuslot))
    flim = scube.get_f50(
        det_table_here["ra"], det_table_here["dec"], det_table_here["wave"], sncut
    )
    sscube.close()

    return flim[0]
예제 #2
0
def return_completeness_from_shots(shots, fluxes, lambda_low, lambda_high,
                                   sncut, rescale_50 = None):
   """
   Return the completeness over a range
   of shots

   """

   bin_edges = linspace(0, 1e-16, 1000)
   bins = 0.5*(bin_edges[:-1] + bin_edges[1:])
   nbinned = zeros(len(bin_edges) - 1)

   cube_comp_all = []
   for shot in shots:
       fn, fnmask = return_sensitivity_hdf_path(shot, return_mask_fn = True)

       with SensitivityCubeHDF5Container(fn, mask_filename = fnmask) as hdf:
           cube_compl, tnbinned = hdf.return_shot_completeness(fluxes*1e-17, lambda_low, 
                                                               lambda_high, sncut,
                                                               bin_edges = bin_edges)
           # rescale curve so 50% flux at rescale_50
           if rescale_50:
               f50 = interp(0.5, cube_compl/cube_compl[-1], fluxes)
               cube_compl = interp(fluxes, rescale_50*fluxes/f50, cube_compl)  

           nbinned += tnbinned

       cube_comp_all.append(cube_compl)
 
   return mean(cube_comp_all, axis=0), bins, nbinned
예제 #3
0
def hdf5_container(tmpdir):
    """ HDF5 we can write test data to """
    filename = tmpdir.join("test.h5").strpath
    hdcon = SensitivityCubeHDF5Container(filename, mode="w")

    # Clever trick to close the file when we're done with it 
    yield hdcon
    hdcon.close()
예제 #4
0
def test_flim_model(datadir):
    """
    Test that the flux limit model is 
    being passed to the sensitivity cubes
    """
    # ifuslot_063
    filename = datadir.join("test_hdf.h5").strpath
    hdcon1 = SensitivityCubeHDF5Container(filename, flim_model="hdr1")
    hdcon2 = SensitivityCubeHDF5Container(filename, flim_model="hdr2pt1")

    scube1 = hdcon1.extract_ifu_sensitivity_cube("ifuslot_063")
    scube2 = hdcon2.extract_ifu_sensitivity_cube("ifuslot_063")

    s1 = scube1.get_f50(161.4201, 50.8822, 3470.0, 5.5)
    s2 = scube2.get_f50(161.4201, 50.8822, 3470.0, 5.5)

    print(s1)
    # if different models passed should be different
    assert abs(s1 - s2) > 1e-19
예제 #5
0
def test_hdf5_create_and_write(tmpdir, use_with):
    """
    Test writing the HDF5 file to a 
    file
    """

    filename = tmpdir.join("test.h5").strpath

    # Test with statement
    if use_with:
        with SensitivityCubeHDF5Container(filename, mode="w"):
            pass
    else:
        # Test explicitly closing
        hdcon = SensitivityCubeHDF5Container(filename, mode="w")
        hdcon.close()

    # Can we open it again?
    hdcon2 = SensitivityCubeHDF5Container(filename, mode="r")
    hdcon2.close()
예제 #6
0
def get_regions_from_flim(shotid):

    date = str(shotid)[0:8]
    obs = str(shotid)[8:]
    datevobs = str(date) + "v" + str(obs).zfill(3)
    shotid = int(str(date) + str(obs).zfill(3))

    hdf_filename, mask_fn = return_sensitivity_hdf_path(datevobs,
                                                        release="hdr2.1",
                                                        return_mask_fn=True)

    hdfcont = SensitivityCubeHDF5Container(
        filename=hdf_filename,
        aper_corr=1.0,
        flim_model="hdr2pt1",
        mask_filename=mask_fn,
    )

    ifu_name_list = []
    ifu_ra = []
    ifu_dec = []
    ifuregions = []

    for ifu_name, tscube in hdfcont.itercubes():
        shape = tscube.sigmas.shape
        ra, dec, lambda_ = tscube.wcs.all_pix2world(shape[2] / 2.0,
                                                    shape[1] / 2.0,
                                                    shape[0] / 2.0, 0)
        pa = -tscube.header["CROTA2"]
        ifu_name_list.append(ifu_name)
        coord = SkyCoord(ra, dec, unit="deg")
        ifu_ra.append(ra)
        ifu_dec.append(dec)
        ifuregions.append(
            RectangleSkyRegion(
                center=coord,
                width=1.0 * u.arcmin,
                height=1.0 * u.arcmin,
                angle=pa * u.deg,
            ))
    hdfcont.close()

    return ifuregions
예제 #7
0
except:
    wave_slice = 4540.
    
datevobs = str(date) + 'v' + str(obs).zfill(3)
shotid = int(str(date) + str(obs).zfill(3))

survey = Survey()

shot_coords = survey.coords[survey.shotid == shotid][0]

seeing = survey.fwhm_virus[survey.shotid == shotid][0]
tp = survey.response_4540[survey.shotid == shotid][0]

hdf_filename_hdr2pt1, mask_fn = return_sensitivity_hdf_path(datevobs, release="hdr2.1", return_mask_fn=True)

hdfcont_hdr2 = SensitivityCubeHDF5Container(filename=hdf_filename_hdr2pt1, aper_corr=1.0,
                                            flim_model="hdr2pt1",  mask_filename=mask_fn)

#overplot detections
#detects = Detections(curated_version='2.1.3')
#sel_shot = detects.shotid == shotid
#detects_shot = detects[sel_shot]

version = '2.1.3'

config = HDRconfig()
catfile = op.join(config.detect_dir, 'catalogs', 'source_catalog_' + version + '.fits')
source_table = Table.read(catfile)
source_table_shot = source_table[source_table['shotid'] == shotid]

del source_table
def generate_sencube_hdf(datevshot,
                         ra,
                         dec,
                         pa,
                         fplane_output_dir,
                         nx,
                         ny,
                         nz,
                         ifusize,
                         skip_ifus=[
                             "000", "600", "555", "601", "602", "603", "604",
                             "610", "611", "612", "613", "614", "615", "616"
                         ],
                         hdf_filename=None):
    """
    Generate an empty real or mock sensitivity HDF5 container, 
    with the proper astrometry in the cubes. Real containters
    are of the SensitivityCubeHDF5Container class and are 
    written to a file. The mock containers are of
    HDF5MockContainer class and do not have a real HDF5
    file - useful for simulations. 

    Parameters
    ----------
    datevshot : str
        the 8 digit YYYYMMDDvSSS date
        of the shot and the shot, used to get the
        correct focal plane file
    ra, dec, pa : float
        the astrometry of the shot
    fplane_output_dir : str
        directory to output fplane files to
    hdf_filename : str (optional)
        if passed, generate a real
        SensitivityCubeHDF5Container
        with this filename. If None
        generate a mock container.
    ifusize : float
        size of x,y of IFU in arcsec
    skip_ifus : list (optional)
        the IFUSLOTS to skip

    Returns
    -------
    hdfcont : SensitivityCubeHDF5Container or HDF5MockContainer
       a real or mock sensivity cube container depending on
       the ``hdf_filename`` parameter
    """
    if hdf_filename:
        hdfcont = SensitivityCubeHDF5Container(hdf_filename, mode="w")
    else:
        hdfcont = HDF5MockContainer()

    # Generate the shot astrometry
    rot = 360.0 - (pa + 90.)
    tp = TangentPlane(ra, dec, rot)

    date = datevshot[:8]
    fplane_bn = "{:s}_fplane.txt".format(date)
    fplane_fn = join(fplane_output_dir, fplane_bn)

    if not isfile(fplane_fn):
        get_fplane(fplane_fn, datestr=str(date))
        fplane = FPlane(fplane_fn)
    else:
        fplane = FPlane(fplane_fn)

    for ifuslot, ifu in iteritems(fplane.difus_ifuslot):

        if ifuslot in skip_ifus:
            continue

        ifuslot_str = "ifuslot_" + ifuslot
        # Note x, y swapped in focal fplane
        ra_ifu, dec_ifu = tp.xy2raDec(ifu.y, ifu.x)
        scube = create_sensitivity_cube_from_astrom(ra_ifu.item(),
                                                    dec_ifu.item(), pa, nx, ny,
                                                    nz, ifusize)
        hdfcont.add_sensitivity_cube(datevshot, ifuslot_str, scube)

    return hdfcont
def generate_sources_to_simulate(args=None):
    """
    Generate source simulation inputs
    spread evenly over the sensitivity cube
    pixels, from an input sensitivity cube. Not
    suitable as a random catalogue, as not uniform
    in redshift - good for probing the detection
    efficiency etc. over redshift though

    """

    parser = ArgumentParser(
        description=
        "Generate inputs for source simulations, uniform in x,y,z datacube coords"
    )
    parser.add_argument("--nsplit-start",
                        default=0,
                        type=int,
                        help="What number to start the file split labeling at")
    parser.add_argument("--ifusize",
                        help="Size of IFU, in arcsec",
                        default=52.)
    parser.add_argument("--nperifu",
                        help="Number of sources to add per IFU",
                        default=1000,
                        type=int)
    parser.add_argument("--flimcut",
                        help="Don't simulate source above this",
                        type=float,
                        default=1e-15)
    parser.add_argument("--fix-flux",
                        help="If not None, set all sources to this flux value",
                        default=None,
                        type=float)
    parser.add_argument("--frac-range",
                        default=[0.2, 2.0],
                        nargs=2,
                        type=float,
                        help="The range in flux as a fraction of flim")
    parser.add_argument("--nsplit",
                        default=1,
                        help="Number of jobs per line in .run file")
    parser.add_argument("--nmax",
                        default=1000,
                        help="Maximum number of sources in one sim",
                        type=int)
    parser.add_argument(
        "filelist",
        help="Ascii file with list of sensitivity HDF5 files or date shots")
    parser.add_argument("outdir", help="Directory for output files")
    opts = parser.parse_args(args=args)

    survey_obj = None
    tables = []
    with open(opts.filelist, "r") as fp:
        for line in fp:
            input_ = line.strip()
            if ".h5" in input_:
                print("Assuming {:s} is an HDF5 file".format(input_))
                field = filename.strip("_sensitivity_cube.h5")
                sencube_hdf = SensitivityCubeHDF5Container(filename)
                add_flim = True
            else:
                print("Assuming {:s} is date shot".format(input_))
                field = input_
                add_flim = False

                if type(survey_obj) == type(None):
                    survey_obj = Survey("hdr2")

                shot = survey_obj[survey_obj.datevobs == field]
                sencube_hdf = generate_sencube_hdf(shot.datevobs[0],
                                                   shot.ra[0], shot.dec[0],
                                                   shot.pa[0], opts.outdir, 31,
                                                   31, 1036, opts.ifusize)

        ttable = rdz_flux_from_hdf_cubes(sencube_hdf,
                                         add_flim=add_flim,
                                         minfrac=opts.frac_range[0],
                                         maxfrac=opts.frac_range[1],
                                         nperifu=opts.nperifu)
        ttable["field"] = field
        tables.append(ttable)

    table = vstack(tables)
    table["id"] = range(len(table))

    if opts.fix_flux:
        table["flux"] = opts.fix_flux

    if add_flim:
        table = table[table["flim"] < opts.flimcut]
        print("After flimcut left with {:d} sources!".format(len(table)))

    #table.write("full_input_{:d}.txt".format(opts.nsplit_start), format="ascii.commented_header")

    # Split into IFUS and fields
    unique_ifus = unique(table["ifuslot"])
    unique_fields = unique(table["field"])

    # Second part of the filename
    fn2s = []
    split_into_ifus(table,
                    unique_fields,
                    unique_ifus,
                    opts.outdir,
                    NMAX=opts.nmax,
                    nsplit_start=opts.nsplit_start,
                    nsplit=opts.nsplit)
예제 #10
0
def return_biwt_cmd(args=None):
    """
    Command line tool to return the bi-weight
    of the flux limit at a certain wavelength
    from an HDF5 file

    """

    # Parse the arguments
    parser = argparse.ArgumentParser(
        description="Compute biweight flux limits from HDF5 file")
    parser.add_argument("--wlrange",
                        nargs=2,
                        default=[4500, 4600],
                        type=int,
                        help="Wavelength range to compute median over")
    parser.add_argument("--hist",
                        action="store_true",
                        help="Plot histograms of flux limit")
    parser.add_argument(
        "--nkeep",
        type=int,
        default=10000,
        help="To remove the edge pixel only the nkeep deepest pixels"
        " are considered (default 10000)")
    parser.add_argument("--hist_all",
                        action="store_true",
                        help="Plot histograms flux limit for all inputs")
    parser.add_argument("--fout",
                        default=None,
                        type=str,
                        help="Ascii file to save results to")
    parser.add_argument("--fn-shot-average",
                        default=None,
                        type=str,
                        help="Ascii file to append shot average flim to")
    parser.add_argument("files",
                        help="HDF container(s) of sensitivity cubes",
                        nargs='+')
    opts = parser.parse_args(args=args)

    print("Using wavelengths {:f} to {:f} AA".format(*opts.wlrange))

    # Loop over the files producing median and percentiles
    biwt_ls = []
    ifu = []
    biwt_vars = []
    dateshot = []

    for fn in opts.files:

        # Open the HDF5 container
        with SensitivityCubeHDF5Container(fn) as hdfcont:

            # Loop over shots
            shots_groups = hdfcont.h5file.list_nodes(hdfcont.h5file.root)
            for shot_group in shots_groups:
                str_datevshot = shot_group._v_name
                flims_shot = []

                # Loop over sensitivity cubes
                for ifu_name, scube in hdfcont.itercubes(
                        datevshot=str_datevshot):
                    flims = return_flattened_wlrange(scube, opts.wlrange[0],
                                                     opts.wlrange[1])
                    flims = flims[isfinite(flims)]
                    flims.sort()

                    # Compute statistics and save numbers
                    # from this cube
                    flims_shot.extend(flims[:opts.nkeep])
                    biwt_ls.append(biweight_location(flims[:opts.nkeep]))
                    biwt_vars.append(biweight_midvariance(flims[:opts.nkeep]))

                    # Save IFU and shot info
                    ifu.append(ifu_name.strip("ifuslot_"))
                    dateshot.append(str_datevshot.strip("/virus_"))

                if opts.fn_shot_average:
                    with open(opts.fn_shot_average, 'a') as fn:
                        fn.write("{:s} {:e} \n".format(
                            str_datevshot.strip("virus_"),
                            biweight_location(flims_shot)))

    table = Table(
        [dateshot, ifu, biwt_ls, sqrt(biwt_vars)],
        names=["dateshot", "ifu", "biwt_loc", "sqrt_biwt_mdvar"])

    # Output or save
    if opts.fout:
        table.write(opts.fout)
    else:
        table.pprint(max_lines=-1)