Example #1
0
def return_astropy_table(Source_dict,
                         fiberweights=False,
                         return_fiber_info=False):
    """Returns an astropy table fom a source dictionary"""

    id_arr = []
    shotid_arr = []
    wave_arr = []
    spec_arr = []
    spec_err_arr = []
    weights_arr = []
    fiber_weights_arr = []
    fiber_info_arr = []
    gal_flag_arr = []
    meteor_flag_arr = []
    amp_flag_arr = []
    flag_arr = []

    config = HDRconfig()
    bad_amps_table = Table.read(config.badamp)
    galaxy_cat = Table.read(config.rc3cat, format="ascii")
    
    # loop over every ID/observation combo:

    for ID in Source_dict.keys():

        for shotid in Source_dict[ID].keys():
            wave_rect = 2.0 * np.arange(1036) + 3470.0
            spec = Source_dict[ID][shotid][0]
            spec_err = Source_dict[ID][shotid][1]
            weights = Source_dict[ID][shotid][2]
            if fiberweights:
                fiber_weights = Source_dict[ID][shotid][3]
            # get fiber info to make masks
            try:
                fiber_info = Source_dict[ID][shotid][4]
            except:
                fiber_info = None

            if Source_dict[ID][shotid][5] is None:
                amp_flag = True
                gal_flag = True
                meteor_flag = True
                flag = True
            else:
                meteor_flag, gal_flag, amp_flag, flag = Source_dict[ID][shotid][5]

            sel = np.isfinite(spec)
            
            if np.sum(sel) > 0:
                id_arr.append(ID)
                shotid_arr.append(shotid)
                wave_arr.append(wave_rect)
                spec_arr.append(spec)
                spec_err_arr.append(spec_err)
                weights_arr.append(weights)
                if fiberweights:
                    fiber_weights_arr.append(fiber_weights)
                fiber_info_arr.append(fiber_info)
                flag_arr.append(flag)
                amp_flag_arr.append(amp_flag)
                meteor_flag_arr.append(meteor_flag)
                gal_flag_arr.append(gal_flag)

    output = Table()
    fluxden_u = 1e-17 * u.erg * u.s ** (-1) * u.cm ** (-2) * u.AA ** (-1)

    output.add_column(Column(id_arr), name="ID")
    output.add_column(Column(shotid_arr), name="shotid")
    output.add_column(Column(wave_arr, unit=u.AA, name="wavelength"))
    output.add_column(Column(spec_arr, unit=fluxden_u, name="spec"))
    output.add_column(Column(spec_err_arr, unit=fluxden_u, name="spec_err"))
    output.add_column(Column(weights_arr), name="weights")
    output.add_column(Column(flag_arr, name='flag', dtype=int))
    output.add_column(Column(gal_flag_arr, name='gal_flag', dtype=int))
    output.add_column(Column(amp_flag_arr, name='amp_flag', dtype=int))
    output.add_column(Column(meteor_flag_arr, name='meteor_flag', dtype=int))

    if fiberweights:
        output.add_column(Column(fiber_weights_arr), name="fiber_weights")
    if return_fiber_info:
        output.add_column(Column(fiber_info_arr, name="fiber_info"))

    return output
Example #2
0
    def __init__(
        self,
        coords=None,
        detectid=None,
        survey="hdr2.1",
        aperture=3.0 * u.arcsec,
        cutout_size=5.0 * u.arcmin,
        zoom=3,
    ):

        self.survey = survey.lower()

        self.detectid = detectid
        self.aperture = aperture
        self.cutout_size = cutout_size
        self.zoom = zoom

        config = HDRconfig(survey=survey)

        self.fileh5dets = tb.open_file(config.detecth5, "r")
        self.catlib = catalogs.CatalogLibrary()

        if coords:
            self.coords = coords
            self.detectid = 1000000000
        elif detectid:
            self.detectid = detectid
            self.update_det_coords()
        else:
            self.coords = SkyCoord(191.663132 * u.deg,
                                   50.712696 * u.deg,
                                   frame="icrs")
            self.detectid = 2101848640

        # initialize the image widget from astrowidgets
        self.imw = ImageWidget(image_width=600, image_height=600)

        self.survey_widget = widgets.Dropdown(
            options=["HDR1", "HDR2", "HDR2.1"],
            value=self.survey.upper(),
            layout=Layout(width="10%"),
        )

        self.detectbox = widgets.BoundedIntText(
            value=self.detectid,
            min=1000000000,
            max=3000000000,
            step=1,
            description="DetectID:",
            disabled=False,
        )
        self.im_ra = widgets.FloatText(
            value=self.coords.ra.value,
            description="RA (deg):",
            layout=Layout(width="20%"),
        )
        self.im_dec = widgets.FloatText(
            value=self.coords.dec.value,
            description="DEC (deg):",
            layout=Layout(width="20%"),
        )

        self.pan_to_coords = widgets.Button(description="Pan to coords",
                                            disabled=False,
                                            button_style="success")
        self.marking_button = widgets.Button(description="Mark Sources",
                                             button_style="success")
        self.reset_marking_button = widgets.Button(description="Reset",
                                                   button_style="success")
        self.extract_button = widgets.Button(description="Extract Object",
                                             button_style="success")

        self.marker_table_output = widgets.Output(
            layout={"border": "1px solid black"})
        #        self.spec_output = widgets.Output(layout={'border': '1px solid black'})

        self.spec_output = widgets.Tab(description="Extracted Spectra:",
                                       layout={"border": "1px solid black"})
        self.textimpath = widgets.Text(description="Source: ",
                                       value="",
                                       layout=Layout(width="90%"))

        self.topbox = widgets.HBox([
            self.survey_widget,
            self.detectbox,
            self.im_ra,
            self.im_dec,
            self.pan_to_coords,
        ])
        self.leftbox = widgets.VBox([self.imw, self.textimpath],
                                    layout=Layout(width="800px"))
        self.rightbox = widgets.VBox(
            [
                widgets.HBox([
                    self.marking_button,
                    self.reset_marking_button,
                    self.extract_button,
                ]),
                self.marker_table_output,
                self.spec_output,
            ],
            layout=Layout(width="800px"),
        )

        self.bottombox = widgets.Output(layout={"border": "1px solid black"})

        self.load_image()

        self.all_box = widgets.VBox([
            self.topbox,
            widgets.HBox([self.leftbox, self.rightbox]),
            #self.spec_output,
            self.bottombox
        ])
        display(self.all_box)
        self.detectbox.observe(self.on_det_change)
        self.pan_to_coords.on_click(self.pan_to_coords_click)
        self.marking_button.on_click(self.marking_on_click)
        self.reset_marking_button.on_click(self.reset_marking_on_click)
        self.extract_button.on_click(self.extract_on_click)
        self.survey_widget.observe(self.on_survey_change)
Example #3
0
def return_flux_limit_model(flim_model, cache_sim_interp=True, verbose=False):
    """
    Return the noise -> 50% completeness
    scaling and a function for the 
    completeness curves

    """

    # old models for legacy support
    if flim_model in ["hdr1", "hdr2pt1"]:
        if verbose:
            print("Using flim model: {:s}".format(flim_model))
        return return_flux_limit_model_old(flim_model)

    models = {
        "one_sigma_nearest_pixel":
        ModelInfo("curves_v1", [1.0],
                  None,
                  False,
                  False,
                  snlow=0.999999,
                  snhigh=1.000001),
        "one_sigma_interpolate":
        ModelInfo("curves_v1", [1.0],
                  None,
                  True,
                  False,
                  snlow=0.999999,
                  snhigh=1.000001),
        "hdr2pt1pt1":
        ModelInfo(
            "curves_v1",
            [2.76096687e-03, 2.09732448e-02, 7.21681512e-02, 3.36040017e+00],
            None, False, False),
        "hdr2pt1pt3":
        ModelInfo(
            "curves_v1",
            [6.90111625e-04, 5.99169372e-02, 2.92352510e-01, 1.74348070e+00],
            None, False, False),
        "v1":
        ModelInfo("curves_v1", [
            -8.80650683e-02, 2.03488098e+00, -1.73733048e+01, 6.56038443e+01,
            -8.84158092e+01
        ], None, False, True),
        "v1.1":
        ModelInfo("curves_v1", [
            -8.80650683e-02, 2.03488098e+00, -1.73733048e+01, 6.56038443e+01,
            -8.84158092e+01
        ], None, True, True),
        "v2":
        ModelInfo("curves_v1", [1.0, 0.0], [
            -1.59395767e-14, 3.10388106e-10, -2.26855051e-06, 7.38507004e-03,
            -8.06953973e+00
        ],
                  False,
                  True,
                  lw_scaling=linewidth_f50_scaling_v1)
    }

    default = "v2"

    if not flim_model:
        flim_model = default

    model = models[flim_model]
    if verbose:
        print("Using flim model: {:s}".format(flim_model))

    if flim_model_cache.cached_model == flim_model and cache_sim_interp:
        sinterp = flim_model_cache.cached_sim_interp
    else:
        conf = HDRconfig()
        fdir = conf.flim_sim_completeness
        fdir = join(fdir, model.snfile_version)
        sinterp = SimulationInterpolator(fdir,
                                         model.dont_interp_to_zero,
                                         snmode=False,
                                         verbose=verbose)

    # save model in cache
    if cache_sim_interp:
        flim_model_cache.cached_model = flim_model
        flim_model_cache.cached_sim_interp = sinterp

    def f50_from_noise(noise, lambda_, sncut, linewidth=None):
        """
        Return the 50% completeness
        flux given noise and S/N cut. 
    
        Parameters
        ----------
        noise : float
            the noise from the
            sensitivity cubes
        sncut : float
            the signal to noise
            cut to assume
        linewidth : float
            the linewidth in A,
            only used with supported
            models    

        Returns
        -------
        f50s : array
           the fluxes at 50%
           completeness  
        """
        try:
            if sncut < model.snlow or sncut > model.snhigh:
                print("WARNING: model {:s} not calibrated for this S/N range".
                      format(flim_model))
        except ValueError:
            if any(sncut < 4.5) or any(ncut > 7.5):
                print("WARNING: model {:s} not calibrated for this S/N range".
                      format(flim_model))

        bad = noise > 998

        if model.wavepoly:
            noise = noise * polyval(model.wavepoly, lambda_)

        if type(linewidth) != type(None):
            if type(model.lw_scaling) != type(None):
                lw_scale = model.lw_scaling(linewidth, sncut)
            else:
                raise NoLineWidthModel(
                    "Linewidth dependence not available for this flim model")

            noise = noise * lw_scale

        snmult = polyval(model.snpoly, sncut)
        f50 = snmult * noise

        # keep bad values unscaled
        try:
            f50[bad] = 999
        except TypeError:
            if bad:
                f50 = 999

        return f50

    return f50_from_noise, sinterp, model.interp_sigmas
Example #4
0
    def __init__(self,
                 datevshot,
                 release=None,
                 flim_model=None,
                 rad=3.5,
                 ffsky=False,
                 wavenpix=3,
                 d25scale=3.0,
                 verbose=False,
                 sclean_bad=True,
                 log_level="WARNING"):

        self.conf = HDRconfig()
        self.extractor = Extract()
        self.shotid = int(datevshot.replace("v", ""))
        self.date = datevshot[:8]
        self.rad = rad
        self.ffsky = ffsky
        self.wavenpix = wavenpix
        self.sclean_bad = sclean_bad

        logger = logging.getLogger(name="ShotSensitivity")
        logger.setLevel(log_level)

        if verbose:
            raise DeprecationWarning(
                "Using verbose is deprecated, set log_level instead")
            logger.setLevel("DEBUG")

        logger.info("shotid: {:d}".format(self.shotid))

        if not release:
            self.release = self.conf.LATEST_HDR_NAME
        else:
            self.release = release

        logger.info("Data release: {:s}".format(self.release))
        self.survey = Survey(survey=self.release)

        # Set up flux limit model
        self.f50_from_noise, self.sinterp, interp_sigmas \
                                       = return_flux_limit_model(flim_model,
                                                                 cache_sim_interp=False,
                                                                 verbose=verbose)

        # Generate astrometry for this shot
        survey_sel = (self.survey.shotid == self.shotid)
        self.shot_pa = self.survey.pa[survey_sel][0]
        self.shot_ra = self.survey.ra[survey_sel][0]
        self.shot_dec = self.survey.dec[survey_sel][0]
        rot = 360.0 - (self.shot_pa + 90.)
        self.tp = TangentPlane(self.shot_ra, self.shot_dec, rot)

        #Set up masking
        logger.info("Using d25scale {:f}".format(d25scale))
        self.setup_mask(d25scale)

        # Set up spectral extraction
        if release == "hdr1":
            fwhm = self.survey.fwhm_moffat[survey_sel][0]
        else:
            fwhm = self.survey.fwhm_virus[survey_sel][0]

        logger.info("Using Moffat PSF with FWHM {:f}".format(fwhm))
        self.moffat = self.extractor.moffat_psf(fwhm, 3. * rad, 0.25)
        self.extractor.load_shot(self.shotid, fibers=True, survey=self.release)

        # Set up the focal plane astrometry
        fplane_table = self.extractor.shoth5.root.Astrometry.fplane

        # Bit of a hack to avoid changing pyhetdex
        with NamedTemporaryFile(mode='w') as tpf:
            for row in fplane_table.iterrows():
                tpf.write(
                    "{:03d} {:8.5f} {:8.5f} {:03d} {:03d} {:03d} {:8.5f} {:8.5f}\n"
                    .format(row['ifuslot'], row['fpx'], row['fpy'],
                            row['specid'], row['specslot'], row['ifuid'],
                            row['ifurot'], row['platesc']))
            tpf.seek(0)
            self.fplane = FPlane(tpf.name)
def main(argv=None):
    """ Main Function """
    # Call initial parser from init_utils
    parser = ap.ArgumentParser(description="""Create HDF5 Astrometry file.""",
                               add_help=True)

    parser.add_argument(
        "-sdir",
        "--shotdir",
        help="""Directory for shot H5 files to ingest""",
        type=str,
        default="/scratch/03946/hetdex/hdr3/reduction/data",
    )

    parser.add_argument(
        "-sl",
        "--shotlist",
        help="""Text file of DATE OBS list""",
        type=str,
        default="hdr3.shotlist",
    )

    parser.add_argument(
        "-of",
        "--outfilename",
        type=str,
        help="""Relative or absolute path for output HDF5
                        file.""",
        default=None,
    )

    parser.add_argument("-survey", "--survey", type=str, default="hdr3")

    parser.add_argument(
        "-m",
        "--month",
        type=int,
        default=None,
        help="""Create FiberIndex for a single month""",
    )

    parser.add_argument(
        "--merge",
        "-merge",
        help="""Boolean trigger to merge all 2*.fits files in cwd""",
        default=False,
        required=False,
        action="store_true",
    )

    args = parser.parse_args(argv)
    args.log = setup_logging()

    fileh = tb.open_file(args.outfilename,
                         mode="w",
                         title=args.survey.upper() + " Fiber Index file ")

    shotlist = Table.read(args.shotlist,
                          format="ascii.no_header",
                          names=["date", "obs"])

    tableFibers = fileh.create_table(
        fileh.root,
        "FiberIndex",
        VIRUSFiberIndex,
        "Survey Fiber Coord Info",
        expectedrows=300000000,
    )

    if args.merge:
        files = glob.glob("mfi*h5")
        for file in files:
            args.log.info("Appending detect H5 file: %s" % file)
            fileh_i = tb.open_file(file, "r")
            tableFibers_i = fileh_i.root.FiberIndex.read()
            tableFibers.append(tableFibers_i)

        tableFibers.cols.healpix.create_csindex()
        tableFibers.cols.ra.create_csindex()
        tableFibers.cols.shotid.create_csindex()
        tableFibers.flush()
        fileh.close()
        args.log.info("Completed {}".format(args.outfilename))
        sys.exit()

    # set up HEALPIX options
    Nside = 2**15
    hp.max_pixrad(Nside, degrees=True) * 3600  # in unit of arcsec

    config = HDRconfig(survey=args.survey)

    badshot = np.loadtxt(config.badshot, dtype=int)

    if args.month is not None:
        args.log.info("Working on month {}".format(args.month))
        # if working on a single month downselect
        shotlist["month"] = np.array(shotlist["date"] / 100, dtype=int)
        sel_month = shotlist["month"] == args.month
        shotlist = shotlist[sel_month]

    for shotrow in shotlist:
        datevshot = str(shotrow["date"]) + "v" + str(shotrow["obs"]).zfill(3)
        shotid = int(str(shotrow["date"]) + str(shotrow["obs"]).zfill(3))

        date = shotrow["date"]

        try:
            args.log.info("Ingesting %s" % datevshot)
            file_obs = tb.open_file(op.join(args.shotdir, datevshot + ".h5"),
                                    "r")
            tableFibers_i = file_obs.root.Data.FiberIndex

            for row_i in tableFibers_i:

                row_main = tableFibers.row

                for col in tableFibers_i.colnames:
                    row_main[col] = row_i[col]

                fiberid = row_i["fiber_id"]

                try:
                    row_main["healpix"] = hp.ang2pix(Nside,
                                                     row_i["ra"],
                                                     row_i["dec"],
                                                     lonlat=True)
                except:
                    row_main["healpix"] = 0

                row_main["shotid"] = shotid
                row_main["date"] = date
                row_main["datevobs"] = datevshot

                row_main["specid"] = fiberid[20:23]
                row_main["ifuslot"] = fiberid[24:27]
                row_main["ifuid"] = fiberid[28:31]
                row_main["amp"] = fiberid[32:34]
                row_main.append()

            file_obs.close()

        except:
            if shotid in badshot:
                pass
            else:
                args.log.error("could not ingest %s" % datevshot)

    tableFibers.cols.healpix.create_csindex()
    tableFibers.cols.ra.create_csindex()
    tableFibers.cols.shotid.create_csindex()
    tableFibers.flush()
    fileh.close()
    args.log.info("Completed {}".format(args.outfilename))
Example #6
0
                      "/work/03946/hetdex/hdr2.1/detect/image_db",
                      "/work/03261/polonius/hdr2.1/detect/image_db"
                      ],
                 }
#
# add paths from hetdex_api to search (place in first position)
#
for v in DICT_DB_PATHS.keys():
    try:
        release_number = v/10.0
        if v % 10 == 0:
            release_string = "hdr{:d}".format(int(release_number))
        else:
            release_string = "hdr{:2.1f}".format(release_number)

        DICT_DB_PATHS[v].insert(0,op.join(HDRconfig(survey=release_string).elix_dir))
    except:# Exception as e:
        #print(e)
        continue


def get_elixer_report_db_path(detectid,report_type="report"):
    """
    Return the top (first found) path to database file based on the detectid (assumes the HDR version is part of the
    prefix, i.e. HDR1 files are 1000*, HDR2 are 2000*, and so on)
    :param detectid:
    :param report_type: choose one of "report" (normal ELiXer report image) [default]
                               "nei" (ELiXer neighborhood image)
                               "mini" (ELiXer mini-report image for phone app)
    :return: None or database filename
    """
Example #7
0
def main(argv=None):
    ''' Main Function '''
    # Call initial parser from init_utils                                                          
    parser = ap.ArgumentParser(description="""Create HDF5 file.""",
                               add_help=True)

    parser.add_argument("-d", "--date",
                        help='''Date, e.g., 20170321, YYYYMMDD''',
                        type=str, default=None)

    parser.add_argument("-o", "--observation",
                        help='''Observation number, "00000007" or "7"''',
                        type=str, default=None)

    parser.add_argument("-r", "--rootdir",
                        help='''Root Directory for Reductions''',
                        type=str, default='/data/00115/gebhardt/calfits/')

    parser.add_argument('-of', '--outfilename', type=str,
                        help='''Relative or absolute path for output HDF5                          
                        file.''', default=None)

    parser.add_argument("-survey", "--survey", help='''{hdr1, hdr2, hdr2.1}''',
                        type=str, default='hdr2.1')

    args = parser.parse_args(argv)
    args.log = setup_logging()

    calfiles = get_cal_files(args)

    datestr = '%sv%03d' % (args.date, int(args.observation))

    shotid = int(str(args.date) + str(args.observation).zfill(3))

    #check if shotid is in badlist
    config = HDRconfig(args.survey)
    badshots = np.loadtxt(config.badshot, dtype=int)
    
    badshotflag = False
    
    if shotid in badshots:
        badshotflag = True
    
    if len(calfiles) == 0:
        if badshotflag:
            args.log.warning("No calfits file to append for %s" % datestr)
        else:
            args.log.error("No calfits file to append for %s" % datestr)

        sys.exit('Exiting cal append script for %s' % datestr)

    if op.exists(args.outfilename):
        fileh = tb.open_file(args.outfilename, 'a')
    else:
        args.log.error('Problem opening : ' + args.outfilename)
        sys.exit('Exiting Script')
    
    args.log.info('Appending calibrated fiber arrays to ' + args.outfilename)

    fibtable = fileh.root.Data.Fibers
    
    for calfile in calfiles:

        multi  = calfile[49:60]
        try:
            cal_table = get_cal_table(calfile)
        except:
            continue
            args.log.error('Could not ingest calfile: %s' % calfile)
            
        args.log.info('Working on IFU ' + multi )
        for amp_i in ['LL','LU','RL','RU']:
            
            multiframe_i = 'multi_'+ multi + '_' + amp_i

            for fibrow in fibtable.where('multiframe == multiframe_i'):
                
                idx = (cal_table['expnum'] == fibrow['expnum']) * (cal_table['multiframe'] == fibrow['multiframe'].decode()) * (cal_table['fibidx'] == fibrow['fibidx'])

                if np.sum(idx) >= 1:
                    fibrow['calfib']  = cal_table['calfib'][idx]
                    fibrow['calfibe'] = cal_table['calfibe'][idx]
                    fibrow['calfib_counts'] = cal_table['calfib_counts'][idx]
                    fibrow['calfibe_counts'] = cal_table['calfibe_counts'][idx]
                    fibrow['spec_fullsky_sub'] = cal_table['spec_fullsky_sub'][idx]
                    fibrow.update()
                #else:
                   # args.log.warning("No fiber match for %s" % fibrow['fiber_id'])
                    
    args.log.info('Flushing and closing H5 file')
    fibtable.flush()
    fileh.close()