コード例 #1
0
ファイル: plot_sfd.py プロジェクト: wkerzendorf/dustmaps
def main():
    w, h = (2056, 1024)
    l_0 = 0.

    # Create a grid of coordinates
    print('Creating grid of coordinates...')
    l = np.linspace(-180. + l_0, 180. + l_0, 2 * w)
    b = np.linspace(-90., 90., 2 * h + 2)
    b = b[1:-1]
    l, b = np.meshgrid(l, b)

    l += (np.random.random(l.shape) - 0.5) * 360. / (2. * w)
    b += (np.random.random(l.shape) - 0.5) * 180. / (2. * h)

    coords = SkyCoord(l * u.deg, b * u.deg, frame='galactic')

    # Set up SFD query object
    print('Loading SFD map...')
    sfd = SFDQuery()

    print('Querying map...')
    ebv = sfd.query(coords)

    # Convert the output array to a PIL image and save
    print('Saving image...')
    img = numpy2pil(ebv[::-1, ::-1], 0., 1.5)
    img = img.resize((w, h), resample=PIL.Image.LANCZOS)
    fname = 'sfd.png'
    img.save(fname)

    return 0
コード例 #2
0
ファイル: plot_sfd.py プロジェクト: gregreen/dustmaps
def main():
    w,h = (2056,1024)
    l_0 = 0.

    # Create a grid of coordinates
    print('Creating grid of coordinates...')
    l = np.linspace(-180.+l_0, 180.+l_0, 2*w)
    b = np.linspace(-90., 90., 2*h+2)
    b = b[1:-1]
    l,b = np.meshgrid(l, b)

    l += (np.random.random(l.shape) - 0.5) * 360./(2.*w)
    b += (np.random.random(l.shape) - 0.5) * 180./(2.*h)

    coords = SkyCoord(l*u.deg, b*u.deg, frame='galactic')

    # Set up SFD query object
    print('Loading SFD map...')
    sfd = SFDQuery()

    print('Querying map...')
    ebv = sfd.query(coords)

    # Convert the output array to a PIL image and save
    print('Saving image...')
    img = numpy2pil(ebv[::-1,::-1], 0., 1.5)
    img = img.resize((w,h), resample=PIL.Image.LANCZOS)
    fname = 'sfd.png'
    img.save(fname)

    return 0
コード例 #3
0
ファイル: extinction.py プロジェクト: rudolffu/qsofitmore
def getebv(ra, dec, mapname='planck', mode=None):
    """
    Query the dust map with "dustmaps" to get the line-of-sight
    E(B-V) value for a given object.
    Parameters:
    ----------
        mapname : str
            One of ['sfd', 'planck']. Other maps are 
            avaliable in "dustmaps" but not implemented here: 
            ['bayestar', 'iphas', 'marshall', chen2014',  
            'lenz2017', 'pg2010', 'leike_ensslin_2019', 'leike2020']
            Default: 'planck'.
        mode : str
            One of ['local', 'web']. Applicable only when
            mapname == 'sfd'. When 'local', query the local map 
            on disk. When 'web', query the web server.  
            Default: 'local'.
        """
    coord = SkyCoord(ra=ra * u.deg, dec=dec * u.deg, frame='icrs')
    if mapname.lower() == 'planck':
        from dustmaps.planck import PlanckQuery
        planck = PlanckQuery()
        ebv = planck(coord)
    elif mapname.lower() == 'sfd' and mode == 'local':
        from dustmaps.sfd import SFDQuery
        sfd = SFDQuery()
        ebv = sfd(coord)
    elif mapname.lower == 'sfd' and mode == 'web':
        from dustmaps.sfd import SFDWebQuery
        sfd = SFDWebQuery()
        ebv = sfd(coord)
    return ebv
コード例 #4
0
ファイル: reddening.py プロジェクト: jls713/gaia_dr2_spectro
    def __init__(self, with_extinction_maps=False):

        self.redd_maps = {}
        with open(dir_path + 'extinction/extinction_coeffs_2017.dat') as f:
            self.R_V_grid = np.fromstring(f.readline(),
                                          dtype=np.float64,
                                          sep=' ')
            for l in f.readlines():
                spl = l.split(" ", 1)
                self.redd_maps[spl[0]] = np.fromstring(spl[1],
                                                       dtype=np.float64,
                                                       sep=' ')

        self.interp_maps = {
            n: interp1d(self.R_V_grid, self.redd_maps[n])
            for n in self.redd_maps
        }

        self.R_G = np.zeros((75, len(self.R_V_grid)))
        with open(dir_path + 'extinction/extinction_coeffs_G_2017.dat') as f:
            self.logTeffgrid = np.fromstring(f.readline(),
                                             dtype=np.float64,
                                             sep=' ')
            for n, l in enumerate(f.readlines()):
                self.R_G[n] = np.fromstring(l, dtype=np.float64, sep=' ')

        self.interp_G_maps = interp2d(self.R_V_grid, self.logTeffgrid,
                                      self.R_G)

        if (with_extinction_maps):
            self.sfd = SFDQuery()
            self.bayestar = BayestarQuery(max_samples=10)
            self.marshall = MarshallQuery()
            self.drimmel = mwdust.Drimmel03(filter='Landolt V')
コード例 #5
0
    def sbsextinction(self):

        #ra and dec variables set from class attributes for ease

        ra = self.ra
        dec = self.dec

        #astropy skycoord called, units and frame set

        coords = SkyCoord(ra, dec, unit='deg', frame='icrs')

        #sfd map chosen

        sfd = SFDQuery()

        #E(B-V) for each star loaded into array from map

        sfdred = sfd(coords)

        #corrections from table 6 of Schlafly and Finkbeiner(2011) made

        jext = sfdred * 0.709
        hext = sfdred * 0.449
        kext = sfdred * 0.302

        #extinction corrections carried out on class attributes

        self.jmag = self.jmag - jext
        self.hmag = self.hmag - hext
        self.kmag = self.kmag - kext

        #method to carry out star by star extinction on object using SFD dustmap. Takes object target as input

        print('UKIRT Extinction corrections done')
コード例 #6
0
ファイル: data_handling.py プロジェクト: gilettejr/Masters
    def sbsextinction_on_frame(self, target):

        #attributes set as variables for ease

        ra = target.ra
        dec = target.dec

        #astropy skycoord called, units and frame set

        coords = SkyCoord(ra, dec, unit='deg', frame='icrs')

        #sfd map chosen

        sfd = SFDQuery()

        #E(B-V) for each star loaded into array from map

        sfdred = sfd(coords)

        #corrections from table 6 of Schlafly and Finkbeiner(2011) made

        jext = sfdred * 0.709
        hext = sfdred * 0.449
        kext = sfdred * 0.302

        #extinction corrections carried out on class attributes

        target.jmag = target.jmag - jext
        target.hmag = target.hmag - hext
        target.kmag = target.kmag - kext
コード例 #7
0
def dust(l, b, distance, plot=False, max_samples=2, mode='median', model='bayes'):
    if model == 'sfd':
        c = SkyCoord(l, b,
                frame='galactic')
        sfd = SFDQuery()
        dust = sfd(c)

    if model == 'bayes':
        c = SkyCoord(l, b,
                distance = distance,
                frame='galactic')
        bayes = BayestarQuery(max_samples=max_samples)
        dust = bayes(c, mode=mode)
    if model == 'iphas':
        c = SkyCoord(l, b,
                distance = distance,
                frame='galactic')
        iphas = IPHASQuery()
        dust = iphas(c, mode=mode)

    if model == 'marshall':
        c = SkyCoord(l, b,
                distance = distance,
                frame='galactic')
        marshall = MarshallQuery()
        dust = marshall(c)

    if model == 'chen':
        c = SkyCoord(l, b,
                distance = distance,
                frame='galactic')
        chen = Chen2014Query()
        dust = chen(c)
    #cNoDist = SkyCoord(l, b,
    #        frame='galactic')
    #bayesDustNoDist = bayes(cNoDist, mode=mode)

    #!!!!! Do something else than setting it equal to 0 !!!!!
    #if len(bayesDust) > 1: bayesDust[np.isnan(bayesDust)] = 0.0

    if plot:
        fig, ax = plt.subplots(3, figsize=(5, 7.5))

        ax[0].hist(np.log10(sfd(c)), bins=100, log=True, histtype='step')
        ax[1].hist(np.log10(bayesDustNoDist[bayesDustNoDist>0]), bins=100, log=True, histtype='step')
        ax[2].hist(np.log10(bayesDust[bayesDust >0]), bins=100, log=True, histtype='step')

        ax[0].set_xlabel('SFD Dust Attenuation')
        ax[1].set_xlabel('Bayestar Dust Attenuation No Distance')
        ax[2].set_xlabel('Bayestar Dust Attenuation')
        for a in ax: a.set_xlim(-4, 0.0)
        hist, bins = np.histogram(magsMatched['bmag'], bins=100)
        plt.hist(magsMatched['bmag'], bins=bins, histtype='step')
        plt.hist(magsMatched['bmag'] - B_RedCoeff*bayesDust, bins=bins, histtype='step')
        plt.tight_layout()
    return dust
コード例 #8
0
def getDustMap(nside=64):
    """
    Method to estimate the dust map 

    Parameters
    ---------------
    nside: int, opt
      healpix nside parameters

    Returns
    -----------
    dustmap: pandas df
    with the following columns: healpixID, RA, Dec, ebvofMW

    """

    # get the total number of pixels
    npixels = hp.nside2npix(nside)

    # pixels = hp.get_all_neighbours(nside, 0.0, 0.0, nest=True, lonlat=Tru)

    # get the (RA, Dec) of the pixel centers
    vec = hp.pix2ang(nside, range(npixels), nest=True, lonlat=True)

    dustmap = pd.DataFrame(range(npixels), columns=['healpixID'])
    dustmap['RA'] = vec[0]
    dustmap['Dec'] = vec[1]

    coords = SkyCoord(vec[0], vec[1], unit='deg')
    try:
        sfd = SFDQuery()
    except Exception as err:
        from dustmaps.config import config
        config['data_dir'] = 'dustmaps'
        import dustmaps.sfd
        dustmaps.sfd.fetch()
    sfd = SFDQuery()
    ebvofMW = sfd(coords)

    dustmap['ebvofMW'] = ebvofMW
    dustmap['npixels'] = npixels

    return dustmap
コード例 #9
0
    def generateLightcurve(self,
                           p_c,
                           p_o,
                           partial_QC_df,
                           QC_columns,
                           do_extinction=False):

        cyan_partial_QC_df = partial_QC_df.query('%s == "c"' %
                                                 QC_columns['qc_filters'])
        orange_partial_QC_df = partial_QC_df.query('%s == "o"' %
                                                   QC_columns['qc_filters'])

        phase_c = cyan_partial_QC_df[QC_columns['qc_time']] - self.expl_epoch
        phase_o = orange_partial_QC_df[QC_columns['qc_time']] - self.expl_epoch

        mag_c = abs2app(p_c(phase_c), self.redshift)
        mag_o = abs2app(p_o(phase_o), self.redshift)

        if do_extinction:

            dustmaps_path = os.path.dirname(os.path.abspath(dustmaps.__file__))
            # 			print(dustmaps_path)

            if not os.path.exists(os.path.join(dustmaps_path, "DustMaps/")):
                config["data_dir"] = "{pathToSource}/DustMaps/".format(
                    pathToSource=dustmaps_path)
                #Downloading dustmap for galactic extinction from Schlafly & Finkbeiner (SFD; 2011)
                dustmaps.sfd.fetch()
            else:
                config["data_dir"] = "{pathToSource}/DustMaps/".format(
                    pathToSource=dustmaps_path)

            #Setting the extraction query for the SFD Dustmap
            coords = SkyCoord(self.ra * u.deg, self.dec * u.deg, frame='icrs')
            extinction_map = SFDQuery()
            ebv = extinction_map(coords)

            A_g = ebv * 3.172
            A_r = ebv * 2.271
            A_i = ebv * 1.682

            A_c = (A_g + A_r) / 2.
            A_o = (A_r + A_i) / 2.

        else:

            A_c = 0.0
            A_o = 0.0

        self.timeline_c = phase_c + self.expl_epoch
        self.timeline_o = phase_o + self.expl_epoch
        self.mag_c = mag_c + A_c
        self.mag_o = mag_o + A_o
        self.extinction_c = A_c
        self.extinction_o = A_o
コード例 #10
0
def get_dustmaps_dust(ra, dec, web=True, **kwargs):
    "Use https://github.com/gregreen/dustmaps"
    
    from dustmaps.sfd import SFDQuery, SFDWebQuery
    from astropy.coordinates import SkyCoord
    
    coords = SkyCoord(ra, dec, unit='deg', frame='icrs')
    
    if web:
        sfd = SFDWebQuery()
    else:
        sfd = SFDQuery()
        
    ebv = sfd(coords)
    return ebv
コード例 #11
0
ファイル: catalog.py プロジェクト: conornally/Messier33
 def extinction_correct(self, overwrite=False):
     """
     INPUT:  overwrite (True) filter mag column with corrected magnitudes
     FUNC:   using SFDQuery finds E(B-V) for each ra,dec in catalog and 
             corrects the filter magnitudes using this value
     """
     Messier33.info("*Correcting Dust Extinction\n")
     coords = SkyCoord(self['ra'], self['dec'], unit=u.deg)
     ebv = SFDQuery()(coords)
     for band in self.bands:
         Messier33.info(
             "**R_%s=%.4f (%s)\n" %
             (band, self.config.Rv3_1[band], self.config.Rv3_1['ref']))
         o = self[band] - (self.config.Rv3_1[band] * ebv)
         self.append(o, key="%so" % band)
         self.indices["d%so" % band] = self.indices["d%s" % band]
         if (overwrite): self.delete(band)
     self.history.append("Extinction Correction in bands %s" % self.bands)
コード例 #12
0
def main():
    c0 = SkyCoord.from_name('orion a', frame='galactic')
    print(c0)

    # l = np.arange(c0.l.deg - 5., c0.l.deg + 5., 0.05)
    # b = np.arange(c0.b.deg - 5., c0.b.deg + 5., 0.05)

    l0, b0 = (37., -16.)
    l = np.arange(l0 - 5., l0 + 5., 0.05)
    b = np.arange(b0 - 5., b0 + 5., 0.05)
    l, b = np.meshgrid(l, b)
    coords = SkyCoord(l * units.deg,
                      b * units.deg,
                      distance=1. * units.kpc,
                      frame='galactic')

    sfd = SFDQuery()
    Av_sfd = 2.742 * sfd(coords)

    planck = PlanckQuery()
    Av_planck = 3.1 * planck(coords)

    bayestar = BayestarQuery(max_samples=1)
    Av_bayestar = 2.742 * bayestar(coords)

    fig = plt.figure(figsize=(12, 4), dpi=150)

    for k, (Av, title) in enumerate([(Av_sfd, 'SFD'), (Av_planck, 'Planck'),
                                     (Av_bayestar, 'Bayestar')]):
        ax = fig.add_subplot(1, 3, k + 1)
        ax.imshow(np.sqrt(Av)[::, ::-1],
                  vmin=0.,
                  vmax=2.,
                  origin='lower',
                  interpolation='nearest',
                  cmap='binary',
                  aspect='equal')
        ax.axis('off')
        ax.set_title(title)

    fig.subplots_adjust(wspace=0., hspace=0.)
    plt.savefig('comparison.png', dpi=150)

    return 0
コード例 #13
0
ファイル: dust.py プロジェクト: astrocatalogs/faststars
def do_dust(catalog):
    task_str = catalog.get_current_task_str()

    # Set preferred names, calculate some columns based on imported data,
    # sanitize some fields
    keys = list(catalog.entries.keys())

    check_dustmaps(catalog.get_current_task_repo())

    sfd = SFDQuery()

    for oname in pbar(keys, task_str):
        # Some events may be merged in cleanup process, skip them if
        # non-existent.
        try:
            name = catalog.add_entry(oname)
        except Exception:
            catalog.log.warning(
                '"{}" was not found, suggests merge occurred in cleanup '
                'process.'.format(oname))
            continue

        if (FASTSTARS.RA not in catalog.entries[name]
                or FASTSTARS.DEC not in catalog.entries[name]):
            continue
        else:
            Mname = name
            Mradec = str(
                catalog.entries[name][FASTSTARS.RA][0]['value']) + str(
                    catalog.entries[name][FASTSTARS.DEC][0]['value'])
            Mdist = '-1'
            c = coord(Mradec, unit=(un.hourangle, un.deg), frame='icrs')
            reddening = sfd(c)
            source = catalog.entries[name].add_source(
                bibcode='1998ApJ...500..525S')
            catalog.entries[name].add_quantity(FASTSTARS.EBV,
                                               str(reddening),
                                               source,
                                               upperlimit=True,
                                               derived=True)

    catalog.journal_entries()
    return
コード例 #14
0
def getEBV_effect(rtAs_n,dec_n):
     '''
     Generates E(B-V) i.e magnitude of Extinction effect with sky coordinates.
     parameters
     ----------
     rightAscension: angular distance of a particular point measured eastward along equator
     declination: angular distance of a point north or south of the celestial equator
     

     source: https://en.wikipedia.org/wiki/International_Celestial_Reference_System
     '''
     rt_As=rightAscension*au.degree
     dec=declination*au.degree
     sk_loc = SkyCoord(rt_As, dec, frame='icrs')
     sfd = SFDQuery()
     ebv = sfd(sk_loc)
     '''
     sdf():returns reddening in a unit that is similar to magnitudes of E(B-V).
     '''
     return (ebv)
コード例 #15
0
def deredden_spectra(wave, coords):
    """
    Apply S&F 2011 Extinction from SFD Map
    https://iopscience.iop.org/article/10.1088/0004-637X/737/2/103#apj398709t6

    Paramters
    ---------
    wave      array
        wavelength to apply correction
    coords    SkyCoord object
        sky coordinates
    """
    Rv = 3.1
    corr_SF2011 = 2.742  # Landolt V
    
    sfd = SFDQuery()
    ebv = sfd(coords)
    Av = corr_SF2011 * ebv
    ext = extinction.fitzpatrick99(np.array(wave, dtype=np.double), Av, Rv)

    deredden = 10**(0.4*np.array(ext))

    return deredden
コード例 #16
0
    def ext_corr(self):

        galaxy_data = self.galaxy_data

        # attributes set as variables for ease

        ra = galaxy_data.RA
        dec = galaxy_data.DEC

        # astropy skycoord called, units and galaxy_data set

        coords = SkyCoord(ra, dec, unit='deg')

        # sfd map chosen

        sfd = SFDQuery()

        # E(B-V) for each star loaded into array from map

        sfdred = sfd(coords)
        # print(sfdred)
        # corrections from table 6 of Schlafly and Finkbeiner(2011) made

        jext = sfdred * 0.709
        hext = sfdred * 0.449
        kext = sfdred * 0.302

        # extinction corrections carried out on class attributes

        galaxy_data.jmag = galaxy_data.jmag - jext

        galaxy_data.hmag = galaxy_data.hmag - hext
        galaxy_data.kmag = galaxy_data.kmag - kext

        self.galaxy_data = galaxy_data

        print('UKIRT extinction corrections done')
コード例 #17
0
def extinction_corr(catalog, bands):

    # Extinction coefficients for HSC filters for conversion from E(B-V) to extinction, A_filter.
    # Numbers provided by Masayuki Tanaka (NAOJ).
    #
    # Band, A_filter/E(B-V)
    extinctionCoeffs_HSC = {
        "g": 3.240,
        "r": 2.276,
        "i": 1.633,
        "z": 1.263,
        "y": 1.075,
        "HSC-G": 3.240,
        "HSC-R": 2.276,
        "HSC-I": 1.633,
        "HSC-Z": 1.263,
        "HSC-Y": 1.075,
        "NB0387": 4.007,
        "NB0816": 1.458,
        "NB0921": 1.187,
    }

    bands = list(bands)
    sfd = SFDQuery()
    coord_string_ra = 'coord_ra_'+str(bands[0])
    coord_string_dec = 'coord_dec_'+str(bands[0])
    coords = SkyCoord(catalog[coord_string_ra], catalog[coord_string_dec])
    ebvValues = sfd(coords)
    extinction_dict = {'E(B-V)': ebvValues}

    # Create a dict with the extinction values for each band (and E(B-V), too):
    for band in bands:
        coeff_name = 'A_'+str(band)
        extinction_dict[coeff_name] = ebvValues*extinctionCoeffs_HSC[band]

    return(extinction_dict)
コード例 #18
0
def test_sdss_halpha(fname,
                     fluxkey,
                     magkey,
                     magerrprefix,
                     bands,
                     scatter_density=True,
                     zkey="z",
                     xmin=-17,
                     xmax=-13,
                     output=None,
                     bands_ebv=None,
                     wline=6562.8 * u.AA):
    bands_ebv = ["G", "I"] if bands_ebv is None else bands_ebv
    ############################################################################
    # Reading transmission curves for S-PLUS
    filters_dir = os.path.join(os.getcwd(), "filter_curves-master")
    filenames = sorted([os.path.join(filters_dir, _) for _ in os.listdir( \
                        filters_dir)])
    filternames = [os.path.split(_)[1].replace(".dat", "") for _ in filenames]
    filternames = [
        _.replace("F0", "F").replace("JAVA", "") for _ in filternames
    ]
    filternames = [_.replace("SDSS", "").upper() for _ in filternames]
    fcurves = [np.loadtxt(f) for f in filenames]
    wcurves = [curve[:, 0] * u.AA for curve in fcurves]
    trans = [curve[:, 1] for curve in fcurves]
    wcurves = dict(zip(filternames, wcurves))
    trans = dict(zip(filternames, trans))
    halpha = EmLine3Filters(wline, [_.upper() for _ in bands], wcurves, trans)
    ############################################################################
    # Reading catalog and remove objects out of z-range
    wdir = os.path.join(context.home_dir, "catalogs")
    filename = os.path.join(wdir, fname)
    table = Table.read(filename)
    if zkey in table.colnames:
        table = table[table[zkey] < 0.02]
    ############################################################################
    # Cleaning table against large errors in mgnitudes
    magerrs = np.array([
        table["{}{}{}".format(magerrprefix, band, magkey)].data
        for band in bands
    ])
    idx = np.where(np.nanmax(magerrs, axis=0) < 0.2)[0]
    table = table[idx]
    ############################################################################
    # Estimating the extinction
    gi = np.array(
        [table["{}{}".format(band, magkey)].data for band in bands_ebv])
    g_i = gi[0] - gi[1]
    ebvs = np.clip(0.206 * np.power(g_i, 1.68) - 0.0457, 0, np.infty)
    Rv = 4.1
    Avs = Rv * ebvs
    corr = np.array([
        extinction.remove(
            extinction.calzetti00(np.atleast_1d(wline), Av, Rv)[0], 1)
        for Av in Avs
    ])
    ############################################################################
    # Correcting observations for Galactic extinction
    coords = SkyCoord(table["ra"] * u.degree, table["dec"] * u.degree)
    sfd = SFDQuery()
    ebv_mw = sfd(coords)
    Rv_mw = 3.1
    Av_mw = Rv_mw * ebv_mw
    corr_mw = np.array([
        extinction.remove(
            extinction.ccm89(halpha.wpiv, Av, Rv_mw)[0], np.ones(3))
        for Av in Av_mw
    ]).T
    ############################################################################
    # Calculating H-alpha
    mags = np.array(
        [table["{}{}".format(band, magkey)].data for band in bands])
    flam = mag2flux(mags, halpha.wpiv) * corr_mw
    flux_halpha_nii = halpha.flux_3F(flam)
    log_halpha = np.where(g_i <= 0.5,
                          0.989 * np.log10(flux_halpha_nii.value) - 0.193,
                          0.954 * np.log10(flux_halpha_nii.value) - 0.753)
    halpha_sdss = np.log10((table[fluxkey]) * np.power(10., -17))
    halpha_splus = log_halpha + np.log10(corr)
    ############################################################################
    # Selecting only regions within limits of flux
    idx = np.where(
        np.isfinite(halpha_sdss * halpha_splus) & (halpha_sdss > xmin)
        & (halpha_splus < xmax))
    halpha_sdss = halpha_sdss[idx]
    halpha_splus = halpha_splus[idx]
    ############################################################################
    fig = plt.figure()
    if scatter_density:
        ax = fig.add_subplot(1, 1, 1, projection='scatter_density')
        density = ax.scatter_density(halpha_sdss, halpha_splus, cmap="magma_r")
        fig.colorbar(density, label='Number of points per pixel')

    else:
        ax = fig.add_subplot(1, 1, 1)
        label = "mag_key={}".format(magkey)
        counts, xedges, yedges, im = ax.hist2d(halpha_sdss,
                                               halpha_splus,
                                               bins=(50, 50),
                                               cmap="bone_r")
        plt.legend(title=label)
        fig.colorbar(im, ax=ax)
    ax.set_aspect("equal")
    plt.xlim(xmin, xmax)
    plt.ylim(xmin, xmax)
    plt.plot(np.linspace(xmin, xmax, 100), np.linspace(xmin, xmax, 100), "--r")
    plt.xlabel(r"$\log$ H$\alpha$ (erg / s / cm$^2$) -- SDSS")
    plt.ylabel(r"$\log$ H$\alpha$ (erg / s / cm$^2$) -- SPLUS")
    axins = inset_axes(ax,
                       width="80%",
                       height="80%",
                       bbox_to_anchor=(.65, .05, .38, .38),
                       bbox_transform=ax.transAxes,
                       loc=3)
    diff = halpha_splus - halpha_sdss
    median = np.nanmedian(diff)
    std = np.nanstd(diff)
    title = "med: {:.2f} std: {:.2f}".format(median, std)
    axins.hist(diff, bins=20)
    axins.set_title(title)
    plt.tight_layout()
    if "output" is not None:
        plt.savefig(
            output,
            dpi=250,
        )
    plt.show()
コード例 #19
0
        'r') as f:
    catalogue = np.array(f['a'])
z = catalogue[0]
ra = catalogue[1]
dec = catalogue[2]
d_file = '/mnt/ddnfs/data_users/cxkttwl/ICL/wget_data/'
#d_file = '/mnt/ddnfs/data_users/cxkttwl/ICL/data/sky_sub_img/' # add sky information

load = '/mnt/ddnfs/data_users/cxkttwl/ICL/data/'
band = ['r', 'g', 'i', 'u', 'z']
l_wave = np.array([6166, 4686, 7480, 3551, 8932])
mag_add = np.array([0, 0, 0, -0.04, 0.02])
zopt = np.array([22.5, 22.5, 22.5, 22.46, 22.52])
sb_lim = np.array([24.5, 25, 24, 24.35, 22.9])
Rv = 3.1
sfd = SFDQuery()


def mask_A(band_id, z_set, ra_set, dec_set):

    kk = np.int(band_id)
    Nz = len(z_set)
    param_A = 'default_mask_A.sex'
    out_cat = 'default_mask_A.param'
    out_load_A = '/mnt/ddnfs/data_users/cxkttwl/PC/A_mask_%d_cpus.cat' % rank
    ## size test
    r_res = 1.  # 2.8 for larger R setting
    for q in range(Nz):
        z_g = z_set[q]
        ra_g = ra_set[q]
        dec_g = dec_set[q]
コード例 #20
0
def get_MW_EBV(args):
    ''' 
    This function reads the input file to obtain the coordinates for 
    the source objects. It is called only when ISM_correct_coords is not None, 
    since the only thing that depends on coordinates is the Milky Way dust 
    correction. Note: The program will assume degrees; this can be manually 
    changed here if the user wishes.

    The columns for coordinates should be named "C1" and "C2" if the sources 
    are not in the Skelton Catalog. For sources in Skelton, coordinates are 
    optional (since they will be automatically read from the Skelton catalog).

    Note: The program will assume degrees; this can be manually 
    changed here if the user wishes.

    Parameters
    ----------
    args : class
        The args class is carried from function to function with information
        from command line input and config.py
    Returns
    -------
    E(B-V) for Milky Way  : 1D array
    '''
    F = Table.read(args.filename, format='ascii')
    Fcols = F.colnames
    nobj = len(F['Field'])
    try:
        C1 = F['C1']
        C2 = F['C2']
    except:
        # Skelton catalogs
        fields = ['aegis', 'cosmos', 'goodsn', 'goodss', 'uds']
        name_base = '_3dhst.v4.1.cat.FITS'
        if F['Field'][0].lower() in fields:
            for fd in F['Field']:
                #Make sure the input isn't a mix of Skelton and non-Skelton
                assert fd.lower() in fields, "%s not in Skelton" % (fd)
            C1, C2 = np.zeros(nobj), np.zeros(nobj)
            field_dict = {}
            for field in fields:
                field_dict[field] = fits.open(
                    op.join('3dhst_catalogs', field + name_base))[1]
            for i, datum in enumerate(F):
                # assumes first element is field name and second is ID
                loc = datum[0].lower()
                C1[i] = field_dict[loc].data['ra'][int(datum[1]) - 1]
                C2[i] = field_dict[loc].data['dec'][int(datum[1]) - 1]
            args.ISM_correct_coords = 'FK5'  #Skelton coordinates are RA and Dec
        else:
            print("No coordinates given and no match to Skelton Catalog")
            return np.array([np.nan] * nobj)

    from dustmaps.sfd import SFDQuery
    from astropy.coordinates import SkyCoord
    coords = SkyCoord(C1,
                      C2,
                      unit='deg',
                      frame=args.ISM_correct_coords.lower())
    sfd = SFDQuery()
    ebv = sfd(coords)
    return ebv
コード例 #21
0
    def __init__(self,
                 starname,
                 ra,
                 dec,
                 g_id=None,
                 plx=None,
                 plx_e=None,
                 rad=None,
                 rad_e=None,
                 temp=None,
                 temp_e=None,
                 lum=None,
                 lum_e=None,
                 dist=None,
                 dist_e=None,
                 Av=None,
                 offline=False,
                 mag_dict=None,
                 verbose=True,
                 ignore=None):
        """See class docstring."""
        # MISC
        self.verbose = verbose
        self.offline = offline

        # Star stuff
        self.starname = starname
        self.ra_dec_to_deg(ra, dec)

        c = random.choice(self.colors)

        display_star_init(self, c)

        if verbose:
            if plx is not None:
                StarWarning('Parallax', 0).warn()
            if rad is not None:
                StarWarning('Radius', 0).warn()
            if temp is not None:
                StarWarning('Temperature', 0).warn()
            if lum is not None:
                StarWarning('Luminosity', 0).warn()
            if mag_dict is not None:
                StarWarning('Magnitudes', 0).warn()

        self.get_plx = True if plx is None else False
        self.get_dist = True if dist is None and plx is None else False
        self.get_rad = True if rad is None else False
        self.get_temp = True if temp is None else False
        self.get_lum = True if lum is None else False
        self.get_mags = True if mag_dict is None else False
        self.get_logg = False  # This is set to True after self.estimate_logg

        self.g_id = g_id

        # Lookup archival magnitudes, radius, temperature, luminosity
        # and parallax
        lookup = self.get_rad + self.get_temp + self.get_plx \
            + self.get_mags + self.get_dist

        if lookup:
            if not offline:
                if verbose:
                    print(
                        colored('\t\t*** LOOKING UP ARCHIVAL INFORMATION ***',
                                c))
                lib = Librarian(starname,
                                self.ra,
                                self.dec,
                                g_id=self.g_id,
                                mags=self.get_mags,
                                ignore=ignore)
                self.g_id = lib.g_id
                self.tic = lib.tic
                self.kic = lib.kic
            else:
                print(colored('\t\t*** ARCHIVAL LOOKUP OVERRIDDEN ***', c))
                if self.get_mags:
                    StarWarning('', 1).__raise__()
                lib = None
                self.tic = False
                self.kic = False

            # [plx, plx_e, dist, dist_e, rad, rad_e, temp, temp_e, lum, lum_e]
            libouts = extract_from_lib(lib)

            if self.get_plx:
                self.plx = libouts[0]
                self.plx_e = libouts[1]
            else:
                self.plx = plx
                self.plx_e = plx_e

            if self.get_dist:
                self.dist = libouts[2]
                self.dist_e = libouts[3]
            elif dist is not None:
                self.dist = dist
                self.dist_e = dist_e
            else:
                self.calculate_distance()

            if self.get_rad:
                self.rad = libouts[4]
                self.rad_e = libouts[5]
            else:
                self.rad = rad
                self.rad_e = rad_e

            if self.get_temp:
                self.temp = libouts[6]
                self.temp_e = libouts[7]
            else:
                self.temp = temp
                self.temp_e = temp_e

            if self.get_lum:
                self.lum = libouts[8]
                self.lum_e = libouts[9]
            else:
                self.lum = lum
                self.lum_e = lum_e

        if self.get_mags:
            self.used_filters = lib.used_filters
            self.mags = lib.mags
            self.mag_errs = lib.mag_errs
        else:
            filters = []
            self.used_filters = np.zeros(self.filter_names.shape[0])
            self.mags = np.zeros(self.filter_names.shape[0])
            self.mag_errs = np.zeros(self.filter_names.shape[0])
            for k in mag_dict.keys():
                filt_idx = np.where(k == self.filter_names)[0]
                self.used_filters[filt_idx] = 1
                self.mags[filt_idx] = mag_dict[k][0]
                self.mag_errs[filt_idx] = mag_dict[k][1]

                filters.append(k)
        self.filter_mask = np.where(self.used_filters == 1)[0]

        # Get max Av
        if Av is None:
            sfd = SFDQuery()
            coords = SkyCoord(self.ra,
                              self.dec,
                              unit=(u.deg, u.deg),
                              frame='icrs')
            ebv = sfd(coords)
            self.Av = ebv * 2.742
        else:
            self.Av = Av
        # Get the wavelength and fluxes of the retrieved magnitudes.
        wave, flux, flux_er, bandpass = extract_info(
            self.mags[self.filter_mask], self.mag_errs[self.filter_mask],
            self.filter_names[self.filter_mask])

        self.wave = np.zeros(self.filter_names.shape[0])
        self.flux = np.zeros(self.filter_names.shape[0])
        self.flux_er = np.zeros(self.filter_names.shape[0])
        self.bandpass = np.zeros(self.filter_names.shape[0])

        for k in wave.keys():
            filt_idx = np.where(k == self.filter_names)[0]
            self.wave[filt_idx] = wave[k]
            self.flux[filt_idx] = flux[k]
            self.flux_er[filt_idx] = flux_er[k]
            self.bandpass[filt_idx] = bandpass[k]

        rel_er = self.flux_er[self.filter_mask] / self.flux[self.filter_mask]
        mx_rel_er = rel_er.max() + 0.1
        upper = self.flux_er[self.filter_mask] == 0
        flx = self.flux[self.filter_mask][upper]
        for i, f in zip(self.filter_mask[upper], flx):
            self.flux_er[i] = mx_rel_er * f

        # self.calculate_distance()
        c = random.choice(self.colors)
        display_star_fin(self, c)
        c = random.choice(self.colors)
        self.print_mags(c)
コード例 #22
0
    def __call__(self, obs, display=False, time_display=0):
        """ Simulation of the light curve
        We use multiprocessing (one band per process) to increase speed

        Parameters
        ---------
        obs: array
         array of observations
        gen_par: array
         simulation parameters
        display: bool,opt
         to display LC as they are generated (default: False)
        time_display: float, opt
         time persistency of the displayed window (defalut: 0 sec)

        Returns
        ---------
        astropy table with:
        columns: band, flux, fluxerr, snr_m5,flux_e,zp,zpsys,time
        metadata : SNID,RA,Dec,DayMax,X1,Color,z
        """

        RA = np.mean(obs[self.RACol])
        Dec = np.mean(obs[self.DecCol])
        pixRA = np.mean(obs['pixRA'])
        pixDec = np.mean(obs['pixDec'])
        pixID = np.unique(obs['healpixID']).item()
        dL = -1

        # get ebvofMW from dust maps
        ebvofMW = self.sn_parameters['ebvofMW']
        if ebvofMW < 0.:
            # in that case ebvofMW value is taken from a map
            coords = SkyCoord(pixRA, pixDec, unit='deg')
            try:
                sfd = SFDQuery()
            except Exception as err:
                from dustmaps.config import config
                config['data_dir'] = 'dustmaps'
                import dustmaps.sfd
                dustmaps.sfd.fetch()

            sfd = SFDQuery()
            ebvofMW = sfd(coords)

        # start timer
        ti = SNTimer(time.time())
        # Are there observations with the filters?
        goodFilters = np.in1d(obs[self.filterCol],
                              np.array([b for b in 'grizy']))

        if len(obs[goodFilters]) == 0:
            return [self.nosim(ra, dec, pixRA, pixDec, pixID, season, ti, -1)]

        tab_tot = self.lcFast(obs, ebvofMW, self.gen_parameters)
        """
        # apply dust correction here
        tab_tot = self.dust_corrections(tab_tot, pixRA, pixDec)
        """
        ptime = ti.finish(time.time())['ptime'].item()
        self.premeta.update(
            dict(
                zip([
                    'RA', 'Dec', 'pixRA', 'pixDec', 'healpixID', 'dL', 'ptime',
                    'status'
                ], [RA, Dec, pixRA, pixDec, pixID, dL, ptime, 1])))
        """
        ii = tab_tot['band'] == 'LSST::z'

        sel = tab_tot[ii]
        for io, row in sel.iterrows():
            print(row[['phase', 'z', 'band', 'flux',
                       'old_flux', 'fluxerr', 'old_fluxerr']].values)
        """
        list_tables = self.transform(tab_tot)

        # if the user chooses to display the results...
        if display:
            for table_lc in list_tables:
                self.plotLC(
                    table_lc['time', 'band', 'flux', 'fluxerr', 'zp', 'zpsys'],
                    time_display)

        return list_tables
コード例 #23
0
    def __init__(
        self,
        survey=LATEST_HDR_NAME,
        catalog_type="lines",
        curated_version=None,
        loadtable=True,
    ):
        """
        Initialize the detection catalog class for a given data release

        Input
        -----
        survey : string
            Data release you would like to load, i.e., 'hdr2','HDR1'
            This is case insensitive.
        catalog_type : string
            Catalog to laod up. Either 'lines' or 'continuum'. Default is 
            'lines'.
        load_table : bool
           Boolean flag to load all detection table info upon initialization.
           For example, if you just want to grab a spectrum this isn't needed.
        
        """
        survey_options = ["hdr1", "hdr2", "hdr2.1"]
        catalog_type_options = ["lines", "continuum", "broad"]

        if survey.lower() not in survey_options:
            print("survey not in survey options")
            print(survey_options)
            return None

        if catalog_type.lower() not in catalog_type_options:
            print("catalog_type not in catalog_type options")
            print(catalog_type_options)
            return None

        # store to class
        if curated_version is not None:
            self.version = curated_version
            self.loadtable = False
            self.survey = "hdr" + curated_version[0:3]
        else:
            self.version = None
            self.survey = survey
            self.loadtable = loadtable

        global config
        config = HDRconfig(survey=self.survey)

        if catalog_type == "lines":
            self.filename = config.detecth5
        elif catalog_type == "continuum":
            self.filename = config.contsourceh5
        elif catalog_type == "broad":
            try:
                self.filename = config.detectbroadh5
            except:
                print("Could not locate broad line catalog")

        self.hdfile = tb.open_file(self.filename, mode="r")

        # store to class
        if curated_version is not None:
            self.version = curated_version
            self.loadtable = False
            self.survey = "hdr" + curated_version[0:3]
        else:
            self.survey = survey
            self.loadtable = loadtable

        if self.version is not None:

            try:
                catfile = op.join(config.detect_dir, "catalogs",
                                  "detect_hdr" + self.version + ".fits")
                det_table = Table.read(catfile)

                for col in det_table.colnames:
                    if isinstance(det_table[col][0], str):
                        setattr(self, col,
                                np.array(det_table[col]).astype(str))
                    else:
                        setattr(self, col, np.array(det_table[col]))

                self.vis_class = -1 * np.ones(np.size(self.detectid))

            except:
                print("Could not open curated catalog version: " +
                      self.version)
                return None

        elif self.loadtable:
            colnames = self.hdfile.root.Detections.colnames
            for name in colnames:
                if isinstance(
                        getattr(self.hdfile.root.Detections.cols, name)[0],
                        np.bytes_):
                    setattr(
                        self,
                        name,
                        getattr(self.hdfile.root.Detections.cols,
                                name)[:].astype(str),
                    )
                else:
                    setattr(self, name,
                            getattr(self.hdfile.root.Detections.cols, name)[:])
            if self.survey == "hdr2.1":
                # Fix fluxes and continuum values for aperture corrections
                wave = self.hdfile.root.Detections.cols.wave[:]
                apcor = self.hdfile.root.Spectra.cols.apcor[:]
                wave_spec = self.hdfile.root.Spectra.cols.wave1d[:]

                apcor_array = np.ones_like(wave)
                for idx in np.arange(0, np.size(wave)):
                    sel_apcor = np.where(wave_spec[idx, :] > wave[idx])[0][0]
                    apcor_array[idx] = apcor[idx, sel_apcor]
                self.apcor = apcor_array

                if catalog_type == "lines":

                    # remove E(B-V)=0.02 screen extinction
                    fix = get_2pt1_extinction_fix()

                    self.flux /= fix(wave)
                    self.flux_err /= fix(wave)
                    self.continuum /= fix(wave)
                    self.continuum_err /= fix(wave)

                    # store observed flux values in new columns
                    self.flux_obs = self.flux.copy()
                    self.flux_err_obs = self.flux_err.copy()
                    self.continuum_obs = self.continuum.copy()
                    self.continuum_err_obs = self.continuum_err.copy()

                    # apply extinction to observed values to get
                    # dust corrected values
                    # Apply S&F 2011 Extinction correction from SFD Map
                    # https://iopscience.iop.org/article/10.1088/0004-637X/737/2/103#apj398709t6

                    self.coords = SkyCoord(self.ra * u.degree,
                                           self.dec * u.degree,
                                           frame="icrs")

                    sfd = SFDQuery()
                    self.ebv = sfd(self.coords)
                    Rv = 3.1
                    corr_SF2011 = 2.742  # Landolt V
                    ext = []

                    self.Av = corr_SF2011 * self.ebv

                    for index in np.arange(np.size(self.detectid)):
                        src_wave = np.array([np.double(self.wave[index])])
                        ext_i = extinction.fitzpatrick99(
                            src_wave, self.Av[index], Rv)[0]
                        ext.append(ext_i)

                    deredden = 10**(0.4 * np.array(ext))

                    self.flux *= deredden
                    self.flux_err *= deredden
                    self.continuum *= deredden
                    self.continuum_err *= deredden

            # add in the elixer probabilties and associated info:
            if self.survey == "hdr1" and catalog_type == "lines":

                self.hdfile_elix = tb.open_file(config.elixerh5, mode="r")
                colnames2 = self.hdfile_elix.root.Classifications.colnames
                for name2 in colnames2:
                    if name2 == "detectid":
                        setattr(
                            self,
                            "detectid_elix",
                            self.hdfile_elix.root.Classifications.cols.
                            detectid[:],
                        )
                    else:
                        if isinstance(
                                getattr(
                                    self.hdfile_elix.root.Classifications.cols,
                                    name2)[0],
                                np.bytes_,
                        ):
                            setattr(
                                self,
                                name2,
                                getattr(
                                    self.hdfile_elix.root.Classifications.cols,
                                    name2)[:].astype(str),
                            )
                        else:
                            setattr(
                                self,
                                name2,
                                getattr(
                                    self.hdfile_elix.root.Classifications.cols,
                                    name2)[:],
                            )
            else:

                # add elixer info if node exists
                try:
                    colnames = self.hdfile.root.Elixer.colnames
                    for name in colnames:
                        if name == "detectid":
                            continue
                        if isinstance(
                                getattr(self.hdfile.root.Elixer.cols, name)[0],
                                np.bytes_):
                            setattr(
                                self,
                                name,
                                getattr(self.hdfile.root.Elixer.cols,
                                        name)[:].astype(str),
                            )
                        else:
                            setattr(
                                self,
                                name,
                                getattr(self.hdfile.root.Elixer.cols, name)[:],
                            )
                    self.gmag = self.mag_sdss_g
                    self.gmag_err = self.mag_sdss_g
                except:
                    print("No Elixer table found")

            # also assign a field and some QA identifiers
            self.field = np.chararray(np.size(self.detectid), 12, unicode=True)
            self.fwhm = np.zeros(np.size(self.detectid))
            if self.survey == "hdr1":
                self.fluxlimit_4550 = np.zeros(np.size(self.detectid))
            else:
                self.fluxlimit_4540 = np.zeros(np.size(self.detectid))

            self.throughput = np.zeros(np.size(self.detectid))
            self.n_ifu = np.zeros(np.size(self.detectid), dtype=int)

            S = Survey(self.survey)

            for index, shot in enumerate(S.shotid):
                ix = np.where(self.shotid == shot)
                self.field[ix] = S.field[index].astype(str)
                # NOTE: python2 to python3 strings now unicode
                if self.survey == "hdr1":
                    self.fwhm[ix] = S.fwhm_moffat[index]
                    self.fluxlimit_4550[ix] = S.fluxlimit_4550[index]
                else:
                    self.fwhm[ix] = S.fwhm_virus[index]
                try:
                    self.fluxlimit_4540[ix] = S.fluxlimit_4540[index]
                except:
                    pass
                self.throughput[ix] = S.response_4540[index]
                self.n_ifu[ix] = S.n_ifu[index]

                # assign a vis_class field for future classification
                # -2 = ignore (bad detectid, shot)
                # -1 = no assignemnt
                # 0 = artifact
                # 1 = OII emitter
                # 2 = LAE emitter
                # 3 = star
                # 4 = nearby galaxies (HBeta, OIII usually)
                # 5 = other line
            # close the survey HDF5 file
            S.close()

            self.vis_class = -1 * np.ones(np.size(self.detectid))

            if self.survey == "hdr1":
                self.add_hetdex_gmag(loadpickle=True, picklefile=config.gmags)

            if self.survey == "hdr1":
                if PYTHON_MAJOR_VERSION < 3:
                    self.plae_poii_hetdex_gmag = np.array(
                        pickle.load(open(config.plae_poii_hetdex_gmag, "rb")))
                else:
                    self.plae_poii_hetdex_gmag = np.array(
                        pickle.load(open(config.plae_poii_hetdex_gmag, "rb"),
                                    encoding="bytes"))

        else:
            # just get coordinates, wavelength and detectid
            self.detectid = self.hdfile.root.Detections.cols.detectid[:]
            self.ra = self.hdfile.root.Detections.cols.ra[:]
            self.dec = self.hdfile.root.Detections.cols.dec[:]
            self.wave = self.hdfile.root.Detections.cols.wave[:]

        # set the SkyCoords
        self.coords = SkyCoord(self.ra * u.degree,
                               self.dec * u.degree,
                               frame="icrs")
コード例 #24
0
def dust(l, b):
    c = SkyCoord(l, b, frame='galactic')
    sfd = SFDQuery()
    dust = sfd(c)
    return dust
コード例 #25
0
    def main(self):

        hpx_path = "%s/%s" % (self.options.healpix_dir, self.options.healpix_file)

        # If you specify a telescope that's not in the default list, you must provide the rest of the information
        detector = None
        is_error = False
        is_custom_percentile = False
        custom_percentile = None

        # Check the inputs for errors...
        if self.options.telescope_abbreviation not in self.telescope_mapping.keys():
            print("Running for custom telescope. Checking required parameters...")

            if self.options.telescope_abbreviation == "":
                is_error = True
                print("For custom telescope, `telescope_abbreviation` is required!")

            if self.options.telescope_name == "":
                is_error = True
                print("For custom telescope, `telescope_name` is required!")

            if self.options.detector_width_deg <= 0.0:
                is_error = True
                print("For custom telescope, `detector_width_deg` is required, and must be > 0!")

            if self.options.detector_height_deg <= 0.0:
                is_error = True
                print("For custom telescope, `detector_height_deg` is required, and must be > 0!")

            if not is_error:
                detector = Detector(self.options.telescope_name, self.options.detector_width_deg,
                                    self.options.detector_height_deg)
        else:
            detector = self.telescope_mapping[self.options.telescope_abbreviation]

        if self.options.percentile > 0.9:
            is_error = True
            print("User-defined percentile must be <= 0.90")
        elif self.options.percentile > 0.0:
            is_custom_percentile = True
            custom_percentile = self.options.percentile

        if is_error:
            print("Exiting...")
            return 1

        print("\n\nTelescope: `%s -- %s`, width: %s [deg]; height %s [deg]" % (self.options.telescope_abbreviation,
                                                                               detector.name, detector.deg_width,
                                                                               detector.deg_height))
        fov_area = (detector.deg_width * detector.deg_height)
        print("%s FOV area: %s" % (detector.name, fov_area))

        print("Loading base cartography...")
        base_cartography = None
        with open('%s/%s_base_cartography.pkl' % (self.options.working_dir, self.options.gw_id), 'rb') as handle:
            base_cartography = pickle.load(handle)

        # print("Loading sql pixel map...")
        # sql_pixel_map = None
        # with open('%s/%s_sql_pixel_map.pkl' % (self.options.working_dir, self.options.gw_id), 'rb') as handle:
        # 	sql_pixel_map = pickle.load(handle)

        # print("Loading sql cartography...")
        # sql_tile_cartography = None
        # with open('%s/%s_sql_cartography.pkl' % (self.options.working_dir, self.options.gw_id), 'rb') as handle:
        # 	sql_tile_cartography = pickle.load(handle)

        # print("Loading galaxy query...")
        # query = None
        # with open('%s/%s_query.pkl' % (self.options.working_dir, self.options.gw_id), 'rb') as handle:
        # 	query = pickle.load(handle)

        if not self.options.skip_completeness:
            print("Loading sql multipolygon...")
            sql_poly = None
            with open('%s/%s_sql_poly.pkl' % (self.options.working_dir, self.options.gw_id), 'rb') as handle:
                sql_poly = pickle.load(handle)

            # print("Loading contained galaxies...")
            # contained_galaxies = None
            # with open('%s/%s_contained_galaxies.pkl' % (self.options.working_dir, self.options.gw_id), 'rb') as handle:
            # 	contained_galaxies = pickle.load(handle)

            # for g in (sorted(contained_galaxies, key=lambda x: x.relative_prob, reverse=True))[:99]:
            # 	print(g.relative_prob)

            print("Loading redistributed cartography...")
            redistributed_cartography = None
            with open('%s/%s_redstributed_cartography.pkl' % (self.options.working_dir, self.options.gw_id),
                      'rb') as handle:
                redistributed_cartography = pickle.load(handle)

        # print("Loading observed tiles file...")
        # observed_tiles = {
        # 	"S190728q":{"Thacher":{"ut190728":[]},
        # 				"Swope":{"ut190728":[]}
        # 			   }
        # }

        # running_prob = 0
        # unique_pixels = []
        # # Thacher
        # thacher_width = 2048*0.609/3600. #arcseconds -> deg
        # thacher_height = 2048*0.609/3600.
        # with open('%s/ut190727_28_Thacher_observed.txt' % self.options.working_dir,'r') as csvfile:
        # 	csvreader = csv.reader(csvfile, delimiter=',',skipinitialspace=True)

        # 	for row in csvreader:
        # 		name = row[0]
        # 		t = Tile(coord.SkyCoord(row[1], row[2], unit=(u.hour, u.deg)), thacher_width, thacher_height, redistributed_cartography.unpacked_healpix.nside)
        # 		t.field_name = name

        # 		t_prob = t.enclosed_pixel_indices
        # 		for ti in t_prob:
        # 			if ti not in unique_pixels:
        # 				unique_pixels.append(ti)

        # 		t.net_prob = np.sum(redistributed_cartography.unpacked_healpix.prob[t_prob])
        # 		running_prob += t.net_prob
        # 		print("%s - net prob: %s" % (name, t.net_prob))
        # 		observed_tiles["S190728q"]["Thacher"]["ut190728"].append(t)

        # print("\n")
        # swope_width = 4096*0.435/3600. #arcseconds -> deg
        # swope_height = 4112*0.435/3600.
        # with open('%s/ut190727_28_Swope_observed.txt' % self.options.working_dir,'r') as csvfile:
        # 	csvreader = csv.reader(csvfile, delimiter=',',skipinitialspace=True)

        # 	for row in csvreader:
        # 		name = row[0]
        # 		t = Tile(coord.SkyCoord(row[1], row[2], unit=(u.hour, u.deg)), swope_width, swope_height, redistributed_cartography.unpacked_healpix.nside)
        # 		t.field_name = name

        # 		t_prob = t.enclosed_pixel_indices

        # 		for ti in t_prob:
        # 			if ti not in unique_pixels:
        # 				unique_pixels.append(ti)

        # 		t.net_prob = np.sum(redistributed_cartography.unpacked_healpix.prob[t_prob])
        # 		running_prob += t.net_prob
        # 		print("%s - net prob: %s" % (name, t.net_prob))
        # 		observed_tiles["S190728q"]["Swope"]["ut190728"].append(t)

        # tile_set = [
        # 	("Thacher_0728",observed_tiles["S190728q"]["Thacher"]["ut190728"],('royalblue','None')),
        # 	("Swope_0728",observed_tiles["S190728q"]["Swope"]["ut190728"],('green','None')),
        # ]
        # print("Total non-corrected prob captured by observed Thacher+Swope tiles: %s" % running_prob)
        # print("Total corrected prob captured by observed Thacher+Swope tiles: %s" % np.sum(redistributed_cartography.unpacked_healpix.prob[np.asarray(unique_pixels)]))

        print("\n\nBuilding MWE...")
        config["data_dir"] = "./DataIngestion"

        print("Initializing MWE n128 pix...")
        f99_r = 2.285
        nside128 = 128
        nside128_npix = hp.nside2npix(nside128)
        nside128_pixels = []
        nside128_RA = []
        nside128_DEC = []

        mw_pixels = []
        for i in range(nside128_npix):
            pe = Pixel_Element(i, nside128, 0.0)
            nside128_pixels.append(pe)

            theta, phi = hp.pix2ang(pe.nside, pe.index)
            dec = np.degrees(0.5 * np.pi - theta)
            ra = np.degrees(phi)

            nside128_RA.append(ra)
            nside128_DEC.append(dec)

        c1 = coord.SkyCoord(nside128_RA, nside128_DEC, unit=(u.deg, u.deg))
        sfd = SFDQuery()
        ebv1 = sfd(c1)
        for i, e in enumerate(ebv1):
            if e * f99_r >= 0.5:
                mw_pixels.append(nside128_pixels[i])

        tiles_to_plot = None
        pixels_to_plot = None
        sql_poly_to_plot = None
        if not self.options.skip_completeness:
            tiles_to_plot = redistributed_cartography.tiles
            pixels_to_plot = redistributed_cartography.unpacked_healpix.pixels_90
            sql_poly_to_plot = sql_poly
        else:
            tiles_to_plot = base_cartography.tiles
            pixels_to_plot = base_cartography.unpacked_healpix.pixels_90

        ra_tiles = []
        dec_tiles = []
        for i, t in enumerate(tiles_to_plot):
            field_name = "%s%s%sE%s" % (self.options.telescope_abbreviation,
                                        str(self.options.event_number).zfill(3),
                                        self.options.schedule_designation,
                                        str(i + 1).zfill(5))

            t.field_name = field_name
            # ra_tiles.append(t.coord.ra.degree)
            # dec_tiles.append(t.coord.dec.degree)
            ra_tiles.append(t.ra_deg)
            dec_tiles.append(t.dec_deg)

        c = coord.SkyCoord(ra_tiles, dec_tiles, unit=(u.deg, u.deg))
        sfd = SFDQuery()
        ebv = sfd(c)

        new_tiles = []
        for i, e in enumerate(ebv):
            t = tiles_to_plot[i]
            # if e*f99_r < 0.5 and t.coord.dec.degree > -30.0:
            if e * f99_r < 0.5 and t.dec_deg < 30.0:
                new_tiles.append(t)
        # if t.coord.dec.degree > -30.0:
        # 	t.mwe = e
        # 	new_tiles.append(t)

        print(len(new_tiles))
        prob = 0.0
        for t in new_tiles:
            prob += t.net_prob

        print("Enclosed prob = %s" % prob)

        def GetSexigesimalString(c):
            ra = c.ra.hms
            dec = c.dec.dms

            ra_string = "%02d:%02d:%05.2f" % (ra[0], ra[1], ra[2])
            if dec[0] >= 0:
                dec_string = "+%02d:%02d:%05.2f" % (dec[0], np.abs(dec[1]), np.abs(dec[2]))
            else:
                dec_string = "%03d:%02d:%05.2f" % (dec[0], np.abs(dec[1]), np.abs(dec[2]))

            # Python has a -0.0 object. If the deg is this (because object lies < 60 min south), the string formatter will drop the negative sign
            if c.dec < 0.0 and dec[0] == 0.0:
                dec_string = "-00:%02d:%05.2f" % (np.abs(dec[1]), np.abs(dec[2]))
            return (ra_string, dec_string)

        with open('%s/%s_%s_MWE_below_30_AA.txt' % (self.options.working_dir, self.options.gw_id, detector.name),
                  'w') as csvfile:

            csvwriter = csv.writer(csvfile)

            cols = []
            cols.append('# FieldName')
            cols.append('FieldRA')
            cols.append('FieldDec')
            cols.append('Telscope')
            cols.append('Filter')
            cols.append('ExpTime')
            cols.append('Priority')
            cols.append('Status')
            # cols.append('A_R')
            csvwriter.writerow(cols)

            for i, st in enumerate(new_tiles):
                # coord_str = GetSexigesimalString(st.coord)
                c = coord.SkyCoord(st.ra_deg, st.dec_deg, unit=(u.deg, u.deg))
                coord_str = GetSexigesimalString(c)

                cols = []

                cols.append(st.field_name)
                cols.append(coord_str[0])
                cols.append(coord_str[1])
                cols.append(detector.name)
                cols.append(self.options.filter)
                cols.append(self.options.exp_time)
                cols.append(st.net_prob)
                cols.append('False')
                # cols.append(st.mwe)
                csvwriter.writerow(cols)

            print("Done")

        # return 0;
        # raise("Stop!")

        # swope_width = 4096*0.435/3600. #arcseconds -> deg
        # swope_height = 4112*0.435/3600.

        # thacher_width = 2048*0.609/3600. #arcseconds -> deg
        # thacher_height = 2048*0.609/3600.

        # nickel_width = 2048*0.368/3600. #arcseconds -> deg
        # nickel_height = 2048*0.368/3600.

        # nickel_width = 2048*0.368/3600. #arcseconds -> deg
        # nickel_height = 2048*0.368/3600.

        # andicam_ccd_width = 1024*0.371/3600. #arcseconds -> deg
        # andicam_ccd_height = 1024*0.371/3600.

        # swift_uvot_width = 2048*0.502/3600. #arcseconds -> deg
        # swift_uvot_height = 2048*0.502/3600.

        # saguaro_width = 2.26 #deg
        # saguaro_height = 2.26

        # observed_tiles = {
        # 	"S190425z":{"Swope":{"ut190425":[], "ut190428":[], "ut190429":[]},
        # 				"Thacher":{"ut190425":[]},
        # 				"ANDICAM":{"ut190425":[], "ut190428":[]},
        # 				"Nickel":{"ut190425":[], "ut190429":[]},
        # 				"Swift":{"ut190425":[]},
        # 				"SAGUARO":{"ut190425":[]}
        # 			   }
        # }

        # # Just do S190425z for now...
        # # SWOPE
        # with open('../O3_Alerts/GW190408/GW190425_2/S190425z_ut190425_Swope_Tiles_Observed.csv','r') as csvfile:

        # 	csvreader = csv.reader(csvfile, delimiter=' ',skipinitialspace=True)
        # 	next(csvreader)

        # 	for row in csvreader:
        # 		t = Tile(coord.SkyCoord(row[1], row[2], unit=(u.hour, u.deg)), swope_width, swope_height, redistributed_cartography.unpacked_healpix.nside)
        # 		(observed_tiles["S190425z"]["Swope"]["ut190425"]).append(t)

        # with open('../O3_Alerts/GW190408/GW190425_2/S190425z_ut190428_Swope_Tiles_Observed.csv','r') as csvfile:

        # 	csvreader = csv.reader(csvfile, delimiter=' ',skipinitialspace=True)
        # 	next(csvreader)

        # 	for row in csvreader:
        # 		t = Tile(coord.SkyCoord(row[1], row[2], unit=(u.hour, u.deg)), swope_width, swope_height, redistributed_cartography.unpacked_healpix.nside)
        # 		(observed_tiles["S190425z"]["Swope"]["ut190428"]).append(t)

        # with open('../O3_Alerts/GW190408/GW190425_2/S190425z_ut190429_Swope_Tiles_Observed.csv','r') as csvfile:

        # 	csvreader = csv.reader(csvfile, delimiter=' ',skipinitialspace=True)
        # 	next(csvreader)

        # 	for row in csvreader:
        # 		t = Tile(coord.SkyCoord(row[1], row[2], unit=(u.hour, u.deg)), swope_width, swope_height, redistributed_cartography.unpacked_healpix.nside)
        # 		(observed_tiles["S190425z"]["Swope"]["ut190429"]).append(t)

        # # THACHER
        # with open('../O3_Alerts/GW190408/GW190425_2/S190425z_ut190425_Thacher_Tiles_Observed.csv','r') as csvfile:

        # 	csvreader = csv.reader(csvfile, delimiter=' ',skipinitialspace=True)
        # 	next(csvreader)

        # 	for row in csvreader:
        # 		t = Tile(coord.SkyCoord(row[1], row[2], unit=(u.hour, u.deg)), thacher_width, thacher_height, redistributed_cartography.unpacked_healpix.nside)
        # 		(observed_tiles["S190425z"]["Thacher"]["ut190425"]).append(t)

        # # ANDICAM
        # with open('../O3_Alerts/GW190408/GW190425_2/S190425z_ut190425_ANDICAM_Tiles_Observed.csv','r') as csvfile:

        # 	csvreader = csv.reader(csvfile, delimiter=' ',skipinitialspace=True)
        # 	next(csvreader)

        # 	for row in csvreader:
        # 		t = Tile(coord.SkyCoord(row[1], row[2], unit=(u.hour, u.deg)), andicam_ccd_width, andicam_ccd_height, redistributed_cartography.unpacked_healpix.nside)
        # 		(observed_tiles["S190425z"]["ANDICAM"]["ut190425"]).append(t)

        # with open('../O3_Alerts/GW190408/GW190425_2/S190425z_ut190428_ANDICAM_Tiles_Observed.csv','r') as csvfile:

        # 	csvreader = csv.reader(csvfile, delimiter=' ',skipinitialspace=True)
        # 	next(csvreader)

        # 	for row in csvreader:
        # 		t = Tile(coord.SkyCoord(row[1], row[2], unit=(u.hour, u.deg)), andicam_ccd_width, andicam_ccd_height, redistributed_cartography.unpacked_healpix.nside)
        # 		(observed_tiles["S190425z"]["ANDICAM"]["ut190428"]).append(t)

        # # NICKEL
        # with open('../O3_Alerts/GW190408/GW190425_2/S190425z_ut190425_Nickel_Tiles_Observed.csv','r') as csvfile:

        # 	csvreader = csv.reader(csvfile, delimiter=' ',skipinitialspace=True)
        # 	next(csvreader)

        # 	for row in csvreader:
        # 		t = Tile(coord.SkyCoord(row[1], row[2], unit=(u.hour, u.deg)), nickel_width, nickel_height, redistributed_cartography.unpacked_healpix.nside)
        # 		(observed_tiles["S190425z"]["Nickel"]["ut190425"]).append(t)

        # with open('../O3_Alerts/GW190408/GW190425_2/S190425z_ut190429_Nickel_Tiles_Observed.csv','r') as csvfile:

        # 	csvreader = csv.reader(csvfile, delimiter=' ',skipinitialspace=True)
        # 	next(csvreader)

        # 	for row in csvreader:
        # 		t = Tile(coord.SkyCoord(row[1], row[2], unit=(u.hour, u.deg)), nickel_width, nickel_height, redistributed_cartography.unpacked_healpix.nside)
        # 		(observed_tiles["S190425z"]["Nickel"]["ut190429"]).append(t)

        # # Swift
        # with open('../O3_Alerts/GW190408/GW190425_2/S190425z_ut190425_Swift_Tiles_Observed.csv','r') as csvfile:

        # 	csvreader = csv.reader(csvfile, delimiter=' ',skipinitialspace=True)
        # 	next(csvreader)

        # 	for row in csvreader:
        # 		t = Tile(coord.SkyCoord(row[1], row[2], unit=(u.deg, u.deg)), swift_uvot_width, swift_uvot_height, redistributed_cartography.unpacked_healpix.nside)
        # 		(observed_tiles["S190425z"]["Swift"]["ut190425"]).append(t)

        # for k1,v1 in observed_tiles.items():
        # 	print("%s" % k1)

        # 	for k2,v2 in observed_tiles[k1].items():
        # 		print("\t%s" % k2)

        # 		for k3,v3 in observed_tiles[k1][k2].items():
        # 			print("\t\t%s - # of tiles: %s" % (k3, len(observed_tiles[k1][k2][k3])))

        # 	print("\n")

        # tile_set = [
        # 	("Swope_0425",observed_tiles["S190425z"]["Swope"]["ut190425"],('r','None')),
        # 	("Swope_0428",observed_tiles["S190425z"]["Swope"]["ut190428"],('r','None')),
        # 	("Swope_0429",observed_tiles["S190425z"]["Swope"]["ut190429"],('r','None')),

        # 	("Thacher_0425",observed_tiles["S190425z"]["Thacher"]["ut190425"],('royalblue','None')),

        # 	("ANDICAM_0425",observed_tiles["S190425z"]["ANDICAM"]["ut190425"],('forestgreen','None')),
        # 	("ANDICAM_0428",observed_tiles["S190425z"]["ANDICAM"]["ut190428"],('forestgreen','None')),

        # 	("Nickel_0425",observed_tiles["S190425z"]["Nickel"]["ut190425"],('mediumorchid','None')),
        # 	("Nickel_0429",observed_tiles["S190425z"]["Nickel"]["ut190429"],('mediumorchid','None')),

        # 	("Swift_0425",observed_tiles["S190425z"]["Swift"]["ut190425"],('k','None'))
        # ]

        # # In case you want to plot contours...
        # print("Computing contours for '%s'...\n" % base_cartography.unpacked_healpix.file_name)
        # base_cartography.unpacked_healpix.compute_contours()

        # print("Computing contours for '%s'...\n" % redistributed_cartography.unpacked_healpix.file_name)
        # redistributed_cartography.unpacked_healpix.compute_contours()

        # # Thacher
        # thacher_tiles = []
        # with open('%s/S190521g_AA_THACHER_Tiles.txt' % self.options.working_dir,'r') as csvfile:

        #     csvreader = csv.reader(csvfile, delimiter=',')
        #     next(csvreader)

        #     for row in csvreader:
        #         t = Tile(coord.SkyCoord(row[1], row[2], unit=(u.hour, u.deg)), detector.deg_width, detector.deg_height,
        #                  redistributed_cartography.unpacked_healpix.nside)
        #         t.field_name = row[0]
        #         t.net_prob = np.sum(redistributed_cartography.unpacked_healpix.prob[t.enclosed_pixel_indices])

        #       		print("Tile #: %s; Net prob: %0.4f" % (t.field_name, t.net_prob))

        #         thacher_tiles.append(t)

        # above_30 = []
        # for t in thacher_tiles:
        # 	if t.coord.dec.degree > -30.0:
        # 		above_30.append(t)

        # sorted_tiles = sorted(above_30, key=lambda t: t.net_prob, reverse=True)
        # print("Number of northern tiles: %s" % len(sorted_tiles))

        # top_200 = sorted_tiles[0:199]
        # top_200_prob = np.sum([t.net_prob for t in top_200])
        # print("Prob of northern, top 200: %s" % top_200_prob)

        # top_200_area = fov_area*200
        # print("Area of northern, top 200: %s" % top_200_area)

        # with open('%s/S190521g_AA_THACHER_Tiles_Northern_200.txt' % self.options.working_dir,'w') as csvfile:

        # 	csvwriter = csv.writer(csvfile)

        # 	cols = []
        # 	cols.append('# FieldName')
        # 	cols.append('FieldRA')
        # 	cols.append('FieldDec')
        # 	cols.append('Telscope')
        # 	cols.append('Filter')
        # 	cols.append('ExpTime')
        # 	cols.append('Priority')
        # 	cols.append('Status')
        # 	csvwriter.writerow(cols)

        # 	for i, st in enumerate(top_200):

        # 		coord_str = GetSexigesimalString(st.coord)

        # 		cols = []

        # 		cols.append(st.field_name)
        # 		cols.append(coord_str[0])
        # 		cols.append(coord_str[1])
        # 		cols.append(detector.name)
        # 		cols.append(self.options.filter)
        # 		cols.append(self.options.exp_time)
        # 		print(st.net_prob)
        # 		cols.append(st.net_prob)
        # 		cols.append('False')
        # 		csvwriter.writerow(cols)

        # 	print("Done")

        # test = []
        # for t in base_cartography.tiles:
        # 	test.append(t.polygon)

        # # print("here")
        # # print(list(test[0].exterior.coords))
        # # print("\n\n")

        # test2 = unary_union(test)
        # # print(type(test2))

        # eps = 0.00001
        # test3 = test2.buffer(eps, 1, join_style=JOIN_STYLE.mitre).buffer(-eps, 1, join_style=JOIN_STYLE.mitre)

        # plot_probability_map("%s/%s_SQL_Pixels" % (self.options.working_dir, self.options.gw_id),
        # 			 pixels_filled=base_cartography.unpacked_healpix.pixels_90,
        # 			 # tiles=base_cartography.tiles,
        # 			 # pixels_empty=base_cartography.unpacked_healpix.pixels_90,
        # 			 colormap=plt.cm.viridis,
        # 			 linear_rings=test3)

        # pixels_90 = [Pixel_Element(i90, redistributed_cartography.unpacked_healpix.nside,
        # 							redistributed_cartography.unpacked_healpix.prob[i90])
        # 							for i90 in redistributed_cartography.unpacked_healpix.indices_of_90]

        plot_probability_map(
            "%s/%s_%s_Redistributed_90th_Tiles" % (self.options.working_dir, self.options.gw_id, detector.name),
            # pixels_filled=redistributed_cartography.unpacked_healpix.pixels_90,
            pixels_filled=pixels_to_plot,
            # galaxies=contained_galaxies,
            galaxies=mw_pixels,
            tiles=new_tiles,
            # tiles=redistributed_cartography.tiles,
            # tiles=base_cartography.tiles,
            # sql_poly=sql_poly,
            # sql_poly=sql_poly_to_plot,
            # linear_rings=sql_poly.polygon,
            # healpix_obj_for_contours=base_cartography.unpacked_healpix,
            # tile_set=tile_set
            )
コード例 #26
0
ファイル: dustmap_script.py プロジェクト: PhD-Misc/Random
#distance = 2.3
#print(source_ra, source_dec, distance)

print('Estimating E(B-V) using dust maps:')

coords = SkyCoord(source_ra, source_dec, distance=distance*units.kpc, frame='icrs')

bayestar = BayestarQuery(max_samples=5, version='bayestar2017')
print('## Bayestar (3D):', bayestar(coords, mode='median'))

chen = Chen2014Query()
print('## Chen 2014 (3D):', chen(coords))

iphas = IPHASQuery()
print('## IPHAS (3D):', iphas(coords, mode='median'))

marshal = MarshallQuery()
print('## Marshall 2006 (3D):', marshal(coords))

sfdebv = SFDQuery()
print('## SFD 1998/2011 (2D):', sfdebv(coords))

lenz = Lenz2017Query()
print('## Lenz 2017 (2D):', lenz(coords))


bh = BHQuery()
print('## Burstein 1982 (2D):', bh(coords))

print('\n WARNING: REMEMBER TO USE APPROPRIATE BAND CONVERSION FACTORS.\nSEE http://iopscience.iop.org/article/10.1088/0004-637X/737/2/103/meta#apj398709t6')