Esempio n. 1
0
def test_healpix_nuniq():
    hpix_file = os.path.join(resource_filename('astropath', 'tests'), 'files',
                             'FRB201123_hpix_uniform.fits.gz')
    hpix = Table.read(hpix_file)
    header = fits.open(hpix_file)[1].header

    nside = 2**header['MOCORDER']
    localiz = dict(type='healpix',
                   healpix_data=hpix,
                   healpix_nside=nside,
                   healpix_ordering='NUNIQ',
                   healpix_coord='C')

    assert localization.vet_localization(localiz)

    # L_wx
    cent_ra = 263.6671241047224
    cent_dec = -50.76756723228885

    # Calculate L_wx
    box_hwidth = 60.
    step_size = 1.
    ngrid = int(np.round(2 * box_hwidth / step_size))
    x = np.linspace(-box_hwidth, box_hwidth, ngrid)
    xcoord, ycoord = np.meshgrid(x, x)
    ra = cent_ra + xcoord / 3600. / np.cos(cent_dec * units.deg).value
    dec = cent_dec + ycoord / 3600.

    L_wx = localization.calc_LWx(ra, dec, localiz)
Esempio n. 2
0
def localization_from_hpfile(hpix_file:str) -> dict:
    """ Generate a localization dict from
    a healpix file

    Args:
        hpix_file (str): 

    Returns:
        dict: localization dict
            - type (str)
            - healpix_data (np.ndarray) : probability density values
            - healpix_nside (int) : NSIDE
            - healpix_ordering (str) : healpix ordering
            - healpix_coord (str) : ??
    """

    # Read
    hpix = Table.read(hpix_file)
    header = fits.open(hpix_file)[1].header

    nside = 2**header['MOCORDER']
    localiz = dict(type='healpix',
                   healpix_data=hpix, 
                   healpix_nside=nside,
                   healpix_ordering='NUNIQ',
                   healpix_coord='C')

    # Vet                
    assert localization.vet_localization(localiz)
    
    return localiz
    
Esempio n. 3
0
def test_wcs():
    # Load up the localization
    lfile = os.path.join(resource_filename('astropath', 'tests'), 'files',
                         'mask_frb201123_localization.fits.gz')
    hdul = fits.open(lfile)
    data = hdul[0].data
    header = hdul[0].header
    wcs = WCS(header)

    # Normalize
    data /= np.sum(data)

    localiz = dict(type='wcs', wcs_data=data, wcs_WCS=wcs)
    assert localization.vet_localization(localiz)

    # Approx center
    in_region = np.where(data > 0.)
    coord = wcs.pixel_to_world(in_region[1], in_region[0])
    cent_ra = np.mean(coord.ra.deg)
    cent_dec = np.mean(coord.dec.deg)

    # Calculate L_wx
    box_hwidth = 60.
    step_size = 1.
    ngrid = int(np.round(2 * box_hwidth / step_size))
    x = np.linspace(-box_hwidth, box_hwidth, ngrid)
    xcoord, ycoord = np.meshgrid(x, x)
    ra = cent_ra + xcoord / 3600. / np.cos(cent_dec * units.deg).value
    dec = cent_dec + ycoord / 3600.

    L_wx = localization.calc_LWx(ra, dec, localiz)
Esempio n. 4
0
    def init_localization(self, ltype: str, **kwargs):
        """Ingets the localization information

        Args:
            ltype (str): Localization type [eelipse, wcs, healpix]
            kwargs: Other parameters definig the localization
                Depends on the ltype.  See localization.py
        """
        self.localiz = dict(type=ltype)
        self.localiz.update(kwargs)
        # Vet
        assert localization.vet_localization(
            self.localiz), 'Bad candidate prior input'
        logging.info("Localization is ready!")
Esempio n. 5
0
def test_error_ellipse():
    # This follows the FRB example Notebook

    # Set up localization
    frb_coord = SkyCoord('21h44m25.255s -40d54m00.10s', frame='icrs')
    eellipse = dict(a=0.1, b=0.1, theta=0.)
    localiz = dict(type='eellipse', center_coord=frb_coord, eellipse=eellipse)
    assert localization.vet_localization(localiz)

    # Candidates
    cand_file = os.path.join(resource_filename('astropath', 'data'),
                             'frb_example', 'frb180924_candidates.csv')
    candidates = pandas.read_csv(cand_file, index_col=0)
    c_coords = SkyCoord(ra=candidates.ra, dec=candidates.dec, unit='deg')

    # Priors
    theta_prior = dict(PDF='exp', max=6., scale=1.)
    cand_prior = dict(P_O_method='inverse', P_U=0., name='Adopted')
    assert priors.vet_theta_prior(theta_prior)
    assert priors.vet_cand_prior(cand_prior, candidates)

    # Raw priors
    raw_prior_Oi = priors.raw_prior_Oi(cand_prior['P_O_method'],
                                       candidates.half_light.values,
                                       mag=candidates.VLT_FORS2_g.values)
    candidates['P_O_raw'] = raw_prior_Oi
    # Normalize
    candidates['P_O'] = priors.renorm_priors(candidates.P_O_raw.values,
                                             cand_prior['P_U'])

    # P(x|O)
    p_xOi = bayesian.px_Oi_fixedgrid(
        30.,  # box radius for grid, in arcsec
        localiz,
        c_coords,
        candidates.half_light.values,
        theta_prior,
        step_size=0.02)
    candidates['p_xO'] = p_xOi

    # p(x)
    p_x = np.sum(candidates.P_O * candidates.p_xO)

    # Posteriors
    P_Oix = candidates.P_O * p_xOi / p_x
    candidates['P_Ox'] = P_Oix

    # Test
    assert np.isclose(candidates['P_Ox'].max(), 0.98951951218604)