Пример #1
0
def est_dP_auto(params, conv_beam=False, RSD=True, max_beam=False):

    zz = params['zz']

    kh_edgs = params['kh_edgs']
    dkh = kh_edgs[1:] - kh_edgs[:-1]
    if params['logk']:
        kh = kh_edgs[:-1] * ((kh_edgs[1:] / kh_edgs[:-1])**0.5)
    else:
        kh = kh_edgs[:-1] + ((kh_edgs[1:] - kh_edgs[:-1]) * 0.5)

    mu_edgs = params['mu_edgs']
    dmu = mu_edgs[1:] - mu_edgs[:-1]
    mu = mu_edgs[:-1] + (dmu * 0.5)

    b_HI = params['b_HI']
    pkhi = np.mean(Pk_HI(kh, zz, mu, b_HI=b_HI, RSD=RSD), axis=0)
    pkn, B, Vbin = Pk_N(kh, zz, params, mu, max_beam=max_beam)

    if conv_beam:
        pkhi *= B
    else:
        B[B == 0] = np.inf
        pkn = pkn / B

    S = params['S_area']
    S = S * (np.pi / 180.)**2.
    r0 = (cosmo.comoving_distance(np.mean(zz)) * cosmo.h).value
    k_area = 2. * np.pi / (r0**2 * S)**0.5  # min k_perp
    k_para = kh[:, None] * mu[None, :]
    k_perp = (kh[:, None]**2 - k_para**2)**0.5
    pkhi[k_perp < k_area] = 0.

    rmin = (cosmo.comoving_distance(zz.min()) * cosmo.h).value
    rmax = (cosmo.comoving_distance(zz.max()) * cosmo.h).value
    dr = rmax - rmin
    k_fband = 2. * np.pi / dr  #0.03
    #pkhi[k_para < k_fband] = 0.

    if params['k_fgcut'] is not None:
        pkhi[k_para < params['k_fgcut']] = 0.

    pkt = (pkn + pkhi)
    pkt[pkt == 0] = np.inf
    snr = pkhi / pkt

    k2dkdmu = kh[:, None]**2 * dkh[:, None] * dmu[None, :]
    dpk2pk = (0.5 * np.sum(
        (snr)**2. * k2dkdmu / (2. * np.pi)**2., axis=1) * Vbin)**(0.5)
    dpk2pk[dpk2pk == 0] = np.inf
    dpk2pk = 1. / dpk2pk

    pkhi1d = np.sum(pkhi * dmu, axis=1)
    pkn1d = np.sum(pkn * dmu, axis=1)

    #pk0 = np.mean(Pk1D_HI(kh, zz), axis=0)
    pk0 = np.sum(np.mean(Pk_HI(kh, zz, mu, b_HI=b_HI, RSD=RSD), axis=0) * dmu,
                 axis=1)

    return kh, pkhi1d, pkn1d, dpk2pk, pk0
Пример #2
0
def redshift_division(zmax, cosmo, unit, cosmosim):
    exp = np.floor(np.log10(np.abs(unit))).astype(int)

    # Redshift bins for SNeIa
    zr = np.linspace(0, zmax, 1000)
    # middle redshift & distnace
    zmid = np.linspace((zr[1] - zr[0]) / 2, zr[-2] + (zr[-1] - zr[-2]) / 2,
                       999)
    if exp == 21:  # simulation in [kpc]
        dist_zr = (cosmo.comoving_distance(zr)).to_value('kpc')
        # Comoving distance between redshifts
        dist_bet = [
            cd.comoving_distance(zr[j + 1], zr[j], **cosmosim) * 1e3
            for j in range(len(zr) - 1)
        ]
    elif exp == 23:  # simulation in [Mpc]
        dist_zr = (cosmo.comoving_distance(zr)).to_value('Mpc')
        # Comoving distance between redshifts
        dist_bet = [
            cd.comoving_distance(zr[j + 1], zr[j], **cosmosim)
            for j in range(len(zr) - 1)
        ]
    else:
        raise Exception('Dont know this unit ->', exp)
    return zmid, dist_zr, dist_bet
Пример #3
0
def comoving_bins(z_min, z_max, n_bins):
    """Create bins equally spaced in comoving distance.

    Assumes a Planck2015 cosmology.

    Parameters
    ----------
    z_min : `float`
        Minimum redshift.
    z_max : `float`
        Maximum redshift
    n_bins : `int`
        Number of redshift bins to create

    Returns
    -------
    bin_edges : `numpy.ndarray`, (n_bins + 1,)
        Redshift bin edges.
    """
    cov_min = Planck15.comoving_distance(z_min).value
    cov_max = Planck15.comoving_distance(z_max).value
    cov_edges = np.linspace(cov_min, cov_max, n_bins + 1)

    tmp_edges = []
    for cov_edge in cov_edges:
        tmp_edges.append(
            z_at_value(Planck15.comoving_distance, cov_edge * u.Mpc))
    return np.array(tmp_edges)
Пример #4
0
def distance_spherical(v_0, v_1):
#-----------------------------------------------------------#
#-----------------------------------------------------------#
	d_1 = model.comoving_distance(v_0[2]).value
	d_2 = model.comoving_distance(v_1[2]).value
	aa = np.sin(np.deg2rad(v_0[0]))*np.sin(np.deg2rad(v_1[0]))
	bb = aa*np.cos(np.deg2rad(90. - v_0[1]) - np.deg2rad(90. - v_1[1]))
	cc = bb + (np.cos(np.deg2rad(v_0[0]))*np.cos(np.deg2rad(v_1[0])))
	dd = 2.*d_1*d_2*cc
	ee = d_1*d_1 + d_2*d_2
	return np.sqrt(ee - dd)
Пример #5
0
 def freq2distance(self, freq1, freq2=1420.4):
     '''
     Default for the second frequency is the 21 cm frequency, 1420.4 MHz.
     Convert from a pair of frequency to distance in Mpc/h bounded
     by this pair of frequencies. By default freq2 corresponds to z2=0.
     '''
     freq_21 = 1420.4
     z1 = freq_21 / freq1 - 1
     z2 = freq_21 / freq2 - 1
     distance1 = cosmo.comoving_distance(z=z1).value * cosmo.h
     distance2 = cosmo.comoving_distance(z=z2).value * cosmo.h
     return distance1 - distance2
Пример #6
0
    def test_exact_weights(self):
        """Test that the correct pair summary values are computed.
        """
        ids = np.arange(5)
        decs = np.zeros(5)
        ras = np.linspace(0, 500, 5) / 3600
        redshifts = np.full(5, 2.0)
        catalog = {"id": ids, "ra": ras, "dec": decs, "redshift": redshifts}
        pm = pair_maker.PairMaker(self.r_mins, self.r_maxes, self.z_min,
                                  self.z_max)
        output = pm.run(catalog, catalog)

        rs = Planck15.comoving_distance(2.0).value * np.radians(ras)
        weights = pair_maker.distance_weight(rs)
        for r_min, r_max in zip(self.r_mins, self.r_maxes):
            scale_name = "Mpc%.2ft%.2f" % (r_min, r_max)

            self.assertEqual(output.iloc[0]["ref_id"], ids[0])
            self.assertEqual(output.iloc[0]["redshift"], redshifts[0])

            tmp_weights = weights[np.logical_and(rs > r_min, rs < r_max)]
            self.assertEqual(output.iloc[0]["%s_count" % scale_name],
                             len(tmp_weights))
            self.assertAlmostEqual(output.iloc[0]["%s_weight" % scale_name],
                                   tmp_weights.sum())
Пример #7
0
def calc_detection_prob(m1, m2, z_merge):

    ## constants that reflect LIGO design sensitivity
    d_L8 = 1  ## in Gpc
    M_8 = 10  ## in Msun                                                                                                       \

    SNR_thresh = 8

    ## approximate typical SNR from Fishbach et al. 2018
    M_chirp = (m1 * m2)**(3. / 5) / (m1 + m2)**(1. / 5)
    d_C = cosmo.comoving_distance(z_merge).to(u.Gpc).value
    d_L = (1 + z_merge) * d_C

    rho_0 = 8 * (M_chirp * (1 + z_merge) / M_8)**(
        5. / 6) * d_L8 / d_L  ## this is the "typical/optimal" SNR
    if (rho_0 < SNR_thresh): return 0

    ## sample omega according to distribution for omega via inverse CDF method
    dist_size = 10000
    sample_size = 1000
    P_omega_dist = P_omega(np.linspace(0, 1, dist_size))
    inv_P_omega = interpolate.interp1d(P_omega_dist,
                                       np.linspace(0, 1, dist_size),
                                       fill_value="extrapolate")
    omega = inv_P_omega(np.random.uniform(0, 1, sample_size))

    ## find the true SNRs given sky location
    rho = omega * rho_0
    accept_SNR_num = len(rho[np.where(rho >= SNR_thresh)])

    p_det = accept_SNR_num / sample_size

    return p_det
Пример #8
0
def verify_update(sky_obj):
    """
    Check that the frequencies/redshifts/distances are consistent.
    If healpix, check that Npix/indices/Nside are consistent
    """

    if sky_obj.data is not None:
        Nside = sky_obj.Nside
        indices = sky_obj.indices
        Npix = sky_obj.Npix

        if Npix == 12 * Nside ** 2:
            assert all(indices == np.arange(Npix))
        else:
            assert Npix == indices.size
    if sky_obj.freqs is not None:
        freqs = sky_obj.freqs
        sky_obj.Nfreqs
        sky_obj.Z_array
        sky_obj.r_mpc

        Zcheck = 1420.0 / freqs - 1.0
        Rcheck = Planck15.comoving_distance(Zcheck).to("Mpc").value
        assert all(Zcheck == Zcheck)
        assert all(Rcheck == Rcheck)
    assert len(sky_obj._updated) == 0  # The _updated list should be cleared
Пример #9
0
def pg(u,k,z, 
       zp,ap_perp,ap_para,fps8,gamma,b1s8,b2s8,s8,vp,Nshot):    

    x = (cosmo.comoving_distance(z)/cosmo.comoving_distance(zp)).value-1
    
    az_perp = ap_perp + (ap_para-ap_perp)*x
    az_para = ap_para + 2*(ap_para-ap_perp)*x
    
    b1 = b1s8/s8 + 0.29*((1+z)**2-(1+zp)**2)
    b2 = b2s8/s8
    
    omega_ratio = ((1+z)/(1+zp))**3 * (az_para/ap_para)**2 * ((cosmo.H(zp)/cosmo.H(z)).value)**2
    fz = fps8/s8*omega_ratio**gamma

    
    vz = (cosmo.H(zp)/cosmo.H(z)).value * az_para/ap_para * (1+z)/(1+zp) * vp
    
    bs2 = -4/7*(b1-1)
    b3nl = 32/315*(b1-1)
    
    pgdd = b1**2*pdd(k) + 2*b1*b2*pb2d(k) + 2*bs2*b1*pbs2d(k) + 2*b3nl*pb3nl(k) + b2**2*pb22(k)
 
    
    
    pgdv = b1*pdv(k) + b2*pb2v(k) + bs2*pbs2d(k) + b3nl*pb3nl(k)

    dfog = (1+(k*u*vz)**2/2)**(-2)
    

    par1 = b1**3
    par2 = []
    for m,n in itertools.product(range(3),range(3)):
        if A[m,n] is not None:
            par2.append(u**(2*m)*(fz/b1)**n*A[m,n](k))
    AA = par1*np.sum(par2,axis=0)

    par1 = b1**4
    par2 = []
    for m,a,b in itertools.product(range(4),range(2),range(2)):
        if B[m,a,b] is not None:
            par2.append(u**(2*m)*(-fz/b1)**(a+b)*B[m,a,b](k))
    BB = par1*np.sum(par2,axis=0)
    
    pgz = dfog * (pgdd + 2*fz*u**2*pgdv + fz**2*u**4*pvv(k) + AA + BB)
    
    return pgz
Пример #10
0
def CovMatCalculator(z1s, z2s, cthetas, lmax=15.):
    ch1s = cosmo.comoving_distance(z1s).value
    ch2s = cosmo.comoving_distance(z2s).value

    def PVCovMatIntegrand(k, chi1, chi2, ctheta):
        sumand = 0
        for l in range(lmax):
            term = (2. * l + 1.) * jprimel(k * chi1, l) * jprimel(
                k * chi2, l) * Pl(ctheta, l)
            sumand += term
            print l, term
        return PofK(k) * sumand

    return np.asarray([
        integrate.quad(PVCovMatIntegrand, 10**-6., 1.0, args=(a, b, c))[0]
        for a, b, c in zip(ch1s, ch2s, cthetas)
    ])
Пример #11
0
def Pk2D_N(k_para,
           k_perp,
           z,
           params,
           bcast=True,
           return_beam=False,
           max_beam=False):

    z = np.array(z).flatten()
    z0 = np.mean(z)
    rr = (cosmo.comoving_distance(z0) * cosmo.h).value
    rv = (const.c.to('km/s') * (1 + z0)**2. / cosmo.H(z0) * cosmo.h).value

    q = k_perp * rr
    y = k_para * rv

    freq0 = 1420. / (1. + z.max())

    S = params['S_area']
    S = S * (np.pi / 180.)**2.
    r0 = (cosmo.comoving_distance(z0) * cosmo.h).value
    rmin = (cosmo.comoving_distance(z.min()) * cosmo.h).value
    rmax = (cosmo.comoving_distance(z.max()) * cosmo.h).value
    dr = rmax - rmin
    Vbin = S * (r0**2) * dr

    #k_area = 2.* np.pi / (r0**2 * S)**0.5 # min k_perp

    _r = CN(q,
            y,
            freq0,
            params,
            bcast=bcast,
            return_beam=return_beam,
            max_beam=max_beam)
    if return_beam:
        pkn, B = _r
        #B = B * np.exp(0.5 * k_perp**2/k_area**2)
        pkn = pkn * Vbin
        return pkn, Vbin, B
    else:
        pkn = _r
        pkn = pkn * Vbin
        return pkn, Vbin
Пример #12
0
    def test_create_bin_edges(self):
        """Test that all binning types produce predictable results.
        """
        pdf_linear = pdf_maker.PDFMaker(self.z_min, self.z_max, 10, "linear")
        test_linear = np.linspace(self.z_min, self.z_max, 11)
        self.assertEqual(pdf_linear.z_min, self.z_min)
        self.assertEqual(pdf_linear.z_max, self.z_max)
        self.assertEqual(pdf_linear.bins, 10)
        self.assertEqual(pdf_linear.binning_type, "linear")
        for pdf_edge, test_edge in zip(pdf_linear.bin_edges, test_linear):
            self.assertAlmostEqual(pdf_edge, test_edge)

        log_z_min = np.log(1 + self.z_min)
        log_z_max = np.log(1 + self.z_max)
        pdf_log = pdf_maker.PDFMaker(self.z_min, self.z_max, 10, "log")
        test_log = np.linspace(log_z_min, log_z_max, 11)
        self.assertEqual(pdf_log.z_min, self.z_min)
        self.assertEqual(pdf_log.z_max, self.z_max)
        self.assertEqual(pdf_log.bins, 10)
        self.assertEqual(pdf_log.binning_type, "log")
        for pdf_edge, test_edge in zip(np.log(1 + pdf_log.bin_edges),
                                       test_log):
            self.assertAlmostEqual(pdf_edge, test_edge)

        cov_z_min = Planck15.comoving_distance(self.z_min).value
        cov_z_max = Planck15.comoving_distance(self.z_max).value
        pdf_cov = pdf_maker.PDFMaker(self.z_min, self.z_max, 10, "comoving")
        test_cov = np.linspace(cov_z_min, cov_z_max, 11)
        self.assertEqual(pdf_cov.z_min, self.z_min)
        self.assertEqual(pdf_cov.z_max, self.z_max)
        self.assertEqual(pdf_cov.bins, 10)
        self.assertEqual(pdf_cov.binning_type, "comoving")
        comoving_edges = Planck15.comoving_distance(pdf_cov.bin_edges).value
        for pdf_edge, test_edge in zip(comoving_edges, test_cov):
            self.assertAlmostEqual(pdf_edge / test_edge - 1, 0, places=6)

        pdf_custom = pdf_maker.PDFMaker(self.z_min,
                                        self.z_max,
                                        np.linspace(1.1, 2.2, 11),
                                        "linear")
        self.assertEqual(pdf_custom.z_min, 1.1)
        self.assertEqual(pdf_custom.z_max, 2.2)
        self.assertEqual(pdf_custom.bins, 10)
        self.assertEqual(pdf_custom.binning_type, "custom")
Пример #13
0
    def __call__(self, m1, m2, chi_1, chi_2, z_or_dist, st, et):
        """Run the waveform creator.

        Create phenomd waveforms in the amplitude frequency domain.

        **Warning**: Binary parameters have to one of three shapes: scalar, len-1 array,
        or array of len MAX. All scalar quantities or len-1 arrays are cast to len-MAX arrays.
        If arrays of different lengths (len>1) are given, a ValueError will be raised.

        Arguments:
            m1 (float or 1D array of floats): Mass 1 in Solar Masses. (>0.0)
            m2 (float or 1D array of floats): Mass 2 in Solar Masses. (>0.0)
            chi_1 (float or 1D array of floats): dimensionless spin of mass 1
                aligned to orbital angular momentum. Default is None (not 0.0). [-1.0, 1.0]
            chi_2 (float or 1D array of floats): dimensionless spin of mass 2
                aligned to orbital angular momentum. Default is None (not 0.0). [-1.0, 1.0]
            z_or_dist (float or 1D array of floats): Distance measure to the binary.
                This can take three forms: redshift (dimensionless, *default*),
                luminosity distance (Mpc), comoving_distance (Mpc).
                The type used must be specified in 'dist_type' parameter. (>0.0)
            st (float or 1D array of floats): Start time of waveform in years before
                end of the merger phase. This is determined using 1 PN order. (>0.0)
            et (float or 1D array of floats): End time of waveform in years before
                end of the merger phase. This is determined using 1 PN order. (>0.0)

        """
        # cast binary inputs to same shape
        self._broadcast_and_set_attrs(locals())

        # based on distance inputs, need to find redshift and luminosity distance.
        if self.dist_type == "redshift":
            self.z = self.z_or_dist
            self.dist = cosmo.luminosity_distance(self.z).value

        elif self.dist_type == "luminosity_distance":
            z_in = np.logspace(-3, 3, 10000)
            lum_dis = cosmo.luminosity_distance(z_in).value

            self.dist = self.z_or_dist
            self.z = np.interp(self.dist, lum_dis, z_in)

        elif self.dist_type == "comoving_distance":
            z_in = np.logspace(-3, 3, 10000)
            lum_dis = cosmo.luminosity_distance(z_in).value
            com_dis = cosmo.comoving_distance(z_in).value

            comoving_distance = self.z_or_dist
            self.z = np.interp(comoving_distance, com_dis, z_in)
            self.dist = np.interp(comoving_distance, com_dis, lum_dis)

        self.length = len(self.m1)
        self._sanity_check()
        self._create_waveforms()
        return self
Пример #14
0
def p(l,k,z,
     zp,ap_perp,ap_para,fps8,gamma,b1s8,b2s8,s8,vp,Nshot):   

    x = (cosmo.comoving_distance(z)/cosmo.comoving_distance(zp)).value-1 
    az_perp = ap_perp + (ap_para-ap_perp)*x
    az_para = ap_para + 2*(ap_para-ap_perp)*x
    
    a = az_perp**(2/3)*az_para**(1/3)
    FAP = az_para/az_perp
    eps = FAP**(1/3)-1
    
    uu = np.linspace(-1,1,101)
    u = uu[1:]
    du = np.diff(uu)
    
    Ll = legendre(l)
    
    pp = []
    
    for i,ui in enumerate(u):
    
        k1 = k*(1+eps)/a*(1+ui**2*((1+eps)**(-6)-1))**(1/2)
        u1 = ui/(1+eps)**3*(1+ui**2*((1+eps)**(-6)-1))**(1/2)  
    
        pgz =  pg(u1,k1,z, 
           zp,ap_perp,ap_para,fps8,gamma,b1s8,b2s8,s8,vp,Nshot)    
    
        
        pu = pgz*Ll(ui)
        pp.append(pu)

    pp = np.array(pp)
    pp = np.sum(pp*du[:,None],axis=0)
    
    pl = (2*l+1)/(2*az_para*az_perp**2) * pp
    
    if l==0:
        return pl+Nshot
    else:
        return pl
Пример #15
0
def fixColumns(TableName):
    upperCaseCols(TableName)
    if 'RA' in TableName.colnames:
        good = 1
    elif 'RAJ2000' in TableName.colnames:
        TableName.rename_column('RAJ2000', 'RA')
    else:
        print('Could not find RA column')

    if 'DEC' in TableName.colnames:
        good = 1
    elif 'DECLINATION' in TableName.colnames:
        TableName.rename_column('DECLINATION', 'DEC')
    elif 'DEJ2000' in TableName.colnames:
        TableName.rename_column('DEJ2000', 'DEC')
    else:
        print('Could not find dec column')

    if 'DISTANCE' in TableName.colnames:
        good = 1
    elif 'DIST' in TableName.colnames:
        TableName.rename_column('DIST', 'DISTANCE')
    elif 'Z' in TableName.colnames:
        TableName.add_column(
            Column(Planck15.comoving_distance(TableName['Z']),
                   name='DISTANCE'))
        TableName.remove_column('Z')
    elif 'REDSHIFT' in TableName.colnames:
        TableName.add_column(
            Column(Planck15.comoving_distance(TableName['REDSHIFT']),
                   name='DISTANCE'))
        TableName.remove_column('REDSHIFT')
    else:
        TableName['DISTANCE'] = 100 * 10e6 * u.pc
        #.add_column(Column((TableName['ra']/TableName['ra']) * 100 * 10e6 * u.pc, name = 'distance'))
        print(
            'Could not find distance or redshift column, placed all data at 100 Mpc'
        )
Пример #16
0
def CHI(q, y, z):

    z = np.array([
        z,
    ]).flatten()
    zi = np.mean(z)

    r = cosmo.comoving_distance(zi) * cosmo.h
    rnu = const.c.to('km/s') * (1 + zi)**2 / cosmo.H(zi) * cosmo.h

    k_para = y / rnu.value
    k_perp = q / r.value

    return Pk2D_HI(k_para, k_perp, zi)[0]
Пример #17
0
def get_cut_box(box, numin=150., numax=161.15):
    box, N, zstart, zend, L = get_sim_21cmfast(box)

    # GENERATE FREQUENCIES AND CUT THE BOX
    box = box[:, :, ::-1]  # reverse last axis
    d = np.linspace(Planck15.comoving_distance(zstart),
                    Planck15.comoving_distance(zend), N)

    _z = np.linspace(zstart, zend, N)
    print _z, Planck15.comoving_distance(_z)
    dspline = spline(Planck15.comoving_distance(_z), _z)
    z = dspline(d[::-1])
    #    z = np.linspace(zend, zstart, N)
    nu = 1420. / (z + 1)

    mask = np.logical_and(nu > numin, nu < numax)

    box = box[:, :, mask]
    z = z[mask]
    d = d[mask].value
    nu = nu[mask]

    return box, N, L * Planck15.h, d * Planck15.h, nu, z
Пример #18
0
def spherical_to_cartesian(theta, phi, z_val):
#-----------------------------------------------------------#
# Converts sphericaal coordiantes to cartesian coordiantes.
# Input:
#   theta: polar angle - float, degrees
#     phi: azimuthal angle - float, degrees
#   z_val: redshift - float
# Output:
#   numpy array: [x, y, z]
#-----------------------------------------------------------#
	radial = model.comoving_distance(z_val).value
	phi    = 90. - phi
	xx     = radial*np.cos(np.deg2rad(theta))*np.sin(np.deg2rad(phi))
	yy     = radial*np.sin(np.deg2rad(theta))*np.sin(np.deg2rad(phi))
	zz     = radial*np.cos(np.deg2rad(phi))
	return [xx, yy, zz]
Пример #19
0
def z_to_mpc(redshift):
    """
    Convert a redshift to a comoving distance

    Parameters
    ----------
    redshift : float

    Returns
    -------
    Comoving distance of redshift in Mpc

    """

    if redshift <= 1e-10:
        return 0 * u.Mpc
    else:
        return cosmo.comoving_distance(redshift)
Пример #20
0
    def __init__(self,TwoMRSMaps=None,sim=True, pthresh=3.e-3, zslices = 16, smoothing=0.0, m=3, zmax=3.):
        self.HESEMaps=TwoMRSMaps
        self.sim=sim
        self.pthresh = pthresh
        self.obs = ICPSObservations()
        self.perf = ICPSPerformance('DiscoSens.txt')
        self.NU, self.NL, self.NO, self.SU, self.SL, self.SO = self.obs.GetHSCounts(pthresh)
        self.PTDisc=False
        self.Det={}
        if sim:
            print 'Total Median Hotspots above threshold p value ',pthresh,':', (self.NU+self.NL+self.SU+self.SL)/2,'N:S', (self.NU+self.NL)/2, (self.SU+self.SL)/2
            print 'Total Observed Hotspots', self.NO + self.SO
            self.Det["IsotropicHS"]=Detector(Name="IceCubePSSkyMap", EvList=IsoGenerator((self.NU+self.NL+self.SU+self.SL)/2, self.NU+self.NL, self.SU+self.SL))
        self.c_evWeights={}
        self.c_evWeights["IsotropicHS"]=TCanvas()
        self.h_injPattern={}
        self.h_injPattern["IsotropicHS"]=None

        self.TwoMRSMapsSum={}
        self.TwoMRSMapsSum["IsotropicHS"]=self.HESEMapsSumf("IsotropicHS")
        self.CRSet={}
        self.CRSet["IsotropicHS"]=[]
        self.sigWsum={}
        self.TomoMaps={}
        self.Zslices = zslices
        self.Zarr = np.linspace(0., 0.15, zslices)
        
        for i in range(len(self.Zarr)-1):
            print PF(), 'Loading map in range ', self.Zarr[i], ' to ', self.Zarr[i+1]
            self.TomoMaps[self.Zarr[i]] = TwoMRSMap(Make2MRSMap(self.Zarr[i],  self.Zarr[i+1], 16), "SourceDist", smoothing)
        self.Nsrc = 0
        self.Ndensity = 0.
        self.M = m
        self.TotDiffuseFlux=0.
        self.Fluxes=[]
        self.Zmax=zmax
        self.scaler = N0Scaler(self.M, self.Zmax)
        self.DCMRmax=cosmo.comoving_distance(self.Zmax).value
        if self.M:
            self.Evoprobx = np.linspace(0, self.Zmax, 1500)
            self.Evoproby = np.power(1.+self.Evoprobx, self.M)*np.power(self.Evoprobx, 2.)
            self.backpolate = InterpolatedUnivariateSpline(self.Evoproby, self.Evoprobx)
Пример #21
0
def cube2healpix(cube, cube_res, cube_freq, hpx_nside):
    """
    Tile and grid a 21 cm simulation cube to a full-sky HEALPix map.

    The projection assumes Planck15 cosmology.

    Parameters
    ----------
    cube: numpy.ndarray
        Simulation cube, 3-dimensional
    cube_res: float
        Resolution of the cube in Mpc.
    cube_freq: float
        Observed frequency matching the cube's cosmology in MHz.
    hpx_nside: integer
        NSIDE of the output HEALPix image. Must be a valid NSIDE for HEALPix,
        i.e. 2**i for integer i.

    """
    cube_shape = cube.shape

    # Determine the radial comoving distance dc to the "Universe" shell at the
    # observed frequency.
    f21 = 1420.40575177  # MHz
    z21 = f21 / cube_freq - 1
    dc = Cosmo.comoving_distance(z21).value

    # Get the vector coordinates (vx, vy, vz) of the HEALPix pixels.
    vx, vy, vz = hp.pix2vec(hpx_nside, np.arange(hp.nside2npix(hpx_nside)))

    # Translate vector coordinates to comoving coordinates and determine the
    # corresponding cube indexes (xi, yi, zi). For faster operation, we will
    # use the mod function to determine the nearest neighboring pixels and
    # just grab the data points from those pixels instead of doing linear
    # interpolation. This sets the origin of the projecting shell to pixel
    # (x, y, z) = (0, 0, 0).
    xi = np.mod(np.around(vx * dc / cube_res).astype(int), cube_shape[0])
    yi = np.mod(np.around(vy * dc / cube_res).astype(int), cube_shape[1])
    zi = np.mod(np.around(vz * dc / cube_res).astype(int), cube_shape[2])
    return cube[xi, yi, zi]
Пример #22
0
def cube2healpix(cube, cube_res, cube_freq, hpx_nside):
    """
    Tile and grid a 21 cm simulation cube to a full-sky HEALPix map.

    The projection assumes Planck15 cosmology.

    Parameters
    ----------
    cube: numpy.ndarray
        Simulation cube, 3-dimensional
    cube_res: float
        Resolution of the cube in Mpc.
    cube_freq: float
        Observed frequency matching the cube's cosmology in MHz.
    hpx_nside: integer
        NSIDE of the output HEALPix image. Must be a valid NSIDE for HEALPix,
        i.e. 2**i for integer i.

    """
    cube_shape = cube.shape

    # Determine the radial comoving distance dc to the "Universe" shell at the
    # observed frequency.
    f21 = 1420.40575177  # MHz
    z21 = f21 / cube_freq - 1
    dc = Cosmo.comoving_distance(z21).value

    # Get the vector coordinates (vx, vy, vz) of the HEALPix pixels.
    vx, vy, vz = hp.pix2vec(hpx_nside, np.arange(hp.nside2npix(hpx_nside)))

    # Translate vector coordinates to comoving coordinates and determine the
    # corresponding cube indexes (xi, yi, zi). For faster operation, we will
    # use the mod function to determine the nearest neighboring pixels and
    # just grab the data points from those pixels instead of doing linear
    # interpolation. This sets the origin of the projecting shell to pixel
    # (x, y, z) = (0, 0, 0).
    xi = np.mod(np.around(vx * dc / cube_res).astype(int), cube_shape[0])
    yi = np.mod(np.around(vy * dc / cube_res).astype(int), cube_shape[1])
    zi = np.mod(np.around(vz * dc / cube_res).astype(int), cube_shape[2])
    return cube[xi, yi, zi]
def writeDists(o):
    """ Writes bias and dn/dz files for
        redmagic like sample.
    Eli Rykoff says

    Density is ~constant comoving density 1.5e-3 h^3 Mpc^-3, sigma_z/(1+z)
    ~0.01, and bias is 2-2.5ish.
    """

    zmin=0.01
    zmax=1.2
    Nz=1000
    rho_comoving=1.5e-3
    #Nz shaping
    zshape=1.0

    d=o.outpath
    fn=open (d+"/Nz.txt",'w')
    fb=open (d+"/bz.txt",'w')
    pi=np.pi
    for z in np.linspace(zmin, zmax,Nz):
            fb.write("%g %g\n"%(z,2.2))
            ## for density, need to convert Mpc/h into n/sqdeg/dz
            ## c over H(z) for Mpc/h units
            coHz=(const.c/co.H(z)).to(u.Mpc).value*co.h
            # radius in Mpc/h
            r=co.comoving_distance(z).to("Mpc").value*co.h
            #volume of 1sq * dz
            # 4pir^2 * (1deg/rad)**2 * dr/dz
            # hMpc^3 per dz per 1sqd
            vrat=r**2 * (pi/180.)**2  * coHz
            dens=rho_comoving*vrat
            ## shape distribution to avoid sharep cut
            if (z>zshape):
                sup=(z-zshape)/(zmax-zshape)
                dens*=np.exp(-10*sup**2)
            fn.write("%g %g\n"%(z,dens))
Пример #24
0
def writeDists(o):
    """ Writes bias and dn/dz files for
        redmagic like sample.
    Eli Rykoff says

    Density is ~constant comoving density 1.5e-3 h^3 Mpc^-3, sigma_z/(1+z)
    ~0.01, and bias is 2-2.5ish.
    """

    zmin = 0.01
    zmax = 1.2
    Nz = 1000
    rho_comoving = 1.5e-3
    #Nz shaping
    zshape = 1.0

    d = o.outpath
    fn = open(d + "/Nz.txt", 'w')
    fb = open(d + "/bz.txt", 'w')
    pi = np.pi
    for z in np.linspace(zmin, zmax, Nz):
        fb.write("%g %g\n" % (z, 2.2))
        ## for density, need to convert Mpc/h into n/sqdeg/dz
        ## c over H(z) for Mpc/h units
        coHz = (const.c / co.H(z)).to(u.Mpc).value * co.h
        # radius in Mpc/h
        r = co.comoving_distance(z).to("Mpc").value * co.h
        #volume of 1sq * dz
        # 4pir^2 * (1deg/rad)**2 * dr/dz
        # hMpc^3 per dz per 1sqd
        vrat = r**2 * (pi / 180.)**2 * coHz
        dens = rho_comoving * vrat
        ## shape distribution to avoid sharep cut
        if (z > zshape):
            sup = (z - zshape) / (zmax - zshape)
            dens *= np.exp(-10 * sup**2)
        fn.write("%g %g\n" % (z, dens))
Пример #25
0
import yt
import numpy as np
from astropy.cosmology import Planck15 as cosmo
from scipy.interpolate import InterpolatedUnivariateSpline
from astropy import units as u
from astropy.coordinates import SkyCoord
from optparse import OptionParser
import gc

inv_comoving_distance = InterpolatedUnivariateSpline(
    cosmo.comoving_distance(np.linspace(0, 0.6, 1000)).value,
    np.linspace(0, 0.6, 1000))

usage = 'usage: %prog [options]'
parser = OptionParser(usage)

parser.add_option("-x",
                  "--xcenter",
                  action="store",
                  type="float",
                  default=3000.0,
                  dest="XC",
                  help="Xcoord of the Center")
parser.add_option("-y",
                  "--ycenter",
                  action="store",
                  type="float",
                  default=3000.0,
                  dest="YC",
                  help="Ycoord of the Center")
parser.add_option("-z",
Пример #26
0
Ncells = [int(round(11 / 15 * 1000)), int(round(4 / 15 * 1000))]
smooth_fac = 0.5

Sigma, x, y = projected_surface_density(HaloPosLC,
                                        M200,
                                        Ncells,
                                        smooth=True,
                                        smooth_fac=0.8,
                                        neighbour_no=4)

print(np.max(Sigma), np.mean(Sigma))
###############################################################################
# PLOT
xticks_Mpc = np.array([1, 2, 3, 4, 5]) * 1e3 * u.Mpc
z = np.array([0.0, 0.5, 1, 1.5, 2])
xticks_z = cosmo.comoving_distance(z).to_value('Mpc')

fig = plt.figure(figsize=(15, 4))
ax1 = fig.add_subplot(111)
ax1.imshow(
    (Sigma).T,  #extent=[x.min(), x.max(), y.min(), y.max()],
    cmap='jet',
    origin='lower')
#ax1.scatter(HaloPosLC[:, 0], HaloPosLC[:, 1], marker='.', s=1, c='w')
#ax1.scatter(SrcPosLC1[:, 0], SrcPosLC1[:, 1], marker='.', s=1, c='r')
#ax1.scatter(SrcPosLC2[:, 0], SrcPosLC2[:, 1], marker='.', s=1, c='r')
#ax1.scatter(SrcPosLC3[:, 0], SrcPosLC3[:, 1], marker='.', s=1, c='r')
#ax1.set_facecolor('k')
#ax1.set_xlim(0, 5500)
#ax1.set_ylim(-30, 30)
#ax2 = ax1.twiny()
Пример #27
0
    seed = np.int(args['-i'])
except:
    seed = 42  ## Default argument

## How to: collect complete array from distribution amongst allocated cores.
## data    = numpy.concatenate(catalog.comm.allgather(catalog['Position'].compute()), axis=0)

redshift = 0.3

home_dir = os.environ['HOME']
scratch_dir = os.environ['SCRATCH']

mock_output = 'Scratch'
output_dirs = {'Home': home_dir, 'Scratch': scratch_dir}

midchi = pycosmo.comoving_distance([redshift])  ##  Mpc.
midchi *= pycosmo.h  ##  Mpc / h.
midchi = midchi.value

cosmo = cosmology.Planck15
Plin = cosmology.LinearPower(cosmo, redshift, transfer='EisensteinHu')

b1 = 1.0e+0

res = 'hi'
chunks = 100
interlaced = False

lores = {'nbar': 1.0e-4, 'boxsize': 1.0e+3, 'fftsize': 128}
hires = {'nbar': 1.0e-4, 'boxsize': 1.5e+3, 'fftsize': 128}
Пример #28
0
#	z 	= zsp[sp_vals]
#	ra 	= cat.field('RAJ2000')[sp_vals]
#	dec 	= cat.field('DEJ2000')[sp_vals]
#rich 	= cat.field('RL*')[sp_vals]
else:
    z = z_sp[:]  # otherwise keep all clusters
    for i in range(len(z)):
        if np.isnan(z_sp[i]):  # locate non-spectroscopic
            z[i] = z_ph[i]  # replace with estimated z
    ra = cat.field('RAJ2000')
    dec = cat.field('DEJ2000')
    #rich 	= cat.field('RL*')

#keep	= np.loadtxt('/mnt/scratch-lustre/cbevingt/vectors/{}/subset_{}.dat'.format(name,name)).astype(int)
c = SkyCoord(ra * u.deg, dec * u.deg,
             cosmo.comoving_distance(z))  # intialized coordinates
#c 	= c[keep].galactic
c = c.galactic  # convert to galactic
l = c.l.value
b = c.b.value
""" stacking parameters """

v = np.loadtxt('/mnt/scratch-lustre/cbevingt/vectors/{}/ells_{}.dat'.format(
    name, name))
theta = v[:, 1]
N = len(theta)  # total number of clusters to potentially stack
res = 160  # size of map section for each cluster in pixels (res x res)
index = np.arange(
    N)  # define an index for naming files for each oriented map section
""" stacking function """
def eventRate(N,z1,z2):
    return period * extrapolation_rate * (1./(z2-z1))*4.*np.pi*N*(3.064e-7)*cosmo.comoving_distance((z1+z2)/2.).value**2.
Пример #30
0
def u2kperp(u,z):
    return u*2.*pi/pl15.comoving_distance(z).value
Пример #31
0
def X(f):
    z=f21/f-1
    return pl15.comoving_distance(z).value
def Dc2(z1,z2):
    Dcz1 = (p15.comoving_distance(z1).value*p15.h)
    Dcz2 = (p15.comoving_distance(z2).value*p15.h)
    res = Dcz2-Dcz1+1e-8
    return res
def Dc(z):
    res = p15.comoving_distance(z).value*p15.h
    return res
Пример #34
0
from opstats.utils import MWA_FREQ_EOR_ALL_80KHZ as F
from opstats.utils import F21


# Simulation parameters
nf = F.size
nside = 4096
cube_res = 1000 / 128
cube_size = 128

cube_files = ['/data6/piyanat/models/21cm/cubes/interpolated/'
              'interp_delta_21cm_l128_{:.3f}MHz.npy'
              .format(f) for f in F]

dc = Cosmo.comoving_distance(F21 / (F * 1e6) - 1).value

vx0, vy0, vz0 = hp.pix2vec(nside, 0)  # North Pole vector
zi0 = np.mod(np.around(vz0 * dc / cube_res).astype(int), cube_size)

# We will concatenate zi0[j] slices, so that pixels (j, 0, 0) of
# the cube match central l.o.s. pixels on the sine projected lightcone.
trad_lightcone = np.empty((nf, cube_size, cube_size))
for j in range(nf):
    cube = np.load(cube_files[j])
    cube -= cube.mean()
    trad_lightcone[j] = cube[:, :, zi0[j]].T

# Save out the traditional lightcone
trad_lightcone_x = np.arange(-64, 64) * 1000 / 128
trad_lightcone_da = xr.DataArray(
Пример #35
0
def do_cleanup(catalog):
    """Task to cleanup catalog before final write."""
    task_str = catalog.get_current_task_str()

    # Set preferred names, calculate some columns based on imported data,
    # sanitize some fields
    keys = catalog.entries.copy().keys()

    cleanupcnt = 0
    for oname in pbar(keys, task_str):
        name = catalog.add_entry(oname)

        # Set the preferred name, switching to that name if name changed.
        name = catalog.entries[name].set_preferred_name()

        aliases = catalog.entries[name].get_aliases()
        catalog.entries[name].set_first_max_light()

        if TIDALDISRUPTION.DISCOVER_DATE not in catalog.entries[name]:
            prefixes = ['MLS', 'SSS', 'CSS', 'GRB ']
            for alias in aliases:
                for prefix in prefixes:
                    if (alias.startswith(prefix) and
                            is_number(alias.replace(prefix, '')[:2])):
                        discoverdate = ('/'.join([
                            '20' + alias.replace(prefix, '')[:2],
                            alias.replace(prefix, '')[2:4],
                            alias.replace(prefix, '')[4:6]
                        ]))
                        if catalog.args.verbose:
                            tprint('Added discoverdate from name [' + alias +
                                   ']: ' + discoverdate)
                        source = catalog.entries[name].add_self_source()
                        catalog.entries[name].add_quantity(
                            TIDALDISRUPTION.DISCOVER_DATE,
                            discoverdate,
                            source,
                            derived=True)
                        break
                if TIDALDISRUPTION.DISCOVER_DATE in catalog.entries[name]:
                    break
        if TIDALDISRUPTION.DISCOVER_DATE not in catalog.entries[name]:
            prefixes = [
                'ASASSN-', 'PS1-', 'PS1', 'PS', 'iPTF', 'PTF', 'SCP-', 'SNLS-',
                'SPIRITS', 'LSQ', 'DES', 'SNHiTS', 'Gaia', 'GND', 'GNW', 'GSD',
                'GSW', 'EGS', 'COS', 'OGLE', 'HST'
            ]
            for alias in aliases:
                for prefix in prefixes:
                    if (alias.startswith(prefix) and
                            is_number(alias.replace(prefix, '')[:2]) and
                            is_number(alias.replace(prefix, '')[:1])):
                        discoverdate = '20' + alias.replace(prefix, '')[:2]
                        if catalog.args.verbose:
                            tprint('Added discoverdate from name [' + alias +
                                   ']: ' + discoverdate)
                        source = catalog.entries[name].add_self_source()
                        catalog.entries[name].add_quantity(
                            TIDALDISRUPTION.DISCOVER_DATE,
                            discoverdate,
                            source,
                            derived=True)
                        break
                if TIDALDISRUPTION.DISCOVER_DATE in catalog.entries[name]:
                    break
        if TIDALDISRUPTION.DISCOVER_DATE not in catalog.entries[name]:
            prefixes = ['SNF']
            for alias in aliases:
                for prefix in prefixes:
                    if (alias.startswith(prefix) and
                            is_number(alias.replace(prefix, '')[:4])):
                        discoverdate = ('/'.join([
                            alias.replace(prefix, '')[:4],
                            alias.replace(prefix, '')[4:6],
                            alias.replace(prefix, '')[6:8]
                        ]))
                        if catalog.args.verbose:
                            tprint('Added discoverdate from name [' + alias +
                                   ']: ' + discoverdate)
                        source = catalog.entries[name].add_self_source()
                        catalog.entries[name].add_quantity(
                            TIDALDISRUPTION.DISCOVER_DATE,
                            discoverdate,
                            source,
                            derived=True)
                        break
                if TIDALDISRUPTION.DISCOVER_DATE in catalog.entries[name]:
                    break
        if TIDALDISRUPTION.DISCOVER_DATE not in catalog.entries[name]:
            prefixes = ['PTFS', 'SNSDF']
            for alias in aliases:
                for prefix in prefixes:
                    if (alias.startswith(prefix) and
                            is_number(alias.replace(prefix, '')[:2])):
                        discoverdate = ('/'.join([
                            '20' + alias.replace(prefix, '')[:2],
                            alias.replace(prefix, '')[2:4]
                        ]))
                        if catalog.args.verbose:
                            tprint('Added discoverdate from name [' + alias +
                                   ']: ' + discoverdate)
                        source = catalog.entries[name].add_self_source()
                        catalog.entries[name].add_quantity(
                            TIDALDISRUPTION.DISCOVER_DATE,
                            discoverdate,
                            source,
                            derived=True)
                        break
                if TIDALDISRUPTION.DISCOVER_DATE in catalog.entries[name]:
                    break
        if TIDALDISRUPTION.DISCOVER_DATE not in catalog.entries[name]:
            prefixes = ['AT', 'SN', 'OGLE-', 'SM ', 'KSN-']
            for alias in aliases:
                for prefix in prefixes:
                    if alias.startswith(prefix):
                        year = re.findall(r'\d+', alias)
                        if len(year) == 1:
                            year = year[0]
                        else:
                            continue
                        if alias.replace(prefix, '').index(year) != 0:
                            continue
                        if (year and is_number(year) and '.' not in year and
                                len(year) <= 4):
                            discoverdate = year
                            if catalog.args.verbose:
                                tprint('Added discoverdate from name [' +
                                       alias + ']: ' + discoverdate)
                            source = catalog.entries[name].add_self_source()
                            catalog.entries[name].add_quantity(
                                TIDALDISRUPTION.DISCOVER_DATE,
                                discoverdate,
                                source,
                                derived=True)
                            break
                if TIDALDISRUPTION.DISCOVER_DATE in catalog.entries[name]:
                    break

        if (TIDALDISRUPTION.RA not in catalog.entries[name] or
                TIDALDISRUPTION.DEC not in catalog.entries[name]):
            prefixes = [
                'PSN J', 'MASJ', 'CSS', 'SSS', 'MASTER OT J', 'HST J', 'TCP J',
                'MACS J', '2MASS J', 'EQ J', 'CRTS J', 'SMT J'
            ]
            for alias in aliases:
                for prefix in prefixes:
                    if (alias.startswith(prefix) and
                            is_number(alias.replace(prefix, '')[:6])):
                        noprefix = alias.split(':')[-1].replace(
                            prefix, '').replace('.', '')
                        decsign = '+' if '+' in noprefix else '-'
                        noprefix = noprefix.replace('+', '|').replace('-', '|')
                        nops = noprefix.split('|')
                        if len(nops) < 2:
                            continue
                        rastr = nops[0]
                        decstr = nops[1]
                        ra = ':'.join([rastr[:2], rastr[2:4], rastr[4:6]]) + \
                            ('.' + rastr[6:] if len(rastr) > 6 else '')
                        dec = (decsign + ':'.join(
                            [decstr[:2], decstr[2:4], decstr[4:6]]) +
                            ('.' + decstr[6:] if len(decstr) > 6 else ''))
                        if catalog.args.verbose:
                            tprint('Added ra/dec from name: ' + ra + ' ' + dec)
                        source = catalog.entries[name].add_self_source()
                        catalog.entries[name].add_quantity(
                            TIDALDISRUPTION.RA, ra, source, derived=True)
                        catalog.entries[name].add_quantity(
                            TIDALDISRUPTION.DEC, dec, source, derived=True)
                        break
                if TIDALDISRUPTION.RA in catalog.entries[name]:
                    break

        no_host = (TIDALDISRUPTION.HOST not in catalog.entries[name] or
                   not any([
                       x[QUANTITY.VALUE] == 'Milky Way'
                       for x in catalog.entries[name][TIDALDISRUPTION.HOST]
                   ]))
        if (TIDALDISRUPTION.RA in catalog.entries[name] and
                TIDALDISRUPTION.DEC in catalog.entries[name] and no_host):
            from astroquery.irsa_dust import IrsaDust
            if name not in catalog.extinctions_dict:
                try:
                    ra_dec = (catalog.entries[name][TIDALDISRUPTION.RA][0][
                        QUANTITY.VALUE] + " " + catalog.entries[name][
                            TIDALDISRUPTION.DEC][0][QUANTITY.VALUE])
                    result = IrsaDust.get_query_table(ra_dec, section='ebv')
                except (KeyboardInterrupt, SystemExit):
                    raise
                except Exception:
                    warnings.warn("Coordinate lookup for " + name +
                                  " failed in IRSA.")
                else:
                    ebv = result['ext SandF mean'][0]
                    ebverr = result['ext SandF std'][0]
                    catalog.extinctions_dict[name] = [ebv, ebverr]
            if name in catalog.extinctions_dict:
                sources = uniq_cdl([
                    catalog.entries[name].add_self_source(),
                    catalog.entries[name]
                    .add_source(bibcode='2011ApJ...737..103S')
                ])
                (catalog.entries[name].add_quantity(
                    TIDALDISRUPTION.EBV,
                    str(catalog.extinctions_dict[name][0]),
                    sources,
                    e_value=str(catalog.extinctions_dict[name][1]),
                    derived=True))
        if ((TIDALDISRUPTION.HOST in catalog.entries[name] and
             (TIDALDISRUPTION.HOST_RA not in catalog.entries[name] or
              TIDALDISRUPTION.HOST_DEC not in catalog.entries[name]))):
            for host in catalog.entries[name][TIDALDISRUPTION.HOST]:
                alias = host[QUANTITY.VALUE]
                if ' J' in alias and is_number(alias.split(' J')[-1][:6]):
                    noprefix = alias.split(' J')[-1].split(':')[-1].replace(
                        '.', '')
                    decsign = '+' if '+' in noprefix else '-'
                    noprefix = noprefix.replace('+', '|').replace('-', '|')
                    nops = noprefix.split('|')
                    if len(nops) < 2:
                        continue
                    rastr = nops[0]
                    decstr = nops[1]
                    hostra = (':'.join([rastr[:2], rastr[2:4], rastr[4:6]]) +
                              ('.' + rastr[6:] if len(rastr) > 6 else ''))
                    hostdec = decsign + ':'.join([
                        decstr[:2], decstr[2:4], decstr[4:6]
                    ]) + ('.' + decstr[6:] if len(decstr) > 6 else '')
                    if catalog.args.verbose:
                        tprint('Added hostra/hostdec from name: ' + hostra +
                               ' ' + hostdec)
                    source = catalog.entries[name].add_self_source()
                    catalog.entries[name].add_quantity(
                        TIDALDISRUPTION.HOST_RA, hostra, source, derived=True)
                    catalog.entries[name].add_quantity(
                        TIDALDISRUPTION.HOST_DEC,
                        hostdec,
                        source,
                        derived=True)
                    break
                if TIDALDISRUPTION.HOST_RA in catalog.entries[name]:
                    break

        if (TIDALDISRUPTION.REDSHIFT not in catalog.entries[name] and
                TIDALDISRUPTION.VELOCITY in catalog.entries[name]):
            # Find the "best" velocity to use for this
            bestsig = 0
            for hv in catalog.entries[name][TIDALDISRUPTION.VELOCITY]:
                sig = get_sig_digits(hv[QUANTITY.VALUE])
                if sig > bestsig:
                    besthv = hv[QUANTITY.VALUE]
                    bestsrc = hv['source']
                    bestsig = sig
            if bestsig > 0 and is_number(besthv):
                voc = float(besthv) * 1.e5 / CLIGHT
                source = catalog.entries[name].add_self_source()
                sources = uniq_cdl([source] + bestsrc.split(','))
                (catalog.entries[name].add_quantity(
                    TIDALDISRUPTION.REDSHIFT,
                    pretty_num(
                        sqrt((1. + voc) / (1. - voc)) - 1., sig=bestsig),
                    sources,
                    kind='heliocentric',
                    derived=True))
        if (TIDALDISRUPTION.REDSHIFT not in catalog.entries[name] and
                len(catalog.nedd_dict) > 0 and
                TIDALDISRUPTION.HOST in catalog.entries[name]):
            reference = "NED-D"
            refurl = "http://ned.ipac.caltech.edu/Library/Distances/"
            for host in catalog.entries[name][TIDALDISRUPTION.HOST]:
                if host[QUANTITY.VALUE] in catalog.nedd_dict:
                    source = catalog.entries[name].add_source(
                        bibcode='2016A&A...594A..13P')
                    secondarysource = catalog.entries[name].add_source(
                        name=reference, url=refurl, secondary=True)
                    meddist = statistics.median(catalog.nedd_dict[host[
                        QUANTITY.VALUE]])
                    redz = z_at_value(cosmo.comoving_distance,
                                      float(meddist) * un.Mpc)
                    redshift = pretty_num(
                        redz, sig=get_sig_digits(str(meddist)))
                    catalog.entries[name].add_quantity(
                        TIDALDISRUPTION.REDSHIFT,
                        redshift,
                        uniq_cdl([source, secondarysource]),
                        kind='host',
                        derived=True)
        if (TIDALDISRUPTION.MAX_ABS_MAG not in catalog.entries[name] and
                TIDALDISRUPTION.MAX_APP_MAG in catalog.entries[name] and
                TIDALDISRUPTION.LUM_DIST in catalog.entries[name]):
            # Find the "best" distance to use for this
            bestsig = 0
            for ld in catalog.entries[name][TIDALDISRUPTION.LUM_DIST]:
                sig = get_sig_digits(ld[QUANTITY.VALUE])
                if sig > bestsig:
                    bestld = ld[QUANTITY.VALUE]
                    bestsrc = ld['source']
                    bestsig = sig
            if bestsig > 0 and is_number(bestld) and float(bestld) > 0.:
                source = catalog.entries[name].add_self_source()
                sources = uniq_cdl([source] + bestsrc.split(','))
                bestldz = z_at_value(cosmo.luminosity_distance,
                                     float(bestld) * un.Mpc)
                pnum = (float(catalog.entries[name][
                    TIDALDISRUPTION.MAX_APP_MAG][0][QUANTITY.VALUE]) - 5.0 *
                    (log10(float(bestld) * 1.0e6) - 1.0
                     ) + 2.5 * log10(1.0 + bestldz))
                pnum = pretty_num(pnum, sig=bestsig)
                catalog.entries[name].add_quantity(
                    TIDALDISRUPTION.MAX_ABS_MAG, pnum, sources, derived=True)
        if TIDALDISRUPTION.REDSHIFT in catalog.entries[name]:
            # Find the "best" redshift to use for this
            bestz, bestkind, bestsig, bestsrc = catalog.entries[
                name].get_best_redshift()
            if bestsig > 0:
                try:
                    bestz = float(bestz)
                except Exception:
                    print(catalog.entries[name])
                    raise
                if TIDALDISRUPTION.VELOCITY not in catalog.entries[name]:
                    source = catalog.entries[name].add_self_source()
                    # FIX: what's happening here?!
                    pnum = CLIGHT / KM * \
                        ((bestz + 1.)**2. - 1.) / ((bestz + 1.)**2. + 1.)
                    pnum = pretty_num(pnum, sig=bestsig)
                    catalog.entries[name].add_quantity(
                        TIDALDISRUPTION.VELOCITY,
                        pnum,
                        source,
                        kind=PREF_KINDS[bestkind],
                        derived=True)
                if bestz > 0.:
                    from astropy.cosmology import Planck15 as cosmo
                    if TIDALDISRUPTION.LUM_DIST not in catalog.entries[name]:
                        dl = cosmo.luminosity_distance(bestz)
                        sources = [
                            catalog.entries[name].add_self_source(),
                            catalog.entries[name]
                            .add_source(bibcode='2016A&A...594A..13P')
                        ]
                        sources = uniq_cdl(sources + bestsrc.split(','))
                        catalog.entries[name].add_quantity(
                            TIDALDISRUPTION.LUM_DIST,
                            pretty_num(
                                dl.value, sig=bestsig),
                            sources,
                            kind=PREF_KINDS[bestkind],
                            derived=True)
                        if (TIDALDISRUPTION.MAX_ABS_MAG not in
                                catalog.entries[name] and
                                TIDALDISRUPTION.MAX_APP_MAG in
                                catalog.entries[name]):
                            source = catalog.entries[name].add_self_source()
                            pnum = pretty_num(
                                float(catalog.entries[name][
                                    TIDALDISRUPTION.MAX_APP_MAG][0][
                                        QUANTITY.VALUE]) - 5.0 *
                                (log10(dl.to('pc').value) - 1.0
                                 ) + 2.5 * log10(1.0 + bestz),
                                sig=bestsig + 1)
                            catalog.entries[name].add_quantity(
                                TIDALDISRUPTION.MAX_ABS_MAG,
                                pnum,
                                sources,
                                derived=True)
                    if TIDALDISRUPTION.COMOVING_DIST not in catalog.entries[
                            name]:
                        cd = cosmo.comoving_distance(bestz)
                        sources = [
                            catalog.entries[name].add_self_source(),
                            catalog.entries[name]
                            .add_source(bibcode='2016A&A...594A..13P')
                        ]
                        sources = uniq_cdl(sources + bestsrc.split(','))
                        catalog.entries[name].add_quantity(
                            TIDALDISRUPTION.COMOVING_DIST,
                            pretty_num(
                                cd.value, sig=bestsig),
                            sources,
                            derived=True)
        if all([
                x in catalog.entries[name]
                for x in [
                    TIDALDISRUPTION.RA, TIDALDISRUPTION.DEC,
                    TIDALDISRUPTION.HOST_RA, TIDALDISRUPTION.HOST_DEC
                ]
        ]):
            # For now just using first coordinates that appear in entry
            try:
                c1 = coord(
                    ra=catalog.entries[name][TIDALDISRUPTION.RA][0][
                        QUANTITY.VALUE],
                    dec=catalog.entries[name][TIDALDISRUPTION.DEC][0][
                        QUANTITY.VALUE],
                    unit=(un.hourangle, un.deg))
                c2 = coord(
                    ra=catalog.entries[name][TIDALDISRUPTION.HOST_RA][0][
                        QUANTITY.VALUE],
                    dec=catalog.entries[name][TIDALDISRUPTION.HOST_DEC][0][
                        QUANTITY.VALUE],
                    unit=(un.hourangle, un.deg))
            except (KeyboardInterrupt, SystemExit):
                raise
            except Exception:
                pass
            else:
                sources = uniq_cdl(
                    [catalog.entries[name].add_self_source()] + catalog.
                    entries[name][TIDALDISRUPTION.RA][0]['source'].split(',') +
                    catalog.entries[name][TIDALDISRUPTION.DEC][0]['source'].
                    split(',') + catalog.entries[name][TIDALDISRUPTION.HOST_RA]
                    [0]['source'].split(',') + catalog.entries[name][
                        TIDALDISRUPTION.HOST_DEC][0]['source'].split(','))
                if 'hostoffsetang' not in catalog.entries[name]:
                    hosa = Decimal(
                        hypot(c1.ra.degree - c2.ra.degree, c1.dec.degree -
                              c2.dec.degree))
                    hosa = pretty_num(hosa * Decimal(3600.))
                    catalog.entries[name].add_quantity(
                        TIDALDISRUPTION.HOST_OFFSET_ANG,
                        hosa,
                        sources,
                        derived=True,
                        u_value='arcseconds')
                if (TIDALDISRUPTION.COMOVING_DIST in catalog.entries[name] and
                        TIDALDISRUPTION.REDSHIFT in catalog.entries[name] and
                        TIDALDISRUPTION.HOST_OFFSET_DIST not in
                        catalog.entries[name]):
                    offsetsig = get_sig_digits(catalog.entries[name][
                        TIDALDISRUPTION.HOST_OFFSET_ANG][0][QUANTITY.VALUE])
                    sources = uniq_cdl(
                        sources.split(',') + (catalog.entries[name][
                            TIDALDISRUPTION.COMOVING_DIST][0]['source']).
                        split(',') + (catalog.entries[name][
                            TIDALDISRUPTION.REDSHIFT][0]['source']).split(','))
                    (catalog.entries[name].add_quantity(
                        TIDALDISRUPTION.HOST_OFFSET_DIST,
                        pretty_num(
                            float(catalog.entries[name][
                                TIDALDISRUPTION.HOST_OFFSET_ANG][0][
                                QUANTITY.VALUE]) / 3600. * (pi / 180.) *
                            float(catalog.entries[name][
                                TIDALDISRUPTION.COMOVING_DIST][0][
                                    QUANTITY.VALUE]) * 1000. /
                            (1.0 + float(catalog.entries[name][
                                TIDALDISRUPTION.REDSHIFT][0][QUANTITY.VALUE])),
                            sig=offsetsig),
                        sources))

        catalog.entries[name].sanitize()
        catalog.journal_entries(bury=True, final=True, gz=True)
        cleanupcnt = cleanupcnt + 1
        if catalog.args.travis and cleanupcnt % 1000 == 0:
            break

    catalog.save_caches()

    return
Пример #36
0
def u2kperp(u,z):
    return u*2.*pi/pl15.comoving_distance(z).value
Пример #37
0
        #print 5/0
        wpoisson = np.sqrt(dd_data) / dr_data * float(Nr1) / float(Nd1)

        # "cross correlation" version of formula in Mo Jing and Boerner 1992
        # the geometric mean of the two is purely empirical...
        ng = np.sqrt(np.shape(data2RA)[0] * np.shape(data1RA)[0])
        w_mjb = np.sqrt(dd_data + dd_data**2. * 4. /
                        ng) / dr_data * float(Nr1) / float(Nd1)

        data2RA_sel = data2file['RA'][:][allinds]
        data2DEC_sel = data2file['DEC'][:][allinds]
        data2Z_sel = data2file['Z'][:][allinds]

        h0 = LCDM.H0 / (100 * u.km / u.s / u.Mpc)
        zmean = data2Z_sel.mean()
        R = (LCDM.comoving_distance(zmean) / (u.Mpc / h0))

        thmin = (Smin / R) * 180. / np.pi
        thmax = (Smax / R) * 180. / np.pi
        #print(thmin)
        #print(np.logspace(-3,0,16,endpoint=True))
        b = np.logspace(np.log10(thmin),
                        np.log10(thmax),
                        nbins + 1,
                        endpoint=True)

        theta = 0.5 * (b[1:] + b[:-1])

        s = np.radians(theta.value) * R

        theta_low = 3600. * np.logspace(
Пример #38
0
def X(f):
    z=f21/f-1
    return pl15.comoving_distance(z).value