Exemplo n.º 1
0
def mk_mock_srch(radecfile, nzdictfile, Nsph, simul_cosmo):

    if simul_cosmo == "Planck":
        # First make h free
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5
    comv = cosmo.comoving_distance

    radecarr = h5_arr(radecfile, "good_pts")
    nzdict = json.load(open(nzdictfile))

    Nrands = radecarr.shape[0]
    Narrs = Nsph / Nrands
    remain = Nsph % Nrands

    radecz = np.zeros((Nsph, 3))

    for i in range(Narrs):

        start = Nrands * i
        stop = Nrands * (i + 1)
        radecz[start:stop, :2] = radecarr[:, :]

    endchunk = Nrands * (Narrs)
    radecz[endchunk:, :2] = radecarr[:remain, :]

    rad = np.arange(1.0, 67.0, 5.0)
    zlo = nzdict["zlo"]
    zhi = nzdict["zhi"]

    radeczlist = len(rad) * [radecz]

    for r_i, r in enumerate(rad):

        dis_near = Distance(comv(zlo).value + r, u.Mpc)
        dis_far = Distance(comv(zhi).value - r, u.Mpc)

        z_a = dis_near.compute_z(cosmology=cosmo)

        z_b = dis_far.compute_z(cosmology=cosmo)

        randz = (z_a ** 3 + \
                 (z_b ** 3 - z_a ** 3) * np.random.rand(Nsph)) ** (1. / 3.)

        radeczlist[r_i][:, 2] = randz[:]

        arr2h5(
            radeczlist[r_i], "{0}/{1}/mocks/mock_srch_pts.hdf5".format(
                os.path.dirname(radecfile), simul_cosmo),
            "radecz_{0}".format(str(r_i * 5 + 1)))
Exemplo n.º 2
0
    def __Redshift__(self, verbose=False):
        
        ZType = 0

        # Scrapping the redshift        
        for line in self.gcn[5:]:
            try:
                tempZ = re.findall("(z\s?=|redshift.of.about)\s?([0-9.]+)\s?\+?\/?\-?\s?([0-9.]+)?", line)[0][1:]
                if "mag" not in line:
                    ZType = 1
                if verbose==3:
                    print("Redshift is adopted from  \n'{}'".format(line))

            except:
                try:
                    tempZ = re.findall("estimate is ([0-9.]+)\s?\+?\/?\-?\s?([0-9.]+)?\s?(Mpc|kpc)", line)[0]
                    ZType = 2
                    if verbose==3:
                        print("Redshift is adopted from \n'{}'".format(line))
                except:
                    pass

        
        # Rearrange the redshift   
        if ZType == 1:
            if tempZ[0][-1] == ".":
                z = tempZ[0][:-1]
            else:
                z = tempZ[0]
                
            if tempZ[1] == "":
                zerr = 1/10**len(str(tempZ[0]).split(".")[1])
            elif tempZ[1][-1] == ".":
                zerr = tempZ[1][:-1]
            else:
                zerr = tempZ[1]
                
            self.Redshift = ['{:.4f}'.format(float(z)), float(zerr)]
        elif ZType == 2:
            if tempZ[2] == 'Mpc':
                unit = u.Mpc
            elif tempZ[2] == 'kpc':
                unit = u.kpc
            d0 = Distance(float(tempZ[0]), unit=unit)
            z0 = d0.compute_z()
            derr1 = Distance(float(tempZ[0])+float(tempZ[1]), unit=unit)
            zerr1 = derr1.compute_z()
            derr2 = Distance(float(tempZ[0])-float(tempZ[1]), unit=unit)
            zerr2 = derr2.compute_z()
            zerr = max(abs(zerr1-z0), abs(z0-zerr2))
            self.Redshift = ['{:.4f}'.format(z0), zerr]
Exemplo n.º 3
0
def mk_mock_srch(radecfile, nzdictfile, Nsph, simul_cosmo):

    if simul_cosmo == "Planck":
        # First make h free
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5
    comv = cosmo.comoving_distance

    radecarr = h5_arr(radecfile, "good_pts")
    nzdict = json.load(open(nzdictfile))

    Nrands = radecarr.shape[0]
    Narrs = Nsph / Nrands
    remain = Nsph % Nrands

    radecz = np.zeros((Nsph, 3))

    for i in range(Narrs):

        start = Nrands * i
        stop = Nrands * (i + 1)
        radecz[start:stop, :2] = radecarr[:, :]

    endchunk = Nrands * (Narrs)
    radecz[endchunk:, :2] = radecarr[:remain, :]

    rad = np.arange(1.0, 67.0, 5.0)
    zlo = nzdict["zlo"]
    zhi = nzdict["zhi"]

    radeczlist = len(rad) * [radecz]

    for r_i, r in enumerate(rad):

        dis_near = Distance(comv(zlo) + r, u.Mpc)
        dis_far = Distance(comv(zhi) - r, u.Mpc)

        z_a = dis_near.compute_z(cosmology=cosmo)

        z_b = dis_far.compute_z(cosmology=cosmo)

        randz = (z_a ** 3 + \
                 (z_b ** 3 - z_a ** 3) * np.random.rand(Nsph)) ** (1. / 3.)

        radeczlist[r_i][:, 2] = randz[:]

        arr2h5(radeczlist[r_i], "{0}/{1}/mocks/mock_srch_pts.hdf5".format(os.path.dirname(radecfile), simul_cosmo), "radecz_{0}".format(str(r_i * 5 + 1)))
Exemplo n.º 4
0
def get_pc_per_arcsec(distance, cosmo=WMAP9):
    """

    Args:
        distance: float
            Distance in Mpc
        cosmo: astropy.cosmology
            Cosmology. Default is None: will then
            use the default_cosmology from astropy

    Returns:
        pc_per_arcsec: float
            Conversion parsec per arcsecond

    """
    from astropy.cosmology import default_cosmology
    from astropy.coordinates import Distance
    from astropy import units as u

    # Use default cosmology from astropy
    if cosmo is None:
        cosmo = default_cosmology.get()

    # Use astropy units
    dist = Distance(distance, u.Mpc)
    # get the corresponding redshift
    redshift = dist.compute_z(cosmo)
    # And nore the proper conversion
    kpc_per_arcmin = cosmo.kpc_proper_per_arcmin(redshift)
    return kpc_per_arcmin.to(u.pc / u.arcsec)
Exemplo n.º 5
0
def test_distances_scipy():
    """
    The distance-related tests that require scipy due to the cosmology
    module needing scipy integration routines
    """
    from astropy.cosmology import WMAP5

    # try different ways to initialize a Distance
    d4 = Distance(z=0.23)  # uses default cosmology - as of writing, WMAP7
    npt.assert_allclose(d4.z, 0.23, rtol=1e-8)

    d5 = Distance(z=0.23, cosmology=WMAP5)
    npt.assert_allclose(d5.compute_z(WMAP5), 0.23, rtol=1e-8)

    d6 = Distance(z=0.23, cosmology=WMAP5, unit=u.km)
    npt.assert_allclose(d6.value, 3.5417046898762366e+22)

    with pytest.raises(ValueError):
        Distance(cosmology=WMAP5, unit=u.km)

    with pytest.raises(ValueError):
        Distance()

    # vectors!  regression test for #11949
    d4 = Distance(z=[0.23, 0.45])  # as of writing, Planck18
    npt.assert_allclose(d4.z, [0.23, 0.45], rtol=1e-8)
Exemplo n.º 6
0
def dist_to_z(D):
    return Distance.compute_z(D, cosmo)
    def main(self):

        is_error = False

        # Parameter checks
        if self.options.gw_id == "":
            is_error = True
            print("GWID is required.")

        if self.options.healpix_file == "":
            is_error = True
            print("Healpix file is required.")

        if is_error:
            print("Exiting...")
            return 1

        formatted_healpix_dir = self.options.healpix_dir
        if "{GWID}" in formatted_healpix_dir:
            formatted_healpix_dir = formatted_healpix_dir.replace(
                "{GWID}", self.options.gw_id)

        formatted_model_output_dir = self.options.model_output_dir
        if "{GWID}" in formatted_model_output_dir:
            formatted_model_output_dir = formatted_model_output_dir.replace(
                "{GWID}", self.options.gw_id)

        hpx_path = "%s/%s" % (formatted_healpix_dir, self.options.healpix_file)
        model_path = "../Models"

        model_files = []
        for file in os.listdir(model_path):
            if file.endswith(".dat"):
                model_files.append("%s/%s" % (model_path, file))

        if len(model_files) <= 0:
            is_error = True
            print("There are no models to process.")

        # Check if the above files exist...
        if not os.path.exists(hpx_path):
            is_error = True
            print("Healpix file `%s` does not exist." % hpx_path)

        if is_error:
            print("Exiting...")
            return 1

        # CONVENIENCE DICTIONARIES
        # Band abbreviation, band_id mapping
        band_mapping_new = {
            "sdss_g": "SDSS g",
            "sdss_r": "SDSS r",
            "sdss_i": "SDSS i",
            "Clear": "Clear"
        }

        reverse_band_mapping_new = {
            "SDSS g": "sdss_g",
            "SDSS r": "sdss_r",
            "SDSS i": "sdss_i",
            "Clear": "Clear"
        }

        detector_mapping = {
            "s": "SWOPE",
            "t": "THACHER",
            "a": "ANDICAM",
            "n": "NICKEL",
            "m": "MOSFIRE",
            "k": "KAIT",
            "si": "SINISTRO"
        }

        # LOADING NSIDE 128 SKY PIXELS AND EBV INFORMATION
        print("\nLoading NSIDE 128 pixels...")
        nside128 = 128
        N128_dict = None
        with open('N128_dict.pkl', 'rb') as handle:
            N128_dict = pickle.load(handle)
        del handle

        print("\nLoading existing EBV...")
        ebv = None
        with open('ebv.pkl', 'rb') as handle:
            ebv = pickle.load(handle)

        models = {}
        for mf in model_files:
            model_table = Table.read(mf, format='ascii.ecsv')
            mask = model_table['time'] > 0.0

            time = np.asarray(model_table['time'][mask])
            g = np.asarray(model_table['sdss_g'][mask])
            r = np.asarray(model_table['sdss_r'][mask])
            i = np.asarray(model_table['sdss_i'][mask])
            clear = np.asarray(model_table['Clear'][mask])

            model_props = model_table.meta['comment']
            mass = float(re.findall("\d+\.\d+", model_props[1])[0])
            velocity = float(re.findall("\d+\.\d+", model_props[2])[0])

            factor = float(re.findall("(\d+\.?\d?)[e+\-]", model_props[3])[0])
            exp = float(re.findall("[e+\-](-+\d+\.?\d+)", model_props[3])[0])
            Xlan = factor * 10**(exp)

            base_name = os.path.basename(mf)
            print("Loading `%s`" % base_name)
            models[(base_name, mass, velocity, Xlan)] = {
                'time': time,
                'sdss_g': g,
                'sdss_r': r,
                'sdss_i': i,
                'Clear': clear
            }

        # Get Map ID
        print("\nLoading Healpix Map...")
        healpix_map_select = "SELECT id, NSIDE FROM HealpixMap WHERE GWID = '%s' and Filename = '%s'"
        healpix_map_id = int(
            query_db([
                healpix_map_select %
                (self.options.gw_id, self.options.healpix_file)
            ])[0][0][0])
        healpix_map_nside = int(
            query_db([
                healpix_map_select %
                (self.options.gw_id, self.options.healpix_file)
            ])[0][0][1])

        # Get Bands
        print("\nLoading Configured Bands...")
        band_select = "SELECT id, Name, F99_Coefficient FROM Band"
        bands = query_db([band_select])[0]
        band_dict_by_name = {}
        band_dict_by_id = {}
        for b in bands:
            b_id = int(b[0])
            b_name = b[1]
            b_coeff = float(b[2])

            band_dict_by_name[b_name] = (b_id, b_name, b_coeff)
            band_dict_by_id[b_id] = (b_id, b_name, b_coeff)

        print("\nRetrieving distinct, imaged map pixels")
        map_pixel_select = '''
        SELECT 
            DISTINCT hp.id, 
            hp.HealpixMap_id, 
            hp.Pixel_Index, 
            hp.Prob, 
            hp.Distmu, 
            hp.Distsigma, 
            hp.Distnorm, 
            hp.Mean, 
            hp.Stddev, 
            hp.Norm, 
            sp.Pixel_Index as N128_Pixel_Index 
        FROM 
            HealpixPixel hp 
        JOIN ObservedTile_HealpixPixel ot_hp on ot_hp.HealpixPixel_id = hp.id 
        JOIN ObservedTile ot on ot.id = ot_hp.ObservedTile_id 
        JOIN SkyPixel sp on sp.id = hp.N128_SkyPixel_id 
        WHERE
            ot.HealpixMap_id = %s and 
            ot.Mag_Lim IS NOT NULL 
        '''

        q = map_pixel_select % healpix_map_id
        map_pixels = query_db([q])[0]
        print("Retrieved %s map pixels..." % len(map_pixels))

        # Initialize map pix dict for later access
        map_pixel_dict_new = OrderedDict()

        # map_pixel_dict_old = OrderedDict()
        class Pixel_Synopsis():
            def __init__(self, mean_dist, dist_sigma, prob_2D, pixel_index,
                         N128_index, pix_ebv, z):
                self.mean_dist = mean_dist
                self.dist_sigma = dist_sigma
                self.prob_2D = prob_2D
                self.pixel_index = pixel_index
                self.N128_index = N128_index
                self.pix_ebv = pix_ebv
                self.z = z

                self.distance_arr = None
                self.distance_modulus_arr = None
                self.distance_probs = None

                # From the tiles that contain this pixel
                # band:value
                self.measured_bands = []
                self.lim_mags = OrderedDict()
                self.delta_mjds = OrderedDict()

                # From the model (only select the bands that have been imaged)
                self.A_lambda = OrderedDict()  # band:value

                # model:arr
                self.model_observer_time_arr_new = OrderedDict()
                # self.model_observer_time_arr_old = None

                # Computed based on model + tile + pixel info
                # band:value
                # self.app_mag_model = {} # x-val
                # self.app_mag_model_prob = {} # y-val

                # Final calculation
                # model:band:value
                self.best_integrated_probs = OrderedDict()

            def __str__(self):
                return str(self.__dict__)

        count_bad_pixels = 0
        for p in map_pixels:
            mean_dist = float(p[7])
            dist_sigma = float(p[8])
            prob_2D = float(p[3])
            pixel_index = int(p[2])
            N128_pixel_index = int(p[10])
            pix_ebv = ebv[N128_pixel_index]

            d = Distance(mean_dist, u.Mpc)
            if mean_dist == 0.0:
                # distance did not converge for this pixel. pass...
                print("Bad Index: %s" % pixel_index)
                count_bad_pixels += 1
                continue

            z = d.compute_z(cosmology=cosmo)

            p_new = Pixel_Synopsis(mean_dist, dist_sigma, prob_2D, pixel_index,
                                   N128_pixel_index, pix_ebv, z)

            min_dist = mean_dist - 5.0 * dist_sigma
            if min_dist <= 0.0:
                min_dist = 0.001
            max_dist = mean_dist + 5.0 * dist_sigma

            distance_arr = np.linspace(min_dist, max_dist, 100)
            distance_modulus_arr = 5.0 * np.log10(distance_arr * 1e+6) - 5.0
            distance_probs = 1.0 / np.sqrt(
                2.0 * np.pi * dist_sigma**2) * np.exp(
                    -1.0 * (distance_arr - mean_dist)**2 / (2 * dist_sigma**2))

            p_new.distance_arr = distance_arr
            p_new.distance_modulus_arr = distance_modulus_arr
            p_new.distance_probs = distance_probs

            map_pixel_dict_new[pixel_index] = p_new

        print("\nMap pixel dict complete. %s bad pixels." % count_bad_pixels)

        # Get Detectors
        detectors = []
        print("\nLoading Swope...")
        detector_select = "SELECT id, Name, Deg_width, Deg_width, Deg_radius, Area, MinDec, MaxDec FROM Detector WHERE `Name`='%s'"
        dr = query_db([detector_select % 'SWOPE'])[0][0]
        swope = Detector(dr[1],
                         float(dr[2]),
                         float(dr[2]),
                         detector_id=int(dr[0]))
        detectors.append(swope)

        print("\nLoading Thacher...")
        dr = query_db([detector_select % 'THACHER'])[0][0]
        thacher = Detector(dr[1],
                           float(dr[2]),
                           float(dr[2]),
                           detector_id=int(dr[0]))
        detectors.append(thacher)

        print("\nLoading Nickel...")
        dr = query_db([detector_select % 'NICKEL'])[0][0]
        nickel = Detector(dr[1],
                          float(dr[2]),
                          float(dr[2]),
                          detector_id=int(dr[0]))
        detectors.append(nickel)

        print("\nLoading KAIT...")
        dr = query_db([detector_select % 'KAIT'])[0][0]
        kait = Detector(dr[1],
                        float(dr[2]),
                        float(dr[2]),
                        detector_id=int(dr[0]))
        detectors.append(kait)

        print("\nLoading SINISTRO...")
        dr = query_db([detector_select % 'SINISTRO'])[0][0]
        sinistro = Detector(dr[1],
                            float(dr[2]),
                            float(dr[2]),
                            detector_id=int(dr[0]))
        detectors.append(sinistro)

        # Get and instantiate all observed tiles
        observed_tile_select = '''
            SELECT 
                id,
                Detector_id, 
                FieldName, 
                RA, 
                _Dec, 
                EBV, 
                N128_SkyPixel_id, 
                Band_id, 
                MJD, 
                Exp_Time, 
                Mag_Lim, 
                HealpixMap_id 
            FROM
                ObservedTile 
            WHERE
                HealpixMap_id = %s and 
                Detector_id = %s and 
                Mag_Lim IS NOT NULL 
        '''

        observed_tiles = []

        print("\nLoading Swope's Observed Tiles...")
        ot_result = query_db(
            [observed_tile_select % (healpix_map_id, swope.id)])[0]
        for ot in ot_result:
            t = Tile(float(ot[3]),
                     float(ot[4]),
                     swope.deg_width,
                     swope.deg_height,
                     healpix_map_nside,
                     tile_id=int(ot[0]))
            t.field_name = ot[2]
            t.mjd = float(ot[8])
            t.mag_lim = float(ot[10])
            t.band_id = int(ot[7])

            observed_tiles.append(t)
        print("Loaded %s %s tiles..." % (len(ot_result), swope.name))

        print("\nLoading Nickel's Observed Tiles...")
        ot_result = query_db(
            [observed_tile_select % (healpix_map_id, nickel.id)])[0]
        for ot in ot_result:
            t = Tile(float(ot[3]),
                     float(ot[4]),
                     nickel.deg_width,
                     nickel.deg_height,
                     healpix_map_nside,
                     tile_id=int(ot[0]))
            t.field_name = ot[2]
            t.mjd = float(ot[8])
            t.mag_lim = float(ot[10])
            t.band_id = int(ot[7])

            observed_tiles.append(t)
        print("Loaded %s %s tiles..." % (len(ot_result), nickel.name))

        print("\nLoading Thacher's Observed Tiles...")
        ot_result = query_db(
            [observed_tile_select % (healpix_map_id, thacher.id)])[0]
        for ot in ot_result:
            t = Tile(float(ot[3]),
                     float(ot[4]),
                     thacher.deg_width,
                     thacher.deg_height,
                     healpix_map_nside,
                     tile_id=int(ot[0]))
            t.field_name = ot[2]
            t.mjd = float(ot[8])
            t.mag_lim = float(ot[10])
            t.band_id = int(ot[7])

            observed_tiles.append(t)
        print("Loaded %s %s tiles..." % (len(ot_result), thacher.name))

        print("\nLoading KAIT's Observed Tiles...")
        ot_result = query_db(
            [observed_tile_select % (healpix_map_id, kait.id)])[0]
        for ot in ot_result:
            t = Tile(float(ot[3]),
                     float(ot[4]),
                     kait.deg_width,
                     kait.deg_height,
                     healpix_map_nside,
                     tile_id=int(ot[0]))
            t.field_name = ot[2]
            t.mjd = float(ot[8])
            t.mag_lim = float(ot[10])
            t.band_id = int(ot[7])
            observed_tiles.append(t)
        print("Loaded %s %s tiles..." % (len(ot_result), kait.name))

        print("\nLoading SINISTRO's Observed Tiles...")
        ot_result = query_db(
            [observed_tile_select % (healpix_map_id, sinistro.id)])[0]
        for ot in ot_result:
            t = Tile(float(ot[3]),
                     float(ot[4]),
                     sinistro.deg_width,
                     sinistro.deg_height,
                     healpix_map_nside,
                     tile_id=int(ot[0]))
            t.field_name = ot[2]
            t.mjd = float(ot[8])
            t.mag_lim = float(ot[10])
            t.band_id = int(ot[7])
            observed_tiles.append(t)
        print("Loaded %s %s tiles..." % (len(ot_result), sinistro.name))

        print("Getting Detector-Band pairs...")
        detector_band_result = query_db([
            '''SELECT 
                DISTINCT d.Name as Detector, b.Name as Band
            FROM Detector d 
            JOIN ObservedTile ot on ot.Detector_id = d.id
            JOIN Band b on b.id = ot.Band_id
            WHERE ot.HealpixMap_id = %s''' % healpix_map_id
        ])

        print("\nUpdating pixel `delta_mjds` and `lim_mags`...")
        # For each tile:
        # 	we want the MJD of observation, and add that to the list of a pixels' MJD collection.
        # 	we want the limiting mag, add that to the list of a pixel's lim mag collection
        for t in observed_tiles:
            pix_indices = t.enclosed_pixel_indices

            for i in pix_indices:

                # get band from id...
                band = band_dict_by_id[t.band_id]
                band_name = band[1]

                # Some pixels are omitted because their distance information did not converge
                if i not in map_pixel_dict_new:
                    continue

                pix_synopsis_new = map_pixel_dict_new[i]

                if band_name not in pix_synopsis_new.measured_bands:
                    pix_synopsis_new.measured_bands.append(band_name)
                    pix_synopsis_new.delta_mjds[band_name] = {}
                    pix_synopsis_new.lim_mags[band_name] = {}

                pix_synopsis_new.delta_mjds[band_name][t.mjd] = (t.mjd -
                                                                 GW190814_t_0)
                pix_synopsis_new.lim_mags[band_name][t.mjd] = (t.mag_lim)

        print("\nInitializing %s models..." % len(models))

        print("\nUpdating pixel `A_lambda` and `model_observer_time_arr`...")
        # Set pixel `model_observer_time_arr` and `A_lambda`
        for pix_index, pix_synopsis in map_pixel_dict_new.items():

            for model_param_tuple, model_dict in models.items():
                for model_col in model_dict.keys():
                    if model_col in band_mapping_new:

                        band = band_dict_by_name[band_mapping_new[model_col]]
                        band_id = band[0]
                        band_name = band[1]
                        band_coeff = band[2]

                        if band_name in pix_synopsis.measured_bands:
                            if band_name not in pix_synopsis.A_lambda:
                                pix_a_lambda = pix_synopsis.pix_ebv * band_coeff
                                pix_synopsis.A_lambda[band_name] = pix_a_lambda

                time_dilation = 1.0 + pix_synopsis.z
                pix_synopsis.model_observer_time_arr_new[
                    model_param_tuple] = model_dict["time"] * time_dilation

        # NEW Do the calculation...
        print("\nIntegrating total model probs...")
        count = 0
        for pix_index, pix_synopsis in map_pixel_dict_new.items():

            for band in pix_synopsis.measured_bands:

                for model_param_tuple, model_dict in models.items():

                    model_abs_mags = model_dict[reverse_band_mapping_new[band]]
                    pixel_delta_mjd = pix_synopsis.delta_mjds[band]

                    for i, (mjd,
                            delta_mjd) in enumerate(pixel_delta_mjd.items()):

                        # get the corresponding abs mag for the time of the observation
                        pix_abs_mag = np.interp(
                            delta_mjd, pix_synopsis.
                            model_observer_time_arr_new[model_param_tuple],
                            model_abs_mags)
                        # print("pix_abs_mag: %s" % pix_abs_mag)

                        # compute the distribution in apparent mag
                        pix_app_mag = np.asarray(pix_synopsis.distance_modulus_arr) + pix_abs_mag + \
                                      pix_synopsis.A_lambda[band]

                        # re-normalize this distribution to sum to the pixel 2D prob
                        # SIMPS (y, x)
                        app_mag_norm = simps(pix_synopsis.distance_probs,
                                             pix_app_mag)
                        renorm_pix_app_mag_prob = np.asarray(
                            (pix_synopsis.prob_2D / app_mag_norm) *
                            pix_synopsis.distance_probs)

                        # Integrate the app mag distribution from arbitrarily bright to the limiting magnitude
                        f_interp = lambda x: np.interp(
                            x, pix_app_mag, renorm_pix_app_mag_prob)
                        lower_bound = np.min(pix_app_mag)
                        upper_bound = pix_synopsis.lim_mags[band][mjd]

                        if model_param_tuple not in pix_synopsis.best_integrated_probs:
                            pix_synopsis.best_integrated_probs[
                                model_param_tuple] = {}

                        try:
                            pix_synopsis.best_integrated_probs[
                                model_param_tuple][band][mjd] = 0.0
                        except:
                            pix_synopsis.best_integrated_probs[
                                model_param_tuple][band] = {
                                    mjd: 0.0
                                }

                        prob_to_detect = quad(f_interp, lower_bound,
                                              upper_bound)[0]

                        if not np.isnan(
                                prob_to_detect) and prob_to_detect >= 0.0:
                            pix_synopsis.best_integrated_probs[
                                model_param_tuple][band][mjd] = prob_to_detect

            count += 1
            if count % 1000 == 0:
                print("Processed: %s" % count)

        # NEW
        # Finally, get the highest valued integration, and sum
        running_sums = {}  # model:band:value
        # pixels_to_plot = {} # model:band:pixel

        for pix_index, pix_synopsis in map_pixel_dict_new.items():
            for band in pix_synopsis.measured_bands:
                for model_param_tuple, model_dict in models.items():

                    pix_max = 0.0
                    if model_param_tuple not in running_sums:
                        running_sums[model_param_tuple] = {}

                    try:
                        t = running_sums[model_param_tuple][band]
                    except:
                        running_sums[model_param_tuple][band] = 0.0

                    probs = []
                    for mjd, integrated_prob in pix_synopsis.best_integrated_probs[
                            model_param_tuple][band].items():
                        probs.append(integrated_prob)

                    pix_max = np.max(probs)
                    running_sums[model_param_tuple][band] += pix_max

        # pixels_to_plot[] append(Pixel_Element(pix_index, healpix_map_nside, pix_max))

        for model_param_tuple, band_dict in running_sums.items():
            print(
                "\nIntegrated prob to detect model by band `%s` (%s, %s, %s)" %
                model_param_tuple)

            for band, running_sum in band_dict.items():
                print("\t%s: %s" % (band, running_sum))

        ## Additional calculation -- for every pixel, just get the highest prob
        running_sums2 = {}
        for model_param_tuple, model_dict in models.items():

            running_sums2[model_param_tuple] = 0.0

            for pix_index, pix_synopsis in map_pixel_dict_new.items():

                pix_max = 0.0
                probs = []

                for band in pix_synopsis.measured_bands:
                    for mjd, integrated_prob in pix_synopsis.best_integrated_probs[
                            model_param_tuple][band].items():
                        probs.append(integrated_prob)

                pix_max = np.max(probs)
                running_sums2[model_param_tuple] += pix_max

        # Build ascii.ecsv formatted output
        cols = ['Model', 'Mass', 'Velocity', 'X_lan', 'Prob']
        dtype = ['S40', 'f8', 'f8', 'f8', 'f8']
        meta = [
            '{key} = {value}'.format(key=db[0], value=db[1])
            for db in detector_band_result[0]
        ]
        result_table = Table(dtype=dtype, names=cols)
        result_table.meta['comment'] = meta

        for model_param_tuple, prob in running_sums2.items():
            print(
                "\nCombined Integrated prob to detect model `%s` (%s, %s, %s)"
                % model_param_tuple)
            print("\t%s" % prob)
            result_table.add_row([
                model_param_tuple[0], model_param_tuple[1],
                model_param_tuple[2], model_param_tuple[3], prob
            ])

        result_table.write("%s/Detection_Results.prob" %
                           formatted_model_output_dir,
                           overwrite=True,
                           format='ascii.ecsv')
def dist_to_z(D):
    return Distance.compute_z(D,cosmo)
Exemplo n.º 9
0
def vpf(dat_dir, Nsph, simul_cosmo, rad):

    # Grab the data coordinates
    gals = h5_arr("./dat/out/{0}/{1}/gals_cart_coords.hdf5".
                      format(dat_dir, simul_cosmo), "cart_pts")

    # Get details about the redshift interval being considered
    nbar_dict = json.load(open("./dat/out/{0}/{1}/nbar_zrange.json".
                                   format(dat_dir, simul_cosmo)))
    zlo = nbar_dict["zlo"]
    zhi = nbar_dict["zhi"]

    # Get the search points
    good_pts = h5_arr("./dat/out/{0}/srch_radec.hdf5".format(dat_dir), "good_pts")
    bad_pts = h5_arr("./dat/out/{0}/veto.hdf5".format(dat_dir),
                     "bad_pts")

    # Set angular radius of effective area around bad points
    bad_r = np.arccos(1.0 - (np.pi * 9.8544099e-05) / (2 * 180 ** 2))
    bad_r_deg = np.rad2deg(bad_r)

    # Set the cosmology with h free
    # Here the cosmology is based on WMAP (for first MultiDark simulation)
    if simul_cosmo == "Planck":
        # First make h free
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5
    comv = cosmo.comoving_distance

    # Build the trees

    # galaxy tree
    gal_baum = cKDTree(gals)

    # tree of bad points (angular coordinates on unit sphere)
    bad_xyz = radec2xyz(bad_pts)
    veto_baum = cKDTree(bad_xyz)

    # Initialise final output arrays
#    rad = np.arange(1.0, 67.0, 5.0)  doing it one radius at a time
#    P_0 = np.zeros(rad.shape)

    # No. of spheres and norm
#     Nsph_arr = Nsph * np.array(4 * [0.01] + 4 * [0.1] + 4 * [1.0])
#     norm = 1. / Nsph_arr
#    norm = 1. / Nsph

    rand_i = 0

    for r_i, r in enumerate(rad):

        # start the count of successful voids
        count = 0

        # Custom zrange for sphere size
        dis_near = Distance(comv(zlo).value + r, u.Mpc)
        dis_far = Distance(comv(zhi).value - r, u.Mpc)

        z_a = dis_near.compute_z(cosmology=cosmo)

        z_b = dis_far.compute_z(cosmology=cosmo)

        for i in range(Nsph):  # _arr[r_i]):

            # compensate for finite length of mask file
            rand_i = rand_i % 999999

            radec = good_pts[rand_i, :]

            rang = Angle(radec[0], u.deg)
            decang = Angle(radec[1], u.deg)

            randz = (z_a ** 3 + \
                     (z_b ** 3 - z_a ** 3) * np.random.rand(1)[0]) ** (1. / 3.)
            dis = Distance(comv(randz), u.Mpc)

            coord = ICRSCoordinates(rang, decang, distance=dis)

            sph_cen = np.array([coord.x.value, coord.y.value, coord.z.value])

            nn = gal_baum.query(sph_cen)

            print "rad: ", r, ", sphere: ", i

            if not nn[0] < r:

                # add instance to probability count
                count += 1

                # record quality of sphere using spline values for intersection
                # with bad points

                # Get radius of circular projection of sphere
                R = np.arcsin(r / np.sqrt(np.sum(sph_cen[:] ** 2)))

                # Get coordinates of circle centre on unit sphere
                crc_cen = radec2xyz(radec)[0]

                # Compute tree search radius from Cosine rule
                # (include points extending beyond sphere edge to account for
                # finite area around bad points)
                l_srch = np.sqrt(2. - 2. * np.cos(R))

                # Run search
                pierce_l = veto_baum.query_ball_point(crc_cen, l_srch)

                bad_vol = 0.

                R = np.degrees(R)  # need in degrees for bad_vol computation

                for pt in pierce_l:

                    pt_ang = bad_pts[pt]
                    dis = np.degrees(central_angle(pt_ang, radec))
                    l = dis / R

                    bad_vol += 1.5 * (bad_r_deg / R) ** 2 \
                                   * np.sqrt(1.0 - l ** 2)

                f_r = open("./dat/out/{0}/{1}/vpf_out/volfrac_{2}.dat".
                               format(dat_dir, simul_cosmo, r),
                           'a')
                f_r.write("{0}\n".format(bad_vol))
                f_r.close()

            rand_i += 1
Exemplo n.º 10
0
def vpf(dat_dir, Nsph, simul_cosmo, rad):

    # Grab the data coordinates
    gals = h5_arr(
        "./dat/out/{0}/{1}/gals_cart_coords.hdf5".format(dat_dir, simul_cosmo),
        "cart_pts")

    # Get details about the redshift interval being considered
    nbar_dict = json.load(
        open("./dat/out/{0}/{1}/nbar_zrange.json".format(dat_dir,
                                                         simul_cosmo)))
    zlo = nbar_dict["zlo"]
    zhi = nbar_dict["zhi"]

    # Get the search points
    good_pts = h5_arr("./dat/out/{0}/srch_radec.hdf5".format(dat_dir),
                      "good_pts")
    bad_pts = h5_arr("./dat/out/{0}/veto.hdf5".format(dat_dir), "bad_pts")

    # Set angular radius of effective area around bad points
    bad_r = np.arccos(1.0 - (np.pi * 9.8544099e-05) / (2 * 180**2))
    bad_r_deg = np.rad2deg(bad_r)

    # Set the cosmology with h free
    # Here the cosmology is based on WMAP (for first MultiDark simulation)
    if simul_cosmo == "Planck":
        # First make h free
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5
    comv = cosmo.comoving_distance

    # Build the trees

    # galaxy tree
    gal_baum = cKDTree(gals)

    # tree of bad points (angular coordinates on unit sphere)
    bad_xyz = radec2xyz(bad_pts)
    veto_baum = cKDTree(bad_xyz)

    # Initialise final output arrays
    #    rad = np.arange(1.0, 67.0, 5.0)  doing it one radius at a time
    #    P_0 = np.zeros(rad.shape)

    # No. of spheres and norm
    #     Nsph_arr = Nsph * np.array(4 * [0.01] + 4 * [0.1] + 4 * [1.0])
    #     norm = 1. / Nsph_arr
    #    norm = 1. / Nsph

    rand_i = 0

    for r_i, r in enumerate(rad):

        # start the count of successful voids
        count = 0

        # Custom zrange for sphere size
        dis_near = Distance(comv(zlo).value + r, u.Mpc)
        dis_far = Distance(comv(zhi).value - r, u.Mpc)

        z_a = dis_near.compute_z(cosmology=cosmo)

        z_b = dis_far.compute_z(cosmology=cosmo)

        for i in range(Nsph):  # _arr[r_i]):

            # compensate for finite length of mask file
            rand_i = rand_i % 999999

            radec = good_pts[rand_i, :]

            rang = Angle(radec[0], u.deg)
            decang = Angle(radec[1], u.deg)

            randz = (z_a ** 3 + \
                     (z_b ** 3 - z_a ** 3) * np.random.rand(1)[0]) ** (1. / 3.)
            dis = Distance(comv(randz), u.Mpc)

            coord = ICRSCoordinates(rang, decang, distance=dis)

            sph_cen = np.array([coord.x.value, coord.y.value, coord.z.value])

            nn = gal_baum.query(sph_cen)

            print "rad: ", r, ", sphere: ", i

            if not nn[0] < r:

                # add instance to probability count
                count += 1

                # record quality of sphere using spline values for intersection
                # with bad points

                # Get radius of circular projection of sphere
                R = np.arcsin(r / np.sqrt(np.sum(sph_cen[:]**2)))

                # Get coordinates of circle centre on unit sphere
                crc_cen = radec2xyz(radec)[0]

                # Compute tree search radius from Cosine rule
                # (include points extending beyond sphere edge to account for
                # finite area around bad points)
                l_srch = np.sqrt(2. - 2. * np.cos(R))

                # Run search
                pierce_l = veto_baum.query_ball_point(crc_cen, l_srch)

                bad_vol = 0.

                R = np.degrees(R)  # need in degrees for bad_vol computation

                for pt in pierce_l:

                    pt_ang = bad_pts[pt]
                    dis = np.degrees(central_angle(pt_ang, radec))
                    l = dis / R

                    bad_vol += 1.5 * (bad_r_deg / R) ** 2 \
                                   * np.sqrt(1.0 - l ** 2)

                f_r = open(
                    "./dat/out/{0}/{1}/vpf_out/volfrac_{2}.dat".format(
                        dat_dir, simul_cosmo, r), 'a')
                f_r.write("{0}\n".format(bad_vol))
                f_r.close()

            rand_i += 1