def make_physical(self, refinement=1, pad=5, order=2, clear_obs=True):
        r"""Project the maps and weights into physical coordinates
        refinement allows the physical bins to be e.g. =2 times finer
        pad puts a number of padded pixels on all sides of the physical vol.
        the clear_obs flag deletes the obs. coord maps
        """

        self.phys_map1 = bh.repackage_kiyo(
            pg.physical_grid(self.map1,
                             refinement=refinement,
                             pad=pad,
                             order=order))

        self.phys_map2 = bh.repackage_kiyo(
            pg.physical_grid(self.map2,
                             refinement=refinement,
                             pad=pad,
                             order=order))

        self.phys_noise_inv1 = bh.repackage_kiyo(
            pg.physical_grid(self.noise_inv1,
                             refinement=refinement,
                             pad=pad,
                             order=order))

        self.phys_noise_inv2 = bh.repackage_kiyo(
            pg.physical_grid(self.noise_inv2,
                             refinement=refinement,
                             pad=pad,
                             order=order))

        del self.map1, self.map2, self.noise_inv1, self.noise_inv2
        gc.collect()

        return
Exemple #2
0
    def make_physical(self, refinement=1, pad=5, order=2, clear_obs=True):
        r"""Project the maps and weights into physical coordinates
        refinement allows the physical bins to be e.g. =2 times finer
        pad puts a number of padded pixels on all sides of the physical vol.
        the clear_obs flag deletes the obs. coord maps
        """

        self.phys_map1 = bh.repackage_kiyo(pg.physical_grid(self.map1,
                                           refinement=refinement,
                                           pad=pad, order=order))

        self.phys_map2 = bh.repackage_kiyo(pg.physical_grid(self.map2,
                                           refinement=refinement,
                                           pad=pad, order=order))

        self.phys_noise_inv1 = bh.repackage_kiyo(pg.physical_grid(
                                                 self.noise_inv1,
                                                 refinement=refinement,
                                                 pad=pad, order=order))

        self.phys_noise_inv2 = bh.repackage_kiyo(pg.physical_grid(
                                                 self.noise_inv2,
                                                 refinement=refinement,
                                                 pad=pad, order=order))

        del self.map1, self.map2, self.noise_inv1, self.noise_inv2
        gc.collect()

        return
Exemple #3
0
def find_avg_fsky(map_key, tack_on=None, refinement=2, pad=5, order=1):
    """Take all the pairs that enter the autopower, open their weight files and
    find the fsky for each data treatment
    In a pipeline
        fsky = find_avg_fsky(self.params["map_key"],
                            tack_on=self.params["tack_on"],
                            refinement=self.params["refinement"],
                            pad=self.params["pad"],
                            order=self.params["order"])
    """
    fsky = {}
    datapath_db = dp.DataPath()

    map_cases = datapath_db.fileset_cases(map_key, "pair;type;treatment")

    # This code is essentially verbatim for the permutation in the real
    # autopower
    unique_pairs = dp.GBTauto_cross_pairs(map_cases['pair'],
                                          map_cases['pair'],
                                          cross_sym="_with_")

    treatment_list = map_cases['treatment']

    for treatment in treatment_list:
        for item in unique_pairs:
            dbkeydict = {}
            mapset0 = (map_key, item[0], treatment)
            mapset1 = (map_key, item[1], treatment)
            dbkeydict['noiseinv1_key'] = "%s:%s;noise_inv;%s" % mapset0
            dbkeydict['noiseinv2_key'] = "%s:%s;noise_inv;%s" % mapset1
            files = dp.convert_dbkeydict_to_filedict(dbkeydict,
                                                     datapath_db=datapath_db,
                                                     tack_on=tack_on)

            print files['noiseinv1_key'], files['noiseinv2_key']
            weight1 = algebra.make_vect(algebra.load(files['noiseinv1_key']))
            weight2 = algebra.make_vect(algebra.load(files['noiseinv2_key']))

            physweight1 = bh.repackage_kiyo(
                pg.physical_grid(weight1,
                                 refinement=refinement,
                                 pad=pad,
                                 order=order))

            physweight2 = bh.repackage_kiyo(
                pg.physical_grid(weight2,
                                 refinement=refinement,
                                 pad=pad,
                                 order=order))

            #fsky = np.sum(physweight1 * physweight2)**2
            #fsky /= np.sum(physweight1**2 * physweight2**2)
            #fsky /= float(physweight1.size)
            fsky = np.sum(weight1 * weight2)**2
            fsky /= np.sum(weight1**2 * weight2**2)
            fsky /= float(weight1.size)
            print "volume factor in noise weight: ", fsky

    return fsky
def wrap_find_weight(filename, regenerate=False):
    if regenerate:
        retval = find_weight(filename)
    else:
        retval = memoize_find_weight(filename)

    return batch_handler.repackage_kiyo(retval)
Exemple #5
0
def wrap_find_weight(filename, regenerate=False, calc_diagnal=False):
    if not calc_diagnal:
        if regenerate:
            retval = find_weight(filename)
        else:
            retval = memoize_find_weight(filename)
    else:
        if regenerate:
            retval = find_weight_re_diagnal(filename)
        else:
            retval = memoize_find_weight_re_diagnal(filename)

    return batch_handler.repackage_kiyo(retval)
Exemple #6
0
def get_cached_physical(filename, refinement=2, pad=5, order=1):
    basename = filename.split(".")[0]
    phys_cachename = basename + "_physical.npy"
    chksum_cachename = basename + ".md5"
    print phys_cachename, chksum_cachename

    curr_chksum = ft.hashfile(filename)
    # ALSO CHECK IF THE PARAMS CHANGED!

    # try to get an existing checksum
    try:
        chkfile = open(chksum_cachename, "r")
        old_chksum = chkfile.read()
        chkfile.close()
        if old_chksum == curr_chksum:
            chksum_not_changed = True
    except IOError as e:
        chksum_not_changed = False

    if os.path.isfile(phys_cachename) and chksum_not_changed:
        print "using the cached file: " + phys_cachename
        ret_data = algebra.make_vect(algebra.load(chksum_cachename))
    else:
        print "writing a physical cache for: " + filename
        # calculate the physical coordinate box
        obs_map = algebra.make_vect(algebra.load(filename))
        ret_data = bh.repackage_kiyo(pg.physical_grid(obs_map,
                                           refinement=refinement,
                                           pad=pad, order=order))
        algebra.save(chksum_cachename, ret_data)

        # save the new checksum
        chkfile = open(chksum_cachename, "w")
        chkfile.write(curr_chksum)
        chkfile.close()

    return ret_data
def calculate_mixing(weight_file1, weight_file2, bins, xspec_fileout,
                     mixing_fileout,
                     unitless=False, refinement=2, pad=5, order=1,
                     window='blackman', zero_pad=False, identity_test=False):
    print "loading the weights and converting to physical coordinates"
    weight1_obs = algebra.make_vect(algebra.load(weight_file1))
    weight1 = bh.repackage_kiyo(pg.physical_grid(
                                weight1_obs,
                                refinement=refinement,
                                pad=pad, order=order))

    weight2_obs = algebra.make_vect(algebra.load(weight_file2))
    weight2 = bh.repackage_kiyo(pg.physical_grid(
                                weight2_obs,
                                refinement=refinement,
                                pad=pad, order=order))

    if window:
        window_function = fftutil.window_nd(weight1.shape, name=window)
        weight1 *= window_function
        weight2 *= window_function

    print "calculating the cross-power of the spatial weighting functions"
    arr1 = algebra.ones_like(weight1)
    arr2 = algebra.ones_like(weight2)

    # no window applied here (applied above)
    xspec = pe.cross_power_est(weight1, weight2, arr1, arr2,
                               window=None, nonorm=True)

    # for each point in the cube, find |k|, k_perp, k_parallel
    # TODO: speed this up by using one direct numpy call (not limiting)
    k_mag_arr = binning.radius_array(xspec)
    k_perp_arr = binning.radius_array(xspec, zero_axes=[0])
    k_parallel_arr = binning.radius_array(xspec, zero_axes=[1, 2])

    if unitless:
        xspec = pe.make_unitless(xspec, radius_arr=k_mag_arr)

    # NOTE: assuming lowest k bin has only one point in 3D k-space
    # could make this floor of dimensions divided by 2 also
    center_3d = np.transpose(np.transpose(np.where(k_mag_arr == 0.))[0])

    # In the estimator, we devide by 1/sum(w1 * w2) to get most of the effect
    # of the weighing. The mixing matrix here can be thought of as a correction
    # that that diagonal-only estimate.
    leakage_ratio = xspec[center_3d[0], center_3d[1], center_3d[2]] / \
                    np.sum(weight1 * weight2)
    print "power leakage ratio: %10.5g" % leakage_ratio

    xspec /= np.sum(weight1 * weight2)

    print "partitioning the 3D kspace up into the 2D k bins"
    (kflat, ret_indices) = bin_indices_2d(k_perp_arr, k_parallel_arr,
                                          bins, bins)

    # perform a test where the window function is a delta function at the
    # origin so that the mixing matrix is identity
    if identity_test:
        xspec = algebra.zeros_like(xspec)
        xspec[center_3d[0], center_3d[1], center_3d[2]] = 1.

    # now save the window cross-power for downstream pooled users
    algebra.save(xspec_fileout, xspec)

    runlist = []
    for bin_index in range(kflat.shape[0]):
        bin_3d = ret_indices[repr(bin_index)]
        if bin_3d is not None:
            runlist.append((xspec_fileout, bin_index, bins, bin_3d, center_3d))

    pool = multiprocessing.Pool(processes=(multiprocessing.cpu_count() - 4))
    # the longest runs get pushed to the end; randomize for better job packing
    random.shuffle(runlist)
    results = pool.map(sum_window, runlist)
    #gnuplot_single_slice(runlist[0])  # for troubleshooting

    # now save the results for post-processing
    params = {"unitless": unitless, "refinement": refinement, "pad": pad,
              "order": order, "window": window, "zero_pad": zero_pad,
              "identity_test": identity_test, "weight_file1": weight_file1,
              "weight_file2": weight_file2, "bins": bins}

    outshelve = shelve.open(mixing_fileout, "n")
    outshelve["params"] = params        # parameters for this run
    outshelve["weight1"] = weight1      # weight map 1
    outshelve["weight2"] = weight2      # weight map 2
    outshelve["xspec"] = xspec          # copy of the weight spectra
    outshelve["kflat"] = kflat          # 2D k bin vector
    outshelve["bins_3d"] = ret_indices  # indices to k3d for a 2d k bin
    outshelve["results"] = results      # mixing matrix columns
    outshelve.close()
def calculate_mixing(weight_file1,
                     weight_file2,
                     bins,
                     xspec_fileout,
                     mixing_fileout,
                     unitless=False,
                     refinement=2,
                     pad=5,
                     order=1,
                     window='blackman',
                     zero_pad=False,
                     identity_test=False):
    print "loading the weights and converting to physical coordinates"
    weight1_obs = algebra.make_vect(algebra.load(weight_file1))
    weight1 = bh.repackage_kiyo(
        pg.physical_grid(weight1_obs,
                         refinement=refinement,
                         pad=pad,
                         order=order))

    weight2_obs = algebra.make_vect(algebra.load(weight_file2))
    weight2 = bh.repackage_kiyo(
        pg.physical_grid(weight2_obs,
                         refinement=refinement,
                         pad=pad,
                         order=order))

    if window:
        window_function = fftutil.window_nd(weight1.shape, name=window)
        weight1 *= window_function
        weight2 *= window_function

    print "calculating the cross-power of the spatial weighting functions"
    arr1 = algebra.ones_like(weight1)
    arr2 = algebra.ones_like(weight2)

    # no window applied here (applied above)
    xspec = pe.cross_power_est(weight1,
                               weight2,
                               arr1,
                               arr2,
                               window=None,
                               nonorm=True)

    # for each point in the cube, find |k|, k_perp, k_parallel
    # TODO: speed this up by using one direct numpy call (not limiting)
    k_mag_arr = binning.radius_array(xspec)
    k_perp_arr = binning.radius_array(xspec, zero_axes=[0])
    k_parallel_arr = binning.radius_array(xspec, zero_axes=[1, 2])

    if unitless:
        xspec = pe.make_unitless(xspec, radius_arr=k_mag_arr)

    # NOTE: assuming lowest k bin has only one point in 3D k-space
    # could make this floor of dimensions divided by 2 also
    center_3d = np.transpose(np.transpose(np.where(k_mag_arr == 0.))[0])

    # In the estimator, we devide by 1/sum(w1 * w2) to get most of the effect
    # of the weighing. The mixing matrix here can be thought of as a correction
    # that that diagonal-only estimate.
    leakage_ratio = xspec[center_3d[0], center_3d[1], center_3d[2]] / \
                    np.sum(weight1 * weight2)
    print "power leakage ratio: %10.5g" % leakage_ratio

    xspec /= np.sum(weight1 * weight2)

    print "partitioning the 3D kspace up into the 2D k bins"
    (kflat, ret_indices) = bin_indices_2d(k_perp_arr, k_parallel_arr, bins,
                                          bins)

    # perform a test where the window function is a delta function at the
    # origin so that the mixing matrix is identity
    if identity_test:
        xspec = algebra.zeros_like(xspec)
        xspec[center_3d[0], center_3d[1], center_3d[2]] = 1.

    # now save the window cross-power for downstream pooled users
    algebra.save(xspec_fileout, xspec)

    runlist = []
    for bin_index in range(kflat.shape[0]):
        bin_3d = ret_indices[repr(bin_index)]
        if bin_3d is not None:
            runlist.append((xspec_fileout, bin_index, bins, bin_3d, center_3d))

    pool = multiprocessing.Pool(processes=(multiprocessing.cpu_count() - 4))
    # the longest runs get pushed to the end; randomize for better job packing
    random.shuffle(runlist)
    results = pool.map(sum_window, runlist)
    #gnuplot_single_slice(runlist[0])  # for troubleshooting

    # now save the results for post-processing
    params = {
        "unitless": unitless,
        "refinement": refinement,
        "pad": pad,
        "order": order,
        "window": window,
        "zero_pad": zero_pad,
        "identity_test": identity_test,
        "weight_file1": weight_file1,
        "weight_file2": weight_file2,
        "bins": bins
    }

    outshelve = shelve.open(mixing_fileout, "n")
    outshelve["params"] = params  # parameters for this run
    outshelve["weight1"] = weight1  # weight map 1
    outshelve["weight2"] = weight2  # weight map 2
    outshelve["xspec"] = xspec  # copy of the weight spectra
    outshelve["kflat"] = kflat  # 2D k bin vector
    outshelve["bins_3d"] = ret_indices  # indices to k3d for a 2d k bin
    outshelve["results"] = results  # mixing matrix columns
    outshelve.close()