Example #1
0
    def load_pairs(self):
        r"""load the set of map/noise pairs specified by keys handed to the
        database. This sets up operations on the quadratic product
            Q = map1^T noise_inv1 B noise_inv2 map2
        """
        par = self.params
        (self.pairlist, pairdict) = dp.cross_maps(par['map1'], par['map2'],
                                             par['noise_inv1'],
                                             par['noise_inv2'],
                                             noise_inv_suffix=";noise_weight",
                                             verbose=False,
                                             tack_on=self.tack_on_input,
                                             db_to_use=self.datapath_db)

        for pairitem in self.pairlist:
            pdict = pairdict[pairitem]
            print "-" * 80
            dp.print_dictionary(pdict, sys.stdout,
                                key_list=['map1', 'noise_inv1',
                                          'map2', 'noise_inv2'])

            map1 = algebra.make_vect(algebra.load(pdict['map1']))
            map2 = algebra.make_vect(algebra.load(pdict['map2']))
            if par['simfile'] is not None:
                print "adding %s with multiplier %s" % (par['simfile'],
                                                        par['sim_multiplier'])

                sim = algebra.make_vect(algebra.load(par['simfile']))
                sim *= par['sim_multiplier']
                print sim.shape, map1.shape
            else:
                sim = algebra.zeros_like(map1)

            noise_inv1 = algebra.make_vect(algebra.load(pdict['noise_inv1']))
            noise_inv2 = algebra.make_vect(algebra.load(pdict['noise_inv2']))

            pair = map_pair.MapPair(map1 + sim, map2 + sim,
                                    noise_inv1, noise_inv2,
                                    self.freq_list)

            pair.set_names(pdict['tag1'], pdict['tag2'])

            pair.params = self.params
            self.pairs[pairitem] = pair

            if par['subtract_inputmap_from_sim'] or \
               par['subtract_sim_from_inputmap']:
                if par['subtract_inputmap_from_sim']:
                    pair_parallel_track = map_pair.MapPair(map1, map2,
                                                  noise_inv1, noise_inv2,
                                                  self.freq_list)

                if par['subtract_sim_from_inputmap']:
                    pair_parallel_track = map_pair.MapPair(sim, sim,
                                                  noise_inv1, noise_inv2,
                                                  self.freq_list)

                pair_parallel_track.set_names(pdict['tag1'], pdict['tag2'])
                pair_parallel_track.params = self.params
                self.pairs_parallel_track[pairitem] = pair_parallel_track
def add_sim_radio():
    """script: go through a list of simulations and add those to a selected map
    """
    root_file = "/mnt/raid-project/gmrt/eswitzer/wiggleZ/"
    radio_file = root_file + "modetest_combined_maps/combined_41-73_cleaned_clean_15.npy"
    root_sim = "/mnt/raid-project/gmrt/calinliv/wiggleZ/simulations/test100/"
    root_out = root_file + "simulations_plus_data/"
    radio_data = algebra.make_vect(algebra.load(radio_file))

    for simindex in range(1,101):
        simname = root_sim + "simulated_signal_map_" + \
                  repr(simindex)+"_with_beam.npy"
        filename = root_out + "simulated_signal_plusdata_map_" + \
                   repr(simindex)+"_with_beam.npy"
        simoutname = root_out + "simulated_signal_map_" + \
                   repr(simindex)+"_with_beam.npy"

        sim_data = algebra.make_vect(algebra.load(simname))
        sim_data /= 1000.
        outmap = copy.deepcopy(radio_data)
        outmap += sim_data

        algebra.save(filename, outmap)
        algebra.save(simoutname, sim_data)

        print filename
def generate_proc_sim(input_file, weightfile, output_file,
                      meansub=False, degrade=False):
    r"""make the maps with various combinations of beam conv/meansub"""
    print "%s -> %s (beam, etc.)" % (input_file, output_file)
    simmap = algebra.make_vect(algebra.load(input_file))

    if degrade:
        print "performing common resolution convolution"
        beam_data = sp.array([0.316148488246, 0.306805630985, 0.293729620792,
                 0.281176247549, 0.270856788455, 0.26745856078,
                 0.258910010848, 0.249188429031])
        freq_data = sp.array([695, 725, 755, 785, 815, 845, 875, 905],
                             dtype=float)
        freq_data *= 1.0e6
        beam_diff = sp.sqrt(max(1.1 * beam_data) ** 2 - (beam_data) ** 2)
        common_resolution = beam.GaussianBeam(beam_diff, freq_data)
        # Convolve to a common resolution.
        simmap = common_resolution.apply(simmap)

    if meansub:
        print "performing mean subtraction"
        noise_inv = algebra.make_vect(algebra.load(weightfile))
        means = sp.sum(sp.sum(noise_inv * simmap, -1), -1)
        means /= sp.sum(sp.sum(noise_inv, -1), -1)
        means.shape += (1, 1)
        simmap -= means
        # the weights will be zero in some places
        simmap[noise_inv < 1.e-20] = 0.

    # extra sanity?
    simmap[np.isinf(simmap)] = 0.
    simmap[np.isnan(simmap)] = 0.

    print "saving to" + output_file
    algebra.save(output_file, simmap)
def generate_windows(window="blackman"):
    datapath_db = data_paths.DataPath()
    # first generate a window for the full physical volume
    filename = datapath_db.fetch('simideal_15hr_physical', intend_read=True,
                                 pick='1')
    print filename
    pcube = algebra.make_vect(algebra.load(filename))
    pwindow = algebra.make_vect(fftutil.window_nd(pcube.shape, name=window),
                                axis_names=('freq', 'ra', 'dec'))
    pwindow.copy_axis_info(pcube)
    print pwindow.shape
    algebra.save("physical_window.npy", pwindow)

    # now generate one for the observed region and project onto the physical
    # volume.
    filename = datapath_db.fetch('simideal_15hr_beam', intend_read=True,
                                 pick='1')
    print filename
    ocube = algebra.make_vect(algebra.load(filename))
    owindow = algebra.make_vect(fftutil.window_nd(ocube.shape, name=window),
                                axis_names=('freq', 'ra', 'dec'))
    owindow.copy_axis_info(ocube)
    print owindow.shape
    print owindow.axes
    algebra.save("observed_window.npy", owindow)
    pwindow = physical_gridding.physical_grid(owindow, refinement=2)
    print pwindow.shape
    algebra.save("observed_window_physreg.npy", pwindow)
Example #5
0
def plot_difference(filename1, filename2, title, sigmarange=6., sigmacut=None,
                    transverse=False, outputdir="./", multiplier=1000.,
                    logscale=False, fractional=False,
                    ignore=None, diff_filename="./difference.npy"):
    """make movies of the difference of two maps (assuming same dimensions)"""
    map1 = algebra.make_vect(algebra.load(filename1))
    map2 = algebra.make_vect(algebra.load(filename2))

    if fractional:
        difftitle = "fractional diff."
        dmap = (map1 - map2) / map1 * 100.
    else:
        difftitle = "difference"
        dmap = map1 - map2

    algebra.save(diff_filename, dmap)

    make_cube_movie(diff_filename,
                       difftitle, cube_frame_dir, sigmarange=6.,
                       sigmacut=sigmacut, outputdir=outputdir, ignore=ignore,
                       multiplier=multiplier, transverse=transverse,
                       logscale=False)

    make_cube_movie(filename1,
                       title, cube_frame_dir, sigmarange=sigmarange,
                       sigmacut=sigmacut, outputdir=outputdir, ignore=ignore,
                       multiplier=multiplier, transverse=transverse,
                       logscale=logscale, filetag_suffix="_1")

    make_cube_movie(filename2,
                       title, cube_frame_dir, sigmarange=sigmarange,
                       sigmacut=sigmacut, outputdir=outputdir, ignore=ignore,
                       multiplier=multiplier, transverse=transverse,
                       logscale=logscale, filetag_suffix="_2")
Example #6
0
def add_manual_mask(source_key, cut_freq_list=None,
                    signal_name='map', noise_inv_name='noise_inv',
                    weight_name='weight', divider_token=";"):
    r"""
    `source_key` is the file db key for the maps to combine
    `signal_name` is the tag in the file db entry for the signal maps
    `noise_inv_name` is the tag in the file db entry for the N^-1 weights
    `weight_name` is the tag in the file db entry for the weights to write out
    `divider_token` is the token that divides the map section name
            from the data type e.g. "A_with_B;noise_inv"
    """
    datapath_db = data_paths.DataPath()
    source_fdb = datapath_db.fetch(source_key, silent=True)
    source_fdict = source_fdb[1]

    # accumulate all the files to combine
    noise_inv_keys = {}
    weight_keys = {}
    signal_keys = {}
    for filekey in source_fdb[0]:
        if divider_token in filekey:
            data_type = filekey.split(divider_token)[1]
            map_section = filekey.split(divider_token)[0]

            if data_type == signal_name:
                signal_keys[map_section] = source_fdict[filekey]

            if data_type == noise_inv_name:
                noise_inv_keys[map_section] = source_fdict[filekey]

            if data_type == weight_name:
                weight_keys[map_section] = source_fdict[filekey]

    for mapkey in signal_keys:
        signal_file = signal_keys[mapkey]
        noise_inv_file = noise_inv_keys[mapkey]
        weight_file = weight_keys[mapkey]
        print "loading pair: %s %s -> %s" % \
                (signal_file, noise_inv_file, weight_file)
        signal_map = algebra.make_vect(algebra.load(signal_file))
        weightmap = algebra.make_vect(algebra.load(noise_inv_file))

        # set the new weights to zero where the N^-1 is small
        # or the signal map is inf or nan
        weightmap[np.isnan(weightmap)] = 0.
        weightmap[np.isinf(weightmap)] = 0.
        weightmap[np.isnan(signal_map)] = 0.
        weightmap[np.isinf(signal_map)] = 0.
        weightmap[weightmap < 1.e-20] = 0.

        if cut_freq_list is not None:
            for cutindex in cut_freq_list:
                weightmap[cutindex, :, :] = 0.

        # could also determine the filename here, outside of the database
        #outputdir = datapath_db.fetch_parent(source_key, return_path=True)
        #weight_out = "%s/%s" % (outputdir, source_key)
        algebra.compressed_array_summary(weightmap, "new weight map")
        algebra.save(weight_file, weightmap)
def map_pair_cal(uncal_maplist, uncal_weightlist, calfactor_outlist,
                 dirtymap_inlist, dirtymap_outlist,
                 convolve=True, factorizable_noise=True,
                 sub_weighted_mean=True, freq_list=range(256)):

    map1file = uncal_maplist.pop(0)
    weight1file = uncal_weightlist.pop(0)
    calfactor_outlist.pop(0)
    dirtymap_out0 = dirtymap_outlist.pop(0)
    dirtymap_in0 = dirtymap_inlist.pop(0)

    # do nothing to the reference map
    ref_dirtymap = algebra.make_vect(algebra.load(dirtymap_in0))
    algebra.save(dirtymap_out0, ref_dirtymap)

    # load maps into pairs
    svdout = shelve.open("correlation_pairs.shelve")
    for map2file, weight2file, calfactor_outfile, \
        dirty_infile, dirty_outfile in zip(uncal_maplist, \
            uncal_weightlist, calfactor_outlist,
            dirtymap_inlist, dirtymap_outlist):

        print map1file, weight1file, map2file, weight2file

        pair = map_pair.MapPair(map1file, map2file,
                                weight1file, weight2file,
                                freq_list, avoid_db=True)

        if factorizable_noise:
            pair.make_noise_factorizable()

        if sub_weighted_mean:
            pair.subtract_weighted_mean()

        if convolve:
            pair.degrade_resolution()

        (corr, counts) = pair.correlate()
        svd_info = ce.get_freq_svd_modes(corr, len(freq_list))
        svdout[map2file] = svd_info

        # write out the left right and cal factors
        leftmode = svd_info[1][0]
        rightmode = svd_info[2][0]
        calfactor = leftmode/rightmode

        facout = open(calfactor_outfile, "w")
        for outvals in zip(leftmode, rightmode, calfactor):
            facout.write("%10.15g %10.15g %10.15g\n" % outvals)

        facout.close()

        newmap = algebra.make_vect(algebra.load(dirty_infile))
        newmap[freq_list, :, :] *= calfactor[:,np.newaxis,np.newaxis]
        algebra.save(dirty_outfile, newmap)
        print dirty_outfile

    svdout.close()
Example #8
0
def divide_iqu_map(source_dict=None, target_dict=None, map_dict=None):
    if source_dict != None:
        iqu        = algebra.make_vect(algebra.load(source_dict['map']))
        iqu_weight = algebra.make_vect(algebra.load(source_dict['weight']))
    elif map_dict != None:
        iqu        = algebra.make_vect(map_dict['map'])
        iqu_weight = algebra.make_vect(map_dict['weight'])
    else:
        print "Error: Can not find iqu map"

    nfreq = iqu.shape[0]/3

    imap = algebra.make_vect(iqu[ 0*nfreq : 1*nfreq, ...])
    qmap = algebra.make_vect(iqu[ 1*nfreq : 2*nfreq, ...])
    umap = algebra.make_vect(iqu[ 2*nfreq : 3*nfreq, ...])

    imap.info = iqu.info
    qmap.info = iqu.info
    umap.info = iqu.info

    imap.copy_axis_info(iqu)
    qmap.copy_axis_info(iqu)
    umap.copy_axis_info(iqu)

    imap_weight = algebra.make_vect(iqu_weight[ 0*nfreq : 1*nfreq, ...])
    qmap_weight = algebra.make_vect(iqu_weight[ 1*nfreq : 2*nfreq, ...])
    umap_weight = algebra.make_vect(iqu_weight[ 2*nfreq : 3*nfreq, ...])

    imap_weight.info = iqu_weight.info
    qmap_weight.info = iqu_weight.info
    umap_weight.info = iqu_weight.info

    imap_weight.copy_axis_info(iqu_weight)
    qmap_weight.copy_axis_info(iqu_weight)
    umap_weight.copy_axis_info(iqu_weight)

    if target_dict != None:
        algebra.save(target_dict['imap'], imap)
        algebra.save(target_dict['qmap'], qmap)
        algebra.save(target_dict['umap'], umap)

        algebra.save(target_dict['imap_weight'], imap_weight)
        algebra.save(target_dict['qmap_weight'], qmap_weight)
        algebra.save(target_dict['umap_weight'], umap_weight)
    else:
        map_dict = {}
        map_dict['imap'] = imap
        map_dict['qmap'] = qmap
        map_dict['umap'] = umap
        map_dict['imap_weight'] = imap_weight
        map_dict['qmap_weight'] = qmap_weight
        map_dict['umap_weight'] = umap_weight
        return map_dict
Example #9
0
def getmap(imap_fname, nmap_fname, mmap_fname=None, half=None):
    """
    get the matrix of intensity map and noise map
    """
    #in_root = params['input_root']

    imap = algebra.load(imap_fname)
    imap = algebra.make_vect(imap)

    if half!=None:
        imap = getmap_halfz(imap, half)
    #print "--The neam value for imap is:",imap.flatten().mean(),"--"
    #imap = imap - imap.flatten().mean()
    if imap.axes != ('freq', 'ra', 'dec') :
        raise ce.DataError('AXES ERROR!')

    try:
        nmap = algebra.load(nmap_fname)
        nmap = algebra.make_vect(nmap)

        if half!=None:
            nmap = getmap_halfz(nmap, half)

        bad = nmap<1.e-5*nmap.flatten().max()
        nmap[bad] = 0.
        non0 = nmap.nonzero()
        #imap[non0] = imap[non0]/nmap[non0]
    except IOError:
        print 'NO Noise File :: Set Noise to One'
        nmap = algebra.info_array(sp.ones(imap.shape))
        nmap.axes = imap.axes
        nmap = algebra.make_vect(nmap)
    nmap.info = imap.info
    if nmap.axes != ('freq', 'ra', 'dec') :
        raise ce.DataError('AXES ERROR!')

    if mmap_fname != None:
        try:
            mmap = algebra.load(mmap_fname)
            mmap = algebra.make_vect(mmap)
            if half!=None:
                mmap = getmap_halfz(mmap, half)
        except IOError:
            print 'NO Mock File :: Make it!'
            mmap = algebra.info_array(
                2.*np.random.rand(imap.shape[0],imap.shape[1], imap.shape[2])-0.5)
            mmap.axes = imap.axes
            mmap = algebra.make_vect(mmap)
        
        return imap, nmap, mmap
    else:
        return imap, nmap
Example #10
0
    def __init__(self, map1, map2, noise_inv1, noise_inv2, freq,
                 input_filenames=False, conv_factor=1.1):
        r"""
        arguments: map1, map2, noise_inv1, noise_inv2, freq
        conv_factor is the factor by which to multiply the largest beam
        in the convolution to a common resolution
        """
        if input_filenames:
            self.map1 = algebra.make_vect(algebra.load(map1))
            self.map2 = algebra.make_vect(algebra.load(map2))
            if noise_inv1:
                print "loading noise1 file: " + noise_inv1
                self.noise_inv1 = algebra.make_vect(algebra.load(noise_inv1))
            else:
                print "WARNING: map1 has unity weight; no file given"
                self.noise_inv1 = algebra.ones_like(self.map1)

            if noise_inv2:
                print "loading noise2 file: " + noise_inv2
                self.noise_inv2 = algebra.make_vect(algebra.load(noise_inv2))
            else:
                print "WARNING: map2 has unity weight; no file given"
                self.noise_inv2 = algebra.ones_like(self.map2)

        else:
            self.map1 = map1
            self.map2 = map2
            self.noise_inv1 = noise_inv1
            self.noise_inv2 = noise_inv2

        self.freq = freq
        self.conv_factor = conv_factor

        # maps in physical coordinates (derived)
        self.phys_map1 = None
        self.phys_map2 = None
        self.phys_noise_inv1 = None
        self.phys_noise_inv2 = None

        # give infinite noise to masked bands
        self.sanitize()

        # Set attributes.
        self.left_modes = 0
        self.right_modes = 0
        # For saving, to keep track of each mapname.
        self.map1_name = ''
        self.map2_name = ''
        # Which section [A, B, C, D...] the maps is from.
        self.map1_code = ''
        self.map2_code = ''
Example #11
0
def mktmp(rgn_i,rgn_j,rgn_k,srgn_i1,srgn_i2,srgn_j1,srgn_j2,srgn_k1,srgn_k2,outfilename):
    """Write to disk a file representing an empty matrix of given dimensions. Also write an identically
    shaped array of booleans, which are true if the index points to the subregion.
    rgn_i/j/k  : the dimensions of the full region to be simulated
        srgn_i/j/k : the dimensions of the deep integration subregion
    outfilename: the name of the file to be created
    """


    regiontype = np.zeros((rgn_i,rgn_j,rgn_k), bool)

    array = np.zeros((rgn_i,rgn_j,rgn_k))

    for i in range(0,rgn_i):
        for j in range(0,rgn_j):
            for k in range(0,rgn_k):
                if (i>=(srgn_i1-1) and i<=(srgn_i2-1)):
                    if (j>=(srgn_j1-1) and j<=(srgn_j2-1)):
                        if (k>=(srgn_k1-1) and k<=(srgn_k2-1)):
                            regiontype[i,j,k]=True
            else:
                regiontype[i,j,k]=False

    region=algebra.info_array(array)
    regiontypename = 'bool' + outfilename
    np.save(regiontypename, regiontype)
    algebra.save(outfilename,region)
    print "done"
    template_map = algebra.make_vect(algebra.load(outfilename))
Example #12
0
def ReadMeta(data_path):
    '''return  freq ra dec'''
    data = algebra.make_vect(algebra.load(data_path))
    freq = data.get_axis('freq')
    ra = data.get_axis('ra')
    dec = data.get_axis('dec')
    return freq,ra,dec
Example #13
0
    def fetch_multi(self,
                    data_obj,
                    db_token="db:",
                    silent=False,
                    intend_read=False):
        r"""Handle various sorts of file pointers/data
        if `data_obj`
            is an array, return a deep copy of it
            is a string:
                if it begins with "db:" -- string after db is a db key
                otherwise assume it is a file and try to open it
        """
        if isinstance(data_obj, str):
            if data_obj[0:len(db_token)] == db_token:
                db_key = data_obj[len(db_token):]
                filename = self.fetch(db_key,
                                      intend_read=intend_read,
                                      silent=silent)
            else:
                filename = data_obj
                prefix = "non-db filename "
                ft.path_properties(filename,
                                   intend_read=intend_read,
                                   is_file=True,
                                   prefix=prefix,
                                   silent=silent)

            ret_data = algebra.make_vect(algebra.load(filename))
        else:
            ret_data = copy.deepcopy(data_obj)

        return ret_data
Example #14
0
    def __init__(self, parameter_file=None, params_dict=None, feedback=0):
        self.params = params_dict
        if parameter_file:
            self.params = parse_ini.parse(parameter_file,
                                          params_init,
                                          prefix=prefix)

        self.output_file = self.params['output_file']
        self.delta_temp_file = self.params['delta_temp_file']
        self.total_integration = self.params['total_integration']
        self.weight_map = algebra.make_vect(
            algebra.load(self.params['weight_file']))

        self.max_stdev = self.params['max_stdev']

        # set the random seed
        if (self.params['seed'] < 0):
            # The usual seed is not fine enough for parallel jobs
            randsource = open("/dev/random", "rb")
            self.seed = struct.unpack("I", randsource.read(4))[0]
            #self.seed = abs(long(outfile_physical.__hash__()))
        else:
            self.seed = self.params['seed']

        random.seed(self.seed)
Example #15
0
def repackage_pickle_as_shelve(pklfile, shelvefile):
    """Take pickled output from Liviu's code and combine the data from various
    sources into a common shelve file. [script, not production]
    """
    print pklfile
    f = open(pklfile, "r")
    F = cPickle.load(f)
    f.close()

    # Setting axis info after pickling.
    map_file = F.params["input_root"] + "sec_A_15hr_41-73_clean_map_I.npy"
    exMap = algebra.make_vect(algebra.load(map_file))
    for Pair in F.Pairs:
        Pair.Map1.info = exMap.info
        Pair.Map2.info = exMap.info
        Pair.Noise_inv1.info = exMap.info
        Pair.Noise_inv2.info = exMap.info

    for corrindex in range(6):
        shelvename = shelvefile + "_" + repr(corrindex) + ".shelve"
        corr_shelve = shelve.open(shelvename)
        print shelvename
        corr_shelve["corr"] = F.Pairs[corrindex].corr
        corr_shelve["counts"] = F.Pairs[corrindex].counts
        corr_shelve["freq_axis"] = F.Pairs[corrindex].Map1.get_axis("freq")
        corr_shelve["params"] = F.params
        corr_shelve.close()
    def process_noise_inv(self, filename, regenerate=True):
        r"""buffer reading the noise inverse files for speed and also
        save to a file in the intermediate output path.

        If the cached file exists as an intermediate product, load it else
        produce it.
        """
        if filename not in self.noisefiledict:
            basename = filename.split("/")[-1].split(".npy")[0]
            filename_diag = "%s/%s_diag.npy" % (self.output_root, basename)
            exists = os.access(filename_diag, os.F_OK)
            if exists and not regenerate:
                print "loading pre-diagonalized noise: " + filename_diag
                self.noisefiledict[filename] = algebra.make_vect(algebra.load(filename_diag))
            else:
                print "loading noise: " + filename
                # TODO: have this be smarter about reading various noise cov
                # inputs
                noise_inv = algebra.make_mat(algebra.open_memmap(filename, mode="r"))
                self.noisefiledict[filename] = noise_inv.mat_diag()
                # self.noisefiledict[filename] = algebra.make_vect(
                #                               algebra.load(filename))
                algebra.save(filename_diag, self.noisefiledict[filename])

        return copy.deepcopy(self.noisefiledict[filename])
Example #17
0
    def fetch_multi(self, data_obj, db_token="db:", silent=False,
                    intend_read=True):
        r"""Handle various sorts of file pointers/data
        if `data_obj`
            is an array, return a deep copy of it
            is a string:
                if it begins with "db:" -- string after db is a db key
                otherwise assume it is a file and try to open it
        """
        if isinstance(data_obj, str):
            if data_obj[0:len(db_token)] == db_token:
                db_key = data_obj[len(db_token):]
                filename = self.fetch(db_key, intend_read=intend_read,
                                      silent=silent)
            else:
                filename = data_obj
                prefix = "non-db filename "
                ft.path_properties(filename, intend_read=intend_read,
                                   is_file=True,
                                   prefix=prefix, silent=silent)

            ret_data = algebra.make_vect(algebra.load(filename))
        else:
            ret_data = copy.deepcopy(data_obj)

        return ret_data
Example #18
0
def ReadMeta(data_path):
    """return  freq ra dec"""
    data = algebra.make_vect(algebra.load(data_path))
    freq = data.get_axis("freq")
    ra = data.get_axis("ra")
    dec = data.get_axis("dec")
    return freq, ra, dec
Example #19
0
    def execute_assembledir(self):
        # link the weights through to the simulation directory
        for (weight_file_in, weight_file_out) in \
                zip(self.input_weight_maps, self.output_weight_maps):
            os.symlink(weight_file_in, weight_file_out)
            os.symlink(weight_file_in + ".meta", weight_file_out + ".meta")

        signalfile = self.output_root + self.output_signal
        signalmap = algebra.make_vect(algebra.load(signalfile))
        signalmap *= self.multiplier

        # now load the signal simulation add thermal noise and save
        for (thermal_file, mapfile) in \
                zip(self.output_thermal, self.output_maps):
            thermalmap = algebra.make_vect(algebra.load(thermal_file))
            algebra.save(mapfile, signalmap + thermalmap)
Example #20
0
    def produce_delta_map(self, optical_file, optical_selection_file):
        map_optical = algebra.make_vect(algebra.load(optical_file))
        map_nbar = algebra.make_vect(algebra.load(optical_selection_file))

        old_settings = np.seterr(invalid="ignore", under="ignore")
        map_delta = map_optical / map_nbar - 1.
        np.seterr(**old_settings)

        # TODO: also consider setting the nbar to zero outside of galaxies?
        map_delta[np.isinf(map_delta)] = 0.
        map_delta[np.isnan(map_delta)] = 0.
        # if e.g. nbar is zero, then set the point as if there were no galaxies
        # downstream, nbar=0 should coincide with zero weight anyway
        #map_delta[np.isinf(map_delta)] = -1.
        #map_delta[np.isnan(map_delta)] = -1.

        return map_delta
Example #21
0
	def process_map(self, imap_fname, nmap_fname, mock_fname=None):
		params = self.params
		out_root = params['output_root']
		in_root = params['input_root']
		
		imap = algebra.load(in_root + imap_fname)
		imap = algebra.make_vect(imap)
		print imap.flatten().mean()
		imap = imap - imap.flatten().mean()
		if imap.axes != ('freq', 'ra', 'dec') :
			raise ce.DataError('AXES ERROR!')

		try:
			nmap = algebra.load(in_root + nmap_fname)
			nmap = algebra.make_vect(nmap)

			bad = nmap<1.e-5*nmap.flatten().max()
			nmap[bad] = 0.
			non0 = nmap.nonzero()
			#imap[non0] = imap[non0]/nmap[non0]
		except IOError:
			print 'NO Noise File :: Set Noise to One'
			nmap = algebra.info_array(sp.ones(imap.shape))
			nmap.axes = imap.axes
			nmap = algebra.make_vect(nmap)
		nmap.info = imap.info
		if nmap.axes != ('freq', 'ra', 'dec') :
			raise ce.DataError('AXES ERROR!')

		if mock_fname != None:
			mmap = algebra.info_array(
				2.*np.random.rand(imap.shape[0],imap.shape[1], imap.shape[2])-0.5)
			mmap.axes = imap.axes
			mmap = algebra.make_vect(mmap)
			box, nbox, mbox = self.fill(imap, nmap, mmap)
			pkrm_nfname = out_root + 'fftbox_' +  mock_fname
			algebra.save(pkrm_nfname, mbox)
		else:
			box, nbox = self.fill(imap, nmap)

		pkrm_fname = out_root + 'fftbox_' + imap_fname
		algebra.save(pkrm_fname, box)

		pkrm_nfname = out_root + 'fftbox_' +  nmap_fname
		algebra.save(pkrm_nfname, nbox)
def map_pair_cal(
    uncal_maplist,
    uncal_weightlist,
    calfactor_outlist,
    dirtymap_inlist,
    dirtymap_outlist,
    reference_mapfile,
    reference_weightfile,
    sub_weighted_mean=True,
    freq_list=range(256),
):

    reference_map = algebra.make_vect(algebra.load(reference_mapfile))
    reference_weight = algebra.make_vect(algebra.load(reference_weightfile))

    reference_map = remove_mean(reference_map, reference_weight)

    # load maps into pairs
    for mapfile, weightfile, calfactor_outfile, dirty_infile, dirty_outfile in zip(
        uncal_maplist, uncal_weightlist, calfactor_outlist, dirtymap_inlist, dirtymap_outlist
    ):

        print mapfile, weightfile

        session_map = algebra.make_vect(algebra.load(mapfile))
        session_weight = algebra.make_vect(algebra.load(weightfile))

        session_map = remove_mean(session_map, session_weight)

        calfactor = template_fit(session_map, reference_map, session_weight)

        newmap = algebra.make_vect(algebra.load(dirty_infile))
        newmap[freq_list, :, :] /= calfactor[:, np.newaxis, np.newaxis]
        algebra.save(dirty_outfile, newmap)
        print dirty_outfile

        # optional test by applying the factor to the maps
        # session_map[freq_list, :, :] /= calfactor[:, np.newaxis, np.newaxis]
        # calfactor = template_fit(session_map, reference_map, session_weight)

        facout = open(calfactor_outfile, "w")
        for outvals in calfactor:
            facout.write("%10.15g\n" % outvals)

        facout.close()
Example #23
0
	def execute(self, nprocesses=1):
		params = self.params

		# Make parent directory and write parameter file.
		kiyopy.utils.mkparents(params['output_root'])
		parse_ini.write_params(params, params['output_root']+'params.ini',prefix='pk_')
		in_root = params['input_root']
		out_root = params['output_root']
		mid = params['mid']
		all_out_fname_list = []
		all_in_fname_list = []
		
		#### Process ####
		pol_str = params['polarizations'][0]
		#hr_str = params['hr'][0]
		for hr_str, ii in zip(params['hr'],range(len(params['hr']))):
			end = pol_str
			if len(last)!=0:
				end = end + last[ii]
			imap_fname = in_root + hr_str + mid[0] + end + '.npy'
			imap = algebra.load(imap_fname)
			imap = algebra.make_vect(imap)
			if imap.axes != ('freq', 'ra', 'dec') :
				raise ce.DataError('AXES ERROR!')

			nmap_fname = in_root + hr_str + mid[1] + end + '.npy'
			nmap = algebra.load(nmap_fname)
			nmap = algebra.make_vect(nmap)

			#invers noise weight
			print 'Inverse Noise Weight... Map:' + hr_str[:-1]
			self.weight(imap, nmap, 
				out_root+hr_str+'wt_cleaned_clean_map_'+end+'.png')

			dmap_fname = out_root + 'wt_' + hr_str + mid[0] + end + '.npy'
			algebra.save(dmap_fname, imap)
			all_out_fname_list.append(
				kiyopy.utils.abbreviate_file_path(dmap_fname))

			nmap_fname = out_root + 'wt_' + hr_str + mid[1] + end + '.npy'
			algebra.save(nmap_fname, nmap)
			all_out_fname_list.append(
				kiyopy.utils.abbreviate_file_path(nmap_fname))

		return 0
 def __init__(self, parameter_file_or_dict=None, feedback=2):
     
     # Call the base_single init.
     base_single.BaseSingle.__init__(self, parameter_file_or_dict,
                                     feedback)
     # Read in the calibration file.
     map_file_name = self.params['map_file']
     self.Map = algebra.load(map_file_name)
     self.Map = algebra.make_vect(self.Map)
Example #25
0
def template_map_axes(filename):
    """Open a numpy array map and extract its axis/etc. information
    """
    print "using the volume template file: " + filename
    template_map = algebra.make_vect(algebra.load(filename))
    freq_axis = template_map.get_axis('freq')
    ra_axis = template_map.get_axis('ra')
    dec_axis = template_map.get_axis('dec')
    return (freq_axis, ra_axis, dec_axis, template_map.shape, template_map)
def template_map_axes(filename):
    """Open a numpy array map and extract its axis/etc. information
    """
    print "using the volume template file: " + filename
    template_map = algebra.make_vect(algebra.load(filename))
    freq_axis = template_map.get_axis('freq')
    ra_axis = template_map.get_axis('ra')
    dec_axis = template_map.get_axis('dec')
    return (freq_axis, ra_axis, dec_axis, template_map.shape, template_map)
Example #27
0
	def process_map(self, imap_fname, nmap_fname, ii, mock_fname=None):
		params = self.params
		sigma = params['sigma']
		mu = params['mu']
		out_root = params['output_root']
		in_root = params['input_root']
		
		imap = algebra.load(in_root + imap_fname)
		imap = algebra.make_vect(imap)
		#print imap.flatten().mean()
		imap = imap - imap.flatten().mean()
		if imap.axes != ('freq', 'ra', 'dec') :
			raise ce.DataError('AXES ERROR!')

		print ' :: Set Noise to Gaussian'
		np.random.seed()
		nmap = algebra.info_array(
			sigma*np.random.randn(imap.shape[0],imap.shape[1], imap.shape[2])+mu)
		nmap.axes = imap.axes
		nmap = algebra.make_vect(nmap)
		nmap.info = imap.info
		if nmap.axes != ('freq', 'ra', 'dec') :
			raise ce.DataError('AXES ERROR!')

		## add noise to map ##
		imap = imap + nmap
		non0 = nmap.nonzero()
		nmap[non0] = (1./sigma)**2

		#if mock_fname != None:
		#	mmap = algebra.info_array(
		#		2.*np.random.randn(imap.shape[0],imap.shape[1], imap.shape[2])-0.5)
		#	mmap.axes = imap.axes
		#	mmap = algebra.make_vect(mmap)
		#	box, nbox, mbox = self.fill(imap, nmap, mmap)
		#	pkrm_nfname = out_root + 'fftbox_' +  mock_fname
		#	algebra.save(pkrm_nfname, mbox)
		#else:
		#	box, nbox = self.fill(imap, nmap)

		hr = params['hr']
		mid = params['mid']
		last = params['last']
		pol_str = params['polarizations'][0]
		end = pol_str
		if len(last)!=0:
			end = end + last[ii]
		end = end + '_' + str(ii)
		imap_fname = hr[ii] + mid[0] + end + '.npy'
		nmap_fname = hr[ii] + mid[1] + end + '.npy'

		pkrm_fname = out_root + imap_fname
		algebra.save(pkrm_fname, imap)

		pkrm_nfname = out_root + nmap_fname
		algebra.save(pkrm_nfname, nmap)
Example #28
0
def add_sim_to_data(simkey, datakey, replace=False):
    datapath_db = data_paths.DataPath()

    mapA_file = datapath_db.fetch(datakey + ":A;clean_map", intend_read=True)
    mapB_file = datapath_db.fetch(datakey + ":B;clean_map", intend_read=True)
    mapC_file = datapath_db.fetch(datakey + ":C;clean_map", intend_read=True)
    mapD_file = datapath_db.fetch(datakey + ":D;clean_map", intend_read=True)
    simfile = datapath_db.fetch(simkey + ":1", intend_read=True)

    simmap = algebra.make_vect(algebra.load(simfile))

    mapset = [mapA_file, mapB_file, mapC_file, mapD_file]
    for mapfile in mapset:
        print mapfile, simfile
        origmap = algebra.make_vect(algebra.load(mapfile))
        if replace:
            algebra.save(mapfile, simmap)
        else:
            algebra.save(mapfile, origmap + simmap)
Example #29
0
def add_sim_to_data(simkey, datakey, replace=False):
    datapath_db = data_paths.DataPath()

    mapA_file = datapath_db.fetch(datakey + ":A;clean_map", intend_read=True)
    mapB_file = datapath_db.fetch(datakey + ":B;clean_map", intend_read=True)
    mapC_file = datapath_db.fetch(datakey + ":C;clean_map", intend_read=True)
    mapD_file = datapath_db.fetch(datakey + ":D;clean_map", intend_read=True)
    simfile = datapath_db.fetch(simkey + ":1", intend_read=True)

    simmap = algebra.make_vect(algebra.load(simfile))

    mapset = [mapA_file, mapB_file, mapC_file, mapD_file]
    for mapfile in mapset:
        print mapfile, simfile
        origmap = algebra.make_vect(algebra.load(mapfile))
        if replace:
            algebra.save(mapfile, simmap)
        else:
            algebra.save(mapfile, origmap + simmap)
Example #30
0
    def load_ext_pairs(self, index, map1name, map2name, noise1name,
                       noise2name):
        r"""Load the external datasets (which improve cleaning)
        """
        par = self.params
        (self.pairlist_ext,
         pairdict) = dp.cross_maps(map1name,
                                   map2name,
                                   noise1name,
                                   noise2name,
                                   noise_inv_suffix=";noise_weight",
                                   verbose=False,
                                   db_to_use=self.datapath_db)
        # probably not wanted for external maps:
        #                                    tack_on=self.tack_on_input,

        self.pairs_ext[index] = {}
        for pairitem in self.pairlist_ext:
            pdict = pairdict[pairitem]
            print "-" * 80
            print "loading ext %s pair %s" % (index, pairitem)
            dp.print_dictionary(
                pdict,
                sys.stdout,
                key_list=['map1', 'noise_inv1', 'map2', 'noise_inv2'])

            map1 = algebra.make_vect(algebra.load(pdict['map1']))
            map2 = algebra.make_vect(algebra.load(pdict['map2']))

            noise_inv1 = algebra.make_vect(algebra.load(pdict['noise_inv1']))
            noise_inv2 = algebra.make_vect(algebra.load(pdict['noise_inv2']))

            pair = map_pair.MapPair(map1,
                                    map2,
                                    noise_inv1,
                                    noise_inv2,
                                    self.freq_list,
                                    conv_factor=self.conv_factor)

            pair.set_names(pdict['tag1'], pdict['tag2'])

            pair.params = self.params
            self.pairs_ext[index][pairitem] = pair
Example #31
0
def find_weight(filename):
    r"""rather than read the full noise_inv and find its diagonal, cache the
    diagonal values.

    Note that the .info does not get shelved (class needs to be made
    serializeable). Return the info separately.
    """
    noise_inv_diag = algebra.make_vect(algebra.load(filename))

    return noise_inv_diag, noise_inv_diag.info
Example #32
0
def template_map_axes(filename=None):
    if filename is None:
        root_template = '/mnt/raid-project/gmrt/calinliv/wiggleZ/corr/test/'
        filename = root_template + \
                   'sec_A_15hr_41-73_cleaned_clean_map_I_with_B.npy'

    template_map = algebra.make_vect(algebra.load(filename))
    freq_axis = template_map.get_axis('freq')
    ra_axis = template_map.get_axis('ra')
    dec_axis = template_map.get_axis('dec')
    return (freq_axis, ra_axis, dec_axis, template_map.shape, template_map)
Example #33
0
def process_optical_to_delta(optical_file, optical_selection_file, outfile):
    print "-" * 80
    print "in: " + optical_file
    print "nbar: " + optical_selection_file
    print "out: " + outfile
    map_opt = algebra.make_vect(algebra.load(optical_file))
    map_nbar = algebra.make_vect(algebra.load(optical_selection_file))

    # convert to delta-overdensity
    map_opt = map_opt / map_nbar - 1.
    #algebra.compressed_array_summary(map_opt, "opt after conversion to delta")

    # set the NaNs and infs to zero in data and weights
    nan_array = np.isnan(map_opt)
    map_opt[nan_array] = 0.
    map_nbar[nan_array] = 0.
    inf_array = np.isinf(map_opt)
    map_opt[inf_array] = 0.
    map_nbar[inf_array] = 0.

    algebra.save(outfile, map_opt)
def test_scheme(template_file, sim_filename1, sim_filename2):
    r"""look at some differences between maps"""
    template_map = algebra.make_vect(algebra.load(template_file))
    gbtsim1 = realize_simulation(template_map, scenario="streaming", seed=5489, refinement=1.0)
    gbtsim2 = realize_simulation(template_map, seed=5489, refinement=1.0)

    sim_map1 = algebra.make_vect(gbtsim1, axis_names=("freq", "ra", "dec"))
    sim_map2 = algebra.make_vect(gbtsim2, axis_names=("freq", "ra", "dec"))
    sim_map1.copy_axis_info(template_map)
    sim_map2.copy_axis_info(template_map)
    algebra.save(sim_filename1, sim_map1)
    algebra.save(sim_filename2, sim_map2)
    def load_pairs(self, regenerate=True):
        r"""load the set of map/noise pairs specified by keys handed to the
        database. This sets up operations on the quadratic product
            Q = map1^T noise_inv1 B noise_inv2 map2
        """
        par = self.params
        (self.pairlist, pairdict) = dp.cross_maps(
            par["map1"], par["map2"], par["noise_inv1"], par["noise_inv2"], verbose=False
        )

        for pairitem in self.pairlist:
            pdict = pairdict[pairitem]
            print "-" * 80
            dp.print_dictionary(pdict, sys.stdout, key_list=["map1", "noise_inv1", "map2", "noise_inv2"])

            map1 = algebra.make_vect(algebra.load(pdict["map1"]))
            map2 = algebra.make_vect(algebra.load(pdict["map2"]))
            sim = algebra.make_vect(algebra.load(par["simfile"]))

            if not par["no_weights"]:
                noise_inv1 = self.process_noise_inv(pdict["noise_inv1"], regenerate=regenerate)

                noise_inv2 = self.process_noise_inv(pdict["noise_inv2"], regenerate=regenerate)
            else:
                noise_inv1 = algebra.ones_like(map1)
                noise_inv2 = algebra.ones_like(map2)

            pair = map_pair.MapPair(map1 + sim, map2 + sim, noise_inv1, noise_inv2, self.freq_list)

            pair.set_names(pdict["tag1"], pdict["tag2"])
            pair.lags = self.lags
            pair.params = self.params
            self.pairs[pairitem] = pair

            pair_nosim = map_pair.MapPair(map1, map2, noise_inv1, noise_inv2, self.freq_list)

            pair_nosim.set_names(pdict["tag1"], pdict["tag2"])
            pair_nosim.lags = self.lags
            pair_nosim.params = self.params
            self.pairs_nosim[pairitem] = pair_nosim
def calculate_xspec_file(cube1_file, cube2_file, bins,
                    weight1_file=None, weight2_file=None,
                    truncate=False, window="blackman",
                    return_3d=False, unitless=True):

    cube1 = algebra.make_vect(algebra.load(cube1_file))
    cube2 = algebra.make_vect(algebra.load(cube2_file))

    if weight1_file is None:
        weight1 = algebra.ones_like(cube1)
    else:
        weight1 = algebra.make_vect(algebra.load(weight1_file))

    if weight2_file is None:
        weight2 = algebra.ones_like(cube2)
    else:
        weight2 = algebra.make_vect(algebra.load(weight2_file))

    print cube1.shape, cube2.shape, weight1.shape, weight2.shape
    return calculate_xspec(cube1, cube2, weight1, weight2, bins=bins,
                           window=window, unitless=unitless,
                           truncate=truncate, return_3d=return_3d)
Example #37
0
def calculate_xspec_file(cube1_file, cube2_file, bins,
                    weight1_file=None, weight2_file=None,
                    truncate=False, window="blackman",
                    return_3d=False, unitless=True):

    cube1 = algebra.make_vect(algebra.load(cube1_file))
    cube2 = algebra.make_vect(algebra.load(cube2_file))

    if weight1_file is None:
        weight1 = algebra.ones_like(cube1)
    else:
        weight1 = algebra.make_vect(algebra.load(weight1_file))

    if weight2_file is None:
        weight2 = algebra.ones_like(cube2)
    else:
        weight2 = algebra.make_vect(algebra.load(weight2_file))

    print cube1.shape, cube2.shape, weight1.shape, weight2.shape
    return calculate_xspec(cube1, cube2, weight1, weight2, bins=bins,
                           window=window, unitless=unitless,
                           truncate=truncate, return_3d=return_3d)
def map_pair_cal(uncal_maplist, uncal_weightlist, calfactor_outlist,
                 dirtymap_inlist, dirtymap_outlist,
                 reference_mapfile, reference_weightfile,
                 sub_weighted_mean=True, freq_list=range(256)):

    reference_map = algebra.make_vect(algebra.load(reference_mapfile))
    reference_weight = algebra.make_vect(algebra.load(reference_weightfile))

    reference_map = remove_mean(reference_map, reference_weight)

    # load maps into pairs
    for mapfile, weightfile, calfactor_outfile, \
        dirty_infile, dirty_outfile in zip(uncal_maplist, \
            uncal_weightlist, calfactor_outlist,
            dirtymap_inlist, dirtymap_outlist):

        print mapfile, weightfile

        session_map = algebra.make_vect(algebra.load(mapfile))
        session_weight = algebra.make_vect(algebra.load(weightfile))

        session_map = remove_mean(session_map, session_weight)

        calfactor = template_fit(session_map, reference_map, session_weight)

        newmap = algebra.make_vect(algebra.load(dirty_infile))
        newmap[freq_list, :, :] /= calfactor[:, np.newaxis, np.newaxis]
        algebra.save(dirty_outfile, newmap)
        print dirty_outfile

        # optional test by applying the factor to the maps
        #session_map[freq_list, :, :] /= calfactor[:, np.newaxis, np.newaxis]
        #calfactor = template_fit(session_map, reference_map, session_weight)

        facout = open(calfactor_outfile, "w")
        for outvals in calfactor:
            facout.write("%10.15g\n" % outvals)

        facout.close()
def find_weight(filename):
    r"""rather than read the full noise_inv and find its diagonal, cache the
    diagonal values.

    Note that the .info does not get shelved (class needs to be made
    serializeable). Return the info separately.
    """
    #print "loading noise: " + filename
    #noise_inv = algebra.make_mat(algebra.open_memmap(filename, mode='r'))
    #noise_inv_diag = noise_inv.mat_diag()
    # if optimal map:
    noise_inv_diag = algebra.make_vect(algebra.load(filename))

    return noise_inv_diag, noise_inv_diag.info
Example #40
0
    def execute(self, processes):
        file_list_1 = self.datapath_db.fetch(self.params['map_key_1'],
                                             tack_on=self.params["tack_on_1"],
                                             silent=True)

        file_list_2 = self.datapath_db.fetch(self.params['map_key_2'],
                                             tack_on=self.params["tack_on_2"],
                                             silent=True)

        file_list_out = self.datapath_db.fetch(
            self.params['map_key_out'],
            tack_on=self.params["tack_on_out"],
            silent=True)

        for file_key in file_list_1[0]:
            infile = file_list_1[1][file_key]
            subfile = file_list_2[1][file_key]
            outfile = file_list_out[1][file_key]

            rootdir = "/".join(outfile.split("/")[0:-1])
            if len(rootdir) > 0 and rootdir != ".":
                if not os.path.isdir(rootdir):
                    print "print_multicolumn: making dir " + rootdir
                    os.mkdir(rootdir)

            print "input: ", infile
            print "out: ", outfile

            if "map" in file_key:
                print "minus: ", subfile
                inmap = algebra.make_vect(algebra.load(infile))
                submap = algebra.make_vect(algebra.load(subfile))
                print inmap.shape, submap.shape
                algebra.save(outfile, inmap - submap)
            else:
                shutil.copy2(infile, outfile)
                shutil.copy2(infile + ".meta", outfile + ".meta")
Example #41
0
def get_cached_physical(filename, refinement=2, pad=5, order=1):
    basename = filename.split(".")[0]
    phys_cachename = basename + "_physical.npy"
    chksum_cachename = basename + ".md5"
    print phys_cachename, chksum_cachename

    curr_chksum = ft.hashfile(filename)
    # ALSO CHECK IF THE PARAMS CHANGED!

    # try to get an existing checksum
    try:
        chkfile = open(chksum_cachename, "r")
        old_chksum = chkfile.read()
        chkfile.close()
        if old_chksum == curr_chksum:
            chksum_not_changed = True
    except IOError as e:
        chksum_not_changed = False

    if os.path.isfile(phys_cachename) and chksum_not_changed:
        print "using the cached file: " + phys_cachename
        ret_data = algebra.make_vect(algebra.load(chksum_cachename))
    else:
        print "writing a physical cache for: " + filename
        # calculate the physical coordinate box
        obs_map = algebra.make_vect(algebra.load(filename))
        ret_data = bh.repackage_kiyo(pg.physical_grid(obs_map,
                                           refinement=refinement,
                                           pad=pad, order=order))
        algebra.save(chksum_cachename, ret_data)

        # save the new checksum
        chkfile = open(chksum_cachename, "w")
        chkfile.write(curr_chksum)
        chkfile.close()

    return ret_data
def test_scheme(template_file, sim_filename1, sim_filename2):
    r"""look at some differences between maps"""
    template_map = algebra.make_vect(algebra.load(template_file))
    gbtsim1 = realize_simulation(template_map,
                                 scenario='streaming',
                                 seed=5489,
                                 refinement=1.)
    gbtsim2 = realize_simulation(template_map, seed=5489, refinement=1.)

    sim_map1 = algebra.make_vect(gbtsim1, axis_names=('freq', 'ra', 'dec'))
    sim_map2 = algebra.make_vect(gbtsim2, axis_names=('freq', 'ra', 'dec'))
    sim_map1.copy_axis_info(template_map)
    sim_map2.copy_axis_info(template_map)
    algebra.save(sim_filename1, sim_map1)
    algebra.save(sim_filename2, sim_map2)
Example #43
0
def generate_delta_sim(input_file, output_file):
    r"""make the map with the temperature divided out (delta)"""
    print "reading %s -> %s (dividing by T_b(z))" % (input_file, output_file)

    simmap = algebra.make_vect(algebra.load(input_file))
    freq_axis = simmap.get_axis('freq') / 1.e6
    z_axis = units.nu21 / freq_axis - 1.0

    simobj = corr21cm.Corr21cm()
    T_b = simobj.T_b(z_axis)*1e-3

    simmap /= T_b[:, np.newaxis, np.newaxis]

    print "saving to" + output_file
    algebra.save(output_file, simmap)
Example #44
0
 def execute(self, nprocesses):
     params = self.params
     # Make parent directory and write parameter file.
     kiyopy.utils.mkparents(params['output_root'])
     parse_ini.write_params(params,
                            params['output_root'] + 'params.ini',
                            prefix=prefix)
     in_root = params['input_root']
     # Figure out what the band names are.
     bands = params['bands']
     if not bands:
         map_files = glob.glob(in_root + pol_str + "_*.npy")
         bands = []
         root_len = len(in_root)
         for file_name in map_files:
             bands.append(file_name[root_len:-4])
     # Loop over polarizations.
     for pol_str in params['polarizations']:
         # Read in all the maps to be glued.
         maps = []
         for band in bands:
             band_map_fname = (in_root + pol_str + "_" + repr(band) +
                               '.npy')
             if self.feedback > 1:
                 print "Read using map: " + band_map_fname
             if params['mat_diag']:
                 if self.feedback > 1:
                     print "Treating as a matrix, getting diagonal."
                 band_map = al.open_memmap(band_map_fname, mode='r')
                 band_map = al.make_mat(band_map)
                 band_map = band_map.mat_diag()
             else:
                 band_map = al.load(band_map_fname)
                 band_map = al.make_vect(band_map)
             if band_map.axes != ('freq', 'ra', 'dec'):
                 msg = ("Expeced maps to have axes ('freq',"
                        "'ra', 'dec'), but it has axes: " +
                        str(band_map.axes))
                 raise ce.DataError(msg)
             maps.append(band_map)
         # Now glue them together.
         out_map = glue(maps)
         out_fname = (params['output_root'] + pol_str + "_" + "all" +
                      '.npy')
         if self.feedback > 1:
             print "Writing glued map to: " + out_fname
         al.save(out_fname, out_map)
Example #45
0
 def __init__(self, parameter_file_or_dict=None, feedback=2):
     
     # Call the base_single init.
     base_single.BaseSingle.__init__(self, parameter_file_or_dict,
                                     feedback)
     # Read in the map files.
     map_fnames_start = (self.params['map_input_root']
                         + self.params['map_type'])
     self.maps = []
     for band in self.params['map_bands']:
         this_band_maps = []
         for pol in self.params['map_polarizations']:
             map_file_name = (map_fnames_start + pol + '_' + str(band)
                              + '.npy')
             map = algebra.load(map_file_name)
             map = algebra.make_vect(map)
             this_band_maps.append(map)
         self.maps.append(this_band_maps)
Example #46
0
    def __init__(self, parameter_file_or_dict=None, feedback=2) :
        # Read the parameter file, store in dictionary named parameters.
        self.params = parse_ini.parse(parameter_file_or_dict, params_init, 
                                      prefix=prefix, feedback=feedback)
        self.feedback = feedback

        # Read in the map files.
        map_fnames_start = (self.params['map_input_root']
                            + self.params['map_type'])
        self.maps = []
        for band in self.params['map_bands']:
            this_band_maps = []
            for pol in self.params['map_polarizations']:
                map_file_name = (map_fnames_start + pol + '_' + str(band)
                                 + '.npy')
                map = algebra.load(map_file_name)
                map = algebra.make_vect(map)
                this_band_maps.append(map)
            self.maps.append(this_band_maps)
Example #47
0
def repair_shelve_files(batch_param, ini_prefix, params_default, param_prefix):
    """Add missing information to shelves"""
    filelist = make_shelve_names(batch_param)
    for (index, filename, multiplier, cross_power) in filelist:
        print "repairing: " + filename
        directory = "/".join(filename.split("/")[0:-1]) + "/"
        run_index = re.findall(r'\d+', index)[0]
        ini_file = directory + ini_prefix + run_index + ".ini"
        print ini_file
        params = parse_ini.parse(ini_file, params_default,
                             prefix=param_prefix, feedback=10)

        radio_file1 = params['radio_root1'] + params['radio_data_file1']
        map_radio1 = algebra.make_vect(algebra.load(radio_file1))

        corr_data = shelve.open(filename + ".shelve")
        corr_data["params"] = params
        corr_data["freq_axis"] = map_radio1.get_axis('freq')
        corr_data.close()
Example #48
0
def convert(map_file, history_file=None):
    """Main function."""

    map = algebra.load(map_file)
    map = algebra.make_vect(map)

    if map.axes != ('freq', 'ra', 'dec'):
        raise NotImplementedError("Exepected input map to be organized "
                                  "('freq', 'ra', 'dec').")

    new_shape = map.shape[1:] + (map.shape[0], )

    # Make the out file name assuming the input file end in .npy.  This is a
    # hack and someone should fix it sometime.
    out_fname = map_file.split('/')[-1][:-4] + '.fits'

    Map_fits = data_map.DataMap(sp.rollaxis(map, 0, 3))
    # Set axis names
    Map_fits.set_field('CTYPE3', 'FREQ--HZ', (), '32A')
    Map_fits.set_field('CTYPE1', 'RA---DEG', (), '32A')
    Map_fits.set_field('CTYPE2', 'DEC--DEG', (), '32A')
    # Copy frequency axis (now the third axis not the first).
    Map_fits.set_field('CRVAL3', map.info['freq_centre'], (), 'D')
    Map_fits.set_field('CRPIX3', new_shape[2] // 2 + 1, (), 'D')
    Map_fits.set_field('CDELT3', map.info['freq_delta'], (), 'D')
    # Set the other two axes.
    Map_fits.set_field('CRVAL1', map.info['ra_centre'], (), 'D')
    Map_fits.set_field('CRPIX1', new_shape[0] // 2 + 1, (), 'D')
    Map_fits.set_field('CDELT1', map.info['ra_delta'], (), 'D')
    Map_fits.set_field('CRVAL2', map.info['dec_centre'], (), 'D')
    Map_fits.set_field('CRPIX2', new_shape[1] // 2 + 1, (), 'D')
    Map_fits.set_field('CDELT2', map.info['dec_delta'], (), 'D')

    # Copy the file history if provided.
    if not history_file is None:
        history = hist.read(history_file)
        history.add("Converted map to fits.", ("File name: " + out_fname, ))
        Map_fits.history = history

    # Verify contents and write out.
    Map_fits.verify()
    fits_map.write(Map_fits, out_fname)
Example #49
0
def sum_window(argt):
    """A given bin in 2D k-space (labelled by bin_index_2d) is the sum over a
    "washer" in 3D k-space, a band in k_parallel and an annulus in k_x, k_y.
    Let all of the 3D bins in k_space be indexed by bin_3d. The window function
    is centered at k_3d=0, and let these indices defining the center of the 3d
    volume be given in center_3d.
    TODO: replace this with NlogN convolution
    TODO: implement 0-padded roll instead of np.roll, algebra.roll_zeropad()
    """
    (filename, bin_index_2d, k_2d, bin_3d, center_3d) = argt
    # load the cross-power of the weighting functions
    xspec = algebra.make_vect(algebra.load(filename))
    windowsum = algebra.zeros_like(xspec)

    num_3dbins_in_2dbin = bin_3d.shape[0]

    print "%d: summing over %d bins" % (bin_index_2d, num_3dbins_in_2dbin)

    for bin_3dind in range(num_3dbins_in_2dbin):
        # TODO: is this sign right, does it matter?
        off = bin_3d[bin_3dind] - center_3d
        #print off
        windowsum += np.roll(np.roll(np.roll(xspec, off[0], axis=0),
                                     off[1],
                                     axis=1),
                             off[2],
                             axis=2)

    k_perp_arr = binning.radius_array(xspec, zero_axes=[0])
    k_parallel_arr = binning.radius_array(xspec, zero_axes=[1, 2])
    kx_2d = copy.deepcopy(k_2d)
    ky_2d = copy.deepcopy(k_2d)
    counts_histo_2d, binavg_2d = binning.bin_an_array_2d(
        windowsum, k_perp_arr, k_parallel_arr, kx_2d, ky_2d)

    return (bin_index_2d, counts_histo_2d, binavg_2d)
Example #50
0
    def __init__(self, parameter_file=None, params_dict=None, feedback=0):
        self.params = params_dict
        self.datapath_db = data_paths.DataPath()

        if parameter_file:
            self.params = parse_ini.parse(parameter_file, binwigglezparams_init,
                                          prefix=binwigglezprefix)

        # gather names of the input catalogs
        self.infile_data = self.datapath_db.fetch(self.params['infile_data'],
                                             intend_read=True,
                                             purpose="WiggleZ data catalog")

        self.infile_mock = self.datapath_db.fetch(self.params['infile_mock'],
                                             intend_read=True,
                                             purpose="WiggleZ mock catalog",
                                             silent=True)

        # gather names of all the output files
        self.outfile_data = self.datapath_db.fetch(self.params['outfile_data'],
                                              intend_write=True,
                                              purpose="binned WiggleZ data")

        self.outfile_delta_data = self.datapath_db.fetch(\
                                        self.params['outfile_deltadata'],
                                        intend_write=True,
                                        purpose="binned WiggleZ data delta")

        self.outfile_selection = \
                     self.datapath_db.fetch(self.params['outfile_selection'],
                                        intend_write=True,
                                        purpose="WiggleZ selection function")

        self.outfile_separable = \
                     self.datapath_db.fetch(self.params['outfile_separable'],
                                        intend_write=True,
                                        purpose="WiggleZ separable sel func")

        self.outfile_mock = self.datapath_db.fetch(self.params['outfile_mock'],
                                        intend_write=True,
                                        purpose="WiggleZ binned mock",
                                        silent=True)

        self.outfile_delta_mock = self.datapath_db.fetch(\
                                        self.params['outfile_deltamock'],
                                        intend_write=True,
                                        purpose="WiggleZ binned delta mock",
                                        silent=True)

        # gather axis information from the template file
        self.template_map = \
            algebra.make_vect(algebra.load(self.params['template_file']))

        self.freq_axis = self.template_map.get_axis('freq')
        self.ra_axis = self.template_map.get_axis('ra')
        self.dec_axis = self.template_map.get_axis('dec')

        # placeholders for data products
        self.realmap_binning = None
        self.selection_function = None
        self.separable_selection = None
Example #51
0
def map_pair_cal(uncal_maplist,
                 uncal_weightlist,
                 calfactor_outlist,
                 dirtymap_inlist,
                 dirtymap_outlist,
                 convolve=True,
                 factorizable_noise=True,
                 sub_weighted_mean=True,
                 freq_list=range(256)):

    map1file = uncal_maplist.pop(0)
    weight1file = uncal_weightlist.pop(0)
    calfactor_outlist.pop(0)
    dirtymap_out0 = dirtymap_outlist.pop(0)
    dirtymap_in0 = dirtymap_inlist.pop(0)

    # do nothing to the reference map
    ref_dirtymap = algebra.make_vect(algebra.load(dirtymap_in0))
    algebra.save(dirtymap_out0, ref_dirtymap)

    # load maps into pairs
    svdout = shelve.open("correlation_pairs.shelve")
    for map2file, weight2file, calfactor_outfile, \
        dirty_infile, dirty_outfile in zip(uncal_maplist, \
            uncal_weightlist, calfactor_outlist,
            dirtymap_inlist, dirtymap_outlist):

        print map1file, weight1file, map2file, weight2file

        pair = map_pair.MapPair(map1file,
                                map2file,
                                weight1file,
                                weight2file,
                                freq_list,
                                avoid_db=True)

        if factorizable_noise:
            pair.make_noise_factorizable()

        if sub_weighted_mean:
            pair.subtract_weighted_mean()

        if convolve:
            pair.degrade_resolution()

        (corr, counts) = pair.correlate()
        svd_info = ce.get_freq_svd_modes(corr, len(freq_list))
        svdout[map2file] = svd_info

        # write out the left right and cal factors
        leftmode = svd_info[1][0]
        rightmode = svd_info[2][0]
        calfactor = leftmode / rightmode

        facout = open(calfactor_outfile, "w")
        for outvals in zip(leftmode, rightmode, calfactor):
            facout.write("%10.15g %10.15g %10.15g\n" % outvals)

        facout.close()

        newmap = algebra.make_vect(algebra.load(dirty_infile))
        newmap[freq_list, :, :] *= calfactor[:, np.newaxis, np.newaxis]
        algebra.save(dirty_outfile, newmap)
        print dirty_outfile

    svdout.close()
Example #52
0
def add_manual_mask(source_key,
                    cut_freq_list=None,
                    signal_name='map',
                    noise_inv_name='noise_inv',
                    weight_name='weight',
                    divider_token=";"):
    r"""
    `source_key` is the file db key for the maps to combine
    `signal_name` is the tag in the file db entry for the signal maps
    `noise_inv_name` is the tag in the file db entry for the N^-1 weights
    `weight_name` is the tag in the file db entry for the weights to write out
    `divider_token` is the token that divides the map section name
            from the data type e.g. "A_with_B;noise_inv"
    """
    datapath_db = data_paths.DataPath()
    source_fdb = datapath_db.fetch(source_key, silent=True)
    source_fdict = source_fdb[1]

    # accumulate all the files to combine
    noise_inv_keys = {}
    weight_keys = {}
    signal_keys = {}
    for filekey in source_fdb[0]:
        if divider_token in filekey:
            data_type = filekey.split(divider_token)[1]
            map_section = filekey.split(divider_token)[0]

            if data_type == signal_name:
                signal_keys[map_section] = source_fdict[filekey]

            if data_type == noise_inv_name:
                noise_inv_keys[map_section] = source_fdict[filekey]

            if data_type == weight_name:
                weight_keys[map_section] = source_fdict[filekey]

    for mapkey in signal_keys:
        signal_file = signal_keys[mapkey]
        noise_inv_file = noise_inv_keys[mapkey]
        weight_file = weight_keys[mapkey]
        print "loading pair: %s %s -> %s" % \
                (signal_file, noise_inv_file, weight_file)
        signal_map = algebra.make_vect(algebra.load(signal_file))
        weightmap = algebra.make_vect(algebra.load(noise_inv_file))

        # set the new weights to zero where the N^-1 is small
        # or the signal map is inf or nan
        weightmap[np.isnan(weightmap)] = 0.
        weightmap[np.isinf(weightmap)] = 0.
        weightmap[np.isnan(signal_map)] = 0.
        weightmap[np.isinf(signal_map)] = 0.
        weightmap[weightmap < 1.e-20] = 0.

        if cut_freq_list is not None:
            for cutindex in cut_freq_list:
                weightmap[cutindex, :, :] = 0.

        # could also determine the filename here, outside of the database
        #outputdir = datapath_db.fetch_parent(source_key, return_path=True)
        #weight_out = "%s/%s" % (outputdir, source_key)
        algebra.compressed_array_summary(weightmap, "new weight map")
        algebra.save(weight_file, weightmap)
Example #53
0
def combine_maps_driver(inputmap_dict, inputweight_dict, output_dict,
                        fullcov=False, datapath_db=None):
    r"""Combine a list of weights, maps specified by their database keys
    """
    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    signal_list = []
    weight_list = []
    for mapkey in inputmap_dict:
        signalfile = inputmap_dict[mapkey]
        weightfile = inputweight_dict[mapkey]
        print "loading pair: %s %s" % (signalfile, weightfile)
        signal_list.append(algebra.make_vect(algebra.load(signalfile)))

        if fullcov:
            raw_weight = algebra.make_mat(
                            algebra.open_memmap(weightfile))
            raw_weight = raw_weight.mat_diag()
        else:
            raw_weight = algebra.make_vect(algebra.load(weightfile))

        # zero out any messy stuff
        raw_weight[raw_weight < 1.e-20] = 0.
        raw_weight[np.isnan(raw_weight)] = 0.
        raw_weight[np.isinf(raw_weight)] = 0.
        weight_list.append(raw_weight)

    prodmap = []
    for mapind in range(0, len(signal_list)):
        prodmap.append(signal_list[mapind] * weight_list[mapind])

    print "CHECK THESE: %d %d %d" % (len(signal_list), len(weight_list),
                                     len(prodmap))

    cumulative_product = algebra.zeros_like(prodmap[0])
    cumulative_weight = algebra.zeros_like(prodmap[0])
    for mapind in range(0, len(signal_list)):
        cumulative_product += prodmap[mapind]
        cumulative_weight += weight_list[mapind]

    algebra.compressed_array_summary(cumulative_weight, "weight map")
    algebra.compressed_array_summary(cumulative_product, "product map")

    newmap = cumulative_product / cumulative_weight

    cumulative_weight[cumulative_weight < 1.e-20] = 0.
    cumulative_product[cumulative_weight < 1.e-20] = 0.

    # if the new map is nan or inf, set it and the wieghts to zero
    nan_array = np.isnan(newmap)
    newmap[nan_array] = 0.
    cumulative_product[nan_array] = 0.
    cumulative_weight[nan_array] = 0.
    inf_array = np.isinf(newmap)
    newmap[inf_array] = 0.
    cumulative_product[inf_array] = 0.
    cumulative_weight[inf_array] = 0.
    algebra.compressed_array_summary(newmap, "new map")
    algebra.compressed_array_summary(cumulative_product, "final map * weight")
    algebra.compressed_array_summary(cumulative_weight, "final weight map")

    print output_dict
    algebra.save(output_dict['map'], newmap)
    algebra.save(output_dict['product'], cumulative_product)
    algebra.save(output_dict['weight'], cumulative_weight)
    algebra.save(output_dict['ones'], algebra.ones_like(newmap))
Example #54
0
def save_1D_corr(mode_number, high, save):
    '''very hardcoded'''
    # Load pickle file.
    #    file_name = "/mnt/raid-project/gmrt/eswitzer/wiggleZ/modetest/73_ABCD_all_%d_modes_real3map/New_Slices_object.pkl" % (mode_number)
    file_name = "/mnt/raid-project/gmrt/calinliv/wiggleZ/corr/doctest/New_Slices_object.pkl"
    f = open(file_name, "r")
    F = cPickle.load(f)
    f.close()

    # Setting axis info after pickling.
    map_file = F.params["input_root"] + "sec_A_15hr_41-73_clean_map_I.npy"
    exMap = algebra.make_vect(algebra.load(map_file))
    for Pair in F.Pairs:
        Pair.Map1.info = exMap.info
        Pair.Map2.info = exMap.info
        Pair.Noise_inv1.info = exMap.info
        Pair.Noise_inv2.info = exMap.info

    ########## Getting 1D thing. #####################################
    out_list = []
    d1_list = []
    for i in range(0, 6):
        # The corr to use.
        corr = F.Pairs[i].corr
        # The lags used
        lags = sp.array(F.params['lags'])
        real_lags = copy.deepcopy(lags)
        real_lags[0] = 0
        real_lags[1:] -= sp.diff(lags) / 2.0
        # The range selected in ini file.
        frange = F.params['freq']
        # The corresponding real frequencies for that range.
        realrange = [F.Pairs[i].Map1.get_axis('freq')[f] for f in frange]
        # The 2D correlation.
        out = fs.rebin_corr_freq_lag(corr,
                                     realrange,
                                     nfbins=200,
                                     weights=F.Pairs[i].counts,
                                     return_fbins=True)
        out_list.append(out[0])
        #plt.figure()
        #plt.imshow(out[0])
        #plt.colorbar()
        # the 1D correlation.
        d1 = fs.collapse_correlation_1D(out[0], out[2], real_lags, out[1])
        d1_list.append(copy.deepcopy(d1[0]))
        #    plt.figure()
        #    plt.plot(d1[2], d1[0],'.')
        x_axis = d1[2][1]

    matrixx = []
    for d in d1_list:
        matrixx.append(d.tolist())

    matrixx = sp.array(matrixx)
    print matrixx

    vals = []
    std = []
    for i in range(0, matrixx.shape[1]):
        # Get the sqrt to get mK.
        vals.append(
            sp.mean(sp.sign(matrixx[:, i]) * sp.sqrt(abs(matrixx[:, i]))))
        std.append(sp.std(
            sp.sign(matrixx[:, i]) * sp.sqrt(abs(matrixx[:, i]))))

    vals = sp.array(vals)
    std = sp.array(std)

    print
    print
    for i in range(0, matrixx.shape[0]):
        print sp.sign(matrixx[i, :]) * sp.sqrt(abs(matrixx[i, :]))

    # Set plot up for log log axes.
    plt.figure()
    ax = plt.gca()
    ax.set_yscale("log")
    ax.set_xscale("log")
    # Plot positives.
    t_inds = vals >= 0
    n_inds = vals < 0
    plt.plot(x_axis[t_inds], vals[t_inds] * 1000,
             'b.')  # take out *1000 for simmaps.
    plt.plot(x_axis[t_inds], (vals[t_inds] + std[t_inds]) * 1000, 'g_')
    plt.plot(x_axis[t_inds], (vals[t_inds] - std[t_inds]) * 1000, 'g_')
    # Plot negatives.
    neg_vals = -1 * vals
    plt.plot(x_axis[n_inds], neg_vals[n_inds] * 1000,
             'r.')  # take out *1000 for simmaps.
    plt.plot(x_axis[n_inds], (neg_vals[n_inds] + std[n_inds]) * 1000, 'g_')
    plt.plot(x_axis[n_inds], (neg_vals[n_inds] - std[n_inds]) * 1000, 'g_')
    #plt.axis([1, 100, 0.01, 500.0])
    plt.xlabel('lag (Mpc/h)')
    plt.ylabel('correlation (mK)')

    # Plot the model(?).
    t_lags = sp.arange(0.1, 100, 0.1)
    r0 = 5.5
    rb = 7.0
    t = (sp.sqrt(((rb + t_lags) / r0)**(-1.8)))
    t = t * 0.15 / t[0]
    f = plt.plot(t_lags, t, marker='None', color='k', linestyle='-')

    if high:
        plt.axis([0.9, 100, 0.0001, 1.0])
    else:
        plt.axis([0.9, 100, 0.001, 1.0])

    if save:
        name = "/cita/h/home-2/calinliv/Desktop/figures/check/1D_corr_map_%d_doctest.png" % (
            mode_number)
        #f = open(name, "w")
        plt.savefig(name)
Example #55
0
def make_autocorr(filename, identifier=None,
                  thousand_multiplier=True, multiplier=1.):
    """Same as above but for autocorrs in NewSlices pickle objects.
    filename is the full path to the file and should inlude the .pkl ending.
    wrap the plot correlation class which reads correlation object shelve
    files; uses new binning methods in freq-slices. Note that correlations are
    converted into units of mK.
    """
    output = {}
    # Load the New_Slices_object.pkl
    pkl_handle = open(filename, "r")
    print filename
    pkl_obj = cPickle.load(pkl_handle)
    pkl_handle.close()

    # Setting axis info after pickling. Make sure to use a map with the proper
    # info set.
    map_file = "/mnt/raid-project/gmrt/calinliv/wiggleZ/maps/" + \
                   "sec_A_15hr_41-73_clean_map_I.npy"
    orig_map = algebra.make_vect(algebra.load(map_file))
    for pair in pkl_obj.pairs:
        pair.Map1.info = orig_map.info
        pair.Map2.info = orig_map.info
        pair.Noise_inv1.info = orig_map.info
        pair.Noise_inv2.info = orig_map.info

    # 3D->2D->1D
    corr_2d_list = []
    corr_1d_list = []
    for i in range(0, len(pkl_obj.pairs)):
        # The corr to use.
        corr = pkl_obj.pairs[i].corr
        if (multiplier != 1.):
            print "WARNING: using a multiplier of: " + repr(multiplier)
        corr *= multiplier
        # The lags used
        lags = sp.array(pkl_obj.params['lags'])
        real_lags = copy.deepcopy(lags)
        real_lags[0] = 0
        real_lags[1:] -= sp.diff(lags) / 2.0
        # The range selected in ini file.
        frange = pkl_obj.params['freq']
        # The corresponding real frequencies for that range.
        realrange = [pkl_obj.pairs[i].Map1.get_axis('freq')[f] for f in frange]
        # The 2D correlation.
        corr_2d = ce.rebin_corr_freq_lag(corr, realrange, nfbins=200,
                             weights=pkl_obj.pairs[i].counts, return_fbins=True)
        corr_2d_list.append(corr_2d[0])
        # The 1D correlation.
        corr_1d = ce.collapse_correlation_1d(corr_2d[0], corr_2d[2],
                                             real_lags, corr_2d[1])
        corr_1d_list.append(copy.deepcopy(corr_1d[0]))
        # The values for x_left, x_centre, x_right.
        x_axis = corr_1d[2]

    # Put the 1D correlations into a matrix to be averaged easily.
    matrix_1d = []
    for corr_1d in corr_1d_list:
        matrix_1d.append(corr_1d.tolist())

    matrix_1d = sp.array(matrix_1d)

    # Get the average 1D corr and it's sample variance.
    vals = []
    std = []
    for i in range(0, matrix_1d.shape[1]):
        # Get the sqrt to get mK.
        vals.append(sp.mean(sp.sign(matrix_1d[:, i]) * \
                    sp.sqrt(abs(matrix_1d[:, i]))))

        std.append(sp.std(sp.sign(matrix_1d[:, i]) * \
                   sp.sqrt(abs(matrix_1d[:, i]))))

    vals = sp.array(vals)
    std = sp.array(std)

    # Go from K to mK if True. If data is already in mK, then make it False.
    if thousand_multiplier:
        vals *= 1000.
        std *= 1000.

    # Build output dictionary.
    output["run_params"] = pkl_obj.params
    output["lags"] = pkl_obj.params["lags"]
    output["real_lags"] = real_lags
    # uncomment these only if you need them in the shelve file; makes it huge
    #output["corr"] = corr # Not supported for 6 pairs
    #output["corr_counts"] = corr_counts # not supported for 6 pairs.
    output["freq"] = pkl_obj.params["freq"]
    output["freq_axis"] = pkl_obj.pairs[0].Map1.get_axis('freq')
    output["corr1D"] = vals
    output["corr1D_std"] = std
#    output["corr1D_weights"] = corr_1d[1]
#    output["corr1D_lags"] = corr_1d[2]   # This is now the x-axis
    output["x_axis"] = x_axis
#    There are 6 of these now so it's weird.
#    output["corr2D"] = correlation_2d
#    output["corr2D_weights"] = corr_2d[1]   # Same as above.
    output["corr2D_fbins"] = corr_2d[2]  # Ok. Bins are the same for each pair.

    if identifier:
        return (identifier, output)

    return output
Example #56
0
import pylab
import pyfits
import sys
from numpy import *
import core.algebra as al
import scipy

filename1 = sys.argv[1]

filename2 = filename1.split('.')[0]

array = al.load(filename1)
array = al.make_vect(array)

#print array
#   creates a 3D array with indices (freq, ra, dec)

ras = array.get_axis('ra')
decs = array.get_axis('dec')
freqs = array.get_axis('freq')
freqs = freqs / 1e6

for slice, freq in enumerate(freqs):
    nancut = (array[slice] < 10e10) & (array[slice] != NaN)
    cut = (array[slice] > 3.0 * array[slice][nancut].std())
    array[slice][cut] = 3.0 * array[slice][nancut].std()
    cut = (array[slice] < -3.0 * array[slice][nancut].std())
    array[slice][cut] = -3.0 * array[slice][nancut].std()

    #   Need to rotate array[slice] because axes were flipped
    new_array = scipy.transpose(array[slice])
Example #57
0
    def execute(self):
        '''Clean the maps of foregrounds, save the results, and get the
        autocorrelation.'''

        params = self.params
        freq_list = sp.array(params['freq_list'], dtype=int)
        lags = sp.array(params['lags'])

        # Write parameter file.
        kiyopy.utils.mkparents(params['output_root'])
        parse_ini.write_params(params,
                               params['output_root'] + 'params.ini',
                               prefix=prefix)

        # Get the map data from file as well as the noise inverse.
        if len(params['file_middles']) == 1:
            fmid_name = params['file_middles'][0]
            params['file_middles'] = (fmid_name, fmid_name)

        if len(params['file_middles']) >= 2:
            # Deal with multiple files.
            num_maps = len(params['file_middles'])
            maps = []
            noise_invs = []

            # Load all maps and noises once.
            for map_index in range(0, num_maps):
                map_file = (params['input_root'] +
                            params['file_middles'][map_index] +
                            params['input_end_map'])

                print "Loading map %d of %d." % (map_index + 1, num_maps)

                map_in = algebra.make_vect(algebra.load(map_file))

                maps.append(map_in)
                if not params["no_weights"]:
                    noise_file = (params['input_root'] +
                                  params['file_middles'][map_index] +
                                  params['input_end_noise'])

                    print "Loading noise %d of %d." % (map_index + 1, num_maps)

                    noise_inv = algebra.make_mat(
                        algebra.open_memmap(noise_file, mode='r'))

                    noise_inv = noise_inv.mat_diag()
                else:
                    noise_inv = algebra.ones_like(map_in)

                noise_invs.append(noise_inv)

            pairs = []
            # Make pairs with deepcopies to not make mutability mistakes.
            for map1_index in range(0, num_maps):
                for map2_index in range(0, num_maps):
                    if (map2_index > map1_index):
                        map1 = copy.deepcopy(maps[map1_index])
                        map2 = copy.deepcopy(maps[map2_index])
                        noise_inv1 = copy.deepcopy(noise_invs[map1_index])
                        noise_inv2 = copy.deepcopy(noise_invs[map2_index])

                        pair = map_pair.MapPair(map1, map2, noise_inv1,
                                                noise_inv2, freq_list)

                        pair.lags = lags
                        pair.params = params

                        # Keep track of the names of maps in pairs so
                        # it knows what to save later.
                        pair.set_names(params['file_middles'][map1_index],
                                       params['file_middles'][map2_index])
                        pairs.append(pair)

            num_map_pairs = len(pairs)
            print "%d map pairs created from %d maps." % (len(pairs), num_maps)

        # Hold a reference in self.
        self.pairs = pairs

        # Get maps/ noise inv ready for running.
        if params["convolve"]:
            for pair in pairs:
                pair.degrade_resolution()

        if params['factorizable_noise']:
            for pair in pairs:
                pair.make_noise_factorizable()

        if params['sub_weighted_mean']:
            for pair in pairs:
                pair.subtract_weighted_mean()

        self.pairs = pairs
        # Since correlating takes so long, if you already have the svds
        # you can skip this first correlation [since that's all it's really
        # for and it is the same no matter how many modes you want].
        # Note: map_pairs will not have anything saved in 'fore_corr' if you
        # skip this correlation.
        if not params['skip_fore_corr']:
            # Correlate the maps with multiprocessing. Note that the
            # correlations are saved to file separately then loaded in
            # together because that's (one way) how multiprocessing works.
            fore_pairs = []
            processes_list = []
            for pair_index in range(0, num_map_pairs):
                # Calls 1 multiproc (which governs the correlating) for each
                # pair on a new CPU so you can have all pairs working at once.
                multi = multiprocessing.Process(target=multiproc,
                                                args=([
                                                    pairs[pair_index],
                                                    params['output_root'],
                                                    pair_index, False
                                                ]))

                processes_list.append(multi)

                multi.start()

            # Waits for all correlations to finish before continuing.
            while True in [multi.is_alive() for multi in processes_list]:
                print "processing"
                time.sleep(5)

            # just to be safe
            time.sleep(1)

            # more concise call, but multiprocessing does not behave well with
            # complex objects...........
            #runlist = [(pair_index,
            #            params['output_root'],
            #            False) for
            #            pair_index in range(0, num_map_pairs)]
            #pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
            #pool.map(self.multiproc, runlist)

            # Load the correlations and save them to each pair. The pairs that
            # got passed to multiproc are not the same ones as ones in
            # self.pairs, so this must be done to have actual values.
            print "Loading map pairs back into program."
            file_name = params['output_root']
            file_name += "map_pair_for_freq_slices_fore_corr_"

            for count in range(0, num_map_pairs):
                print "Loading correlation for pair %d" % (count)
                pickle_handle = open(file_name + str(count) + ".pkl", "r")
                correlate_results = cPickle.load(pickle_handle)
                pairs[count].fore_corr = correlate_results[0]
                pairs[count].fore_counts = correlate_results[1]
                fore_pairs.append(pairs[count])
                pickle_handle.close()

            self.fore_pairs = copy.deepcopy(fore_pairs)
            # With this, you do not need fore_pairs anymore.
            self.pairs = copy.deepcopy(fore_pairs)

            pairs = self.pairs

            # Get foregrounds.

            # svd_info_list keeps track of all of the modes of all maps in
            # all pairs. This means if you want to subract a different number
            # of modes for the same maps/noises/frequencies, you have the modes
            # already saved and do not need to run the first correlation again.
            svd_info_list = []
            for pair in pairs:
                vals, modes1, modes2 = cf.get_freq_svd_modes(
                    pair.fore_corr, len(freq_list))
                pair.vals = vals

                # Save ALL of the modes for reference.
                pair.all_modes1 = modes1
                pair.all_modes2 = modes2
                svd_info = (vals, modes1, modes2)
                svd_info_list.append(svd_info)

                # Save only the modes you want to subtract.
                n_modes = params['modes']
                pair.modes1 = modes1[:n_modes]
                pair.modes2 = modes2[:n_modes]

            self.svd_info_list = svd_info_list
            self.pairs = pairs

            if params['save_svd_info']:
                ft.save_pickle(self.svd_info_list, params['svd_file'])
        else:
            # The first correlation and svd has been skipped.
            # This means you already have the modes so you can just load
            # them from file.
            self.svd_info_list = ft.load_pickle(params['svd_file'])
            # Set the svd info to the pairs.
            for i in range(0, len(pairs)):
                svd_info = self.svd_info_list[i]
                pairs[i].vals = svd_info[0]
                pairs[i].all_modes1 = svd_info[1]
                pairs[i].all_modes2 = svd_info[2]
                n_modes = params['modes']
                pairs[i].modes1 = svd_info[1][:n_modes]
                pairs[i].modes2 = svd_info[2][:n_modes]

            self.pairs = pairs

        # Subtract foregrounds.
        for pair_index in range(0, len(pairs)):
            pairs[pair_index].subtract_frequency_modes(
                pairs[pair_index].modes1, pairs[pair_index].modes2)

        # Save cleaned clean maps, cleaned noises, and modes.
        self.save_data(save_maps=params['save_maps'],
                       save_noises=params['save_noises'],
                       save_modes=params['save_modes'])

        # Finish if this was just first pass.
        if params['first_pass_only']:
            self.pairs = pairs
            return

        # Correlate the cleaned maps.
        # Here we could calculate the power spectrum instead eventually.
        temp_pair_list = []
        processes_list = []
        for pair_index in range(0, num_map_pairs):
            multi = multiprocessing.Process(target=multiproc,
                                            args=([
                                                pairs[pair_index],
                                                params['output_root'],
                                                pair_index, True
                                            ]))

            processes_list.append(multi)
            multi.start()

        while True in [multi.is_alive() for multi in processes_list]:
            print "processing"
            time.sleep(5)

        # just to be safe
        time.sleep(1)

        # ugh, would really rathter use implementation below except multiprocessing
        # does not behave.................
        #runlist = [(pairs[pair_index],
        #            params['output_root'],
        #            pair_index, True) for
        #            pair_index in range(0, num_map_pairs)]

        #pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
        #pool.map(multiproc, runlist)

        print "Loading map pairs back into program."
        file_name = params['output_root']
        file_name += "map_pair_for_freq_slices_corr_"

        for count in range(0, num_map_pairs):
            print "Loading correlation for pair %d" % (count)
            pickle_handle = open(file_name + str(count) + ".pkl", "r")
            correlate_results = cPickle.load(pickle_handle)
            pairs[count].corr = correlate_results[0]
            pairs[count].counts = correlate_results[1]
            temp_pair_list.append(pairs[count])
            pickle_handle.close()

        self.pairs = copy.deepcopy(temp_pair_list)

        # Get the average correlation and its standard deviation.
        corr_list = []
        for pair in self.pairs:
            corr_list.append(pair.corr)

        self.corr_final, self.corr_std = cf.get_corr_and_std_3d(corr_list)

        if params['pickle_slices']:
            ft.save_pickle(self, self.params['output_root'] + \
                                 'New_Slices_object.pkl')

        return
Example #58
0
                 0.258910010848, 0.249188429031])
    freq_data = sp.array([695, 725, 755, 785, 815, 845, 875, 905],
                             dtype=float)
    freq_data *= 1.0e6

    beamobj = beam.GaussianBeam(beam_data, freq_data)
    array_beam = beamobj.apply(array)

    algebra.save(filename, array_beam)
    outputdir = "/cita/d/www/home/eswitzer/movies/"
    pc.make_cube_movie(filename, "Temperature (mK)", pc.cube_frame_dir,
                        sigmarange=3., outputdir=outputdir, multiplier=1000.,
                        transverse=False, filetag_suffix="_trial")


if __name__ == '__main__':
    template_file = '/mnt/raid-project/gmrt/eswitzer/GBT/simulations/15hr_oldmap_ideal/sim_000.npy'
    template_map = algebra.make_vect(algebra.load(template_file))

    #simobj = corr21cm.Corr21cm.like_kiyo_map(template_map)
    #(gbtsim, gbtphys, physdim) = simobj.get_kiyo_field_physical(refinement=2)
    #save_and_plot(gbtsim, template_map, "skysim.npy")

    syncobj = foregroundsck.Synchrotron.like_kiyo_map(template_map)
    sync_field = syncobj.getfield() * 0.001
    save_and_plot(sync_field, template_map, "synsim.npy")

    ptsrcobj = pointsource.DiMatteo.like_kiyo_map(template_map)
    ptsrc_field = ptsrcobj.getfield()
    save_and_plot(ptsrc_field, template_map, "pssim.npy")
Example #59
0
        # Iterating through a ndimensional array produces slices along
        # the last axis. This is equivalent to data[i,:,:] in this case
        for data_slice in data:

            # The formatting string indicates that I'm writing out
            # the values in left-justified columns 7 characters in width
            # with 2 decimal places.
            np.savetxt(outfile, data_slice)

            # Writing out a break to indicate different slices...
            outfile.write('# New slice\n')


if __name__ == "__main__":
    if len(sys.argv) == 2:
        # Argument should just be a .npy file.
        array = algebra.load(sys.argv[1])
        out_fname = sys.argv[1].split('/')[-1][:-4] + '.txt'
        tofile(out_fname, array)
    elif len(sys.argv) == 3 and sys.argv[1] == str("diag"):
        # Second argument should be a .npy file that should be interpreted as a
        # matrix and we want to save the diagonal.
        mat = algebra.open_memmap(sys.argv[2])
        mat = algebra.make_mat(mat)
        array = mat.mat_diag()
        out_fname = sys.argv[2].split('/')[-1][:-4] + '.txt'
        tofile(out_fname, array)
    else:
        print("Usage : python alg2txt.py [input file] or"
              " python alg2txt.py diag [input file]")