Exemplo n.º 1
0
def generate_proc_sim(input_file, weightfile, output_file,
                      meansub=False, degrade=False):
    r"""make the maps with various combinations of beam conv/meansub"""
    print "%s -> %s (beam, etc.)" % (input_file, output_file)
    simmap = algebra.make_vect(algebra.load(input_file))

    if degrade:
        print "performing common resolution convolution"
        beam_data = sp.array([0.316148488246, 0.306805630985, 0.293729620792,
                 0.281176247549, 0.270856788455, 0.26745856078,
                 0.258910010848, 0.249188429031])
        freq_data = sp.array([695, 725, 755, 785, 815, 845, 875, 905],
                             dtype=float)
        freq_data *= 1.0e6
        beam_diff = sp.sqrt(max(1.1 * beam_data) ** 2 - (beam_data) ** 2)
        common_resolution = beam.GaussianBeam(beam_diff, freq_data)
        # Convolve to a common resolution.
        simmap = common_resolution.apply(simmap)

    if meansub:
        print "performing mean subtraction"
        noise_inv = algebra.make_vect(algebra.load(weightfile))
        means = sp.sum(sp.sum(noise_inv * simmap, -1), -1)
        means /= sp.sum(sp.sum(noise_inv, -1), -1)
        means.shape += (1, 1)
        simmap -= means
        # the weights will be zero in some places
        simmap[noise_inv < 1.e-20] = 0.

    # extra sanity?
    simmap[np.isinf(simmap)] = 0.
    simmap[np.isnan(simmap)] = 0.

    print "saving to" + output_file
    algebra.save(output_file, simmap)
Exemplo n.º 2
0
    def process_noise_inv(self, filename, regenerate=True):
        r"""buffer reading the noise inverse files for speed and also
        save to a file in the intermediate output path.

        If the cached file exists as an intermediate product, load it else
        produce it.
        """
        if filename not in self.noisefiledict:
            basename = filename.split("/")[-1].split(".npy")[0]
            filename_diag = "%s/%s_diag.npy" % (self.output_root, basename)
            exists = os.access(filename_diag, os.F_OK)
            if exists and not regenerate:
                print "loading pre-diagonalized noise: " + filename_diag
                self.noisefiledict[filename] = algebra.make_vect(algebra.load(filename_diag))
            else:
                print "loading noise: " + filename
                # TODO: have this be smarter about reading various noise cov
                # inputs
                noise_inv = algebra.make_mat(algebra.open_memmap(filename, mode="r"))
                self.noisefiledict[filename] = noise_inv.mat_diag()
                # self.noisefiledict[filename] = algebra.make_vect(
                #                               algebra.load(filename))
                algebra.save(filename_diag, self.noisefiledict[filename])

        return copy.deepcopy(self.noisefiledict[filename])
def map_pair_cal(uncal_maplist, uncal_weightlist, calfactor_outlist,
                 dirtymap_inlist, dirtymap_outlist,
                 convolve=True, factorizable_noise=False,
                 sub_weighted_mean=True, freq_list=range(256)):

    map1file = reference_clean
    weight1file = reference_weight
    #map1file = uncal_maplist.pop(0)
    #weight1file = uncal_weightlist.pop(0)
    #calfactor_outlist.pop(0)
    #dirtymap_out0 = dirtymap_outlist.pop(0)
    #dirtymap_in0 = dirtymap_inlist.pop(0)

    # do nothing to the reference map
    #ref_dirtymap = algebra.make_vect(algebra.load(dirtymap_in0))
    #algebra.save(dirtymap_out0, ref_dirtymap)

    # load maps into pairs
    svdout = shelve.open("correlation_pairs_v2.shelve")
    for map2file, weight2file, calfactor_outfile, \
        dirty_infile, dirty_outfile in zip(uncal_maplist, \
            uncal_weightlist, calfactor_outlist,
            dirtymap_inlist, dirtymap_outlist):

        print map1file, weight1file, map2file, weight2file

        pair = map_pair.MapPair(map1file, map2file,
                                weight1file, weight2file,
                                freq_list, avoid_db=True)

        if factorizable_noise:
            pair.make_noise_factorizable()

        if sub_weighted_mean:
            pair.subtract_weighted_mean()

        if convolve:
            pair.degrade_resolution()

        (corr, counts) = pair.correlate()
        svd_info = ce.get_freq_svd_modes(corr, len(freq_list))
        svdout[map2file] = svd_info

        # write out the left right and cal factors
        leftmode = svd_info[1][0]
        rightmode = svd_info[2][0]
        calfactor = leftmode/rightmode

        facout = open(calfactor_outfile, "w")
        for outvals in zip(leftmode, rightmode, calfactor):
            facout.write("%10.15g %10.15g %10.15g\n" % outvals)

        facout.close()

        newmap = algebra.make_vect(algebra.load(dirty_infile))
        newmap[freq_list, :, :] *= calfactor[:,np.newaxis,np.newaxis]
        algebra.save(dirty_outfile, newmap)
        print dirty_outfile

    svdout.close()
Exemplo n.º 4
0
def plot_difference(filename1, filename2, title, sigmarange=6., sigmacut=None,
                    transverse=False, outputdir="./", multiplier=1000.,
                    logscale=False, fractional=False,
                    ignore=None, diff_filename="./difference.npy"):
    """make movies of the difference of two maps (assuming same dimensions)"""
    map1 = algebra.make_vect(algebra.load(filename1))
    map2 = algebra.make_vect(algebra.load(filename2))

    if fractional:
        difftitle = "fractional diff."
        dmap = (map1 - map2) / map1 * 100.
    else:
        difftitle = "difference"
        dmap = map1 - map2

    algebra.save(diff_filename, dmap)

    make_cube_movie(diff_filename,
                       difftitle, cube_frame_dir, sigmarange=6.,
                       sigmacut=sigmacut, outputdir=outputdir, ignore=ignore,
                       multiplier=multiplier, transverse=transverse,
                       logscale=False)

    make_cube_movie(filename1,
                       title, cube_frame_dir, sigmarange=sigmarange,
                       sigmacut=sigmacut, outputdir=outputdir, ignore=ignore,
                       multiplier=multiplier, transverse=transverse,
                       logscale=logscale, filetag_suffix="_1")

    make_cube_movie(filename2,
                       title, cube_frame_dir, sigmarange=sigmarange,
                       sigmacut=sigmacut, outputdir=outputdir, ignore=ignore,
                       multiplier=multiplier, transverse=transverse,
                       logscale=logscale, filetag_suffix="_2")
    def setUp(self) :
        # Read in just to fiugre out the band structure.
        this_test_file = 'testdata/testfile_guppi_rotated.fits'
        Reader = fitsGBT.Reader(this_test_file, feedback=0)
        Blocks = Reader.read((0,),())
        bands = ()
        for Data in Blocks:
            n_chan = Data.dims[3]
            Data.calc_freq()
            freq = Data.freq
            delta = abs(sp.mean(sp.diff(freq)))
            centre = freq[n_chan//2]
            band = int(centre/1e6)
            bands += (band,)
            map = sp.zeros((n_chan, 15, 11))
            map = algebra.make_vect(map, axis_names=('freq', 'ra', 'dec'))
            map.set_axis_info('freq', centre, -delta)
            map.set_axis_info('ra', 218, -0.2)
            map.set_axis_info('dec', 2, 0.2)
            algebra.save('./testout_clean_map_I_' + str(band) + '.npy', map)

        self.params = {'sm_input_root' : 'testdata/',
                       'sm_file_middles' : ("testfile",),
                       'sm_input_end' : "_guppi_rotated.fits",
                       'sm_output_root' : "./testout_",
                       'sm_output_end' : "_sub.fits",
                       'sm_solve_for_gain' : True,
                       'sm_gain_output_end' : 'gain.pickle',
                       'sm_map_input_root' : './testout_',
                       'sm_map_type' : 'clean_map_',
                       'sm_map_polarizations' : ('I',),
                       'sm_map_bands' : bands
                       }
Exemplo n.º 6
0
def add_sim_radio():
    """script: go through a list of simulations and add those to a selected map
    """
    root_file = "/mnt/raid-project/gmrt/eswitzer/wiggleZ/"
    radio_file = root_file + "modetest_combined_maps/combined_41-73_cleaned_clean_15.npy"
    root_sim = "/mnt/raid-project/gmrt/calinliv/wiggleZ/simulations/test100/"
    root_out = root_file + "simulations_plus_data/"
    radio_data = algebra.make_vect(algebra.load(radio_file))

    for simindex in range(1,101):
        simname = root_sim + "simulated_signal_map_" + \
                  repr(simindex)+"_with_beam.npy"
        filename = root_out + "simulated_signal_plusdata_map_" + \
                   repr(simindex)+"_with_beam.npy"
        simoutname = root_out + "simulated_signal_map_" + \
                   repr(simindex)+"_with_beam.npy"

        sim_data = algebra.make_vect(algebra.load(simname))
        sim_data /= 1000.
        outmap = copy.deepcopy(radio_data)
        outmap += sim_data

        algebra.save(filename, outmap)
        algebra.save(simoutname, sim_data)

        print filename
    def setUp(self):
        # Read in just to fiugre out the band structure.
        this_test_file = 'testdata/testfile_guppi_rotated.fits'
        Reader = fitsGBT.Reader(this_test_file, feedback=0)
        Blocks = Reader.read((0, ), ())
        bands = ()
        for Data in Blocks:
            n_chan = Data.dims[3]
            Data.calc_freq()
            freq = Data.freq
            delta = abs(sp.mean(sp.diff(freq)))
            centre = freq[n_chan // 2]
            band = int(centre / 1e6)
            bands += (band, )
            map = sp.zeros((n_chan, 15, 11))
            map = algebra.make_vect(map, axis_names=('freq', 'ra', 'dec'))
            map.set_axis_info('freq', centre, -delta)
            map.set_axis_info('ra', 218, -0.2)
            map.set_axis_info('dec', 2, 0.2)
            algebra.save('./testout_clean_map_I_' + str(band) + '.npy', map)

        self.params = {
            'sm_input_root': 'testdata/',
            'sm_file_middles': ("testfile", ),
            'sm_input_end': "_guppi_rotated.fits",
            'sm_output_root': "./testout_",
            'sm_output_end': "_sub.fits",
            'sm_solve_for_gain': True,
            'sm_gain_output_end': 'gain.pickle',
            'sm_map_input_root': './testout_',
            'sm_map_type': 'clean_map_',
            'sm_map_polarizations': ('I', ),
            'sm_map_bands': bands
        }
Exemplo n.º 8
0
def mktmp(rgn_i,rgn_j,rgn_k,srgn_i1,srgn_i2,srgn_j1,srgn_j2,srgn_k1,srgn_k2,outfilename):
    """Write to disk a file representing an empty matrix of given dimensions. Also write an identically
    shaped array of booleans, which are true if the index points to the subregion.
    rgn_i/j/k  : the dimensions of the full region to be simulated
        srgn_i/j/k : the dimensions of the deep integration subregion
    outfilename: the name of the file to be created
    """


    regiontype = np.zeros((rgn_i,rgn_j,rgn_k), bool)

    array = np.zeros((rgn_i,rgn_j,rgn_k))

    for i in range(0,rgn_i):
        for j in range(0,rgn_j):
            for k in range(0,rgn_k):
                if (i>=(srgn_i1-1) and i<=(srgn_i2-1)):
                    if (j>=(srgn_j1-1) and j<=(srgn_j2-1)):
                        if (k>=(srgn_k1-1) and k<=(srgn_k2-1)):
                            regiontype[i,j,k]=True
            else:
                regiontype[i,j,k]=False

    region=algebra.info_array(array)
    regiontypename = 'bool' + outfilename
    np.save(regiontypename, regiontype)
    algebra.save(outfilename,region)
    print "done"
    template_map = algebra.make_vect(algebra.load(outfilename))
Exemplo n.º 9
0
    def process_noise_inv(self, filename, regenerate=True):
        r"""buffer reading the noise inverse files for speed and also
        save to a file in the intermediate output path.

        If the cached file exists as an intermediate product, load it else
        produce it.
        """
        if filename not in self.noisefiledict:
            basename = filename.split("/")[-1].split(".npy")[0]
            filename_diag = "%s/%s_diag.npy" % \
                           (self.output_root, basename)
            exists = os.access(filename_diag, os.F_OK)
            if exists and not regenerate:
                print "loading pre-diagonalized noise: " + filename_diag
                self.noisefiledict[filename] = algebra.make_vect(
                    algebra.load(filename_diag))
            else:
                print "loading noise: " + filename
                # TODO: have this be smarter about reading various noise cov
                # inputs
                noise_inv = algebra.make_mat(
                    algebra.open_memmap(filename, mode='r'))
                self.noisefiledict[filename] = noise_inv.mat_diag()
                #self.noisefiledict[filename] = algebra.make_vect(
                #                               algebra.load(filename))
                algebra.save(filename_diag, self.noisefiledict[filename])

        return copy.deepcopy(self.noisefiledict[filename])
Exemplo n.º 10
0
def mklink_for_mappair(source_dict, target):
    r"""to make a link from target direction to the source_dic direction.
    """
    if not os.path.exists(target):
        os.mkdir(target)
    for map_pair in source_dict.keys():
        for key in ['map1', 'map2']:
            fileroot = source_dict[map_pair][key]
            filename = fileroot.split('/')[-1]
            if not os.path.exists(target+filename):
                print target+filename
                os.symlink(fileroot, target + filename)
                os.symlink(fileroot+'.meta', target+filename+'.meta')

            source_dict[map_pair][key] = target + filename

        for key in ['noise_inv1', 'noise_inv2']:
            fileroot = source_dict[map_pair][key]
            filename = fileroot.split('/')[-1]
            filename = filename.replace('inv', 'weight')
            if not os.path.exists(target+filename):
                #os.symlink(fileroot, target + filename)
                noise_inv_diag, info = find_weight_re_diagnal(fileroot)
                algebra.save(target+filename, noise_inv_diag)

            source_dict[map_pair][key] = target + filename

    return source_dict
Exemplo n.º 11
0
def noise_inv_to_weight(noiseinvlist_in, weightlist_in):
    for (noiseinv_item, weight_item) in zip(noiseinvlist_in, weightlist_in):
        print noiseinv_item, weight_item
        noise_inv = algebra.make_mat(
            algebra.open_memmap(noiseinv_item, mode='r'))
        noise_inv_diag = noise_inv.mat_diag()
        algebra.save(weight_item, noise_inv_diag)
Exemplo n.º 12
0
def add_manual_mask(source_key, cut_freq_list=None,
                    signal_name='map', noise_inv_name='noise_inv',
                    weight_name='weight', divider_token=";"):
    r"""
    `source_key` is the file db key for the maps to combine
    `signal_name` is the tag in the file db entry for the signal maps
    `noise_inv_name` is the tag in the file db entry for the N^-1 weights
    `weight_name` is the tag in the file db entry for the weights to write out
    `divider_token` is the token that divides the map section name
            from the data type e.g. "A_with_B;noise_inv"
    """
    datapath_db = data_paths.DataPath()
    source_fdb = datapath_db.fetch(source_key, silent=True)
    source_fdict = source_fdb[1]

    # accumulate all the files to combine
    noise_inv_keys = {}
    weight_keys = {}
    signal_keys = {}
    for filekey in source_fdb[0]:
        if divider_token in filekey:
            data_type = filekey.split(divider_token)[1]
            map_section = filekey.split(divider_token)[0]

            if data_type == signal_name:
                signal_keys[map_section] = source_fdict[filekey]

            if data_type == noise_inv_name:
                noise_inv_keys[map_section] = source_fdict[filekey]

            if data_type == weight_name:
                weight_keys[map_section] = source_fdict[filekey]

    for mapkey in signal_keys:
        signal_file = signal_keys[mapkey]
        noise_inv_file = noise_inv_keys[mapkey]
        weight_file = weight_keys[mapkey]
        print "loading pair: %s %s -> %s" % \
                (signal_file, noise_inv_file, weight_file)
        signal_map = algebra.make_vect(algebra.load(signal_file))
        weightmap = algebra.make_vect(algebra.load(noise_inv_file))

        # set the new weights to zero where the N^-1 is small
        # or the signal map is inf or nan
        weightmap[np.isnan(weightmap)] = 0.
        weightmap[np.isinf(weightmap)] = 0.
        weightmap[np.isnan(signal_map)] = 0.
        weightmap[np.isinf(signal_map)] = 0.
        weightmap[weightmap < 1.e-20] = 0.

        if cut_freq_list is not None:
            for cutindex in cut_freq_list:
                weightmap[cutindex, :, :] = 0.

        # could also determine the filename here, outside of the database
        #outputdir = datapath_db.fetch_parent(source_key, return_path=True)
        #weight_out = "%s/%s" % (outputdir, source_key)
        algebra.compressed_array_summary(weightmap, "new weight map")
        algebra.save(weight_file, weightmap)
Exemplo n.º 13
0
def extend_iqu_map(source_dict=None, target_dict=None, map_dict=None):
    if source_dict != None:
        imap = algebra.make_vect(algebra.load(source_dict['imap']))
        qmap = algebra.make_vect(algebra.load(source_dict['qmap']))
        umap = algebra.make_vect(algebra.load(source_dict['umap']))

        if source_dict.has_key('imap_weight'):
            imap_weight = algebra.make_vect(algebra.load(source_dict['imap_weight']))
            qmap_weight = algebra.make_vect(algebra.load(source_dict['qmap_weight']))
            umap_weight = algebra.make_vect(algebra.load(source_dict['umap_weight']))
        elif source_dict.has_key('imap_inv'):
            imap_weight, info = find_weight_re_diagnal(source_dict['imap_inv'])
            qmap_weight, info = find_weight_re_diagnal(source_dict['qmap_inv'])
            umap_weight, info = find_weight_re_diagnal(source_dict['umap_inv'])
        else:
            print 'Warning: no weight'
            imap_weight = algebra.ones_like(imap)
            qmap_weight = algebra.ones_like(imap)
            umap_weight = algebra.ones_like(imap)
    elif map_dict != None:
        imap = map_dict['imap']
        qmap = map_dict['qmap']
        umap = map_dict['umap']

        if 'imap_weight' in map_dict.keys():
            imap_weight = map_dict['imap_weight']
            qmap_weight = map_dict['qmap_weight']
            umap_weight = map_dict['umap_weight']
        else:
            print 'Warning: no weight'
            imap_weight = algebra.ones_like(imap)
            qmap_weight = algebra.ones_like(imap)
            umap_weight = algebra.ones_like(imap)
    else:
        print "Error: Can not find I Q U maps"
        exit()

    iqu = algebra.info_array(imap.tolist() + qmap.tolist() + umap.tolist())
    iqu = algebra.make_vect(iqu)
    iqu.info = imap.info
    iqu.copy_axis_info(imap)

    iqu_weight = algebra.info_array(imap_weight.tolist() + 
                                    qmap_weight.tolist() + 
                                    umap_weight.tolist())
    iqu_weight = algebra.make_vect(iqu_weight)
    iqu_weight.info = imap_weight.info
    iqu_weight.copy_axis_info(imap_weight)

    if target_dict != None:
        algebra.save(target_dict['map'], iqu)
        algebra.save(target_dict['weight'], iqu_weight)
    else:
        map_dict = {}
        map_dict['map']    = iqu
        map_dict['weight'] = iqu_weight
        return map_dict
Exemplo n.º 14
0
	def process_map(self, imap_fname, nmap_fname, ii, mock_fname=None):
		params = self.params
		sigma = params['sigma']
		mu = params['mu']
		out_root = params['output_root']
		in_root = params['input_root']
		
		imap = algebra.load(in_root + imap_fname)
		imap = algebra.make_vect(imap)
		#print imap.flatten().mean()
		imap = imap - imap.flatten().mean()
		if imap.axes != ('freq', 'ra', 'dec') :
			raise ce.DataError('AXES ERROR!')

		print ' :: Set Noise to Gaussian'
		np.random.seed()
		nmap = algebra.info_array(
			sigma*np.random.randn(imap.shape[0],imap.shape[1], imap.shape[2])+mu)
		nmap.axes = imap.axes
		nmap = algebra.make_vect(nmap)
		nmap.info = imap.info
		if nmap.axes != ('freq', 'ra', 'dec') :
			raise ce.DataError('AXES ERROR!')

		## add noise to map ##
		imap = imap + nmap
		non0 = nmap.nonzero()
		nmap[non0] = (1./sigma)**2

		#if mock_fname != None:
		#	mmap = algebra.info_array(
		#		2.*np.random.randn(imap.shape[0],imap.shape[1], imap.shape[2])-0.5)
		#	mmap.axes = imap.axes
		#	mmap = algebra.make_vect(mmap)
		#	box, nbox, mbox = self.fill(imap, nmap, mmap)
		#	pkrm_nfname = out_root + 'fftbox_' +  mock_fname
		#	algebra.save(pkrm_nfname, mbox)
		#else:
		#	box, nbox = self.fill(imap, nmap)

		hr = params['hr']
		mid = params['mid']
		last = params['last']
		pol_str = params['polarizations'][0]
		end = pol_str
		if len(last)!=0:
			end = end + last[ii]
		end = end + '_' + str(ii)
		imap_fname = hr[ii] + mid[0] + end + '.npy'
		nmap_fname = hr[ii] + mid[1] + end + '.npy'

		pkrm_fname = out_root + imap_fname
		algebra.save(pkrm_fname, imap)

		pkrm_nfname = out_root + nmap_fname
		algebra.save(pkrm_nfname, nmap)
Exemplo n.º 15
0
def noise_inv_to_weight(noiseinvlist_in, weightlist_in):
    print reference_noise_inv, reference_weight
    noise_inv = algebra.make_mat(algebra.open_memmap(reference_noise_inv, mode='r'))
    noise_inv_diag = noise_inv.mat_diag()
    algebra.save(reference_weight, noise_inv_diag)
    print "done with reference weights"

    for (noiseinv_item, weight_item) in zip(noiseinvlist_in, weightlist_in):
        print noiseinv_item, weight_item
        noise_inv = algebra.make_mat(algebra.open_memmap(noiseinv_item, mode='r'))
        noise_inv_diag = noise_inv.mat_diag()
        algebra.save(weight_item, noise_inv_diag)
def test_scheme(template_file, sim_filename1, sim_filename2):
    r"""look at some differences between maps"""
    template_map = algebra.make_vect(algebra.load(template_file))
    gbtsim1 = realize_simulation(template_map, scenario="streaming", seed=5489, refinement=1.0)
    gbtsim2 = realize_simulation(template_map, seed=5489, refinement=1.0)

    sim_map1 = algebra.make_vect(gbtsim1, axis_names=("freq", "ra", "dec"))
    sim_map2 = algebra.make_vect(gbtsim2, axis_names=("freq", "ra", "dec"))
    sim_map1.copy_axis_info(template_map)
    sim_map2.copy_axis_info(template_map)
    algebra.save(sim_filename1, sim_map1)
    algebra.save(sim_filename2, sim_map2)
def convert_noiseinv_to_weight(mapkey):
    datapath_db = dp.DataPath()
    filedb = datapath_db.fetch(mapkey)[1]
    map_cases = datapath_db.fileset_cases(mapkey, "section;maptype")

    for section in map_cases['section']:
        noiseinv_file = filedb[section + ";noise_inv"]
        noiseweight_file = filedb[section + ";noise_weight"]
        print noiseinv_file, noiseweight_file

        noise_inv = algebra.make_mat(algebra.open_memmap(noiseinv_file, mode='r'))
        noise_inv_diag = noise_inv.mat_diag()
        algebra.save(noiseweight_file, noise_inv_diag)
Exemplo n.º 18
0
    def realmap(self):
        """bin the real WiggleZ catalog"""
        self.realmap_binning = bin_catalog_file(self.infile_data,
                                                self.freq_axis,
                                                self.ra_axis, self.dec_axis,
                                                skip_header=1)

        map_wigglez = algebra.make_vect(self.realmap_binning,
                                        axis_names=('freq', 'ra', 'dec'))

        map_wigglez.copy_axis_info(self.template_map)
        algebra.save(self.outfile_data, map_wigglez)

        return
def map_pair_cal(uncal_maplist, uncal_weightlist, in_path, out_path,
                 convolve=False, factorizable_noise=True,
                 sub_weighted_mean=True):

    # load maps into pairs
    for mapname, noisename in zip(uncal_maplist, uncal_weightlist):
        pair = map_pair.MapPair(map1, map2,
                                noise_inv1, noise_inv2,
                                self.freq_list)

        pair.set_names(pdict['tag1'], pdict['tag2'])

        pair.lags = self.lags
        pair.params = self.params
        self.pairs[pairitem] = pair

        (corr, counts) = pair.correlate(pair.lags, speedup=True)
        svd_info = ce.get_freq_svd_modes(corr, len(self.freq_list))
        leftmode = svd_info[1][:0]
        rightmode =svd_info[2][:0]

    # write out the maps and noise
        algebra.save(map1_file, pair.map1)
        algebra.save(map2_file, pair.map2)
        algebra.save(noise_inv1_file, pair.noise_inv1)
        algebra.save(noise_inv2_file, pair.noise_inv2)
Exemplo n.º 20
0
def test_scheme(template_file, sim_filename1, sim_filename2):
    r"""look at some differences between maps"""
    template_map = algebra.make_vect(algebra.load(template_file))
    gbtsim1 = realize_simulation(template_map,
                                 scenario='streaming',
                                 seed=5489,
                                 refinement=1.)
    gbtsim2 = realize_simulation(template_map, seed=5489, refinement=1.)

    sim_map1 = algebra.make_vect(gbtsim1, axis_names=('freq', 'ra', 'dec'))
    sim_map2 = algebra.make_vect(gbtsim2, axis_names=('freq', 'ra', 'dec'))
    sim_map1.copy_axis_info(template_map)
    sim_map2.copy_axis_info(template_map)
    algebra.save(sim_filename1, sim_map1)
    algebra.save(sim_filename2, sim_map2)
Exemplo n.º 21
0
def generate_delta_sim(input_file, output_file):
    r"""make the map with the temperature divided out (delta)"""
    print "reading %s -> %s (dividing by T_b(z))" % (input_file, output_file)

    simmap = algebra.make_vect(algebra.load(input_file))
    freq_axis = simmap.get_axis('freq') / 1.e6
    z_axis = units.nu21 / freq_axis - 1.0

    simobj = corr21cm.Corr21cm()
    T_b = simobj.T_b(z_axis)*1e-3

    simmap /= T_b[:, np.newaxis, np.newaxis]

    print "saving to" + output_file
    algebra.save(output_file, simmap)
Exemplo n.º 22
0
def diag_noise(source_dict, target):
    if not os.path.exists(target):
        os.mkdir(target)
    for map_pair in source_dict.keys():
            fileroot = source_dict[map_pair]
            filename = fileroot.split('/')[-1]
            filename = filename.replace('inv', 'weight')
            if not os.path.exists(target+filename):
                #os.symlink(fileroot, target + filename)
                noise_inv_diag, info = find_weight_re_diagnal(fileroot)
                algebra.save(target+filename, noise_inv_diag)

            source_dict[map_pair] = target + filename

    return source_dict
Exemplo n.º 23
0
 def setUp(self):
     # Make a map that the Covariance gets it's axis info from
     map = sp.zeros((5, 10, 8), dtype=float)
     map = algebra.make_vect(map, axis_names=('freq', 'ra', 'dec'))
     map.set_axis_info("freq", 800.0e6, 2.0e6)
     dec = 10
     map.set_axis_info("ra", 215., 0.1 / sp.cos(dec * sp.pi / 180))
     map.set_axis_info("dec", dec, 0.1)
     algebra.save("tmp_mapfile.npy", map)
     # Set up some parameters to pass.
     self.params = {
         "cv_map_file": "tmp_mapfile.npy",
         "cv_unit_system": "deg-freq",
         "cv_out_file_name": "testout_covariance.npy"
     }
Exemplo n.º 24
0
 def setUp(self) :
     # Make a map that the Covariance gets it's axis info from
     map = sp.zeros((5, 10, 8), dtype=float)
     map = algebra.make_vect(map, axis_names=('freq', 'ra', 'dec'))
     map.set_axis_info("freq", 800.0e6, 2.0e6)
     dec = 10
     map.set_axis_info("ra", 215., 0.1/sp.cos(dec*sp.pi/180))
     map.set_axis_info("dec", dec, 0.1)
     algebra.save("tmp_mapfile.npy", map)
     # Set up some parameters to pass.
     self.params = {
         "cv_map_file" : "tmp_mapfile.npy",
         "cv_unit_system" : "deg-freq", 
         "cv_out_file_name" : "testout_covariance.npy"
         }
Exemplo n.º 25
0
    def delta(self):
        """find the overdensity using a separable selection function"""
        delta_data = self.produce_delta_map(self.outfile_data,
                                            self.outfile_separable)

        algebra.save(self.outfile_delta_data, delta_data)

        for mockindex in self.outfile_mock[0]:
            print "mock delta", mockindex
            mockinfile = self.outfile_mock[1][mockindex]
            mockoutfile = self.outfile_delta_mock[1][mockindex]

            delta_mock = self.produce_delta_map(mockinfile,
                                                self.outfile_separable)

            algebra.save(mockoutfile, delta_mock)
Exemplo n.º 26
0
def inv_diag_noise(source_dict, target):
    if not os.path.exists(target):
        os.mkdir(target)
    for map_pair in source_dict.keys():
            fileroot = source_dict[map_pair]
            filename = fileroot.split('/')[-1]
            filename = filename.replace('diag', 'weight')

            noise_diag = algebra.make_vect(algebra.load(fileroot))
            noise_diag[noise_diag==0] = np.inf
            noise_inv_diag = 1./noise_diag
            algebra.save(target+filename, noise_inv_diag)

            source_dict[map_pair] = target + filename

    return source_dict
Exemplo n.º 27
0
    def execute_assembledir(self):
        # link the weights through to the simulation directory
        for (weight_file_in, weight_file_out) in \
                zip(self.input_weight_maps, self.output_weight_maps):
            os.symlink(weight_file_in, weight_file_out)
            os.symlink(weight_file_in + ".meta", weight_file_out + ".meta")

        signalfile = self.output_root + self.output_signal
        signalmap = algebra.make_vect(algebra.load(signalfile))
        signalmap *= self.multiplier

        # now load the signal simulation add thermal noise and save
        for (thermal_file, mapfile) in \
                zip(self.output_thermal, self.output_maps):
            thermalmap = algebra.make_vect(algebra.load(thermal_file))
            algebra.save(mapfile, signalmap + thermalmap)
Exemplo n.º 28
0
 def execute(self, nprocesses):
     params = self.params
     # Make parent directory and write parameter file.
     kiyopy.utils.mkparents(params['output_root'])
     parse_ini.write_params(params,
                            params['output_root'] + 'params.ini',
                            prefix=prefix)
     in_root = params['input_root']
     # Figure out what the band names are.
     bands = params['bands']
     if not bands:
         map_files = glob.glob(in_root + pol_str + "_*.npy")
         bands = []
         root_len = len(in_root)
         for file_name in map_files:
             bands.append(file_name[root_len:-4])
     # Loop over polarizations.
     for pol_str in params['polarizations']:
         # Read in all the maps to be glued.
         maps = []
         for band in bands:
             band_map_fname = (in_root + pol_str + "_" + repr(band) +
                               '.npy')
             if self.feedback > 1:
                 print "Read using map: " + band_map_fname
             if params['mat_diag']:
                 if self.feedback > 1:
                     print "Treating as a matrix, getting diagonal."
                 band_map = al.open_memmap(band_map_fname, mode='r')
                 band_map = al.make_mat(band_map)
                 band_map = band_map.mat_diag()
             else:
                 band_map = al.load(band_map_fname)
                 band_map = al.make_vect(band_map)
             if band_map.axes != ('freq', 'ra', 'dec'):
                 msg = ("Expeced maps to have axes ('freq',"
                        "'ra', 'dec'), but it has axes: " +
                        str(band_map.axes))
                 raise ce.DataError(msg)
             maps.append(band_map)
         # Now glue them together.
         out_map = glue(maps)
         out_fname = (params['output_root'] + pol_str + "_" + "all" +
                      '.npy')
         if self.feedback > 1:
             print "Writing glued map to: " + out_fname
         al.save(out_fname, out_map)
Exemplo n.º 29
0
 def execute(self, nprocesses):
     params = self.params
     # Make parent directory and write parameter file.
     kiyopy.utils.mkparents(params['output_root'])
     parse_ini.write_params(params, params['output_root'] + 'params.ini',
                            prefix=prefix)
     in_root = params['input_root']        
     # Figure out what the band names are.
     bands = params['bands']
     if not bands:
         map_files = glob.glob(in_root + pol_str + "_*.npy")
         bands = []
         root_len = len(in_root)
         for file_name in map_files:
             bands.append(file_name[root_len:-4])
     # Loop over polarizations.
     for pol_str in params['polarizations']:
         # Read in all the maps to be glued.
         maps = []
         for band in bands:
             band_map_fname = (in_root + pol_str + "_" +
                           repr(band) + '.npy')
             if self.feedback > 1:
                 print "Read using map: " + band_map_fname
             if params['mat_diag']:
                 if self.feedback > 1:
                     print "Treating as a matrix, getting diagonal."
                 band_map = al.open_memmap(band_map_fname, mode='r')
                 band_map = al.make_mat(band_map)
                 band_map = band_map.mat_diag()
             else:
                 band_map = al.load(band_map_fname)
                 band_map = al.make_vect(band_map)
             if band_map.axes != ('freq', 'ra', 'dec') :
                 msg = ("Expeced maps to have axes ('freq',"
                        "'ra', 'dec'), but it has axes: "
                        + str(band_map.axes))
                 raise ce.DataError(msg)
             maps.append(band_map)
         # Now glue them together.
         out_map = glue(maps)
         out_fname = (params['output_root']
                      + pol_str + "_" + "all" + '.npy')
         if self.feedback > 1:
             print "Writing glued map to: " + out_fname
         al.save(out_fname, out_map)
Exemplo n.º 30
0
def map_pair_cal(
    uncal_maplist,
    uncal_weightlist,
    calfactor_outlist,
    dirtymap_inlist,
    dirtymap_outlist,
    reference_mapfile,
    reference_weightfile,
    sub_weighted_mean=True,
    freq_list=range(256),
):

    reference_map = algebra.make_vect(algebra.load(reference_mapfile))
    reference_weight = algebra.make_vect(algebra.load(reference_weightfile))

    reference_map = remove_mean(reference_map, reference_weight)

    # load maps into pairs
    for mapfile, weightfile, calfactor_outfile, dirty_infile, dirty_outfile in zip(
        uncal_maplist, uncal_weightlist, calfactor_outlist, dirtymap_inlist, dirtymap_outlist
    ):

        print mapfile, weightfile

        session_map = algebra.make_vect(algebra.load(mapfile))
        session_weight = algebra.make_vect(algebra.load(weightfile))

        session_map = remove_mean(session_map, session_weight)

        calfactor = template_fit(session_map, reference_map, session_weight)

        newmap = algebra.make_vect(algebra.load(dirty_infile))
        newmap[freq_list, :, :] /= calfactor[:, np.newaxis, np.newaxis]
        algebra.save(dirty_outfile, newmap)
        print dirty_outfile

        # optional test by applying the factor to the maps
        # session_map[freq_list, :, :] /= calfactor[:, np.newaxis, np.newaxis]
        # calfactor = template_fit(session_map, reference_map, session_weight)

        facout = open(calfactor_outfile, "w")
        for outvals in calfactor:
            facout.write("%10.15g\n" % outvals)

        facout.close()
Exemplo n.º 31
0
	def execute(self, nprocesses=1):
		params = self.params

		# Make parent directory and write parameter file.
		kiyopy.utils.mkparents(params['output_root'])
		parse_ini.write_params(params, params['output_root']+'params.ini',prefix='pk_')
		in_root = params['input_root']
		out_root = params['output_root']
		mid = params['mid']
		all_out_fname_list = []
		all_in_fname_list = []
		
		#### Process ####
		pol_str = params['polarizations'][0]
		#hr_str = params['hr'][0]
		for hr_str, ii in zip(params['hr'],range(len(params['hr']))):
			end = pol_str
			if len(last)!=0:
				end = end + last[ii]
			imap_fname = in_root + hr_str + mid[0] + end + '.npy'
			imap = algebra.load(imap_fname)
			imap = algebra.make_vect(imap)
			if imap.axes != ('freq', 'ra', 'dec') :
				raise ce.DataError('AXES ERROR!')

			nmap_fname = in_root + hr_str + mid[1] + end + '.npy'
			nmap = algebra.load(nmap_fname)
			nmap = algebra.make_vect(nmap)

			#invers noise weight
			print 'Inverse Noise Weight... Map:' + hr_str[:-1]
			self.weight(imap, nmap, 
				out_root+hr_str+'wt_cleaned_clean_map_'+end+'.png')

			dmap_fname = out_root + 'wt_' + hr_str + mid[0] + end + '.npy'
			algebra.save(dmap_fname, imap)
			all_out_fname_list.append(
				kiyopy.utils.abbreviate_file_path(dmap_fname))

			nmap_fname = out_root + 'wt_' + hr_str + mid[1] + end + '.npy'
			algebra.save(nmap_fname, nmap)
			all_out_fname_list.append(
				kiyopy.utils.abbreviate_file_path(nmap_fname))

		return 0
Exemplo n.º 32
0
def save_and_plot(array, template, filename):
    array = algebra.make_vect(array, axis_names=('freq', 'ra', 'dec'))
    array.copy_axis_info(template)

    beam_data = sp.array([0.316148488246, 0.306805630985, 0.293729620792,
                 0.281176247549, 0.270856788455, 0.26745856078,
                 0.258910010848, 0.249188429031])
    freq_data = sp.array([695, 725, 755, 785, 815, 845, 875, 905],
                             dtype=float)
    freq_data *= 1.0e6

    beamobj = beam.GaussianBeam(beam_data, freq_data)
    array_beam = beamobj.apply(array)

    algebra.save(filename, array_beam)
    outputdir = "/cita/d/www/home/eswitzer/movies/"
    pc.make_cube_movie(filename, "Temperature (mK)", pc.cube_frame_dir,
                        sigmarange=3., outputdir=outputdir, multiplier=1000.,
                        transverse=False, filetag_suffix="_trial")
Exemplo n.º 33
0
def extract(in_dir, out_dir) :
    """Searches for noise_inv files, extracts the diagonal and writes it out.
    """
    
    files = glob.glob(in_dir + '/*noise_inv*.npy')
    for file_path in files:
        if 'noise_inv_diag' in file_path:
            continue
        file_name = file_path[len(in_dir):]
        print file_name
        parts = file_name.split('noise_inv')
        if len(parts) != 2:
            raise RuntimeError("'noise_inv' appears in file name more than"
                               " once.  Wasn't prepared for this.")
        out_path = out_dir + '/' + parts[0] + 'noise_inv_diag' + parts[1]
        mat = al.open_memmap(file_path, 'r')
        mat = al.make_mat(mat)
        mat_diag = mat.mat_diag()
        al.save(out_path, mat_diag)
Exemplo n.º 34
0
def add_sim_to_data(simkey, datakey, replace=False):
    datapath_db = data_paths.DataPath()

    mapA_file = datapath_db.fetch(datakey + ":A;clean_map", intend_read=True)
    mapB_file = datapath_db.fetch(datakey + ":B;clean_map", intend_read=True)
    mapC_file = datapath_db.fetch(datakey + ":C;clean_map", intend_read=True)
    mapD_file = datapath_db.fetch(datakey + ":D;clean_map", intend_read=True)
    simfile = datapath_db.fetch(simkey + ":1", intend_read=True)

    simmap = algebra.make_vect(algebra.load(simfile))

    mapset = [mapA_file, mapB_file, mapC_file, mapD_file]
    for mapfile in mapset:
        print mapfile, simfile
        origmap = algebra.make_vect(algebra.load(mapfile))
        if replace:
            algebra.save(mapfile, simmap)
        else:
            algebra.save(mapfile, origmap + simmap)
Exemplo n.º 35
0
def add_sim_to_data(simkey, datakey, replace=False):
    datapath_db = data_paths.DataPath()

    mapA_file = datapath_db.fetch(datakey + ":A;clean_map", intend_read=True)
    mapB_file = datapath_db.fetch(datakey + ":B;clean_map", intend_read=True)
    mapC_file = datapath_db.fetch(datakey + ":C;clean_map", intend_read=True)
    mapD_file = datapath_db.fetch(datakey + ":D;clean_map", intend_read=True)
    simfile = datapath_db.fetch(simkey + ":1", intend_read=True)

    simmap = algebra.make_vect(algebra.load(simfile))

    mapset = [mapA_file, mapB_file, mapC_file, mapD_file]
    for mapfile in mapset:
        print mapfile, simfile
        origmap = algebra.make_vect(algebra.load(mapfile))
        if replace:
            algebra.save(mapfile, simmap)
        else:
            algebra.save(mapfile, origmap + simmap)
Exemplo n.º 36
0
    def separable(self):
        # now assume separability of the selection function
        spatial_selection = np.sum(self.selection_function, axis=0)

        freq_selection = np.apply_over_axes(np.sum,
                                            self.selection_function, [1, 2])

        self.separable_selection = (freq_selection * spatial_selection)

        self.separable_selection /= np.sum(freq_selection.flatten())

        map_wigglez_separable = algebra.make_vect(self.separable_selection,
                                                  axis_names=('freq', 'ra', 'dec'))

        map_wigglez_separable.copy_axis_info(self.template_map)

        algebra.save(self.outfile_separable, map_wigglez_separable)

        return
Exemplo n.º 37
0
def process_optical_to_delta(optical_file, optical_selection_file, outfile):
    print "-" * 80
    print "in: " + optical_file
    print "nbar: " + optical_selection_file
    print "out: " + outfile
    map_opt = algebra.make_vect(algebra.load(optical_file))
    map_nbar = algebra.make_vect(algebra.load(optical_selection_file))

    # convert to delta-overdensity
    map_opt = map_opt / map_nbar - 1.
    #algebra.compressed_array_summary(map_opt, "opt after conversion to delta")

    # set the NaNs and infs to zero in data and weights
    nan_array = np.isnan(map_opt)
    map_opt[nan_array] = 0.
    map_nbar[nan_array] = 0.
    inf_array = np.isinf(map_opt)
    map_opt[inf_array] = 0.
    map_nbar[inf_array] = 0.

    algebra.save(outfile, map_opt)
Exemplo n.º 38
0
    def execute(self, processes):
        freq_list = self.weight_map.get_axis("freq")
        delta_freq = np.roll(freq_list, 1) - freq_list
        bandwidth = delta_freq[1]
        # this may be too picky
        assert bandwidth == np.mean(delta_freq[1:]), "bad freq. axis"
        print "assuming uniform bandwidth: ", bandwidth

        integration_time = self.total_integration * 3600.

        freq_weight = np.apply_over_axes(np.sum, self.weight_map, [1, 2])
        best_freq = np.argmax(freq_weight.flatten())
        print "best freq: ", freq_list[best_freq]

        # normalize such that sum of the best slice is the total integration
        # time
        norm = integration_time / np.sum(self.weight_map[best_freq, :, :])
        self.weight_map *= norm

        sys_temp = self.noise_model(freq_list[best_freq] / 1.e6)
        print "system temp at best: ", sys_temp

        #print self.weight_map[best_freq, :, :]
        delta_temp_map = sys_temp / np.sqrt(bandwidth * self.weight_map)
        delta_temp_map[np.isinf(delta_temp_map)] = self.max_stdev

        # print the delta T for the best slice in mK
        #print delta_temp_map[best_freq, :, :] * 1000.

        algebra.save(self.delta_temp_file, delta_temp_map)

        thermal_noise = np.random.randn(*delta_temp_map.shape)
        thermal_noise *= delta_temp_map
        thermal_noise = algebra.make_vect(thermal_noise,
                                          axis_names=('freq', 'ra', 'dec'))
        thermal_noise.copy_axis_info(delta_temp_map)

        #print thermal_noise[best_freq, :, :] * 1000.

        algebra.save(self.output_file, thermal_noise)
Exemplo n.º 39
0
def make_modetest_combined_sim():
    """combine output simulated maps from a mode subtraction test"""
    modedir = "/mnt/raid-project/gmrt/eswitzer/wiggleZ/modetest/"
    outdir = "/mnt/raid-project/gmrt/eswitzer/wiggleZ/modetest_combined_maps_0_50/"
    dirprefix = "73_ABCD_all_"
    data_dirsuffix = "_modes_sim_maponly/"
    cov_dirsuffix = "_modes_real_maponly/"
    for run_index in range(0,55,5):
        fullpath_data = modedir + dirprefix + repr(run_index) + data_dirsuffix
        fullpath_cov = modedir + dirprefix + repr(run_index) + cov_dirsuffix
        print fullpath_data, fullpath_cov
        fourway_split = make_fourway_list(fullpath_data, fullpath_cov)
        (map_out, weights_out, prodmap_out) = combine_maps(fourway_split)

        filename = outdir + "combined_sim_41-73_cleaned_clean_" + \
                   repr(run_index) + ".npy"
        algebra.save(filename, map_out)

        filename = outdir + "combined_sim_41-73_cleaned_noise_inv_" + \
                   repr(run_index) + ".npy"
        algebra.save(filename, weights_out)

        filename = outdir + "combined_sim_41-73_cleaned_product_" + \
                   repr(run_index) + ".npy"
        algebra.save(filename, prodmap_out)
Exemplo n.º 40
0
def generate_windows(window="blackman"):
    datapath_db = data_paths.DataPath()
    # first generate a window for the full physical volume
    filename = datapath_db.fetch('simideal_15hr_physical', intend_read=True,
                                 pick='1')
    print filename
    pcube = algebra.make_vect(algebra.load(filename))
    pwindow = algebra.make_vect(fftutil.window_nd(pcube.shape, name=window),
                                axis_names=('freq', 'ra', 'dec'))
    pwindow.copy_axis_info(pcube)
    print pwindow.shape
    algebra.save("physical_window.npy", pwindow)

    # now generate one for the observed region and project onto the physical
    # volume.
    filename = datapath_db.fetch('simideal_15hr_beam', intend_read=True,
                                 pick='1')
    print filename
    ocube = algebra.make_vect(algebra.load(filename))
    owindow = algebra.make_vect(fftutil.window_nd(ocube.shape, name=window),
                                axis_names=('freq', 'ra', 'dec'))
    owindow.copy_axis_info(ocube)
    print owindow.shape
    print owindow.axes
    algebra.save("observed_window.npy", owindow)
    pwindow = physical_gridding.physical_grid(owindow, refinement=2)
    print pwindow.shape
    algebra.save("observed_window_physreg.npy", pwindow)
Exemplo n.º 41
0
def complete_wigglez_regions(target_sample=0.25,
                             multiplier=16,
                             search_start=16):
    """15hr:
    ([214.00001499999999, 222.99998500000001], [3.0000000000000001e-06,
    3.9999989999999999], [676383691.31904757, 946937167.84666669])

    22hr:
    ([322.00003099999998, 329.99996900000002], [-1.9999990000000001,
    1.9999979999999999], [676383691.31904757, 946937167.84666669])

    1hr
    ([9.0000020000000003, 15.999999000000001], [-1.9999979999999999, 1.999997],
    [676383691.31904757, 946937167.84666669])
    """

    #print "proposed 15hr field dimensions"
    template_15hr = find_map_region(223.,
                                    214.,
                                    0.,
                                    4,
                                    target_sample=target_sample,
                                    multiplier=multiplier,
                                    search_start=search_start,
                                    exact_freq=False,
                                    max_freq=676383691.31904757 / 1.e6,
                                    min_freq=946937167.84666669 / 1.e6,
                                    n_freq=512)
    find_map_dimensions(template_15hr)
    algebra.save("templates/wigglez_15hr_complete.npy", template_15hr)

    #print "proposed 22hr field dimensions"
    template_22hr = find_map_region(330.,
                                    322.,
                                    -2.,
                                    2.,
                                    target_sample=target_sample,
                                    multiplier=multiplier,
                                    search_start=search_start,
                                    exact_freq=False,
                                    max_freq=676383691.31904757 / 1.e6,
                                    min_freq=946937167.84666669 / 1.e6,
                                    n_freq=512)
    find_map_dimensions(template_22hr)
    algebra.save("templates/wigglez_22hr_complete.npy", template_22hr)

    #print "proposed 1hr field dimensions"
    template_1hr = find_map_region(16.,
                                   9.,
                                   -2.,
                                   2.,
                                   target_sample=target_sample,
                                   multiplier=multiplier,
                                   search_start=search_start,
                                   exact_freq=False,
                                   max_freq=676383691.31904757 / 1.e6,
                                   min_freq=946937167.84666669 / 1.e6,
                                   n_freq=512)
    find_map_dimensions(template_1hr)
    algebra.save("templates/wigglez_1hr_complete.npy", template_1hr)
Exemplo n.º 42
0
def data_binning(h5py_file, num_bins):
    file = h5py.File(h5py_file)
    # reading data from h5py
    posx = file['Positions']['x'][:]
    posy = file['Positions']['y'][:]
    posz = file['Positions']['z'][:]
    HI_mass = file['HI_Masses']['HI_Masses'][:]
    # creating info for binning function
    xedges = np.linspace(0, posx.max(), num_bins)
    yedges = np.linspace(0, posy.max(), num_bins)
    zedges = np.linspace(0, posz.max(), num_bins)
    sample = np.zeros((len(posx), 4))
    sample[:, 0] = posx
    sample[:, 1] = posy
    sample[:, 2] = posz
    sample[:, 3] = HI_mass
    # calling binning function
    z = binning.histogram3d_weights(sample, xedges, yedges, zedges)
    # info for make_cube_movie.py
    x_delta = posx.max()/num_bins
    y_delta = posy.max()/num_bins
    z_delta = posz.max()/num_bins
    x_centre = posx.max()/2.
    y_centre = posy.max()/2.
    z_centre = posz.max()/2.
    info = {'ra_delta': x_delta,
            'dec_delta': y_delta,
            'dec_centre': y_centre,
            'axes': ('freq', 'ra', 'dec'),
            'ra_centre': x_centre,
            'freq_centre': z_centre,
            'freq_delta': z_delta,
            'type': 'vect'}
    # save data for make_cube_movie.py
    map = algebra.make_vect(z, axis_names=('ra', 'dec', 'freq'))
    map.info = info
    save_file = open("/tmp/mufma/data/HI_prelim_40_bins_weights.npy", "w")
    algebra.save(save_file, map)
    save_file.close()
Exemplo n.º 43
0
def map_pair_cal(uncal_maplist, uncal_weightlist, calfactor_outlist,
                 dirtymap_inlist, dirtymap_outlist,
                 reference_mapfile, reference_weightfile,
                 sub_weighted_mean=True, freq_list=range(256)):

    reference_map = algebra.make_vect(algebra.load(reference_mapfile))
    reference_weight = algebra.make_vect(algebra.load(reference_weightfile))

    reference_map = remove_mean(reference_map, reference_weight)

    # load maps into pairs
    for mapfile, weightfile, calfactor_outfile, \
        dirty_infile, dirty_outfile in zip(uncal_maplist, \
            uncal_weightlist, calfactor_outlist,
            dirtymap_inlist, dirtymap_outlist):

        print mapfile, weightfile

        session_map = algebra.make_vect(algebra.load(mapfile))
        session_weight = algebra.make_vect(algebra.load(weightfile))

        session_map = remove_mean(session_map, session_weight)

        calfactor = template_fit(session_map, reference_map, session_weight)

        newmap = algebra.make_vect(algebra.load(dirty_infile))
        newmap[freq_list, :, :] /= calfactor[:, np.newaxis, np.newaxis]
        algebra.save(dirty_outfile, newmap)
        print dirty_outfile

        # optional test by applying the factor to the maps
        #session_map[freq_list, :, :] /= calfactor[:, np.newaxis, np.newaxis]
        #calfactor = template_fit(session_map, reference_map, session_weight)

        facout = open(calfactor_outfile, "w")
        for outvals in calfactor:
            facout.write("%10.15g\n" % outvals)

        facout.close()
Exemplo n.º 44
0
    def selection(self):
        """bin the mock catalogs"""
        self.selection_function = np.zeros(self.template_map.shape)

        for mockindex in self.infile_mock[0]:
            print mockindex
            mockfile = self.infile_mock[1][mockindex]
            mock_binning = bin_catalog_file(mockfile, self.freq_axis,
                                            self.ra_axis, self.dec_axis,
                                            skip_header=1, mock=True)

            self.selection_function += mock_binning

            # if this binned mock catalog should be saved
            if mockindex in self.outfile_mock[0]:
                print "mock", self.outfile_mock[1][mockindex]
                map_wigglez_mock = algebra.make_vect(mock_binning,
                                    axis_names=('freq', 'ra', 'dec'))

                map_wigglez_mock.copy_axis_info(self.template_map)

                algebra.save(self.outfile_mock[1][mockindex], map_wigglez_mock)

        # adding the real map back to the selection function is a kludge which
        # ensures the selection function is not zero where there is real data
        # (limit of insufficient mocks)
        self.selection_function += self.realmap_binning
        self.selection_function /= float(len(self.infile_mock[0]) + 1)
        print np.mean(self.selection_function)

        map_wigglez_selection = algebra.make_vect(self.selection_function,
                                                  axis_names=('freq', 'ra', 'dec'))

        map_wigglez_selection.copy_axis_info(self.template_map)

        algebra.save(self.outfile_selection, map_wigglez_selection)

        return
Exemplo n.º 45
0
def make_individual():
    fourway_split = make_fourway_list("/mnt/raid-project/gmrt/calinliv/wiggleZ/corr/84_ABCD_all_15_modes/",
                                      "/mnt/raid-project/gmrt/calinliv/wiggleZ/corr/84_ABCD_all_15_modes/",
                                      map_middle = "_22hr_41-84_cleaned_clean_map_I_with_",
                                      cov_middle = "_22hr_41-84_cleaned_noise_inv_I_with_")
    (map_out, weights_out, prodmap_out) = combine_maps(fourway_split)
    algebra.save("combined_22hr_41-84_cleaned_clean.npy", map_out)
    algebra.save("combined_22hr_41-84_cleaned_noise_inv.npy", weights_out)
    algebra.save("combined_22hr_41-84_cleaned_product.npy", prodmap_out)
Exemplo n.º 46
0
    def execute(self, processes):
        file_list_1 = self.datapath_db.fetch(self.params['map_key_1'],
                                             tack_on=self.params["tack_on_1"],
                                             silent=True)

        file_list_2 = self.datapath_db.fetch(self.params['map_key_2'],
                                             tack_on=self.params["tack_on_2"],
                                             silent=True)

        file_list_out = self.datapath_db.fetch(
            self.params['map_key_out'],
            tack_on=self.params["tack_on_out"],
            silent=True)

        for file_key in file_list_1[0]:
            infile = file_list_1[1][file_key]
            subfile = file_list_2[1][file_key]
            outfile = file_list_out[1][file_key]

            rootdir = "/".join(outfile.split("/")[0:-1])
            if len(rootdir) > 0 and rootdir != ".":
                if not os.path.isdir(rootdir):
                    print "print_multicolumn: making dir " + rootdir
                    os.mkdir(rootdir)

            print "input: ", infile
            print "out: ", outfile

            if "map" in file_key:
                print "minus: ", subfile
                inmap = algebra.make_vect(algebra.load(infile))
                submap = algebra.make_vect(algebra.load(subfile))
                print inmap.shape, submap.shape
                algebra.save(outfile, inmap - submap)
            else:
                shutil.copy2(infile, outfile)
                shutil.copy2(infile + ".meta", outfile + ".meta")
Exemplo n.º 47
0
def get_cached_physical(filename, refinement=2, pad=5, order=1):
    basename = filename.split(".")[0]
    phys_cachename = basename + "_physical.npy"
    chksum_cachename = basename + ".md5"
    print phys_cachename, chksum_cachename

    curr_chksum = ft.hashfile(filename)
    # ALSO CHECK IF THE PARAMS CHANGED!

    # try to get an existing checksum
    try:
        chkfile = open(chksum_cachename, "r")
        old_chksum = chkfile.read()
        chkfile.close()
        if old_chksum == curr_chksum:
            chksum_not_changed = True
    except IOError as e:
        chksum_not_changed = False

    if os.path.isfile(phys_cachename) and chksum_not_changed:
        print "using the cached file: " + phys_cachename
        ret_data = algebra.make_vect(algebra.load(chksum_cachename))
    else:
        print "writing a physical cache for: " + filename
        # calculate the physical coordinate box
        obs_map = algebra.make_vect(algebra.load(filename))
        ret_data = bh.repackage_kiyo(pg.physical_grid(obs_map,
                                           refinement=refinement,
                                           pad=pad, order=order))
        algebra.save(chksum_cachename, ret_data)

        # save the new checksum
        chkfile = open(chksum_cachename, "w")
        chkfile.write(curr_chksum)
        chkfile.close()

    return ret_data