示例#1
0
    def execute(self, nprocesses=1):

        params = self.params
        kiyopy.utils.mkparents(params["output_root"] + params["output_filename"])
        parse_ini.write_params(params, params["output_root"] + "params.ini", prefix=prefix)
        output_fname = params["output_root"] + params["output_filename"]
        out_db = shelve.open(output_fname)
        file_middles = params["file_middles"]
        n_files = len(file_middles)

        n_new = nprocesses - 1  # How many new processes to spawn at once.
        if n_new > 0:
            # Loop over files and spawn processes to deal with them, but make
            # sure that only n_new processes are going at once.
            process_list = range(n_new)
            pipe_list = range(n_new)
            for ii in xrange(n_files + n_new):
                if ii >= n_new:
                    out_db[file_middles[ii - n_new]] = pipe_list[ii % n_new].recv()
                    process_list[ii % n_new].join()
                    if process_list[ii % n_new].exitcode != 0:
                        raise RuntimeError("A thread failed with exit code: " + str(process_list[ii % n_new].exitcode))
                if ii < n_files:
                    Here, Far = mp.Pipe()
                    pipe_list[ii % n_new] = Here
                    process_list[ii % n_new] = mp.Process(target=self.process_file, args=(file_middles[ii], Far))
                    process_list[ii % n_new].start()
        else:
            for middle in file_middles:
                out_db[middle] = self.process_file(middle)
        out_db.close()
        if self.feedback > 1:
            print("Wrote noise parameters to file: " + kiyopy.utils.abbreviate_file_path(output_fname))
示例#2
0
    def execute(self):
        """Process all data."""

        # You have access to the input parameters through the dictionary
        # self.params.
        params = self.params
        # If you have output files, make parent directories if need be.
        utils.mkparents(params['output_root'])
        # Write the input parameters to file so you can go back and look at
        # them.
        parse_ini.write_params(params,
                               params['output_root'] + 'params.ini',
                               prefix=self.prefix)

        # Loop over the files to process.
        for file_middle in params['file_middles']:
            input_fname = (params['input_root'] + file_middle +
                           params['input_end'])

            # Read in the data.  The reader is an object that can read
            # DataBlock objects out of a fits file.
            Reader = fitsGBT.Reader(input_fname, feedback=self.feedback)

            # Some examples of how you would read DataBlock Objects:
            first_scan_and_IF_DataBlock = Reader.read(scans=0, IFs=0)
            second_scan_and_first_IF_DataBlock = Reader.read(scans=1, IFs=0)
            list_of_a_few_data_blocks = Reader.read(scans=(1, 2, 3), IFs=0)
            list_of_all_data_blocks = Reader.read(scans=(), IFs=())
    def __init__(self, parameter_file=None, params_dict=None, feedback=0):
        # recordkeeping
        self.pairs = {}
        self.pairs_parallel_track = {}
        self.pairlist = []
        self.datapath_db = dp.DataPath()

        self.params = params_dict
        if parameter_file:
            self.params = parse_ini.parse(parameter_file, params_init,
                                          prefix=prefix)

        self.freq_list = sp.array(self.params['freq_list'], dtype=int)
        self.tack_on_input = self.params['tack_on_input']
        self.output_root = self.datapath_db.fetch(self.params['output_root'],
                                            tack_on=self.params['tack_on_output'])

        #self.output_root = self.params['output_root']
        print "foreground cleaning writing to output root", self.output_root

        if not os.path.isdir(self.output_root):
            os.mkdir(self.output_root)

        if self.params['SVD_root']:
            self.SVD_root = self.datapath_db.fetch(self.params['SVD_root'],
                                                   intend_write=True)
            print "WARNING: using %s to clean (intended?)" % self.SVD_root
        else:
            self.SVD_root = self.output_root

        # Write parameter file.
        parse_ini.write_params(self.params, self.output_root + 'params.ini',
                               prefix=prefix)
    def __init__(self, parameter_file_or_dict=None):
        # recordkeeping
        self.pairs = {}
        self.pairs_nosim = {}
        self.pairlist = []
        self.noisefiledict = {}
        self.datapath_db = dp.DataPath()

        self.params = parse_ini.parse(parameter_file_or_dict,
                                      params_init,
                                      prefix=prefix)

        self.freq_list = sp.array(self.params['freq_list'], dtype=int)
        self.lags = sp.array(self.params['lags'])
        self.output_root = self.datapath_db.fetch(self.params['output_root'],
                                                  intend_write=True)

        if self.params['SVD_root']:
            self.SVD_root = self.datapath_db.fetch(self.params['SVD_root'],
                                                   intend_write=True)
            print "WARNING: using %s to clean (intended?)" % self.SVD_root
        else:
            self.SVD_root = self.output_root

        # Write parameter file.
        kiyopy.utils.mkparents(self.output_root)
        parse_ini.write_params(self.params,
                               self.output_root + 'params.ini',
                               prefix=prefix)
示例#5
0
    def __init__(self, parameter_file=None, params_dict=None, feedback=0):
        # recordkeeping
        self.pairs = {}
        self.pairs_parallel_track = {}
        self.pairlist = []
        self.datapath_db = dp.DataPath()

        self.params = params_dict
        if parameter_file:
            self.params = parse_ini.parse(parameter_file,
                                          params_init,
                                          prefix=prefix)

        self.freq_list = sp.array(self.params['freq_list'], dtype=int)
        self.tack_on_input = self.params['tack_on_input']
        self.output_root = self.datapath_db.fetch(
            self.params['output_root'], tack_on=self.params['tack_on_output'])

        #self.output_root = self.params['output_root']
        print "foreground cleaning writing to output root", self.output_root

        if not os.path.isdir(self.output_root):
            os.mkdir(self.output_root)

        if self.params['svd_filename'] is not None:
            self.svd_filename = self.params['svd_filename']
            print "WARNING: using %s to clean (intended?)" % self.svd_filename
        else:
            self.svd_filename = self.output_root + "/" + "SVD.hd5"

        # Write parameter file.
        parse_ini.write_params(self.params,
                               self.output_root + 'params.ini',
                               prefix=prefix)
示例#6
0
    def execute(self) :
        """Process all data."""
        
        # You have access to the input parameters through the dictionary
        # self.params.
        params = self.params
        # If you have output files, make parent directories if need be.
        utils.mkparents(params['output_root'])
        # Write the input parameters to file so you can go back and look at
        # them.
        parse_ini.write_params(params, params['output_root'] + 'params.ini',
                               prefix=self.prefix)

        # Loop over the files to process.
        for file_middle in params['file_middles'] :
            input_fname = (params['input_root'] + file_middle +
                           params['input_end'])
            
            # Read in the data.  The reader is an object that can read
            # DataBlock objects out of a fits file.
            Reader = fitsGBT.Reader(input_fname, feedback=self.feedback)
            
            # Some examples of how you would read DataBlock Objects:
            first_scan_and_IF_DataBlock = Reader.read(scans=0,IFs=0)
            second_scan_and_first_IF_DataBlock = Reader.read(scans=1,IFs=0)
            list_of_a_few_data_blocks = Reader.read(scans=(1,2,3),IFs=0)
            list_of_all_data_blocks = Reader.read(scans=(),IFs=())
def wrap_batch_single_crosspwr(inifile, generate=False, outdir="./plots/"):
    r"""Wrapper to the single crosspwr calculator
    """
    params_init = {"left_mapkey": "some preparation of a map, cleaned",
                   "right_simkey": "a simulation to cross it with",
                   "right_weightkey": "weight to use for that sim",
                   "multiplier": "multiply the 1D and 2D spectra",
                   "spec_ini": "ini file for the spectral estimation",
                   "output_tag": "tag identifying the output somehow"}
    prefix="csc_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_tag = "%s_%s" % (params['left_mapkey'], params['output_tag'])
    output_root = "%s/%s/" % (outdir, output_tag)

    if generate:
        output_tag = None

    print output_root, output_tag
    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini',
                           prefix=prefix)

    datapath_db = data_paths.DataPath()

    return batch_single_crosspwr(params["left_mapkey"],
                                 params["right_simkey"],
                                 params["right_weightkey"],
                                 multiplier=params["multiplier"],
                                 inifile=params["spec_ini"],
                                 datapath_db=datapath_db,
                                 outdir=output_root,
                                 output_tag=output_tag)
示例#8
0
    def process_map(self, mapnum, rank):
        params = self.params
        params["hr"] = (params["hrlist"][mapnum][0], params["hrlist"][mapnum][1])
        params["last"] = (params["ltlist"][mapnum][0], params["ltlist"][mapnum][1])

        kiyopy.utils.mkparents(params["output_root"])
        inifile = params["output_root"] + "rank" + str(rank) + "params.ini"
        parse_ini.write_params(params, inifile, prefix="pk_")
        PK = mkpower.PowerSpectrumMaker(inifile, feedback=self.feedback).execute()
        self.q.put_nowait(PK)
示例#9
0
	def process_map(self, jknum, rank):
		params = self.params
		mid = params['mid']
		params['mid'] = ('jk'+str(jknum)+mid[0], 'jk'+str(jknum)+mid[1])

		kiyopy.utils.mkparents(params['output_root'])
		inifile = params['output_root']+ 'rank' + str(rank) +'params.ini'
		parse_ini.write_params(params, inifile ,prefix='pk_')
		PK = mkpower.PowerSpectrumMaker(
			inifile, feedback=self.feedback).execute()
		self.q.put_nowait(PK)
示例#10
0
	def execute(self, nprocesses=1):
		params = self.params

		# Make parent directory and write parameter file.
		kiyopy.utils.mkparents(params['output_root'])
		parse_ini.write_params(params, params['output_root']+'params.ini',prefix='pk_')
		hr = params['hr']
		mid = params['mid']
		last = params['last']
		all_out_fname_list = []
		all_in_fname_list = []
		pol_str = params['polarizations'][0]
		n_processes = params['processes']
		
		#### Process ####
		n_new = n_processes -1
		n_map = len(hr)

		if n_new <=0:
			for hr_str, ii in zip(params['hr'],range(len(params['hr']))):
				end = pol_str
				if len(last)!=0:
					end = end + last[ii]
				#imap_fname = in_root + hr_str + 'dirty_map_' + pol_str + '.npy'
				#imap_fname = in_root + hr_str + mid + pol_str + '.npy'
				imap_fname = hr_str + mid[0] + end + '.npy'
				nmap_fname = hr_str + mid[1] + end + '.npy'

				self.process_map(imap_fname, nmap_fname, ii)
		elif n_new >32:
			raise ValueError("Processes limit is 32")
		else: 
			process_list = range(n_new)
			for ii in xrange(n_map + n_new):
				if ii >= n_new:
					process_list[ii%n_new].join()
					if process_list[ii%n_new].exitcode != 0:
						raise RuntimeError("A thred faild with exit code"
							+ str(process_list[ii%n_new].exitcode))
				if ii < n_map:
					end = pol_str
					if len(last)!=0:
						end = end + last[ii]
					imap_fname = hr[ii] + mid[0] + end + '.npy'
					nmap_fname = hr[ii] + mid[1] + end + '.npy'
					#mock_fname = hr[ii] + 'mock_map_' + end + '.npy'
					process_list[ii%n_new] = mp.Process(
						target=self.process_map, 
						args=(imap_fname, nmap_fname, ii))
						#args=(imap_fname, nmap_fname, mock_fname))
					process_list[ii%n_new].start()
		return 0
示例#11
0
    def execute(self, processes):
        r"""prepare direction"""
        #self.output_root = self.datapath_db.fetch(self.params['output_root'],
        #                                          tack_on=self.params['tack_on'])
        self.output_root = self.params['output_root']
        if not os.path.isdir(self.output_root):
            os.makedirs(self.output_root)


        if self.params['SVD_root']:
            if os.path.exists(self.params['SVD_root']):
                self.SVD_root = self.params['SVD_root']
            else:
                self.SVD_root = self.datapath_db.fetch(self.params['SVD_root'],
                                                   intend_write=True)
            print "WARNING: using %s to clean (intended?)" % self.SVD_root
        else:
            self.SVD_root = self.output_root

        # Write parameter file.
        parse_ini.write_params(self.params, self.output_root + 'params.ini',
                               prefix=prefix)

        r"""main call to execute the various steps in foreground removal"""
        self.load_pairs()

        self.preprocess_pairs()

        if self.params['weighted_SVD']:
            self.call_pairs("apply_map_weights")

        self.calculate_correlation()
        #self.calculate_svd()

        mode_list_stop = self.params['modes']
        mode_list_start = copy.deepcopy(self.params['modes'])
        mode_list_start[1:] = mode_list_start[:-1]

        #self.uncleaned_pairs = copy.deepcopy(self.pairs)
        for (n_modes_start, n_modes_stop) in zip(mode_list_start,
                                             mode_list_stop):
            self.subtract_foregrounds(n_modes_start, n_modes_stop)

            if self.params['weighted_SVD']:
                self.call_pairs("apply_map_weights")

            self.save_data(n_modes_stop)

            if self.params['weighted_SVD']:
                self.call_pairs("apply_map_weights")
示例#12
0
def wrap_batch_gbtxwigglez_data_run(inifile,
                                    generate=False,
                                    outdir="./plots/"):
    r"""Wrapper to the GBT x WiggleZ calculation"""
    params_init = {
        "gbt_mapkey": "cleaned GBT map",
        "wigglez_deltakey": "WiggleZ overdensity map",
        "wigglez_mockkey": "WiggleZ overdensities from mocks",
        "wigglez_selectionkey": "WiggleZ selection function",
        "mode_transfer_1d_ini": "ini file -> 1d trans. function",
        "mode_transfer_2d_ini": "ini file -> 2d trans. function",
        "beam_transfer_ini": "ini file -> 2d beam trans. function",
        "spec_ini": "ini file for the spectral estimation",
        "output_tag": "tag identifying the output somehow"
    }
    prefix = "cwx_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_tag = "%s_%s" % (params['gbt_mapkey'], params['output_tag'])
    output_root = "%s/%s/" % (outdir, output_tag)

    if generate:
        output_tag = None

    print output_root
    print output_tag
    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix)

    datapath_db = data_paths.DataPath()

    mode_transfer_1d = None
    if params["mode_transfer_1d_ini"]:
        mode_transfer_1d = cct.wrap_batch_crosspwr_transfer(
            params["mode_transfer_1d_ini"], generate=generate, outdir=outdir)

    batch_gbtxwigglez_data_run(params["gbt_mapkey"],
                               params["wigglez_deltakey"],
                               params["wigglez_mockkey"],
                               params["wigglez_selectionkey"],
                               inifile=params["spec_ini"],
                               datapath_db=datapath_db,
                               outdir=output_root,
                               output_tag=output_tag,
                               beam_transfer=None,
                               mode_transfer_1d=mode_transfer_1d,
                               mode_transfer_2d=None,
                               theory_curve=None)
def wrap_batch_gbtxwigglez_data_run(inifile, generate=False,
                                    outdir="./plots/"):
    r"""Wrapper to the GBT x WiggleZ calculation"""
    params_init = {"gbt_mapkey": "cleaned GBT map",
                   "wigglez_deltakey": "WiggleZ overdensity map",
                   "wigglez_mockkey": "WiggleZ overdensities from mocks",
                   "wigglez_selectionkey": "WiggleZ selection function",
                   "mode_transfer_1d_ini": "ini file -> 1d trans. function",
                   "mode_transfer_2d_ini": "ini file -> 2d trans. function",
                   "beam_transfer_ini": "ini file -> 2d beam trans. function",
                   "spec_ini": "ini file for the spectral estimation",
                   "output_tag": "tag identifying the output somehow"}
    prefix = "cwx_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_tag = "%s_%s" % (params['gbt_mapkey'], params['output_tag'])
    output_root = "%s/%s/" % (outdir, output_tag)

    if generate:
        output_tag = None

    print output_root
    print output_tag
    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini',
                           prefix=prefix)

    datapath_db = data_paths.DataPath()

    mode_transfer_1d = None
    if params["mode_transfer_1d_ini"]:
        mode_transfer_1d = cct.wrap_batch_crosspwr_transfer(
                                            params["mode_transfer_1d_ini"],
                                            generate=generate,
                                            outdir=outdir)

    batch_gbtxwigglez_data_run(params["gbt_mapkey"],
                               params["wigglez_deltakey"],
                               params["wigglez_mockkey"],
                               params["wigglez_selectionkey"],
                               inifile=params["spec_ini"],
                               datapath_db=datapath_db,
                               outdir=output_root,
                               output_tag=output_tag,
                               beam_transfer=None,
                               mode_transfer_1d=mode_transfer_1d,
                               mode_transfer_2d=None,
                               theory_curve=None)
def write_map_cleanerini(mapname, cutlist, nfreq, factorizable=True, meansub=True,
                         regenerate=False, convolve=False,
                         subtract_inputmap_from_sim = True,
                         subtract_sim_from_inputmap = False,
                         sim_multiplier = 1., username="******",
                         modes = range(0, 105, 5), simfile=None, prefix="fs_",
                         inidir="./input/ers/map_cleaning_autogen/"):
    file_tools.mkparents(inidir)
    params = {}

    simtag = ""
    if simfile:
        simtag = "_plussim"
        if subtract_inputmap_from_sim:
            simtag = "_plussim_minusmap"
        if subtract_sim_from_inputmap:
            simtag = "_plussim_minussim"

    alt = ""
    if sim_multiplier != 1.:
        multstring = "%5.3g" % sim_multiplier
        alt = "_simx" + multstring.replace(".", "p").strip()

    key = '%s_cleaned%s%s' % (mapname, simtag, alt)
    params["output_root"] = '%s_path_%s' % (key, username)

    params["SVD_root"] = None
    params["modes"] = modes
    params["map1"] = mapname
    params["map2"] = mapname
    params["noise_inv1"] = mapname
    params["noise_inv2"] = mapname
    params["no_weights"] = False
    params["sub_weighted_mean"] = meansub
    params["factorizable_noise"] = factorizable
    params["convolve"] = convolve
    params["regenerate_noise_inv"] = regenerate
    params["freq_list"] = tuple([ind for ind in range(nfreq) \
                                 if ind not in cutlist])

    if simfile:
        params["simfile"] = simfile
        params["sim_multiplier"] = sim_multiplier
        params["subtract_inputmap_from_sim"] = subtract_inputmap_from_sim
        params["subtract_sim_from_inputmap"] = subtract_sim_from_inputmap

    filename = "%s/%s.ini" % (inidir, params["output_root"])
    print filename
    parse_ini.write_params(params, filename, prefix=prefix)
示例#15
0
 def execute(self, nprocesses):
     params = self.params
     # Make parent directory and write parameter file.
     kiyopy.utils.mkparents(params['output_root'])
     parse_ini.write_params(params,
                            params['output_root'] + 'params.ini',
                            prefix=prefix)
     in_root = params['input_root']
     # Figure out what the band names are.
     bands = params['bands']
     if not bands:
         map_files = glob.glob(in_root + pol_str + "_*.npy")
         bands = []
         root_len = len(in_root)
         for file_name in map_files:
             bands.append(file_name[root_len:-4])
     # Loop over polarizations.
     for pol_str in params['polarizations']:
         # Read in all the maps to be glued.
         maps = []
         for band in bands:
             band_map_fname = (in_root + pol_str + "_" + repr(band) +
                               '.npy')
             if self.feedback > 1:
                 print "Read using map: " + band_map_fname
             if params['mat_diag']:
                 if self.feedback > 1:
                     print "Treating as a matrix, getting diagonal."
                 band_map = al.open_memmap(band_map_fname, mode='r')
                 band_map = al.make_mat(band_map)
                 band_map = band_map.mat_diag()
             else:
                 band_map = al.load(band_map_fname)
                 band_map = al.make_vect(band_map)
             if band_map.axes != ('freq', 'ra', 'dec'):
                 msg = ("Expeced maps to have axes ('freq',"
                        "'ra', 'dec'), but it has axes: " +
                        str(band_map.axes))
                 raise ce.DataError(msg)
             maps.append(band_map)
         # Now glue them together.
         out_map = glue(maps)
         out_fname = (params['output_root'] + pol_str + "_" + "all" +
                      '.npy')
         if self.feedback > 1:
             print "Writing glued map to: " + out_fname
         al.save(out_fname, out_map)
示例#16
0
    def __init__(self, parameter_file_or_dict=None):
        self.params = parse_ini.parse(parameter_file_or_dict, params_init,
                                      prefix=prefix)

        self.freq_list = sp.array(self.params['freq_list'], dtype=int)
        self.lags = self.params['lags']
        self.nfreq_bin = self.params['nfreq_bin']

        #self.output_root = self.datapath_db.fetch(self.params['output_root'],
        #                                          intend_write=True)
        self.output_root = self.params['output_root']
        self.ini_root = self.params['ini_root']

        # Write parameter file.
        kiyopy.utils.mkparents(self.ini_root)
        parse_ini.write_params(self.params, self.ini_root + 'params.ini',
                               prefix=prefix)
示例#17
0
 def execute(self, nprocesses):
     params = self.params
     # Make parent directory and write parameter file.
     kiyopy.utils.mkparents(params['output_root'])
     parse_ini.write_params(params, params['output_root'] + 'params.ini',
                            prefix=prefix)
     in_root = params['input_root']        
     # Figure out what the band names are.
     bands = params['bands']
     if not bands:
         map_files = glob.glob(in_root + pol_str + "_*.npy")
         bands = []
         root_len = len(in_root)
         for file_name in map_files:
             bands.append(file_name[root_len:-4])
     # Loop over polarizations.
     for pol_str in params['polarizations']:
         # Read in all the maps to be glued.
         maps = []
         for band in bands:
             band_map_fname = (in_root + pol_str + "_" +
                           repr(band) + '.npy')
             if self.feedback > 1:
                 print "Read using map: " + band_map_fname
             if params['mat_diag']:
                 if self.feedback > 1:
                     print "Treating as a matrix, getting diagonal."
                 band_map = al.open_memmap(band_map_fname, mode='r')
                 band_map = al.make_mat(band_map)
                 band_map = band_map.mat_diag()
             else:
                 band_map = al.load(band_map_fname)
                 band_map = al.make_vect(band_map)
             if band_map.axes != ('freq', 'ra', 'dec') :
                 msg = ("Expeced maps to have axes ('freq',"
                        "'ra', 'dec'), but it has axes: "
                        + str(band_map.axes))
                 raise ce.DataError(msg)
             maps.append(band_map)
         # Now glue them together.
         out_map = glue(maps)
         out_fname = (params['output_root']
                      + pol_str + "_" + "all" + '.npy')
         if self.feedback > 1:
             print "Writing glued map to: " + out_fname
         al.save(out_fname, out_map)
示例#18
0
	def execute(self, nprocesses=1):
		params = self.params

		# Make parent directory and write parameter file.
		kiyopy.utils.mkparents(params['output_root'])
		parse_ini.write_params(params, params['output_root']+'params.ini',prefix='pk_')
		in_root = params['input_root']
		out_root = params['output_root']
		mid = params['mid']
		all_out_fname_list = []
		all_in_fname_list = []
		
		#### Process ####
		pol_str = params['polarizations'][0]
		#hr_str = params['hr'][0]
		for hr_str, ii in zip(params['hr'],range(len(params['hr']))):
			end = pol_str
			if len(last)!=0:
				end = end + last[ii]
			imap_fname = in_root + hr_str + mid[0] + end + '.npy'
			imap = algebra.load(imap_fname)
			imap = algebra.make_vect(imap)
			if imap.axes != ('freq', 'ra', 'dec') :
				raise ce.DataError('AXES ERROR!')

			nmap_fname = in_root + hr_str + mid[1] + end + '.npy'
			nmap = algebra.load(nmap_fname)
			nmap = algebra.make_vect(nmap)

			#invers noise weight
			print 'Inverse Noise Weight... Map:' + hr_str[:-1]
			self.weight(imap, nmap, 
				out_root+hr_str+'wt_cleaned_clean_map_'+end+'.png')

			dmap_fname = out_root + 'wt_' + hr_str + mid[0] + end + '.npy'
			algebra.save(dmap_fname, imap)
			all_out_fname_list.append(
				kiyopy.utils.abbreviate_file_path(dmap_fname))

			nmap_fname = out_root + 'wt_' + hr_str + mid[1] + end + '.npy'
			algebra.save(nmap_fname, nmap)
			all_out_fname_list.append(
				kiyopy.utils.abbreviate_file_path(nmap_fname))

		return 0
示例#19
0
def wrap_batch_gbtpwrspec_data_run(inifile, generate=False,
                                    outdir="./plots/"):
    r"""Wrapper to the GBT x GBT calculation"""
    params_init = {"gbt_mapkey": "cleaned GBT map",
                   "mode_transfer_1d_ini": "ini file -> 1d trans. function",
                   "mode_transfer_2d_ini": "ini file -> 2d trans. function",
                   "beam_transfer_ini": "ini file -> 2d beam trans. function",
                   "square_1dmodetrans": False,
                   "spec_ini": "ini file for the spectral estimation",
                   "output_tag": "tag identifying the output somehow"}
    prefix="cp_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_tag = "%s_%s" % (params['gbt_mapkey'], params['output_tag'])
    output_root = "%s/%s/" % (outdir, output_tag)

    if generate:
        output_tag = None

    print output_root
    print output_tag
    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini',
                           prefix=prefix)

    datapath_db = data_paths.DataPath()

    mode_transfer_1d=None
    if params["mode_transfer_1d_ini"]:
        mode_transfer_1d = cct.wrap_batch_crosspwr_transfer(
                                            params["mode_transfer_1d_ini"],
                                            generate=generate,
                                            outdir=outdir)

    return batch_gbtpwrspec_data_run(params["gbt_mapkey"],
                         inifile=params["spec_ini"],
                         datapath_db=datapath_db,
                         outdir=output_root,
                         output_tag=output_tag,
                         beam_transfer=None,
                         square_1dmodetrans = params["square_1dmodetrans"],
                         mode_transfer_1d=mode_transfer_1d,
                         mode_transfer_2d=None)
示例#20
0
def wrap_batch_gbtpwrspec_data_run(inifile, generate=False, outdir="./plots/"):
    r"""Wrapper to the GBT x GBT calculation"""
    params_init = {
        "gbt_mapkey": "cleaned GBT map",
        "mode_transfer_1d_ini": "ini file -> 1d trans. function",
        "mode_transfer_2d_ini": "ini file -> 2d trans. function",
        "beam_transfer_ini": "ini file -> 2d beam trans. function",
        "square_1dmodetrans": False,
        "spec_ini": "ini file for the spectral estimation",
        "output_tag": "tag identifying the output somehow"
    }
    prefix = "cp_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_tag = "%s_%s" % (params['gbt_mapkey'], params['output_tag'])
    output_root = "%s/%s/" % (outdir, output_tag)

    if generate:
        output_tag = None

    print output_root
    print output_tag
    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix)

    datapath_db = data_paths.DataPath()

    mode_transfer_1d = None
    if params["mode_transfer_1d_ini"]:
        mode_transfer_1d = cct.wrap_batch_crosspwr_transfer(
            params["mode_transfer_1d_ini"], generate=generate, outdir=outdir)

    return batch_gbtpwrspec_data_run(
        params["gbt_mapkey"],
        inifile=params["spec_ini"],
        datapath_db=datapath_db,
        outdir=output_root,
        output_tag=output_tag,
        beam_transfer=None,
        square_1dmodetrans=params["square_1dmodetrans"],
        mode_transfer_1d=mode_transfer_1d,
        mode_transfer_2d=None)
示例#21
0
    def __init__(self, parameter_file_or_dict=None):
        self.params = parse_ini.parse(parameter_file_or_dict,
                                      params_init,
                                      prefix=prefix)

        self.freq_list = sp.array(self.params['freq_list'], dtype=int)
        self.lags = self.params['lags']
        self.nfreq_bin = self.params['nfreq_bin']

        #self.output_root = self.datapath_db.fetch(self.params['output_root'],
        #                                          intend_write=True)
        self.output_root = self.params['output_root']
        self.ini_root = self.params['ini_root']

        # Write parameter file.
        kiyopy.utils.mkparents(self.ini_root)
        parse_ini.write_params(self.params,
                               self.ini_root + 'params.ini',
                               prefix=prefix)
示例#22
0
 def execute(self, nprocesses=1) :
     
     params = self.params
     model = params["model"]
     kiyopy.utils.mkparents(params['output_root'])
     parse_ini.write_params(params, params['output_root'] + 'params.ini',
                            prefix=prefix)
     # Loop over files to process.
     for file_middle in params['file_middles'] :
         input_fname = (params['input_root'] + file_middle +
                        params['input_end'])
         Reader = core.fitsGBT.Reader(input_fname, feedback=self.feedback)
         output_fname = params["output_root"] + file_middle + ".npy"
         if model == "scan_var" :
             n_scans = len(Reader.scan_set)
             n_IFs = len(Reader.IF_set)
             first_block = True
             for jj in range(n_IFs) :
                 # These all become arrays on the first iteration.
                 var = 0.0
                 mean = 0.0
                 counts = 0
                 for ii in range(n_scans) :
                     Data = Reader.read(ii, jj)
                     if first_block :
                         out_shape = (n_IFs,) + Data.dims[1:]
                         out_arr = sp.empty(out_shape, dtype=float)
                         first_block = False
                     var += ma.sum(Data.data**2, 0).filled(0)
                     mean += ma.sum(Data.data, 0).filled(0)
                     counts += ma.count(Data.data, 0)
                 # If we didn't get at least 5 good hits, throw aways the
                 # scan.
                 counts[counts < 5] = -1
                 var = var/counts - (mean/counts)**2
                 var[counts < 5] = 1.0e10
                 out_arr[jj, ...] = var
             sp.save(output_fname, out_arr)
             if self.feedback > 1 :
                 print ("Wrote noise parameters to file: " 
                        + utils.abbreviate_file_path(output_fname))
         else :
             raise ValueError("Invalid noise model: " + model)
示例#23
0
    def execute(self, n_processes=1):
        """Process all data.
        
        If n_processes > 1 then this function spawns a bunch of subprocesses
        in parralelle, each of which deals with a single data file.  This both
        speeds things up and avoids any memory leaks (like the bad one in
        pyfits).
        """

        params = self.params
        # Make parent directories if need be.
        utils.mkparents(params['output_root'])
        parse_ini.write_params(params,
                               params['output_root'] + 'params.ini',
                               prefix=self.prefix)
        n_new = n_processes - 1
        n_files = len(params['file_middles'])
        # Loop over the files to process.
        if n_new <= 0:
            # Single process mode.
            for file_ind in range(n_files):
                self.process_file(file_ind)
        elif n_new > 32:
            raise ValueError("Asked for a rediculouse number of processes: " +
                             str(n_new) + ".  Limit is 32.")
        else:
            # Spawn a bunch of new processes each with a single file to
            # analyse.
            # Can't us an mp.Pool here because we don't want to reuse processes
            # due to pyfits memory leak.
            process_list = range(n_new)
            for ii in xrange(n_files + n_new):
                if ii >= n_new:
                    process_list[ii % n_new].join()
                    if process_list[ii % n_new].exitcode != 0:
                        raise RuntimeError("A thread failed with exit code: " +
                                           str(process_list[ii %
                                                            n_new].exitcode))
                if ii < n_files:
                    process_list[ii % n_new] = mp.Process(
                        target=self.process_file, args=(ii, ))
                    process_list[ii % n_new].start()
示例#24
0
    def execute(self, nprocesses=1):

        params = self.params
        kiyopy.utils.mkparents(params['output_root'] +
                               params['output_filename'])
        parse_ini.write_params(params,
                               params['output_root'] + 'params.ini',
                               prefix=prefix)
        output_fname = params['output_root'] + params["output_filename"]
        out_db = shelve.open(output_fname)
        file_middles = params['file_middles']
        n_files = len(file_middles)

        n_new = nprocesses - 1  # How many new processes to spawn at once.
        if n_new > 0:
            # Loop over files and spawn processes to deal with them, but make
            # sure that only n_new processes are going at once.
            process_list = range(n_new)
            pipe_list = range(n_new)
            for ii in xrange(n_files + n_new):
                if ii >= n_new:
                    out_db[file_middles[ii - n_new]] = pipe_list[ii %
                                                                 n_new].recv()
                    process_list[ii % n_new].join()
                    if process_list[ii % n_new].exitcode != 0:
                        raise RuntimeError("A thread failed with exit code: " +
                                           str(process_list[ii %
                                                            n_new].exitcode))
                if ii < n_files:
                    Here, Far = mp.Pipe()
                    pipe_list[ii % n_new] = Here
                    process_list[ii % n_new] = mp.Process(
                        target=self.process_file, args=(file_middles[ii], Far))
                    process_list[ii % n_new].start()
        else:
            for middle in file_middles:
                out_db[middle] = self.process_file(middle)
        out_db.close()
        if self.feedback > 1:
            print("Wrote noise parameters to file: " +
                  kiyopy.utils.abbreviate_file_path(output_fname))
示例#25
0
    def execute(self, n_processes=1) :
        """Process all data.
        
        If n_processes > 1 then this function spawns a bunch of subprocesses
        in parralelle, each of which deals with a single data file.  This both
        speeds things up and avoids any memory leaks (like the bad one in
        pyfits).
        """

        params = self.params
        # Make parent directories if need be.
        utils.mkparents(params['output_root'])
        parse_ini.write_params(params, params['output_root'] + 'params.ini',
                               prefix=self.prefix)
        n_new = n_processes - 1
        n_files = len(params['file_middles'])
        # Loop over the files to process.
        if n_new <= 0 :
            # Single process mode.
            for file_ind in range(n_files) :
                self.process_file(file_ind)
        elif n_new > 32 :
            raise ValueError("Asked for a rediculouse number of processes: " +
                             str(n_new) + ".  Limit is 32.")
        else :
            # Spawn a bunch of new processes each with a single file to
            # analyse.
            # Can't us an mp.Pool here because we don't want to reused processes
            # due to pyfits memory leak.
            process_list = range(n_new)
            for ii in xrange(n_files + n_new) :
                if ii > n_new :
                    process_list[ii%n_new].join()
                    if process_list[ii%n_new].exitcode != 0 : 
                        raise RuntimeError("A thread failed with exit code: "
                                        + str(process_list[ii%n_new].exitcode))
                if ii < n_files :
                    process_list[ii%n_new] = mp.Process(
                        target=self.process_file, args=(ii,))
                    process_list[ii%n_new].start()
def write_map_cleanerini_old(mapname, cutlist, nfreq, factorizable=True, meansub=True,
                         regenerate=False, noconv=False,
                         subtract_inputmap_from_sim = True,
                         subtract_sim_from_inputmap = False,
                         modes = range(0, 105, 5), simfile=None, prefix="fs_",
                         inidir="./input/ers/map_cleaning_autogen_old/"):
    file_tools.mkparents(inidir)

    params = {}
    params["SVD_root"] = None
    params["modes"] = modes
    params["map1"] = mapname
    params["map2"] = mapname
    params["noise_inv1"] = mapname
    params["noise_inv2"] = mapname
    params["no_weights"] = False
    params["sub_weighted_mean"] = meansub
    params["factorizable_noise"] = factorizable
    params["regenerate_noise_inv"] = regenerate
    params["freq_list"] = tuple([ind for ind in range(nfreq) \
                                 if ind not in cutlist])

    tag = "_sims" if simfile else ""
    if simfile:
        params["simfile"] = simfile
        params["sim_multiplier"] = 1.
        params["subtract_inputmap_from_sim"] = subtract_inputmap_from_sim
        params["subtract_sim_from_inputmap"] = subtract_sim_from_inputmap

    params["convolve"] = False
    # TODO: move this to direct path rather than db
    params["output_root"] = "%s_cleaned%s_noconv_path_Eric" % (mapname, tag)
    filename = "%s/%s_cleaned%s_noconv.ini" % (inidir, mapname, tag)
    parse_ini.write_params(params, filename, prefix=prefix)

    params["convolve"] = True
    params["output_root"] = "%s_cleaned%s_path_Eric" % (mapname, tag)
    filename = "%s/%s_cleaned%s.ini" % (inidir, mapname, tag)
    parse_ini.write_params(params, filename, prefix=prefix)
    def __init__(self, parameter_file=None, params_dict=None, feedback=0):
        # recordkeeping
        self.pairs = {}
        self.pairs_ext = {}
        self.pairs_parallel_track = {}
        self.pairlist = []
        self.pairlist_ext = []
        self.indexlist_ext = []
        self.datapath_db = dp.DataPath()

        self.params = params_dict
        if parameter_file:
            self.params = parse_ini.parse(parameter_file, params_init,
                                          prefix=prefix)

        self.freq_list = sp.array(self.params['freq_list'], dtype=int)
        self.tack_on_input = self.params['tack_on_input']
        self.conv_factor = self.params['conv_factor']
        self.output_root = self.datapath_db.fetch(self.params['output_root'],
                                            tack_on=self.params['tack_on_output'])

        #self.output_root = self.params['output_root']
        print "foreground cleaning writing to output root", self.output_root

        if not os.path.isdir(self.output_root):
            os.mkdir(self.output_root)

        if self.params['svd_filename'] is not None:
            self.svd_filename = self.params['svd_filename']
            print "WARNING: using %s to clean (intended?)" % self.svd_filename
        else:
            self.svd_filename = self.output_root + "/" + "SVD.hd5"

        # save the signal and weight matrices
        self.modeinput_filename = self.output_root + "/" + "mode_ingredients.hd5"

        # Write parameter file.
        parse_ini.write_params(self.params, self.output_root + 'params.ini',
                               prefix=prefix)
def wrap_batch_crosspwr_transfer(inifile, generate=False, outdir="./plots/"):
    r"""Wrapper to the transfer function calculator
    """
    params_init = {
        "cleaned_simkey": "cleaned sims for transfer func",
        "truesignal_simkey": "pure signal",
        "truesignal_weightkey": "weight to use for pure signal",
        "reference_simkey": "reference signal",
        "reference_weightkey": "weight to use for reference signal",
        "spec_ini": "ini file for the spectral estimation",
        "output_tag": "tag identifying the output somehow"
    }
    prefix = "cct_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_tag = "%s_%s" % (params['cleaned_simkey'], params['output_tag'])
    output_root = "%s/%s/" % (outdir, output_tag)

    if generate:
        output_tag = None

    print output_root
    print output_tag
    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix)

    datapath_db = data_paths.DataPath()

    return batch_crosspwr_transfer(params["cleaned_simkey"],
                                   params["truesignal_simkey"],
                                   params["truesignal_weightkey"],
                                   params["reference_simkey"],
                                   params["reference_weightkey"],
                                   inifile=params["spec_ini"],
                                   datapath_db=datapath_db,
                                   outdir=output_root,
                                   output_tag=output_tag)
def wrap_batch_crosspwr_transfer(inifile, generate=False, outdir="./plots/"):
    r"""Wrapper to the transfer function calculator
    """
    params_init = {"cleaned_simkey": "cleaned sims for transfer func",
                   "truesignal_simkey": "pure signal",
                   "truesignal_weightkey": "weight to use for pure signal",
                   "reference_simkey": "reference signal",
                   "reference_weightkey": "weight to use for reference signal",
                   "spec_ini": "ini file for the spectral estimation",
                   "output_tag": "tag identifying the output somehow"}
    prefix="cct_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_tag = "%s_%s" % (params['cleaned_simkey'], params['output_tag'])
    output_root = "%s/%s/" % (outdir, output_tag)

    if generate:
        output_tag = None

    print output_root
    print output_tag
    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini',
                           prefix=prefix)

    datapath_db = data_paths.DataPath()

    return batch_crosspwr_transfer(params["cleaned_simkey"],
                                   params["truesignal_simkey"],
                                   params["truesignal_weightkey"],
                                   params["reference_simkey"],
                                   params["reference_weightkey"],
                                   inifile=params["spec_ini"],
                                   datapath_db=datapath_db,
                                   outdir=output_root,
                                   output_tag=output_tag)
    def __init__(self, parameter_file_or_dict=None):
        # recordkeeping
        self.pairs = {}
        self.pairs_nosim = {}
        self.pairlist = []
        self.noisefiledict = {}
        self.datapath_db = dp.DataPath()

        self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=prefix)

        self.freq_list = sp.array(self.params["freq_list"], dtype=int)
        self.lags = sp.array(self.params["lags"])
        self.output_root = self.datapath_db.fetch(self.params["output_root"], intend_write=True)

        if self.params["SVD_root"]:
            self.SVD_root = self.datapath_db.fetch(self.params["SVD_root"], intend_write=True)
            print "WARNING: using %s to clean (intended?)" % self.SVD_root
        else:
            self.SVD_root = self.output_root

        # Write parameter file.
        kiyopy.utils.mkparents(self.output_root)
        parse_ini.write_params(self.params, self.output_root + "params.ini", prefix=prefix)
示例#31
0
def wrap_batch_single_crosspwr(inifile, generate=False, outdir="./plots/"):
    r"""Wrapper to the single crosspwr calculator
    """
    params_init = {
        "left_mapkey": "some preparation of a map, cleaned",
        "right_simkey": "a simulation to cross it with",
        "right_weightkey": "weight to use for that sim",
        "multiplier": "multiply the 1D and 2D spectra",
        "spec_ini": "ini file for the spectral estimation",
        "output_tag": "tag identifying the output somehow"
    }
    prefix = "csc_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_tag = "%s_%s" % (params['left_mapkey'], params['output_tag'])
    output_root = "%s/%s/" % (outdir, output_tag)

    if generate:
        output_tag = None

    print output_root, output_tag
    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix)

    datapath_db = data_paths.DataPath()

    return batch_single_crosspwr(params["left_mapkey"],
                                 params["right_simkey"],
                                 params["right_weightkey"],
                                 multiplier=params["multiplier"],
                                 inifile=params["spec_ini"],
                                 datapath_db=datapath_db,
                                 outdir=output_root,
                                 output_tag=output_tag)
    def execute(self, nprocesses=1) :
        
        params = self.params
        kiyopy.utils.mkparents(params['output_root'])
        parse_ini.write_params(params, params['output_root'] + 'params.ini',
                               prefix=prefix)
        guppi_result = params['Guppi_test']
        output_root = params['output_root']
        output_end = params['output_end']
        file_name = params['file_middles'][0].split('/')[1]
#        print file_name
        sess = file_name.split('_')[0]
#        print sess

        self.file_num = len(params['file_middles']) # getting a variable for number of calibrator files being used

# Need to remove count for calibrator files that are not the right size.
        session_nums = sp.zeros(self.file_num)
        c = 0
        for file_middle in params['file_middles'] :
            input_fname = (params['input_root'] + file_middle + 
                           params['input_end'])
            Reader = core.fitsGBT.Reader(input_fname)
            n_scans = len(Reader.scan_set)
            Len_set = Reader.read(0,0,force_tuple=True)
#            session_nums[c] = file_middle.split('_')[0]
#            print session_nums[c]
            for Data in Len_set :
                freq_num = Data.dims[3] # Setting the frequency binning to match whatever it's been set to. 
            if guppi_result == True : 
                if n_scans != 2 :
                    self.file_num -=1
            elif guppi_result == False :
                if n_scans != 4 :
                    self.file_num -=1
            c+=1

# Need to know the general frequency binning (going to assume that it's 200 for guppi, 260 for spectrometer, aka 1 MHz binning)
#        if guppi_result == True :
#            freq_num = 200 
        if guppi_result == False :
#            freq_num = 260    
            self.file_num *= 2 #because there are two sets of scans per file for spectrometer, need to double the number of values


        self.function = sp.zeros(4*self.file_num) #setting a variable of the right size for peval to use
        self.theta = sp.zeros(4*self.file_num) #setting a variable for parallactic angle in radians
        self.d = sp.zeros((4*self.file_num,freq_num)) #setting a variable for measured values

# Loop over files to process.      
        k=0
        for file_middle in params['file_middles'] :
            input_fname = (params['input_root'] + file_middle +
                           params['input_end'])
# Read in the data, and loop over data blocks.
            Reader = core.fitsGBT.Reader(input_fname)
            n_IFs = len(Reader.IF_set) # Should be 1 given that we've stitched windows for the spectrometer or by def in guppi
            n_scans = len(Reader.scan_set) #Should be 4 for the spectrometer, 2 for guppi
            OnBlocks = Reader.read(range(0,n_scans,2),0,force_tuple=True)
            OffBlocks = Reader.read(range(1,n_scans,2),0,force_tuple=True)
#force_tuple=True makes the ouput of Reader.read a tuple even if thre is only one Block to return.
            Blocks = Reader.read(params['scans'], params['IFs'],
                                 force_tuple=True)

# Setting labels for indices for later
            on_ind = 0
            off_ind = 1
            XX_ind = 0
            YY_ind = 3
            XY_ind = 1
            YX_ind = 2
            
#Calculating Parallactic angles for the cal file
            PA = sp.zeros(n_scans)
            m = 0           
            for Data in Blocks:
                freq_len = Data.dims[3]
                Data.calc_freq()
                freq_val = Data.freq
                freq_val = freq_val/1000000       
                Data.calc_PA()
                PA[m] = ma.mean(Data.PA)
                m+=1
            
#Building the measured data into arrays (guppi version)         
            if guppi_result == True : 
                if n_scans == 2 : 
                    self.theta[k] = ma.mean(PA)
                    self.theta[k+1] = ma.mean(PA)
                    self.theta[k+2] = ma.mean(PA)
                    self.theta[k+3] = ma.mean(PA)

                    S_med_calon_src = sp.zeros((freq_len,4))
                    S_med_caloff_src = sp.zeros((freq_len,4))
                    S_med_calon = sp.zeros((freq_len,4))
                    S_med_caloff = sp.zeros((freq_len,4))

                    for Data in OnBlocks:
                        S_med_caloff_src[:,0] = ma.median(Data.data[:,XX_ind,off_ind,:],axis=0)
                        S_med_caloff_src[:,1] = ma.median(Data.data[:,XY_ind,off_ind,:],axis=0)
                        S_med_caloff_src[:,2] = ma.median(Data.data[:,YX_ind,off_ind,:],axis=0)
                        S_med_caloff_src[:,3] = ma.median(Data.data[:,YY_ind,off_ind,:],axis=0)

                        S_med_calon_src[:,0] = ma.median(Data.data[:,XX_ind,on_ind,:],axis=0)
                        S_med_calon_src[:,1] = ma.median(Data.data[:,XY_ind,on_ind,:],axis=0)
                        S_med_calon_src[:,2] = ma.median(Data.data[:,YX_ind,on_ind,:],axis=0)
                        S_med_calon_src[:,3] = ma.median(Data.data[:,YY_ind,on_ind,:],axis=0)
                    
                    for Data in OffBlocks:
                        S_med_caloff[:,0] = ma.median(Data.data[:,XX_ind,off_ind,:],axis=0)
                        S_med_caloff[:,1] = ma.median(Data.data[:,XY_ind,off_ind,:],axis=0)
                        S_med_caloff[:,2] = ma.median(Data.data[:,YX_ind,off_ind,:],axis=0)
                        S_med_caloff[:,3] = ma.median(Data.data[:,YY_ind,off_ind,:],axis=0)
 
                        S_med_calon[:,0] = ma.median(Data.data[:,XX_ind,on_ind,:],axis=0)
                        S_med_calon[:,1] = ma.median(Data.data[:,XY_ind,on_ind,:],axis=0)
                        S_med_calon[:,2] = ma.median(Data.data[:,YX_ind,on_ind,:],axis=0)
                        S_med_calon[:,3] = ma.median(Data.data[:,YY_ind,on_ind,:],axis=0)
 
                     
                    self.d[k,:] = 0.5*(S_med_calon_src[:,0]+S_med_caloff_src[:,0]-S_med_calon[:,0]-S_med_caloff[:,0])
                    self.d[k+1,:] = 0.5*(S_med_calon_src[:,1]+S_med_caloff_src[:,1]-S_med_calon[:,1]-S_med_caloff[:,1])
                    self.d[k+2,:] = 0.5*(S_med_calon_src[:,2]+S_med_caloff_src[:,2]-S_med_calon[:,2]-S_med_caloff[:,2])
                    self.d[k+3,:] = 0.5*(S_med_calon_src[:,3]+S_med_caloff_src[:,3]-S_med_calon[:,3]-S_med_caloff[:,3])
#                    self.d[k,:] = S_med_calon[:,0] - S_med_calon_src[:,0]+S_med_caloff_src[:,0] # should be Tsys/Tcal
#                    self.d[k+1,:] = S_med_calon[:,3] - S_med_calon_src[:,3]+S_med_caloff_src[:,3] 
#                    self.d[k+2,:] = S_med_caloff[:,0] # should also be Tsys/Tcal
#                    self.d[k+3,:] = S_med_caloff[:,3]
                    k+=4

        for a in range(0,4*self.file_num):
            for b in range(0,freq_num):
#                print self.d[a,b]
                if self.d[a,b] > 1000 :
                   self.d[a,b] = 1000

        print self.d[:,150]

# self is a directory of Tsrc data. I can use this as my corrected data for plotting. 
# It looks like at the end k should equal the number of on, off src sets (so if one set of onoff scans eg 6-9, k = 8)
       

# source data for use in ploting 
        XXsrc_3C286 = sp.zeros(freq_len)
        YYsrc_3C286 = sp.zeros(freq_len)
        XXsrc_3C48 = sp.zeros(freq_len)
        YYsrc_3C48 = sp.zeros(freq_len)
        Usrc_3C286 = sp.zeros(freq_len)
       
        for f in range(0,freq_num):
            Isrc_3C286 = 19.74748409*pow((750.0/freq_val[f]),0.49899785)*(2.28315426-0.000484307905*freq_val[f]) # My fit solution for 3C286
#            Isrc_3C48 = 25.15445092*pow((750.0/freq_val[f]),0.75578842)*(2.28315426-0.000484307905*freq_val[f]) # My fit solution for  3C48 
#        Isrc_3C67 = 4.56303633*pow((750.0/freq_val[f]),0.59237327)*(2.28315426-0.000484307905*freq_val[f]) # My fit solution for 3C67
            Isrc_3C48 = 31.32846821*pow(750.0/freq_val[f],0.52113534)*(2.28315426-0.000484307905*freq_val[f])#My fit solution for 3C147
            PAsrc_3C286 = 33.0*sp.pi/180.0 # for 3C286, doesn't matter for unpolarized. 
            Psrc_3C286 = 0.07 #for 3C286 
            Psrc = 0 #for #3C48,3C67  
#        Qsrc = Isrc*Psrc*sp.cos(2*PAsrc) 
#        Usrc = Isrc*Psrc*sp.sin(2*PAsrc) 
#        Vsrc = 0
            XXsrc_3C286[f] = Isrc_3C286*(1-Psrc_3C286*sp.cos(2*PAsrc_3C286))
            YYsrc_3C286[f] = Isrc_3C286*(1+Psrc_3C286*sp.cos(2*PAsrc_3C286))
            Usrc_3C286[f] = Isrc_3C286*Psrc_3C286*sp.sin(2*PAsrc_3C286)
            XXsrc_3C48[f] = Isrc_3C48
            YYsrc_3C48[f] = Isrc_3C48
#            Usrc_3C48 = 0

        XX_compare = sp.zeros((freq_len,k/4))
        YY_compare = sp.zeros((freq_len,k/4))
        XX_PA_3C286 = sp.zeros((freq_len,k/4))
        YY_PA_3C286 = sp.zeros((freq_len,k/4))
        for c in range(0,k/4):
#            XX_PA_3C286[:,c] = 0.5*(1+sp.cos(2*self.theta[4*c]))*XXsrc_3C286[:]-sp.sin(2*self.theta[4*c])*Usrc_3C286[:]+0.5*(1-sp.cos(2*self.theta[4*c]))*YYsrc_3C286[:]
#            YY_PA_3C286[:,c] = 0.5*(1-sp.cos(2*self.theta[4*c]))*XXsrc_3C286[:]+sp.sin(2*self.theta[4*c])*Usrc_3C286[:]+0.5*(1+sp.cos(2*self.theta[4*c]))*YYsrc_3C286[:]
            XX_compare[:,c] = self.d[c*4,:]
            YY_compare[:,c] = self.d[c*4+3,:] 
#            XX_compare[:,c] = self.d[c*4,:] # Tsys/Tcal 1, XX
#            YY_compare[:,c] = self.d[c*4+1,:]# Tsys/Tcal 1, YY
            XX_PA_3C286[:,c] = self.d[c*4+2,:]# Tsys/Tcal 2, XX
            YY_PA_3C286[:,c] = self.d[c*4+3,:]# Tsys/Tcal 2, YY

        pl.plot(freq_val,XXsrc_3C286,label='XX_3C286',color='b')
#        pl.plot(freq_val,YYsrc_3C286,label='YY_3C286',color='b')
#        pl.plot(freq_val,XXsrc_3C48,label='XX_3C147',color='b')
#        pl.plot(freq_val,XXsrc_3C48,label='YY_3C48',color='b')
      
        for d in range(0,k/4):
            if d == 0:
                col = 'g'
            elif d == 1:
                col = 'r'
            elif d == 2:
                col = 'c'
            elif d == 3:
                col = 'm'
            elif d == 4:
                col = 'y'
            else:
                col = 'k'

#            pl.plot(freq_val,XX_compare[:,d], 'g-.', label='XX_'+str(d),color=col)
#            pl.plot(freq_val,YY_compare[:,d], label='YY_'+str(d),color=col)
#            pl.plot(freq_val,XX_PA_3C286[:,d], label='XX_2_'+str(d),color=col)
            pl.plot(freq_val,XX_compare[:,d],label='XX_'+str(d),color = col)
#            pl.plot(freq_val,YY_compare[:,d],label='YY_'+str(d),color = col)
#            pl.plot(freq_val,XX_PA_3C286[:,d],label='XXsrc_'+str(d), color = col)
#            pl.plot(freq_val,YY_PA_3C286[:,d],label='YYsrc_'+str(d), color = col)
        leg = pl.legend(fancybox='True')
        leg.get_frame().set_alpha(0.25)
        pl.ylim(30,45)
        pl.xlabel("Frequency (MHz)")
        pl.ylabel("Temperature (K)")
        title0 = sess+ '_Tsrc_Test.png'
#        title0 = sess+'_Tsys_Test.png'
        pl.savefig(title0)
        pl.clf()
示例#33
0
 def execute(self, nprocesses=1) :
     """Worker funciton."""
     params = self.params
     # Make parent directory and write parameter file.
     kiyopy.utils.mkparents(params['output_root'])
     parse_ini.write_params(params, params['output_root'] + 'params.ini',
                            prefix=prefix)
     save_noise_diag = params['save_noise_diag']
     in_root = params['input_root']
     all_out_fname_list = []
     all_in_fname_list = []
     # Figure out what the band names are.
     bands = params['bands']
     if not bands:
         map_files = glob.glob(in_root + 'dirty_map_' + pol_str + "_*.npy")
         bands = []
         root_len = len(in_root + 'dirty_map_')
         for file_name in map_files:
             bands.append(file_name[root_len:-4])
     # Loop over files to process.
     for pol_str in params['polarizations']:
         for band in bands:
             if band == -1:
                 band_str = ''
             else:
                 band_str =  "_" + repr(band)
             dmap_fname = (in_root + 'dirty_map_' + pol_str + 
                           band_str + '.npy')
             all_in_fname_list.append(
                 kiyopy.utils.abbreviate_file_path(dmap_fname))
             # Load the dirty map and the noise matrix.
             dirty_map = algebra.load(dmap_fname)
             dirty_map = algebra.make_vect(dirty_map)
             if dirty_map.axes != ('freq', 'ra', 'dec') :
                 msg = ("Expeced dirty map to have axes ('freq',"
                        "'ra', 'dec'), but it has axes: "
                        + str(dirty_map.axes))
                 raise ce.DataError(msg)
             shape = dirty_map.shape
             # Initialize the clean map.
             clean_map = algebra.info_array(sp.zeros(dirty_map.shape))
             clean_map.info = dict(dirty_map.info)
             clean_map = algebra.make_vect(clean_map)
             # If needed, initialize a map for the noise diagonal.
             if save_noise_diag :
                 noise_diag = algebra.zeros_like(clean_map)
             if params["from_eig"]:
                 # Solving from eigen decomposition of the noise instead of
                 # the noise itself.
                 # Load in the decomposition.
                 evects_fname = (in_root + 'noise_evects_' + pol_str +
                                 + band_str + '.npy')
                 if self.feedback > 1:
                     print "Using dirty map: " + dmap_fname
                     print "Using eigenvectors: " + evects_fname
                 evects = algebra.open_memmap(evects_fname, 'r')
                 evects = algebra.make_mat(evects)
                 evals_inv_fname = (in_root + 'noise_evalsinv_' + pol_str
                                    + "_" + repr(band) + '.npy')
                 evals_inv = algebra.load(evals_inv_fname)
                 evals_inv = algebra.make_mat(evals_inv)
                 # Solve for the map.
                 if params["save_noise_diag"]:
                     clean_map, noise_diag = solve_from_eig(evals_inv,
                                 evects, dirty_map, True, self.feedback)
                 else:
                     clean_map = solve_from_eig(evals_inv,
                                 evects, dirty_map, False, self.feedback)
                 # Delete the eigen vectors to recover memory.
                 del evects
             else:
                 # Solving from the noise.
                 noise_fname = (in_root + 'noise_inv_' + pol_str +
                                band_str + '.npy')
                 if self.feedback > 1:
                     print "Using dirty map: " + dmap_fname
                     print "Using noise inverse: " + noise_fname
                 all_in_fname_list.append(
                     kiyopy.utils.abbreviate_file_path(noise_fname))
                 noise_inv = algebra.open_memmap(noise_fname, 'r')
                 noise_inv = algebra.make_mat(noise_inv)
                 # Two cases for the noise.  If its the same shape as the map
                 # then the noise is diagonal.  Otherwise, it should be
                 # block diagonal in frequency.
                 if noise_inv.ndim == 3 :
                     if noise_inv.axes != ('freq', 'ra', 'dec') :
                         msg = ("Expeced noise matrix to have axes "
                                 "('freq', 'ra', 'dec'), but it has: "
                                 + str(noise_inv.axes))
                         raise ce.DataError(msg)
                     # Noise inverse can fit in memory, so copy it.
                     noise_inv_memory = sp.array(noise_inv, copy=True)
                     # Find the non-singular (covered) pixels.
                     max_information = noise_inv_memory.max()
                     good_data = noise_inv_memory < 1.0e-10*max_information
                     # Make the clean map.
                     clean_map[good_data] = (dirty_map[good_data] 
                                             / noise_inv_memory[good_data])
                     if save_noise_diag :
                         noise_diag[good_data] = \
                                 1/noise_inv_memory[good_data]
                 elif noise_inv.ndim == 5 :
                     if noise_inv.axes != ('freq', 'ra', 'dec', 'ra',
                                           'dec'):
                         msg = ("Expeced noise matrix to have axes "
                                "('freq', 'ra', 'dec', 'ra', 'dec'), "
                                "but it has: " + str(noise_inv.axes))
                         raise ce.DataError(msg)
                     # Arrange the dirty map as a vector.
                     dirty_map_vect = sp.array(dirty_map) # A view.
                     dirty_map_vect.shape = (shape[0], shape[1]*shape[2])
                     frequencies = dirty_map.get_axis('freq')/1.0e6
                     # Allowcate memory only once.
                     noise_inv_freq = sp.empty((shape[1], shape[2], 
                                     shape[1], shape[2]), dtype=float)
                     if self.feedback > 1 :
                         print "Inverting noise matrix."
                     # Block diagonal in frequency so loop over frequencies.
                     for ii in xrange(dirty_map.shape[0]) :
                         if self.feedback > 1:
                             print "Frequency: ", "%5.1f"%(frequencies[ii]),
                         if self.feedback > 2:
                             print ", start mmap read:",
                             sys.stdout.flush()
                         noise_inv_freq[...] = noise_inv[ii, ...]
                         if self.feedback > 2:
                             print "done, start eig:",
                             sys.stdout.flush()
                         noise_inv_freq.shape = (shape[1]*shape[2],
                                                 shape[1]*shape[2])
                         # Solve the map making equation by diagonalization.
                         noise_inv_diag, Rot = sp.linalg.eigh(
                             noise_inv_freq, overwrite_a=True)
                         if self.feedback > 2:
                             print "done",
                         map_rotated = sp.dot(Rot.T, dirty_map_vect[ii])
                         # Zero out infinite noise modes.
                         bad_modes = (noise_inv_diag
                                      < 1.0e-5 * noise_inv_diag.max())
                         if self.feedback > 1:
                             print ", discarded: ",
                             print "%4.1f" % (100.0 * sp.sum(bad_modes) 
                                              / bad_modes.size),
                             print "% of modes",
                         if self.feedback > 2:
                             print ", start rotations:",
                             sys.stdout.flush()
                         map_rotated[bad_modes] = 0.
                         noise_inv_diag[bad_modes] = 1.0
                         # Solve for the clean map and rotate back.
                         map_rotated /= noise_inv_diag
                         map = sp.dot(Rot, map_rotated)
                         if self.feedback > 2:
                             print "done",
                             sys.stdout.flush()
                         # Fill the clean array.
                         map.shape = (shape[1], shape[2])
                         clean_map[ii, ...] = map
                         if save_noise_diag :
                             # Using C = R Lambda R^T 
                             # where Lambda = diag(1/noise_inv_diag).
                             temp_noise_diag = 1/noise_inv_diag
                             temp_noise_diag[bad_modes] = 0
                             # Multiply R by the diagonal eigenvalue matrix.
                             # Broadcasting does equivalent of mult by diag
                             # matrix.
                             temp_mat = Rot*temp_noise_diag
                             # Multiply by R^T, but only calculate the
                             # diagonal elements.
                             for jj in range(shape[1]*shape[2]) :
                                 temp_noise_diag[jj] = sp.dot(
                                     temp_mat[jj,:], Rot[jj,:])
                             temp_noise_diag.shape = (shape[1], shape[2])
                             noise_diag[ii, ...] = temp_noise_diag
                         # Return workspace memory to origional shape.
                         noise_inv_freq.shape = (shape[1], shape[2],
                                                 shape[1], shape[2])
                         if self.feedback > 1:
                             print ""
                             sys.stdout.flush()
                 elif noise_inv.ndim == 6 :
                     if save_noise_diag:
                         # OLD WAY.
                         #clean_map, noise_diag, chol = solve(noise_inv,
                         #        dirty_map, True, feedback=self.feedback)
                         # NEW WAY.
                         clean_map, noise_diag, noise_inv_diag, chol = \
                                   solve(noise_fname, noise_inv, dirty_map,
                                   True, feedback=self.feedback)
                     else:
                         # OLD WAY.
                         #clean_map, chol = solve(noise_inv, dirty_map, 
                         #            False, feedback=self.feedback)
                         # NEW WAY.
                         clean_map, noise_inv_diag, chol = \
                                   solve(noise_fname, noise_inv, dirty_map,
                                   False, feedback=self.feedback)
                     if params['save_cholesky']:
                         chol_fname = (params['output_root'] + 'chol_'
                                     + pol_str + band_str + '.npy')
                         sp.save(chol_fname, chol)
                     if params['save_noise_inv_diag']:
                         noise_inv_diag_fname = (params['output_root'] +
                                    'noise_inv_diag_' + pol_str + band_str 
                                    + '.npy')
                         algebra.save(noise_inv_diag_fname, noise_inv_diag)
                     # Delete the cholesky to recover memory.
                     del chol
                 else :
                     raise ce.DataError("Noise matrix has bad shape.")
                 # In all cases delete the noise object to recover memeory.
                 del noise_inv
             # Write the clean map to file.
             out_fname = (params['output_root'] + 'clean_map_'
                          + pol_str + band_str + '.npy')
             if self.feedback > 1:
                 print "Writing clean map to: " + out_fname
             algebra.save(out_fname, clean_map)
             all_out_fname_list.append(
                 kiyopy.utils.abbreviate_file_path(out_fname))
             if save_noise_diag :
                 noise_diag_fname = (params['output_root'] + 'noise_diag_'
                                     + pol_str + band_str + '.npy')
                 algebra.save(noise_diag_fname, noise_diag)
                 all_out_fname_list.append(
                     kiyopy.utils.abbreviate_file_path(noise_diag_fname))
             # Check the clean map for faileur.
             if not sp.alltrue(sp.isfinite(clean_map)):
                 n_bad = sp.sum(sp.logical_not(sp.isfinite(clean_map)))
                 msg = ("Non finite entries found in clean map. Solve"
                        " failed. %d out of %d entries bad" 
                        % (n_bad, clean_map.size)) 
                 raise RuntimeError(msg)
    def execute(self):
        '''Clean the maps of foregrounds, save the results, and get the
        autocorrelation.'''

        params = self.params
        freq_list = sp.array(params['freq_list'], dtype=int)
        lags = sp.array(params['lags'])

        # Write parameter file.
        kiyopy.utils.mkparents(params['output_root'])
        parse_ini.write_params(params, params['output_root'] + 'params.ini',
                               prefix=prefix)

        # Get the map data from file as well as the noise inverse.
        if len(params['file_middles']) == 1:
            fmid_name = params['file_middles'][0]
            params['file_middles'] = (fmid_name, fmid_name)

        if len(params['file_middles']) >= 2:
            # Deal with multiple files.
            num_maps = len(params['file_middles'])
            maps = []
            noise_invs = []

            # Load all maps and noises once.
            for map_index in range(0, num_maps):
                map_file = (params['input_root'] +
                            params['file_middles'][map_index] +
                            params['input_end_map'])

                print "Loading map %d of %d." % (map_index + 1, num_maps)

                map_in = algebra.make_vect(algebra.load(map_file))

                maps.append(map_in)
                if not params["no_weights"]:
                    noise_file = (params['input_root'] +
                                  params['file_middles'][map_index] +
                                  params['input_end_noise'])

                    print "Loading noise %d of %d." % (map_index + 1, num_maps)

                    noise_inv = algebra.make_mat(
                                    algebra.open_memmap(noise_file, mode='r'))

                    noise_inv = noise_inv.mat_diag()
                else:
                    noise_inv = algebra.ones_like(map_in)

                noise_invs.append(noise_inv)

            pairs = []
            # Make pairs with deepcopies to not make mutability mistakes.
            for map1_index in range(0, num_maps):
                for map2_index in range(0, num_maps):
                    if (map2_index > map1_index):
                        map1 = copy.deepcopy(maps[map1_index])
                        map2 = copy.deepcopy(maps[map2_index])
                        noise_inv1 = copy.deepcopy(noise_invs[map1_index])
                        noise_inv2 = copy.deepcopy(noise_invs[map2_index])

                        pair = map_pair.MapPair(map1, map2,
                                                noise_inv1, noise_inv2,
                                                freq_list)

                        pair.lags = lags
                        pair.params = params

                        # Keep track of the names of maps in pairs so
                        # it knows what to save later.
                        pair.set_names(params['file_middles'][map1_index],
                                       params['file_middles'][map2_index])
                        pairs.append(pair)

            num_map_pairs = len(pairs)
            print "%d map pairs created from %d maps." % (len(pairs), num_maps)

        # Hold a reference in self.
        self.pairs = pairs

        # Get maps/ noise inv ready for running.
        if params["convolve"]:
            for pair in pairs:
                pair.degrade_resolution()

        if params['factorizable_noise']:
            for pair in pairs:
                pair.make_noise_factorizable()

        if params['sub_weighted_mean']:
            for pair in pairs:
                pair.subtract_weighted_mean()

        self.pairs = pairs
        # Since correlating takes so long, if you already have the svds
        # you can skip this first correlation [since that's all it's really
        # for and it is the same no matter how many modes you want].
        # Note: map_pairs will not have anything saved in 'fore_corr' if you
        # skip this correlation.
        if not params['skip_fore_corr']:
            # Correlate the maps with multiprocessing. Note that the
            # correlations are saved to file separately then loaded in
            # together because that's (one way) how multiprocessing works.
            runlist = [(pairs[pair_index],
                        params['output_root'],
                        pair_index, False) for
                        pair_index in range(0, num_map_pairs)]

            pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
            pool.map(multiproc, runlist)

            # Load the correlations and save them to each pair. The pairs that
            # got passed to multiproc are not the same ones as ones in
            # self.pairs, so this must be done to have actual values.
            print "Loading map pairs back into program."
            file_name = params['output_root']
            file_name += "map_pair_for_freq_slices_fore_corr_"

            fore_pairs = []
            for count in range(0, num_map_pairs):
                print "Loading correlation for pair %d" % (count)
                pickle_handle = open(file_name + str(count) + ".pkl", "r")
                correlate_results = cPickle.load(pickle_handle)
                pairs[count].fore_corr = correlate_results[0]
                pairs[count].fore_counts = correlate_results[1]
                fore_pairs.append(pairs[count])
                pickle_handle.close()

            self.fore_pairs = copy.deepcopy(fore_pairs)
            # With this, you do not need fore_pairs anymore.
            self.pairs = copy.deepcopy(fore_pairs)

            pairs = self.pairs

            # Get foregrounds.

            # svd_info_list keeps track of all of the modes of all maps in
            # all pairs. This means if you want to subract a different number
            # of modes for the same maps/noises/frequencies, you have the modes
            # already saved and do not need to run the first correlation again.
            svd_info_list = []
            for pair in pairs:
                vals, modes1, modes2 = cf.get_freq_svd_modes(pair.fore_corr,
                                                          len(freq_list))
                pair.vals = vals

                # Save ALL of the modes for reference.
                pair.all_modes1 = modes1
                pair.all_modes2 = modes2
                svd_info = (vals, modes1, modes2)
                svd_info_list.append(svd_info)

                # Save only the modes you want to subtract.
                n_modes = params['modes']
                pair.modes1 = modes1[:n_modes]
                pair.modes2 = modes2[:n_modes]

            self.svd_info_list = svd_info_list
            self.pairs = pairs

            if params['save_svd_info']:
                io_wrap.save_pickle(self.svd_info_list, params['svd_file'])
        else:
            # The first correlation and svd has been skipped.
            # This means you already have the modes so you can just load
            # them from file.
            self.svd_info_list = io_wrap.load_pickle(params['svd_file'])
            # Set the svd info to the pairs.
            for i in range(0, len(pairs)):
                svd_info = self.svd_info_list[i]
                pairs[i].vals = svd_info[0]
                pairs[i].all_modes1 = svd_info[1]
                pairs[i].all_modes2 = svd_info[2]
                n_modes = params['modes']
                pairs[i].modes1 = svd_info[1][:n_modes]
                pairs[i].modes2 = svd_info[2][:n_modes]

            self.pairs = pairs

        # Subtract foregrounds.
        for pair_index in range(0, len(pairs)):
            pairs[pair_index].subtract_frequency_modes(pairs[pair_index].modes1,
                pairs[pair_index].modes2)

        # Save cleaned clean maps, cleaned noises, and modes.
        self.save_data(save_maps=params['save_maps'],
                       save_noises=params['save_noises'],
                       save_modes=params['save_modes'])

        # Finish if this was just first pass.
        if params['first_pass_only']:
            self.pairs = pairs
            return

        # Correlate the cleaned maps.
        # Here we could calculate the power spectrum instead eventually.
        runlist = [(pairs[pair_index],
                    params['output_root'],
                    pair_index, True) for
                    pair_index in range(0, num_map_pairs)]

        pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
        pool.map(multiproc, runlist)

        print "Loading map pairs back into program."
        file_name = params['output_root']
        file_name += "map_pair_for_freq_slices_corr_"

        temp_pair_list = []
        for count in range(0, num_map_pairs):
            print "Loading correlation for pair %d" % (count)
            pickle_handle = open(file_name + str(count) + ".pkl", "r")
            correlate_results = cPickle.load(pickle_handle)
            pairs[count].corr = correlate_results[0]
            pairs[count].counts = correlate_results[1]
            temp_pair_list.append(pairs[count])
            pickle_handle.close()

        self.pairs = copy.deepcopy(temp_pair_list)

        # Get the average correlation and its standard deviation.
        corr_list = []
        for pair in self.pairs:
            corr_list.append(pair.corr)

        self.corr_final, self.corr_std = cf.get_corr_and_std_3d(corr_list)

        if params['pickle_slices']:
            pickle_slices(self)

        return
    def execute(self, nprocesses=1) :
        
        params = self.params
        kiyopy.utils.mkparents(params['output_root'])
        parse_ini.write_params(params, params['output_root'] + 'params.ini',
                               prefix=prefix)
        guppi_result = params['Guppi_test']
        output_root = params['output_root']
        output_end = params['output_end']
        RM_dir = params['RM_dir']
        file_name = params['file_middles'][0].split('/')[1]
#        print file_name
        sess = file_name.split('_')[0]
#        print sess

        self.file_num = len(params['file_middles']) # getting a variable for number of calibrator files being used

# Need to remove count for calibrator files that are not the right size.
        session_nums = sp.zeros(self.file_num)
        c = 0
        for file_middle in params['file_middles'] :
            input_fname = (params['input_root'] + file_middle + 
                           params['input_end'])
            Reader = core.fitsGBT.Reader(input_fname)
            n_scans = len(Reader.scan_set)
            Len_set = Reader.read(0,0,force_tuple=True)
#            session_nums[c] = file_middle.split('_')[0]
#            print session_nums[c]
            for Data in Len_set :
                freq_num = Data.dims[3] # Setting the frequency binning to match whatever it's been set to. 
            if guppi_result == True : 
                if n_scans != 2 :
                    self.file_num -=1
            elif guppi_result == False :
                if n_scans != 4 :
                    self.file_num -=1
            c+=1

# Need to know the general frequency binning (going to assume that it's 200 for guppi, 260 for spectrometer, aka 1 MHz binning)
#        if guppi_result == True :
#            freq_num = 200 
        if guppi_result == False :
#            freq_num = 260    
            self.file_num *= 2 #because there are two sets of scans per file for spectrometer, need to double the number of values


        self.function = sp.zeros(4*self.file_num) #setting a variable of the right size for peval to use
        self.theta = sp.zeros(4*self.file_num) #setting a variable for parallactic angle in radians
        self.RM = sp.zeros(4*self.file_num) #setting a variable for Rotation Measure in Rad/m^2
        self.d = sp.zeros((4*self.file_num,freq_num)) #setting a variable for measured values
        
# Loop over files to process.      
        k=0
        for file_middle in params['file_middles'] :
            input_fname = (params['input_root'] + file_middle +
                           params['input_end'])
# Read in the data, and loop over data blocks.
            Reader = core.fitsGBT.Reader(input_fname)
            n_IFs = len(Reader.IF_set) # Should be 1 given that we've stitched windows for the spectrometer or by def in guppi
            n_scans = len(Reader.scan_set) #Should be 4 for the spectrometer, 2 for guppi
#            print n_scans
            OnBlocks = Reader.read(range(0,n_scans,2),0,force_tuple=True)
            OffBlocks = Reader.read(range(1,n_scans,2),0,force_tuple=True)
     
#force_tuple=True makes the ouput of Reader.read a tuple even if thre is only one Block to return.
            Blocks = Reader.read(params['scans'], params['IFs'],
                                 force_tuple=True)

# Setting labels for indices for later
            on_ind = 0
            off_ind = 1
            XX_ind = 0
            YY_ind = 3
            XY_ind = 1
            YX_ind = 2
            
#Calculating Parallactic angles for the cal file
            PA = sp.zeros(n_scans)
            RM = sp.zeros(n_scans)
            m = 0           
            for Data in Blocks:
#                Comp_Time = 0.0
                freq_len = Data.dims[3]
                time_len = Data.dims[0]
#                print time_len
                Data.calc_freq()
                freq_val = Data.freq
                freq_val = freq_val/1000000       
                Data.calc_PA()
                PA[m] = ma.mean(Data.PA)  
#Include RM stuff in the code:
#                Full_date = Data.field['DATE-OBS'][Data.dims[0]/2]
#                print Full_date
#                Date = Full_date.split('T')[0]
#                Year = Date.split('-')[0]
#                Month = Date.split('-')[1]
#                Day = Date.split('-')[2]
#                Full_time = Full_date.split('T')[1]
#                Hour = Full_time.split(':')[0]
#                print Hour
#                Min = Full_time.split(':')[1]
#                Sec = Full_time.split(':')[2]
#                if int(Min)<=15:
#                    Comp_Time = float(Hour) +0.0
#                elif int(Min)<=45:
#                    Comp_Time = float(Hour) + 0.5
#                else :
#                    Comp_Time = float(Hour) + 1
#                print str(Comp_Time)
#                print '---'
#                RM_file_name = RM_dir + Year+Month+Day+'_RM.txt'
#                RM_data = np.loadtxt(RM_file_name)     
#                RA_RM = sp.zeros(len(RM_data[:,0]))
#                DEC_RM = sp.zeros(len(RM_data[:,0]))
#                for i in range(0, len(RM_data[:,0])):
#                    RM_Hr = int(RM_data[i,0])
#                    print RM_Hr
#                    if RM_data[i,0]%1 == 0 :
#                        RM_Min = '00'
#                        minutes = 0.0
#                    else:
#                        RM_MIN = '30'
#                        minutes = 0.5
#                    Test = float(RM_Hr) + minutes
#                    print Test
#                    if str(Comp_Time) == str(Test): 
#                        UT_RM = Date+'T'+str(RM_Hr)+':'+RM_Min+':00.00'
#                        EL_RM = RM_data[i,2]
#                        AZ_RM = RM_data[i,1]
#                        print EL_RM, AZ_RM
#                        RA_RM[i], DEC_RM[i] = utils.elaz2radecGBT(EL_RM,AZ_RM,UT_RM)               
#                        print RA_RM[i], DEC_RM[i]
#                RA = ma.mean(Data.field['CRVAL2'])
#                print RA
#                print ma.mean(RA)
#                DEC = ma.mean(Data.field['CRVAL3'])
#                print RA, DEC
#                print ma.mean(DEC)
#                print '_____________'
#                print RA_RM, DEC_RM
#                valid = []
#                for i in range(0,len(RA_RM)):
#                    if RA_RM[i] != 0:
#                        if abs(RA-RA_RM[i])<=10.0 :
#                            print RA_RM[i], DEC_RM[i]
#                            if abs(DEC-DEC_RM[i])<10.0:
#                                print RA_RM[i], DEC_RM[i]
##                                RM[m]=RM_data[i,3]
#                                valid.append(i)
#                print valid
#                RA_M=10.0
#                DEC_M=10.0
#                for j in range(0,len(valid)):
#                    if abs(RA-RA_RM[valid[j]])<RA_M:
#                        if abs(DEC-DEC_RM[valid[j]])<DEC_M:
#                            RM[m] = RM_data[valid[j],3]
                m+=1 
# Now have a table of RMs for each scan. 
                
            
#            print time_len
#            print freq_len
#            print m
            print RM
#Building the measured data into arrays (guppi version)         
            if guppi_result == True : 
                if n_scans == 2 : 
                    self.theta[k] = ma.mean(PA)
                    self.theta[k+1] = ma.mean(PA)
                    self.theta[k+2] = ma.mean(PA)
                    self.theta[k+3] = ma.mean(PA)
                    self.RM[k] = ma.mean(RM)
                    self.RM[k+1] = ma.mean(RM)
                    self.RM[k+2] = ma.mean(RM)
                    self.RM[k+3] = ma.mean(RM)
#                    print self.RM

#for if I want to do the difference of medians
                    S_med_calon_src = sp.zeros((freq_len,4))
                    S_med_caloff_src = sp.zeros((freq_len,4))
                    S_med_calon = sp.zeros((freq_len,4))
                    S_med_caloff = sp.zeros((freq_len,4))

#arrays built without taking median
                    for Data in OnBlocks: 
                        S_src = Data.data
#                    print len(S_src)

                    for Data in OffBlocks:
                        S_offsrc = Data.data
#                    print len(S_offsrc)
 
#arrays built taking median
                    for Data in OnBlocks:
                        S_med_caloff_src[:,0] = ma.median(Data.data[:,XX_ind,off_ind,:],axis=0)
                        S_med_caloff_src[:,1] = ma.median(Data.data[:,XY_ind,off_ind,:],axis=0)
                        S_med_caloff_src[:,2] = ma.median(Data.data[:,YX_ind,off_ind,:],axis=0)
                        S_med_caloff_src[:,3] = ma.median(Data.data[:,YY_ind,off_ind,:],axis=0)

                        S_med_calon_src[:,0] = ma.median(Data.data[:,XX_ind,on_ind,:],axis=0)
                        S_med_calon_src[:,1] = ma.median(Data.data[:,XY_ind,on_ind,:],axis=0)
                        S_med_calon_src[:,2] = ma.median(Data.data[:,YX_ind,on_ind,:],axis=0)
                        S_med_calon_src[:,3] = ma.median(Data.data[:,YY_ind,on_ind,:],axis=0)
                    
                    for Data in OffBlocks:
                        S_med_caloff[:,0] = ma.median(Data.data[:,XX_ind,off_ind,:],axis=0)
                        S_med_caloff[:,1] = ma.median(Data.data[:,XY_ind,off_ind,:],axis=0)
                        S_med_caloff[:,2] = ma.median(Data.data[:,YX_ind,off_ind,:],axis=0)
                        S_med_caloff[:,3] = ma.median(Data.data[:,YY_ind,off_ind,:],axis=0)
 
                        S_med_calon[:,0] = ma.median(Data.data[:,XX_ind,on_ind,:],axis=0)
                        S_med_calon[:,1] = ma.median(Data.data[:,XY_ind,on_ind,:],axis=0)
                        S_med_calon[:,2] = ma.median(Data.data[:,YX_ind,on_ind,:],axis=0)
                        S_med_calon[:,3] = ma.median(Data.data[:,YY_ind,on_ind,:],axis=0)
 
#Final input if we already took median
                    self.d[k,:] = 0.5*(S_med_calon_src[:,0]+S_med_caloff_src[:,0]-S_med_calon[:,0]-S_med_caloff[:,0])
                    self.d[k+1,:] = 0.5*(S_med_calon_src[:,1]+S_med_caloff_src[:,1]-S_med_calon[:,1]-S_med_caloff[:,1])
                    self.d[k+2,:] = 0.5*(S_med_calon_src[:,2]+S_med_caloff_src[:,2]-S_med_calon[:,2]-S_med_caloff[:,2])
                    self.d[k+3,:] = 0.5*(S_med_calon_src[:,3]+S_med_caloff_src[:,3]-S_med_calon[:,3]-S_med_caloff[:,3])

#Final input if we did not yet take median                     
#                    print 0.5*(S_src[:,XX_ind,1,:]+S_src[:,XX_ind,0,:]-S_offsrc[:,XX_ind,1,:]-S_offsrc[:,XX_ind,0,:])
#                    self.d[k,:] = ma.mean(0.5*(S_src[:,XX_ind,1,:]+S_src[:,XX_ind,0,:]-S_offsrc[:,XX_ind,1,:]-S_offsrc[:,XX_ind,0,:]),axis=0)
#                    self.d[k+1,:] = ma.mean(0.5*(S_src[:,XY_ind,1,:]+S_src[:,XY_ind,0,:]-S_offsrc[:,XY_ind,1,:]-S_offsrc[:,XY_ind,0,:]),axis=0)
#                    self.d[k+2,:] =ma.mean(0.5*(S_src[:,YX_ind,1,:]+S_src[:,YX_ind,0,:]-S_offsrc[:,YX_ind,1,:]-S_offsrc[:,YX_ind,0,:]),axis=0)
#                    self.d[k+3,:] =ma.mean(0.5*(S_src[:,YY_ind,1,:]+S_src[:,YY_ind,0,:]-S_offsrc[:,YY_ind,1,:]-S_offsrc[:,YY_ind,0,:]),axis=0)             
                    k+=4

        for a in range(0,4*self.file_num):
            for b in range(0,freq_num):
#                print self.d[a,b]
                if self.d[a,b] > 1000 :
                   self.d[a,b] = 1000

        #There are 2 parameters for this version p[0] is XX gain and p[1] is YY gain. 
        p0 = [1,1] # guessed preliminary values
        error = sp.ones(4*self.file_num)
        #Note that error can be used to weight the equations if not all set to one.

        p_val_out = sp.zeros((freq_len, 3))
 #       p_err_out = sp.zeros((freq_len, 17))
     
        for f in range(0,freq_len):   
            plsq = leastsq(self.residuals,p0,args=(error,f,freq_val),full_output=0, maxfev=5000)
            pval = plsq[0] # this is the 1-d array of results0

            p_val_out[f,0] = freq_val[f]
            p_val_out[f,1] = pval[0]
            p_val_out[f,2] = pval[1]

#        sess_num = int(session_nums[0])
#        print sess_num
#        np.savetxt(output_root+str(sess_num)+'_flux_mueller_matrix_calc'+output_end, p_val_out, delimiter = ' ')
        out_path = output_root+sess+'_diff_gain_calc'+output_end
        np.savetxt(out_path,p_val_out,delimiter = ' ')
示例#36
0
    def full_calculation(self, chan_modes_subtract=None, n_poly_subtract=0):

        # Some set up.
        params = self.params
        deconvolve = params['deconvolve']
        kiyopy.utils.mkparents(params['output_root'])
        parse_ini.write_params(params,
                               params['output_root'] + 'params.ini',
                               prefix=prefix)
        self.n_time = params["n_time_bins"]
        n_files = len(params["file_middles"])
        # Loop over files to process.
        first_iteration = True
        for file_middle in params['file_middles']:
            # Get the data.
            full_data, mask, this_dt, full_mean = self.get_data(file_middle)
            if first_iteration:
                self.n_time = full_data.shape[0]
                n_chan = full_data.shape[-1]
                dt = this_dt
            elif not sp.allclose(dt, this_dt, rtol=0.001):
                msg = "Files have different time samplings."
                raise ce.DataError(msg)
            # Subtract out any channel modes passed in.
            if not (chan_modes_subtract is None):
                for v in chan_modes_subtract:
                    full_data -= v * sp.sum(v * full_data, -1)[:, None]
            # Subtract out polynomials from each channel if desired.
            if first_iteration:
                # Generate basis polynomials.
                basis_poly = sp.empty((n_poly_subtract, self.n_time))
                time_scaled = ((sp.arange(self.n_time, dtype=float) * 2 -
                                self.n_time + 1.0) / self.n_time)
                for ii in range(n_poly_subtract):
                    #tmp_poly = scipy.special.eval_chebyu(ii, time_scaled)
                    tmp_poly = sp.cos(sp.pi * ii * time_scaled)
                    tmp_poly *= 1.0 / sp.sqrt(sp.sum(tmp_poly**2))
                    basis_poly[ii, :] = tmp_poly
                # Allocate memory to hold the amplitude spectrum.
                poly_spectrum = sp.zeros((n_poly_subtract, n_chan),
                                         dtype=float)
            # Fit for the polynomials in each channel.
            for ii in range(n_chan):
                weighted_poly = basis_poly * mask[:, 0, 0, ii]
                poly_corr = sp.sum(full_data[:, 0, 0, ii] * basis_poly[:, :],
                                   -1)
                poly_covar = sp.sum(
                    weighted_poly[:, None, :] * basis_poly[None, :, :], -1)
                if n_poly_subtract:
                    poly_amps = linalg.solve(poly_covar,
                                             poly_corr,
                                             sym_pos=True,
                                             overwrite_a=True,
                                             overwrite_b=True)
                    poly_spectrum[:, ii] += poly_amps**2
            # Calculate the raw power spectrum.
            power_mat, window_function = calculate_full_power_mat(
                full_data, mask, deconvolve=deconvolve)

            # Get rid of the extra cal and polarization axes.
            power_mat = power_mat[:, 0, 0, :, :]
            window_function = window_function[:, 0, 0, :, :]
            full_mean = full_mean[0, 0, :]
            # TODO: Figure out a better way to deal with this (only drop
            # affected frequencies).
            #if sp.any(sp.allclose(mask[:,0,0,:], 0.0, 0)):
            #    n_files -= 1
            #    continue
            # Format the power spectrum.
            power_mat = prune_power(power_mat, 0)
            power_mat = make_power_physical_units(power_mat, this_dt)
            # TODO In the future the thermal expectation could include
            # polarization factors (be 'I' aware) and cal factors.
            thermal_expectation = (full_mean / sp.sqrt(this_dt) /
                                   sp.sqrt(abs(self.chan_width)) /
                                   sp.sqrt(1.0 / 2.0 / this_dt))
            if params['norm_to_thermal']:
                power_mat /= (thermal_expectation[:, None] *
                              thermal_expectation[None, :])
            # Combine across files.
            if first_iteration:
                total_power_mat = power_mat
                total_thermal_expectation = thermal_expectation
            else:
                total_power_mat += power_mat
                total_thermal_expectation += thermal_expectation
            first_iteration = False
        if not hasattr(self, 'frequency'):
            self.frequency = ps_freq_axis(dt, self.n_time)
        if n_files > 0:
            total_power_mat /= n_files
            total_thermal_expectation / n_files
            poly_spectrum /= n_files
        if n_poly_subtract:
            return total_power_mat, total_thermal_expectation, poly_spectrum
        else:
            return total_power_mat, total_thermal_expectation
示例#37
0
    def execute(self, nprocesses=1):

        params = self.params
        kiyopy.utils.mkparents(params['output_root'])
        parse_ini.write_params(params,
                               params['output_root'] + 'params.ini',
                               prefix=prefix)
        guppi_result = params['Guppi_test']
        output_root = params['output_root']
        output_end = params['output_end']

        self.file_num = len(
            params['file_middles']
        )  # getting a variable for number of calibrator files being used

        # Need to remove count for calibrator files that are not the right size.
        session_nums = sp.zeros(self.file_num)
        c = 0
        for file_middle in params['file_middles']:
            input_fname = (params['input_root'] + file_middle +
                           params['input_end'])
            Reader = core.fitsGBT.Reader(input_fname)
            n_scans = len(Reader.scan_set)
            Len_set = Reader.read(0, 0, force_tuple=True)
            #            session_nums[c] = file_middle.split('_')[0]
            #            print session_nums[c]
            for Data in Len_set:
                freq_num = Data.dims[
                    3]  # Setting the frequency binning to match whatever it's been set to.
            if guppi_result == True:
                if n_scans != 2:
                    self.file_num -= 1
            elif guppi_result == False:
                if n_scans != 4:
                    self.file_num -= 1
            c += 1

# Need to know the general frequency binning (going to assume that it's 200 for guppi, 260 for spectrometer, aka 1 MHz binning)
#        if guppi_result == True :
#            freq_num = 200
        if guppi_result == False:
            #            freq_num = 260
            self.file_num *= 2  #because there are two sets of scans per file for spectrometer, need to double the number of values

        self.function = sp.zeros(
            4 * self.file_num
        )  #setting a variable of the right size for peval to use
        self.theta = sp.zeros(
            4 * self.file_num
        )  #setting a variable for parallactic angle in radians
        self.d = sp.zeros((4 * self.file_num,
                           freq_num))  #setting a variable for measured values

        # Loop over files to process.
        k = 0
        for file_middle in params['file_middles']:
            input_fname = (params['input_root'] + file_middle +
                           params['input_end'])
            # Read in the data, and loop over data blocks.
            Reader = core.fitsGBT.Reader(input_fname)
            n_IFs = len(
                Reader.IF_set
            )  # Should be 1 given that we've stitched windows for the spectrometer or by def in guppi
            n_scans = len(Reader.scan_set
                          )  #Should be 4 for the spectrometer, 2 for guppi
            OnBlocks = Reader.read(range(0, n_scans, 2), 0, force_tuple=True)
            OffBlocks = Reader.read(range(1, n_scans, 2), 0, force_tuple=True)
            #force_tuple=True makes the ouput of Reader.read a tuple even if thre is only one Block to return.
            Blocks = Reader.read(params['scans'],
                                 params['IFs'],
                                 force_tuple=True)

            # Setting labels for indices for later
            on_ind = 0
            off_ind = 1
            I_ind = 0
            Q_ind = 1
            U_ind = 2
            V_ind = 3

            #Calculating Parallactic angles for the cal file
            PA = sp.zeros(n_scans)
            m = 0
            for Data in Blocks:
                freq_len = Data.dims[3]
                Data.calc_freq()
                freq_val = Data.freq
                freq_val = freq_val / 1000000
                Data.calc_PA()
                PA[m] = ma.mean(Data.PA)
                m += 1

# Going to skip the non guppi version for now.
#            if guppi_result == False :
#                if n_scans == 4 : # To make sure that we don't use incomplete data
#                    self.theta[k] = 0.5*(PA[0]+PA[1]) # the average between the on and off values
#                    self.theta[k+1] =0.5*(PA[0]+PA[1])
#                    self.theta[k+2] = 0.5*(PA[0]+PA[1])
#                    self.theta[k+3] = 0.5*(PA[2]+PA[3])
#                    self.theta[k+4] = 0.5*(PA[2]+PA[3])
#                    self.theta[k+5] = 0.5*(PA[2]+PA[3])

#                    S_med_on = sp.zeros((2,freq_len,4))
#                    S_med = sp.zeros((2,freq_len,4))

#                    i=0
#                    for Data in OnBlocks :
#                        S_med_on[i,:,0] = ma.median(Data.data[:,I_ind,off_ind,:],axis=0)
#                        S_med_on[i,:,1] = ma.median(Data.data[:,Q_ind,off_ind,:],axis=0)
#                        S_med_on[i,:,2] = ma.median(Data.data[:,U_ind,off_ind,:],axis=0)
#                        S_med_on[i,:,3] = ma.median(Data.data[:,V_ind,off_ind,:],axis=0)
#                        i+=1

#                    j=0
#                    for Data in OffBlocks :
#                        S_med[j,:,0] = ma.median(Data.data[:,I_ind,off_ind,:],axis=0)
#                        S_med[j,:,1] = ma.median(Data.data[:,Q_ind,off_ind,:],axis=0)
#                        S_med[j,:,2] = ma.median(Data.data[:,U_ind,off_ind,:],axis=0)
#                        S_med[j,:,3] = ma.median(Data.data[:,V_ind,off_ind,:],axis=0)
#                        j+=1

#                    I_onoff_1 = S_med_on[0,:,0]-S_med[0,:,0] # for first set of on and off scans
#                    I_onoff_2 = S_med_on[1,:,0]-S_med[1,:,0] # for second set of on and off scans

# Setting the measured stokes values for each file (to be used in the least squares fit)
#                    d[k,:] = (S_med_on[0,:,1]-S_med[0,:,1])/I_onoff_1
#                    d[k+1,:] = (S_med_on[0,:,2]-S_med[0,:,2])/I_onoff_1
#                    d[k+2,:] = (S_med_on[0,:,3]-S_med[0,:,3])/I_onoff_1
#                    d[k+3,:] = (S_med_on[1,:,1]-S_med[1,:,1])/I_onoff_2
#                    d[k+4,:] = (S_med_on[1,:,2]-S_med[1,:,2])/I_onoff_2
#                    d[k+5,:] = (S_med_on[1,:,3]-S_med[1,:,3])/I_onoff_2
#                    k+=6

            if guppi_result == True:  #This is the same as above only there is a single set of on and off scans in this case.
                if n_scans == 2:
                    self.theta[k] = ma.mean(PA)
                    self.theta[k + 1] = ma.mean(PA)
                    self.theta[k + 2] = ma.mean(PA)
                    self.theta[k + 3] = ma.mean(PA)

                    S_med_calon_src = sp.zeros((freq_len, 4))
                    S_med_caloff_src = sp.zeros((freq_len, 4))
                    S_med_calon = sp.zeros((freq_len, 4))
                    S_med_caloff = sp.zeros((freq_len, 4))

                    for Data in OnBlocks:
                        S_med_caloff_src[:,
                                         0] = ma.median(Data.data[:, I_ind,
                                                                  off_ind, :],
                                                        axis=0)
                        S_med_caloff_src[:,
                                         1] = ma.median(Data.data[:, Q_ind,
                                                                  off_ind, :],
                                                        axis=0)
                        S_med_caloff_src[:,
                                         2] = ma.median(Data.data[:, U_ind,
                                                                  off_ind, :],
                                                        axis=0)
                        S_med_caloff_src[:,
                                         3] = ma.median(Data.data[:, V_ind,
                                                                  off_ind, :],
                                                        axis=0)

                        S_med_calon_src[:, 0] = ma.median(Data.data[:, I_ind,
                                                                    on_ind, :],
                                                          axis=0)
                        S_med_calon_src[:, 1] = ma.median(Data.data[:, Q_ind,
                                                                    on_ind, :],
                                                          axis=0)
                        S_med_calon_src[:, 2] = ma.median(Data.data[:, U_ind,
                                                                    on_ind, :],
                                                          axis=0)
                        S_med_calon_src[:, 3] = ma.median(Data.data[:, V_ind,
                                                                    on_ind, :],
                                                          axis=0)

                    for Data in OffBlocks:
                        S_med_caloff[:, 0] = ma.median(Data.data[:, I_ind,
                                                                 off_ind, :],
                                                       axis=0)
                        S_med_caloff[:, 1] = ma.median(Data.data[:, Q_ind,
                                                                 off_ind, :],
                                                       axis=0)
                        S_med_caloff[:, 2] = ma.median(Data.data[:, U_ind,
                                                                 off_ind, :],
                                                       axis=0)
                        S_med_caloff[:, 3] = ma.median(Data.data[:, V_ind,
                                                                 off_ind, :],
                                                       axis=0)

                        S_med_calon[:, 0] = ma.median(Data.data[:, I_ind,
                                                                on_ind, :],
                                                      axis=0)
                        S_med_calon[:, 1] = ma.median(Data.data[:, Q_ind,
                                                                on_ind, :],
                                                      axis=0)
                        S_med_calon[:, 2] = ma.median(Data.data[:, U_ind,
                                                                on_ind, :],
                                                      axis=0)
                        S_med_calon[:, 3] = ma.median(Data.data[:, V_ind,
                                                                on_ind, :],
                                                      axis=0)

                    self.d[k, :] = 0.5 * (
                        S_med_calon_src[:, 0] + S_med_caloff_src[:, 0] -
                        S_med_calon[:, 0] - S_med_caloff[:, 0])
                    self.d[k + 1, :] = 0.5 * (
                        S_med_calon_src[:, 1] + S_med_caloff_src[:, 1] -
                        S_med_calon[:, 1] - S_med_caloff[:, 1])
                    self.d[k + 2, :] = 0.5 * (
                        S_med_calon_src[:, 2] + S_med_caloff_src[:, 2] -
                        S_med_calon[:, 2] - S_med_caloff[:, 2])
                    self.d[k + 3, :] = 0.5 * (
                        S_med_calon_src[:, 3] + S_med_caloff_src[:, 3] -
                        S_med_calon[:, 3] - S_med_caloff[:, 3])
                    k += 4

        for a in range(0, 4 * self.file_num):
            for b in range(0, freq_num):
                #                print self.d[a,b]
                if self.d[a, b] > 1000:
                    self.d[a, b] = 1000

        #The seven parameters are in order deltaG[0], alpha[1], psi[2], phi[3], epsilon[4], chi[5], flux[6]=> the parameter vector is p
        p0 = [0.3, 90.0, 170.0, 10.0, 0.016, 0.00, 2.0,
              0.0]  # guessed preliminary values
        error = sp.ones(4 * self.file_num)
        #Note that error can be used to weight the equations if not all set to one.

        p_val_out = sp.zeros((freq_len, 9))
        #       p_err_out = sp.zeros((freq_len, 17))

        for f in range(0, freq_len):
            plsq = leastsq(self.residuals,
                           p0,
                           args=(error, f, freq_val),
                           full_output=0,
                           maxfev=5000)
            pval = plsq[0]  # this is the 1-d array of results
            #            perr = plsq[1] # this is a 2d array representing the estimated covariance of the results. - Not working properly.

            #            Mueller = sp.mat([[pval[0],pval[1],pval[2],pval[3]],[pval[4],pval[5],pval[6],pval[7]],[pval[8],pval[9],pval[10],pval[11]],[pval[12],pval[13],pval[14],pval[15]]])
            #            Mueller = Mueller.I
            #            print Mueller
            pval[1] = (pval[1] + 180) % 360 - 180
            pval[2] = (pval[2] + 180) % 360 - 180
            pval[3] = (pval[3] + 180) % 360 - 180
            pval[5] = (pval[5] + 180) % 360 - 180
            #            pval[7]=(pval[7]+180)%360-180

            p_val_out[f, 0] = freq_val[f]
            p_val_out[f, 1] = pval[0]
            p_val_out[f, 2] = pval[1]
            p_val_out[f, 3] = pval[2]
            p_val_out[f, 4] = pval[3]
            p_val_out[f, 5] = pval[4]
            p_val_out[f, 6] = pval[5]
            p_val_out[f, 7] = pval[6]
#            p_val_out[f,8] = pval[7]

#        sess_num = int(session_nums[0])
#        print sess_num
#        np.savetxt(output_root+str(sess_num)+'_flux_mueller_matrix_calc'+output_end, p_val_out, delimiter = ' ')
        np.savetxt('mueller_params_calc.txt', p_val_out, delimiter=' ')
示例#38
0
	def execute(self, nprocesses=1):
		params = self.params
		boxshape = params['boxshape']
		boxunit = params['boxunit']
		resultf = params['hr'][0]
		if len(params['last']) != 0:
			resultf = resultf + params['last'][0]
		resultf = resultf + '-' + params['hr'][1]
		if len(params['last']) != 0:
			resultf = resultf + params['last'][1]

		# Make parent directory and write parameter file.
		kiyopy.utils.mkparents(params['output_root'])
		parse_ini.write_params(params, params['output_root']+'params.ini',prefix='nl_' )
		in_root = params['input_root']
		out_root = params['output_root']
		cambin_root = params['camb_input_root']
		all_out_fname_list = []
		all_in_fname_list = []
		
		#### Process ####
		kiyopy.utils.mkparents(params['output_root'])

		PKcamb_fname = cambin_root + 'PKcamb.npy'
		PKcamb = algebra.load(PKcamb_fname)

		N = len(params['boxunitlist'])
		yy = np.ndarray(shape=(N, 10))
		xx = np.ndarray(shape=(N, 10))
		for params['boxshape'], params['boxunit'], i\
			in zip(params['boxshapelist'], params['boxunitlist'], range(N)):
			params['plot'] = False
			parse_ini.write_params(params, 
				params['output_root']+'params.ini',prefix='wd_' )
			WindowF, k = \
				windowf.WindowFunctionMaker(params['output_root']+'params.ini',
				feedback=self.feedback).execute()
			if yy.shape[1] != WindowF.shape[0]:
				yy.resize((N,WindowF.shape[0]))
				xx.resize((N,WindowF.shape[0]))
			yy[i] =  WindowF.copy()
			xx[i] =  k.copy()

		def chisq(A, y, x, e):
			err = (y - windowfunction(x, A))**2/e**2
			return err

		non0 = yy[0].nonzero()
		y = yy[0].take(non0)[0][:-10]
		x = xx[0].take(non0)[0][:-10]
		non0 = yy[-1].nonzero()
		y = np.append(y, yy[-1].take(non0)[0][10:-4])
		x = np.append(x, xx[-1].take(non0)[0][10:-4])
		err = y.copy()*10.
		err[5:] = err[5:]*1.e-8

		print x.min(), x.max()
		ki = np.logspace(log10(0.01), log10(1.5), num=300)

		A1 = 1.
		A2 = 1.
		A3 = 1.8
		A0 = np.array([A1, A2, A3])
		A, status = leastsq(chisq, A0, args=(y, x, err), maxfev=20000)

		window = windowfunction(PKcamb[0], A)
		#boxinf = str(boxshape[0])+'x'\
		#	+str(boxshape[1])+'x'+str(boxshape[2])+'x'+str(boxunit)
		sp.save(out_root+'window_fit_'+resultf, window)

		CC = 1.
	#	CC = romberg(lambda k2: K(ki,k2)*k2*k2, PKcamb[0].min(), PKcamb[0].max())
#	#	CC = romberg(lambda k2: K(ki,k2)*k2*k2, 1.e-10, 1.e10)
		
		print A
		aaa = A[1]*1.e-3
		bbb = A[2]*1.e-3
		if bbb**4<4*aaa**4:
			CC = 1./(pi*bbb*(2.-(bbb/aaa)**2)**(0.5))
			def g(x):
				return atan((bbb**4 + 2.*aaa**2*x**2)/
					(bbb**2*(4.*aaa**4-bbb**4)**0.5))
			def K(k1, k2):
				return CC/(k1*k2)*(g(k1+k2)-g(k1-k2))
		else:
			mu = bbb**2*(bbb**4-4.*aaa**4)**0.5
			CC = aaa/(pi*2**0.5*((bbb**4+mu)**0.5-(bbb**4-mu)**0.5))
			def g(x):
				return (mu+bbb**4+2*aaa**2*x**2)/(mu-bbb**4-2*aaa**2*x**2)
			def K(k1, k2):
				return CC/(k1*k2)*log(g(k1-k2)/g(k1+k2))

		#def K(k1,k2):
		#	uplim = k1+k2
		#	downlim = np.fabs(k1-k2)
		#	C = 8*pi**2/(k1*k2)*CC
		#	return C*romberg(lambda Q: windowfunction(Q,A)*Q, downlim, uplim)


	#	print CC

		P = interp1d(PKcamb[0], PKcamb[1], kind='cubic')

		#print PKcamb[0].min(), PKcamb[0].max()

		Pnl = np.zeros(len(ki))
		Pnl_err = np.zeros(len(ki))
		for i in range(len(ki)):
			#Pnl[i] = romberg(lambda k1: k1**2*P(k1)*K(k1,ki[i]),
			Pnl[i], Pnl_err = quad(lambda k1: k1**2*P(k1)*K(k1,ki[i]),
				PKcamb[0].min(), PKcamb[0].max(), limit=200)
		#Pnl = sp.load(out_root+'nonlPK_'+resultf+'.npy')	

		CCC = romberg(lambda k1: k1**2*K(k1, 0.01), ki.min(), ki.max())
		print CCC
		#Pnl = Pnl/CCC

		OmegaHI = params['OmegaHI']
		Omegam = params['Omegam']
		OmegaL = params['OmegaL']
		z = params['z']
		a3 = (1+z)**(-3)
		Tb = 0.3e-3 * (OmegaHI/1.e-3) * ((Omegam + a3*OmegaL)/0.29)**(-0.5)\
			* ((1.+z)/2.5)**0.5

		#Pnl = Pnl*(Tb**2)
		#PKcamb[1] = PKcamb[1]*(Tb**2)

		#print Pnl

		sp.save(out_root+'nonlPK_'+resultf, Pnl)
		sp.save(out_root+'k_nonlPK_'+resultf, ki)

		if self.plot==True:
			#plt.figure(figsize=(6,6))
			##print k
			#plt.subplot('111')

			##kj = sp.linspace(0,PKcamb[0][-1], num=500)
			##KI = np.zeros(500)
			##for j in sp.linspace(ki.min(),ki.max(), num=20):
			##	for i in range(500):
			##		KI[i] = K(j, kj[i])
			##	#plt.plot(kj, KI, label=str(ki))
			##	plt.plot(kj, KI, 'r-', linewidth=1)

			#plt.semilogy()
			##plt.loglog()
			##plt.ylim(ymin=1.e-0)	
			#plt.xlim(xmin=0, xmax=ki.max())
			#plt.title('Coupling Kernels')
			#plt.xlabel('$k$')
			#plt.ylabel('$K(k, k_i)$')
			##plt.legend()


			#plt.savefig(out_root+'Ki.eps', format='eps')

			plt.figure(figsize=(8,4))
			plt.subplot('111')
			#plt.plot(PKcamb[0], window, 'b--', linewidth=1,
			#	label='Fitted Window Function')
			plt.plot(PKcamb[0], PKcamb[1], 'g-', linewidth=1,
				label='Camb Power Spectrum')
			plt.plot(ki, Pnl, 'r-', linewidth=1, 
				label='Power Spectrum')
			plt.loglog()
			plt.xlim(xmin=ki.min(), xmax=ki.max())

			plt.legend()
			plt.savefig(out_root+'nonlPK.eps', format='eps')

			plt.show()
			#print 'Finished @_@ '
		return PKcamb
示例#39
0
    def execute(self, nprocesses=1):
        """Function that acctually does the work.

        The nprocesses parameter does not do anything yet.  It is just there
        for compatibility with the pipeline manager.
        """
        params = self.params
        kiyopy.utils.mkparents(params["output_root"])
        parse_ini.write_params(params, params["output_root"] + "params.ini", prefix="mm_")
        # Rename some commonly used parameters.
        map_shape = params["map_shape"]
        spacing = params["pixel_spacing"]
        algorithm = params["noise_model"]
        noise_root = params["noise_parameters_input_root"]
        ra_spacing = -spacing / sp.cos(params["field_centre"][1] * sp.pi / 180.0)
        if not algorithm in ("grid", "diag_file", "disjoint_scans"):
            raise ValueError("Invalid noise model: " + algorithm)
        if len(params["IFs"]) != 1:
            raise ce.FileParameterTypeError("Can only process a single IF.")

        # Set up to iterate over the pol states.
        npol = 2  # This will be reset when we read the first data block.
        pol_ind = 0

        all_file_names = []

        while pol_ind < npol:
            # Flag for the first block processed (will allowcate memory on the
            # first iteration).
            first_block = True
            # Loop over the files to process.
            try:
                for file_middle in params["file_middles"]:
                    input_fname = params["input_root"] + file_middle + params["input_end"]
                    # Read in the data, and loop over data blocks.
                    Reader = fitsGBT.Reader(input_fname, feedback=self.feedback)
                    Blocks = Reader.read(params["scans"], params["IFs"])

                    # Calculate the time varience at each frequency.  This will
                    # be used as weights in most algorithms.
                    if not algorithm == "grid":
                        if not noise_root == "None":
                            # We have measured variance.
                            noise_pars = sp.load(noise_root + file_middle + ".npy")
                            var = noise_pars[params["IFs"][0], pol_ind, 0, :]
                        else:
                            # We need to measure the variance.
                            var = tools.calc_time_var_file(Blocks, pol_ind, 0)
                            # Convert from masked array to array.
                            var = var.filled(9999.0)
                    else:
                        var = 1.0
                    weight = 1 / var

                    for Data in Blocks:
                        dims = Data.dims
                        # On first pass set up the map parameters.
                        if first_block:
                            shape = map_shape + (dims[-1],)
                            Data.calc_freq()
                            centre_freq = Data.freq[dims[-1] // 2]
                            delta_freq = Data.field["CDELT1"]
                            if pol_ind == 0:
                                # Figure out the length of the polarization
                                # loop.
                                npol = dims[1]
                                # Accumulate the data history.
                                history = hist.History(Data.history)
                            # Get the current polarization integer.
                            this_pol = Data.field["CRVAL4"][pol_ind]
                            # Check that we even want to make a dirty map for
                            # this polarization.
                            if (not utils.polint2str(this_pol) in params["polarizations"]) and params["polarizations"]:
                                # Break to the end of the polarization loop.
                                raise ce.NextIteration()
                            # Allowcate memory for the map.
                            map_data = sp.zeros(shape, dtype=float)
                            map_data = algebra.make_vect(map_data, axis_names=("ra", "dec", "freq"))
                            # Allowcate memory for the inverse map noise.
                            if algorithm in ("grid", "diag_file"):
                                noise_inv = sp.zeros(shape, dtype=float)
                                noise_inv = algebra.make_mat(
                                    noise_inv, axis_names=("ra", "dec", "freq"), row_axes=(0, 1, 2), col_axes=(0, 1, 2)
                                )
                            elif algorithm in ("disjoint_scans", "ds_grad"):
                                # At each frequency use full N^2 noise matrix,
                                # but assume each frequency has uncorrelated
                                # noise. This is a big matrix so make sure it
                                # is reasonable.
                                size = shape[0] ^ 2 * shape[1] ^ 2 * shape[2]
                                if size > 4e9:  # 16 GB
                                    raise RunTimeError("Map size too big. " "Asked for a lot " "of memory.")
                                noise_inv = sp.zeros(shape[0:2] + shape, dtype=sp.float32)
                                noise_inv = algebra.make_mat(
                                    noise_inv,
                                    axis_names=("ra", "dec", "ra", "dec", "freq"),
                                    row_axes=(0, 1, 4),
                                    col_axes=(2, 3, 4),
                                )
                                # Allowcate memory for temporary data. Hold the
                                # number of times each pixel in this scan is
                                # hit. Factor of 2 longer in time in case some
                                # scans are longer than first block (guppi).
                                pixel_hits = sp.empty((2 * dims[0], dims[-1]))
                            first_block = False
                        else:
                            if pol_ind == 0:
                                history.merge(Data)
                        # Figure out the pointing pixel index and the frequency
                        # indicies.
                        Data.calc_pointing()
                        ra_inds = tools.calc_inds(Data.ra, params["field_centre"][0], shape[0], ra_spacing)
                        dec_inds = tools.calc_inds(
                            Data.dec, params["field_centre"][1], shape[1], params["pixel_spacing"]
                        )
                        data = Data.data[:, pol_ind, 0, :]
                        if algorithm in ("grid", "diag_file"):
                            add_data_2_map(data, ra_inds, dec_inds, map_data, noise_inv, weight)
                        elif algorithm in ("disjoint_scans",):
                            add_data_2_map(data - ma.mean(data, 0), ra_inds, dec_inds, map_data, None, weight)
                            pixel_hits[:] = 0
                            pixel_list = pixel_counts(data, ra_inds, dec_inds, pixel_hits, map_shape=shape[0:2])
                            add_scan_noise(pixel_list, pixel_hits, var, noise_inv)
                        # End Blocks for loop.
                    # End file name for loop.
                # Now write the dirty maps out for this polarization.
                # Use memmaps for this since we want to reorganize data
                # and write at the same time.
                # New maps will have the frequency axis as slowly varying, for
                # future efficiency.
                map_file_name = params["output_root"] + "dirty_map_" + utils.polint2str(this_pol) + ".npy"
                mfile = algebra.open_memmap(map_file_name, mode="w+", shape=(shape[2],) + shape[:2])
                map_mem = algebra.make_vect(mfile, axis_names=("freq", "ra", "dec"))
                # And the noise matrix.
                noise_file_name = params["output_root"] + "noise_inv_" + utils.polint2str(this_pol) + ".npy"
                if algorithm in ("disjoint_scans", "ds_grad"):
                    mfile = algebra.open_memmap(noise_file_name, mode="w+", shape=(shape[2],) + shape[:2] * 2)
                    noise_mem = algebra.make_mat(
                        mfile, axis_names=("freq", "ra", "dec", "ra", "dec"), row_axes=(0, 1, 2), col_axes=(0, 3, 4)
                    )
                else:
                    mfile = algebra.open_memmap(noise_file_name, mode="w+", shape=(shape[2],) + shape[:2])
                    noise_mem = algebra.make_mat(
                        mfile, axis_names=("freq", "ra", "dec"), row_axes=(0, 1, 2), col_axes=(0, 1, 2)
                    )
                # Give the data arrays axis information.
                map_mem.set_axis_info("freq", centre_freq, delta_freq)
                map_mem.set_axis_info("ra", params["field_centre"][0], ra_spacing)
                map_mem.set_axis_info("dec", params["field_centre"][1], params["pixel_spacing"])
                noise_mem.set_axis_info("freq", centre_freq, delta_freq)
                noise_mem.set_axis_info("ra", params["field_centre"][0], ra_spacing)
                noise_mem.set_axis_info("dec", params["field_centre"][1], params["pixel_spacing"])
                # Copy the data to the memory maps after rearranging.
                # The roll_axis should return a view, so this should
                # be memory efficient.
                map_mem[...] = sp.rollaxis(map_data, -1)
                noise_mem[...] = sp.rollaxis(noise_inv, -1)

                # Free up all that memory and flush memory maps to file.
                del mfile, map_mem, noise_mem, map_data, noise_inv

                # Save the file names for the history.
                all_file_names.append(kiyopy.utils.abbreviate_file_path(map_file_name))
                all_file_names.append(kiyopy.utils.abbreviate_file_path(noise_file_name))
            except ce.NextIteration:
                pass
            pol_ind += 1
            # End polarization for loop.
        history.add("Made dirty map.", all_file_names)
        h_file_name = params["output_root"] + "history.hist"
        history.write(h_file_name)
示例#40
0
	def execute(self, nprocesses=1):
		
		comm = MPI.COMM_WORLD
		rank = comm.Get_rank()
		size = comm.Get_size()

		params = self.params
		resultf = params['hr'][0]
		if len(params['last']) != 0:
			resultf = resultf + params['last'][0]
		resultf = resultf + '-' + params['hr'][1]
		if len(params['last']) != 0:
			resultf = resultf + params['last'][1]

		# Make parent directory and write parameter file.
		parse_ini.write_params(params, params['output_root']+'params.ini',prefix='pk_')
		in_root = params['input_root']
		out_root = params['output_root']
		mid = params['mid']

		FKPweight = params['FKPweight']
		n_processes = params['processes']

		#### Process ####
		n_new = n_processes - 1
		n_map = params['jknumber']

		kbn = params['kbinNum']
		PK = np.zeros(shape=(n_map,kbn))

		self.q = mp.JoinableQueue()

		if n_new <= 0:
			for ii in range(n_map):
				self.process_map(ii, PK[ii])
		elif n_new > 32:
			raise ValueError('Process limit is 32')
		else:
			process_list = range(n_new)
			for ii in xrange(n_map + n_new):
				if ii >= n_new:
					PK[ii-n_new] = self.q.get()
					process_list[ii%n_new].join()
					if process_list[ii%n_new].exitcode != 0:
						raise RuntimeError("A thred faild with exit code"
							+ str(process_list[ii%n_new].exitcode))
				if ii < n_map:
					process_list[ii%n_new] = mp.Process(
						target=self.process_map, 
						args=(ii, ii%n_new))
					process_list[ii%n_new].start()

		if FKPweight:
			sp.save(params['output_root']+\
				'PKjk_fkp_' + resultf, PK)
		else:
			sp.save(params['output_root']+'PKjk_' + resultf, PK)
		#PKmean = sp.load(params['input_root'] + 'PK.npy')
		PKmean = PK.mean(axis=0)
		PK[:] = (PK[:]-PKmean)**2
		PKvar = np.sum(PK, axis=0)
		PKvar = PKvar*(params['jknumber']-1)/params['jknumber']
		PKvar = np.sqrt(PKvar)
		print PKvar
		if FKPweight:
			sp.save(params['output_root']+\
				'PKvar_fkp_' + resultf, PKvar)
		else:
			sp.save(params['output_root']+\
				'PKvar_' + resultf, PKvar)
示例#41
0
def spectrum_arithmetic(inifile, outdir="./plots/"):
    r"""Perform the operations"""
    params_init = {
        "pwr_a_ini": None,
        "pwr_b_ini": None,
        "pwr_c_ini": None,
        "pwr_d_ini": None,
        "mul_a": "1.",
        "mul_b": "1.",
        "mul_c": "1.",
        "mul_d": "1.",
        "output_tag": "tag identifying the output somehow"
    }
    prefix = "sa_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_root = "%s/%s/" % (outdir, params["output_tag"])
    print output_root

    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix)

    if not params["pwr_a_ini"]:
        print "ERROR: at least spectrum A must be given (and C for ratios)"
        return

    pwr_a_all = cp.wrap_batch_gbtpwrspec_data_run(params["pwr_a_ini"],
                                                  generate=False,
                                                  outdir=outdir)

    if params['pwr_b_ini']:
        pwr_b_all = cp.wrap_batch_gbtpwrspec_data_run(params["pwr_b_ini"],
                                                      generate=False,
                                                      outdir=outdir)

    if params['pwr_c_ini']:
        pwr_c_all = cp.wrap_batch_gbtpwrspec_data_run(params["pwr_c_ini"],
                                                      generate=False,
                                                      outdir=outdir)

    if params['pwr_d_ini']:
        pwr_d_all = cp.wrap_batch_gbtpwrspec_data_run(params["pwr_d_ini"],
                                                      generate=False,
                                                      outdir=outdir)

    for treatment in pwr_a_all:
        pwr_a = pwr_a_all[treatment]
        pwr_a = mul_pwrspec(pwr_a, params['mul_a'])
        pwr_numerator = copy.deepcopy(pwr_a)

        if params['pwr_b_ini']:
            pwr_b = pwr_b_all[treatment]
            pwr_b = mul_pwrspec(pwr_b, params['mul_b'])
            pwr_numerator = add_pwrspec(pwr_numerator, pwr_b)

        if params['pwr_c_ini']:
            pwr_c = pwr_c_all[treatment]
            pwr_c = mul_pwrspec(pwr_c, params['mul_c'])
            pwr_denominator = copy.deepcopy(pwr_c)

            if params['pwr_d_ini']:
                pwr_d = pwr_d_all[treatment]
                pwr_d = mul_pwrspec(pwr_d, params['mul_d'])
                pwr_denominator = add_pwrspec(pwr_denominator, pwr_d)

            pwr_final = divide_pwrspec(pwr_numerator, pwr_denominator)
        else:
            pwr_final = pwr_numerator

        filename = "%s/%s_%s.dat" % (output_root, params['output_tag'],
                                     treatment)
        outfile = open(filename, "w")
        for specdata in zip(pwr_final['bin_left'], pwr_final['bin_center'],
                            pwr_final['bin_right'], pwr_final['counts_histo'],
                            pwr_final['mean_1d'], pwr_final['std_1d']):
            outfile.write(("%10.15g " * 6 + "\n") % specdata)

        outfile.close()
示例#42
0
    'output_root': '',
    'sim_physical_key': '',
    'sim_key': '',
    'sim_beam_key': '',
    'sim_beam_plus_fg_key': '',
    'sim_beam_plus_data_key': '',
    'sim_delta_key': '',
    'sim_beam_meansub_key': '',
    'sim_beam_conv_key': '',
    'sim_beam_meansubconv_key': '',
    'template_key': '',
    'weight_key': '',
    'pwrspec_scenario': '',
    'refinement': 2,
    'omega_HI': ''
}
prefix = 'sg_'

if __name__ == '__main__':
    params = parse_ini.parse(str(sys.argv[1]), params_init, prefix=prefix)
    print params

    datapath_db = data_paths.DataPath()
    output_root = datapath_db.fetch(params['output_root'])

    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix)

    generate_sim(params, parallel=True, datapath_db=datapath_db)
    generate_aux_simset(params, datapath_db=datapath_db)
示例#43
0
    def execute(self):
        '''Clean the maps of foregrounds, save the results, and get the
        autocorrelation.'''

        params = self.params
        freq_list = sp.array(params['freq_list'], dtype=int)
        lags = sp.array(params['lags'])

        # Write parameter file.
        kiyopy.utils.mkparents(params['output_root'])
        parse_ini.write_params(params,
                               params['output_root'] + 'params.ini',
                               prefix=prefix)

        # Get the map data from file as well as the noise inverse.
        if len(params['file_middles']) == 1:
            fmid_name = params['file_middles'][0]
            params['file_middles'] = (fmid_name, fmid_name)

        if len(params['file_middles']) >= 2:
            # Deal with multiple files.
            num_maps = len(params['file_middles'])
            maps = []
            noise_invs = []

            # Load all maps and noises once.
            for map_index in range(0, num_maps):
                map_file = (params['input_root'] +
                            params['file_middles'][map_index] +
                            params['input_end_map'])

                print "Loading map %d of %d." % (map_index + 1, num_maps)

                map_in = algebra.make_vect(algebra.load(map_file))

                maps.append(map_in)
                if not params["no_weights"]:
                    noise_file = (params['input_root'] +
                                  params['file_middles'][map_index] +
                                  params['input_end_noise'])

                    print "Loading noise %d of %d." % (map_index + 1, num_maps)

                    noise_inv = algebra.make_mat(
                        algebra.open_memmap(noise_file, mode='r'))

                    noise_inv = noise_inv.mat_diag()
                else:
                    noise_inv = algebra.ones_like(map_in)

                noise_invs.append(noise_inv)

            pairs = []
            # Make pairs with deepcopies to not make mutability mistakes.
            for map1_index in range(0, num_maps):
                for map2_index in range(0, num_maps):
                    if (map2_index > map1_index):
                        map1 = copy.deepcopy(maps[map1_index])
                        map2 = copy.deepcopy(maps[map2_index])
                        noise_inv1 = copy.deepcopy(noise_invs[map1_index])
                        noise_inv2 = copy.deepcopy(noise_invs[map2_index])

                        pair = map_pair.MapPair(map1, map2, noise_inv1,
                                                noise_inv2, freq_list)

                        pair.lags = lags
                        pair.params = params

                        # Keep track of the names of maps in pairs so
                        # it knows what to save later.
                        pair.set_names(params['file_middles'][map1_index],
                                       params['file_middles'][map2_index])
                        pairs.append(pair)

            num_map_pairs = len(pairs)
            print "%d map pairs created from %d maps." % (len(pairs), num_maps)

        # Hold a reference in self.
        self.pairs = pairs

        # Get maps/ noise inv ready for running.
        if params["convolve"]:
            for pair in pairs:
                pair.degrade_resolution()

        if params['factorizable_noise']:
            for pair in pairs:
                pair.make_noise_factorizable()

        if params['sub_weighted_mean']:
            for pair in pairs:
                pair.subtract_weighted_mean()

        self.pairs = pairs
        # Since correlating takes so long, if you already have the svds
        # you can skip this first correlation [since that's all it's really
        # for and it is the same no matter how many modes you want].
        # Note: map_pairs will not have anything saved in 'fore_corr' if you
        # skip this correlation.
        if not params['skip_fore_corr']:
            # Correlate the maps with multiprocessing. Note that the
            # correlations are saved to file separately then loaded in
            # together because that's (one way) how multiprocessing works.
            fore_pairs = []
            processes_list = []
            for pair_index in range(0, num_map_pairs):
                # Calls 1 multiproc (which governs the correlating) for each
                # pair on a new CPU so you can have all pairs working at once.
                multi = multiprocessing.Process(target=multiproc,
                                                args=([
                                                    pairs[pair_index],
                                                    params['output_root'],
                                                    pair_index, False
                                                ]))

                processes_list.append(multi)

                multi.start()

            # Waits for all correlations to finish before continuing.
            while True in [multi.is_alive() for multi in processes_list]:
                print "processing"
                time.sleep(5)

            # just to be safe
            time.sleep(1)

            # more concise call, but multiprocessing does not behave well with
            # complex objects...........
            #runlist = [(pair_index,
            #            params['output_root'],
            #            False) for
            #            pair_index in range(0, num_map_pairs)]
            #pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
            #pool.map(self.multiproc, runlist)

            # Load the correlations and save them to each pair. The pairs that
            # got passed to multiproc are not the same ones as ones in
            # self.pairs, so this must be done to have actual values.
            print "Loading map pairs back into program."
            file_name = params['output_root']
            file_name += "map_pair_for_freq_slices_fore_corr_"

            for count in range(0, num_map_pairs):
                print "Loading correlation for pair %d" % (count)
                pickle_handle = open(file_name + str(count) + ".pkl", "r")
                correlate_results = cPickle.load(pickle_handle)
                pairs[count].fore_corr = correlate_results[0]
                pairs[count].fore_counts = correlate_results[1]
                fore_pairs.append(pairs[count])
                pickle_handle.close()

            self.fore_pairs = copy.deepcopy(fore_pairs)
            # With this, you do not need fore_pairs anymore.
            self.pairs = copy.deepcopy(fore_pairs)

            pairs = self.pairs

            # Get foregrounds.

            # svd_info_list keeps track of all of the modes of all maps in
            # all pairs. This means if you want to subract a different number
            # of modes for the same maps/noises/frequencies, you have the modes
            # already saved and do not need to run the first correlation again.
            svd_info_list = []
            for pair in pairs:
                vals, modes1, modes2 = cf.get_freq_svd_modes(
                    pair.fore_corr, len(freq_list))
                pair.vals = vals

                # Save ALL of the modes for reference.
                pair.all_modes1 = modes1
                pair.all_modes2 = modes2
                svd_info = (vals, modes1, modes2)
                svd_info_list.append(svd_info)

                # Save only the modes you want to subtract.
                n_modes = params['modes']
                pair.modes1 = modes1[:n_modes]
                pair.modes2 = modes2[:n_modes]

            self.svd_info_list = svd_info_list
            self.pairs = pairs

            if params['save_svd_info']:
                ft.save_pickle(self.svd_info_list, params['svd_file'])
        else:
            # The first correlation and svd has been skipped.
            # This means you already have the modes so you can just load
            # them from file.
            self.svd_info_list = ft.load_pickle(params['svd_file'])
            # Set the svd info to the pairs.
            for i in range(0, len(pairs)):
                svd_info = self.svd_info_list[i]
                pairs[i].vals = svd_info[0]
                pairs[i].all_modes1 = svd_info[1]
                pairs[i].all_modes2 = svd_info[2]
                n_modes = params['modes']
                pairs[i].modes1 = svd_info[1][:n_modes]
                pairs[i].modes2 = svd_info[2][:n_modes]

            self.pairs = pairs

        # Subtract foregrounds.
        for pair_index in range(0, len(pairs)):
            pairs[pair_index].subtract_frequency_modes(
                pairs[pair_index].modes1, pairs[pair_index].modes2)

        # Save cleaned clean maps, cleaned noises, and modes.
        self.save_data(save_maps=params['save_maps'],
                       save_noises=params['save_noises'],
                       save_modes=params['save_modes'])

        # Finish if this was just first pass.
        if params['first_pass_only']:
            self.pairs = pairs
            return

        # Correlate the cleaned maps.
        # Here we could calculate the power spectrum instead eventually.
        temp_pair_list = []
        processes_list = []
        for pair_index in range(0, num_map_pairs):
            multi = multiprocessing.Process(target=multiproc,
                                            args=([
                                                pairs[pair_index],
                                                params['output_root'],
                                                pair_index, True
                                            ]))

            processes_list.append(multi)
            multi.start()

        while True in [multi.is_alive() for multi in processes_list]:
            print "processing"
            time.sleep(5)

        # just to be safe
        time.sleep(1)

        # ugh, would really rathter use implementation below except multiprocessing
        # does not behave.................
        #runlist = [(pairs[pair_index],
        #            params['output_root'],
        #            pair_index, True) for
        #            pair_index in range(0, num_map_pairs)]

        #pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
        #pool.map(multiproc, runlist)

        print "Loading map pairs back into program."
        file_name = params['output_root']
        file_name += "map_pair_for_freq_slices_corr_"

        for count in range(0, num_map_pairs):
            print "Loading correlation for pair %d" % (count)
            pickle_handle = open(file_name + str(count) + ".pkl", "r")
            correlate_results = cPickle.load(pickle_handle)
            pairs[count].corr = correlate_results[0]
            pairs[count].counts = correlate_results[1]
            temp_pair_list.append(pairs[count])
            pickle_handle.close()

        self.pairs = copy.deepcopy(temp_pair_list)

        # Get the average correlation and its standard deviation.
        corr_list = []
        for pair in self.pairs:
            corr_list.append(pair.corr)

        self.corr_final, self.corr_std = cf.get_corr_and_std_3d(corr_list)

        if params['pickle_slices']:
            ft.save_pickle(self, self.params['output_root'] + \
                                 'New_Slices_object.pkl')

        return
示例#44
0
    def execute(self, nprocesses=1) :
        
        params = self.params
        kiyopy.utils.mkparents(params['output_root'])
        parse_ini.write_params(params, params['output_root'] + 'params.ini',
                               prefix=prefix)
        guppi_result = params['Guppi_test']
        output_root = params['output_root']
        output_end = params['output_end']

        self.file_num = len(params['file_middles']) # getting a variable for number of calibrator files being used

# Need to remove count for calibrator files that are not the right size.
        session_nums = sp.zeros(self.file_num)
        c = 0
        for file_middle in params['file_middles'] :
            input_fname = (params['input_root'] + file_middle + 
                           params['input_end'])
            Reader = core.fitsGBT.Reader(input_fname)
            n_scans = len(Reader.scan_set)
            Len_set = Reader.read(0,0,force_tuple=True)
            session_nums[c] = file_middle.split('_')[0]
#            print session_nums[c]
            for Data in Len_set :
                freq_num = Data.dims[3] # Setting the frequency binning to match whatever it's been set to. 
            if guppi_result == True : 
                if n_scans != 2 :
                    self.file_num -=1
            elif guppi_result == False :
                if n_scans != 4 :
                    self.file_num -=1
            c+=1

# Need to know the general frequency binning (going to assume that it's 200 for guppi, 260 for spectrometer, aka 1 MHz binning)
#        if guppi_result == True :
#            freq_num = 200 
        if guppi_result == False :
#            freq_num = 260    
            self.file_num *= 2 #because there are two sets of scans per file for spectrometer, need to double the number of values


        self.function = sp.zeros(4*self.file_num) #setting a variable of the right size for peval to use
        self.theta = sp.zeros(4*self.file_num) #setting a variable for parallactic angle in radians
        self.d = sp.zeros((4*self.file_num,freq_num)) #setting a variable for measured values
        
# Loop over files to process.      
        k=0
        for file_middle in params['file_middles'] :
            input_fname = (params['input_root'] + file_middle +
                           params['input_end'])
# Read in the data, and loop over data blocks.
            Reader = core.fitsGBT.Reader(input_fname)
            n_IFs = len(Reader.IF_set) # Should be 1 given that we've stitched windows for the spectrometer or by def in guppi
            n_scans = len(Reader.scan_set) #Should be 4 for the spectrometer, 2 for guppi
            OnBlocks = Reader.read(range(0,n_scans,2),0,force_tuple=True)
            OffBlocks = Reader.read(range(1,n_scans,2),0,force_tuple=True)
#force_tuple=True makes the ouput of Reader.read a tuple even if thre is only one Block to return.
            Blocks = Reader.read(params['scans'], params['IFs'],
                                 force_tuple=True)

# Setting labels for indices for later
            on_ind = 0
            off_ind = 1
            I_ind = 0
            Q_ind = 1
            U_ind = 2
            V_ind = 3
            
#Calculating Parallactic angles for the cal file
            PA = sp.zeros(n_scans)
            m = 0           
            for Data in Blocks:
                freq_len = Data.dims[3]
                Data.calc_freq()
                freq_val = Data.freq
                freq_val = freq_val/1000000       
                Data.calc_PA()
                PA[m] = ma.mean(Data.PA)  
                m+=1
            
# Going to skip the non guppi version for now.
#            if guppi_result == False :
#                if n_scans == 4 : # To make sure that we don't use incomplete data
#                    self.theta[k] = 0.5*(PA[0]+PA[1]) # the average between the on and off values
#                    self.theta[k+1] =0.5*(PA[0]+PA[1])
#                    self.theta[k+2] = 0.5*(PA[0]+PA[1])
#                    self.theta[k+3] = 0.5*(PA[2]+PA[3]) 
#                    self.theta[k+4] = 0.5*(PA[2]+PA[3])
#                    self.theta[k+5] = 0.5*(PA[2]+PA[3])

#                    S_med_on = sp.zeros((2,freq_len,4))
#                    S_med = sp.zeros((2,freq_len,4))  
            
#                    i=0  
#                    for Data in OnBlocks :
#                        S_med_on[i,:,0] = ma.median(Data.data[:,I_ind,off_ind,:],axis=0)
#                        S_med_on[i,:,1] = ma.median(Data.data[:,Q_ind,off_ind,:],axis=0)
#                        S_med_on[i,:,2] = ma.median(Data.data[:,U_ind,off_ind,:],axis=0) 
#                        S_med_on[i,:,3] = ma.median(Data.data[:,V_ind,off_ind,:],axis=0)
#                        i+=1
             
#                    j=0
#                    for Data in OffBlocks :
#                        S_med[j,:,0] = ma.median(Data.data[:,I_ind,off_ind,:],axis=0)
#                        S_med[j,:,1] = ma.median(Data.data[:,Q_ind,off_ind,:],axis=0)
#                        S_med[j,:,2] = ma.median(Data.data[:,U_ind,off_ind,:],axis=0) 
#                        S_med[j,:,3] = ma.median(Data.data[:,V_ind,off_ind,:],axis=0)
#                        j+=1

#                    I_onoff_1 = S_med_on[0,:,0]-S_med[0,:,0] # for first set of on and off scans
#                    I_onoff_2 = S_med_on[1,:,0]-S_med[1,:,0] # for second set of on and off scans

# Setting the measured stokes values for each file (to be used in the least squares fit)
#                    d[k,:] = (S_med_on[0,:,1]-S_med[0,:,1])/I_onoff_1
#                    d[k+1,:] = (S_med_on[0,:,2]-S_med[0,:,2])/I_onoff_1
#                    d[k+2,:] = (S_med_on[0,:,3]-S_med[0,:,3])/I_onoff_1
#                    d[k+3,:] = (S_med_on[1,:,1]-S_med[1,:,1])/I_onoff_2 
#                    d[k+4,:] = (S_med_on[1,:,2]-S_med[1,:,2])/I_onoff_2
#                    d[k+5,:] = (S_med_on[1,:,3]-S_med[1,:,3])/I_onoff_2
#                    k+=6            

            if guppi_result == True : #This is the same as above only there is a single set of on and off scans in this case.
                if n_scans == 2 : 
                    self.theta[k] = ma.mean(PA)
                    self.theta[k+1] = ma.mean(PA)
                    self.theta[k+2] = ma.mean(PA)
                    self.theta[k+3] = ma.mean(PA)

                    S_med_calon_src = sp.zeros((freq_len,4))
                    S_med_caloff_src = sp.zeros((freq_len,4))
                    S_med_calon = sp.zeros((freq_len,4))
                    S_med_caloff = sp.zeros((freq_len,4))

                    for Data in OnBlocks:
                        S_med_caloff_src[:,0] = ma.median(Data.data[:,I_ind,off_ind,:],axis=0)
                        S_med_caloff_src[:,1] = ma.median(Data.data[:,Q_ind,off_ind,:],axis=0)
                        S_med_caloff_src[:,2] = ma.median(Data.data[:,U_ind,off_ind,:],axis=0)
                        S_med_caloff_src[:,3] = ma.median(Data.data[:,V_ind,off_ind,:],axis=0)

                        S_med_calon_src[:,0] = ma.median(Data.data[:,I_ind,on_ind,:],axis=0)
                        S_med_calon_src[:,1] = ma.median(Data.data[:,Q_ind,on_ind,:],axis=0)
                        S_med_calon_src[:,2] = ma.median(Data.data[:,U_ind,on_ind,:],axis=0)
                        S_med_calon_src[:,3] = ma.median(Data.data[:,V_ind,on_ind,:],axis=0)
                    
                    for Data in OffBlocks:
                        S_med_caloff[:,0] = ma.median(Data.data[:,I_ind,off_ind,:],axis=0)
                        S_med_caloff[:,1] = ma.median(Data.data[:,Q_ind,off_ind,:],axis=0)
                        S_med_caloff[:,2] = ma.median(Data.data[:,U_ind,off_ind,:],axis=0)
                        S_med_caloff[:,3] = ma.median(Data.data[:,V_ind,off_ind,:],axis=0)
 
                        S_med_calon[:,0] = ma.median(Data.data[:,I_ind,on_ind,:],axis=0)
                        S_med_calon[:,1] = ma.median(Data.data[:,Q_ind,on_ind,:],axis=0)
                        S_med_calon[:,2] = ma.median(Data.data[:,U_ind,on_ind,:],axis=0)
                        S_med_calon[:,3] = ma.median(Data.data[:,V_ind,on_ind,:],axis=0)
 
                     
                    self.d[k,:] = 0.5*(S_med_calon_src[:,0]+S_med_caloff_src[:,0]-S_med_calon[:,0]-S_med_caloff[:,0])
                    self.d[k+1,:] = 0.5*(S_med_calon_src[:,1]+S_med_caloff_src[:,1]-S_med_calon[:,1]-S_med_caloff[:,1])
                    self.d[k+2,:] = 0.5*(S_med_calon_src[:,2]+S_med_caloff_src[:,2]-S_med_calon[:,2]-S_med_caloff[:,2])
                    self.d[k+3,:] = 0.5*(S_med_calon_src[:,3]+S_med_caloff_src[:,3]-S_med_calon[:,3]-S_med_caloff[:,3])
                    k+=4

        for a in range(0,4*self.file_num):
            for b in range(0,freq_num):
#                print self.d[a,b]
                if self.d[a,b] > 1000 :
                   self.d[a,b] = 1000

        #The seven parameters are in order deltaG[0], alpha[1], psi[2], phi[3], epsilon[4], Qsrc[5], Usrc[6] chi[7] => the parameter vector is p
        p0 = [1.0, 0.1, 0.1, 0.1, 0.1, 1.0, 0.1, 0.1, 0.1, 0.1, 1.0, 0.1, 0.1, 0.1, 0.1, 1.0] # guessed preliminary values
        error = sp.ones(4*self.file_num)
        #Note that error can be used to weight the equations if not all set to one.

        p_val_out = sp.zeros((freq_len, 17))
 #       p_err_out = sp.zeros((freq_len, 17))
     
        for f in range(0,freq_len):   
            plsq = leastsq(self.residuals,p0,args=(error,f,freq_val),full_output=0, maxfev=5000)
            pval = plsq[0] # this is the 1-d array of results
#            perr = plsq[1] # this is a 2d array representing the estimated covariance of the results. - Not working properly. 

            Mueller = sp.mat([[pval[0],pval[1],pval[2],pval[3]],[pval[4],pval[5],pval[6],pval[7]],[pval[8],pval[9],pval[10],pval[11]],[pval[12],pval[13],pval[14],pval[15]]])
#            Mueller = Mueller.I  
#            print Mueller

            p_val_out[f,0] = freq_val[f]
            p_val_out[f,1] = Mueller[0,0]
            p_val_out[f,2] = Mueller[0,1]
            p_val_out[f,3] = Mueller[0,2]
            p_val_out[f,4] = Mueller[0,3]
            p_val_out[f,5] = Mueller[1,0]
            p_val_out[f,6] = Mueller[1,1]
            p_val_out[f,7] = Mueller[1,2]
            p_val_out[f,8] = Mueller[1,3]
            p_val_out[f,9] = Mueller[2,0]
            p_val_out[f,10] = Mueller[2,1]
            p_val_out[f,11] = Mueller[2,2]
            p_val_out[f,12] = Mueller[2,3]
            p_val_out[f,13] = Mueller[3,0]
            p_val_out[f,14] = Mueller[3,1]
            p_val_out[f,15] = Mueller[3,2]
            p_val_out[f,16] = Mueller[3,3]

        sess_num = int(session_nums[0])
        print sess_num
        np.savetxt(output_root+str(sess_num)+'_flux_mueller_matrix_calc'+output_end, p_val_out, delimiter = ' ')
示例#45
0
 def execute(self, nprocesses=1) :
             
     params = self.params
     kiyopy.utils.mkparents(params['output_root'] + 
                            params['output_filename'])
     parse_ini.write_params(params, params['output_root'] + 'params.ini',
                            prefix=prefix)
     file_middles = params['file_middles']
     # Store the correlation and normalization session by session in a
     # dictionary.
     corr_dict = {}
     norm_dict = {}
     # Also store the frequency axis.
     freq_dict = {}
     # Finally, a gain dictionary.
     gain_dict = {}
     # Loop though all the files and accumulate the correlation and the
     # normalization.  Files in the same session are summed together.
     n_new = nprocesses-1  # How many new processes to spawn at once.
     n_files = len(file_middles)
     if n_new > 0:
         # Multiprocessed version.
         process_list = range(n_new)
         pipe_list = range(n_new)
         for ii in xrange(n_files + n_new) :
             if ii >= n_new :
                 # First end a process before starting a new one.
                 key, corr, norm, freq =  pipe_list[ii%n_new].recv()
                 if corr_dict.has_key(key):
                     if corr_dict[key].shape != corr.shape:
                         msg = ("All data needs to have the same band and"
                                "polarization structure.")
                         raise ce.DataError(msg)
                     corr_dict[key] += corr
                     norm_dict[key] += norm
                     if not np.allclose(freq_dict[key], freq):
                         raise ce.DataError("Frequency structure not "
                                            "consistant.")
                 else:
                     corr_dict[key] = corr
                     norm_dict[key] = norm
                     freq_dict[key] = freq
             if ii < n_files :
                 # Start a new process.
                 Here, Far = mp.Pipe()
                 pipe_list[ii%n_new] = Here
                 process_list[ii%n_new] = mp.Process(
                     target=self.process_file, args=(file_middles[ii], Far))
                 process_list[ii%n_new].start()
     else:
         # Single process.
         for middle in file_middles:
             key, corr, norm, freq = self.process_file(middle)
             if corr_dict.has_key(key):
                 if corr_dict[key].shape != corr.shape:
                     msg = ("All data needs to have the same band and"
                            "polarization structure.")
                     raise ce.DataError(msg)
                 corr_dict[key] += corr
                 norm_dict[key] += norm
                 if not np.allclose(freq_dict[key], freq):
                     raise ce.DataError("Frequency structure not consistant.")
             else:
                 corr_dict[key] = corr
                 norm_dict[key] = norm
                 freq_dict[key] = freq
     # Now that we have the correlation summed for all files in each
     # session, normalize it and store it as an output.
     output_fname = params['output_root'] + params["output_filename"]        
     out_db = shelve.open(output_fname)
     # Loop through all the data divisions and processes them one at a
     # time.
     for key in corr_dict.iterkeys():
         corr = corr_dict[key]
         norm = norm_dict[key]
         # Normalize.
         corr[norm==0] = 1
         norm[norm==0] = 1
         gains = corr / norm
         gain_dict[key] = gains
         #plt.figure()
         #if params['diff_gain_cal_only']:
         #    plt.plot(freq_dict[key][0,:],
         #             (gains[0,0,0,:] + gains[0,0,1,:])/2., '.')
         #    plt.plot(freq_dict[key][0,:],
         #             (gains[0,3,0,:] + gains[0,3,1,:])/2., '.')
         #else:
         #    plt.plot(freq_dict[key][0,:], gains[0,0,0,:], '.')
         #plt.title(key)
         #plt.xlabel('Frequency (Hz)')
         #plt.ylabel('Correlation amplitude')
         out_db[key + '.gains'] = gains
         out_db[key + '.freq'] = freq_dict[key]
     #if not params['diff_gain_cal_only']:
     #    plt.show()
     out_db.close()
     #### Apply the calibration to the data. ####
     for middle in file_middles:
         key = get_key(middle)
         gain = gain_dict[key]
         freq = freq_dict[key]
         self.calibrate_file(middle, gain, freq)
示例#46
0
    def execute(self, nprocesses=1):

        params = self.params
        kiyopy.utils.mkparents(params['output_root'] +
                               params['output_filename'])
        parse_ini.write_params(params,
                               params['output_root'] + 'params.ini',
                               prefix=prefix)
        file_middles = params['file_middles']
        # Store the covariance and counts session by session in a dictionary.
        covar_dict = {}
        counts_dict = {}
        # Also store the frequency axis.
        freq_dict = {}
        # Loop though all the files and accumulate the covariance and the
        # counts.  Files in the same session are summed together.
        for middle in file_middles:
            key, covar, counts, freq = self.process_file(middle)
            if covar_dict.has_key(key):
                if covar_dict[key].shape != covar.shape:
                    msg = ("All data needs to have the same band and"
                           "polarization structure.")
                    raise ce.DataError(msg)
                covar_dict[key] += covar
                counts_dict[key] += counts
                if not np.allclose(freq_dict[key], freq):
                    raise ce.DataError("Frequency structure not consistant.")
            else:
                covar_dict[key] = covar
                counts_dict[key] = counts
                freq_dict[key] = freq
        # Now that we have the covariance, factor it into eigen-vectors and
        # store it in a data base.
        output_fname = params['output_root'] + params["output_filename"]
        out_db = shelve.open(output_fname)
        # Loop through all the data divisions and processes them one at a
        # time.
        for key in covar_dict.iterkeys():
            covar = covar_dict[key]
            counts = counts_dict[key]
            # Normalize.
            counts[counts == 0] = 1
            covar /= counts
            # Loop to each matrix, decompose it and save it.
            eigen_vects = np.empty_like(covar)
            for band_ii in range(covar.shape[0]):
                for pol_jj in range(covar.shape[1]):
                    for cal_kk in range(covar.shape[2]):
                        # Factor
                        h, v = linalg.eigh(covar[band_ii, pol_jj, cal_kk])
                        eigen_vects[band_ii, pol_jj, cal_kk] = v
                        #plt.semilogy(h, '.')
                        #plt.figure()
                        #for ii in range(1,5):
                        #    plt.plot(v[:,-ii])
                        #plt.show()
            out_db[key + '.vects'] = eigen_vects
            out_db[key + '.freq'] = freq_dict[key]
        if params['n_modes_removed']:
            for middle in file_middles:
                key = get_key(middle)
                modes = out_db[key + '.vects']
                modes = modes[:, :, :, :, -params['n_modes_removed']:]
                self.clean_file(middle, modes)
        # Close the data base.
        out_db.close()
示例#47
0
    def execute(self, nprocesses=1):

        comm = MPI.COMM_WORLD
        rank = comm.Get_rank()
        size = comm.Get_size()

        params = self.params
        resultf = params["resultf"]
        # resultf = params['hr'][0]
        # if len(params['last']) != 0:
        # 	resultf = resultf + params['last'][0]
        # resultf = resultf + '-' + params['hr'][1]
        # if len(params['last']) != 0:
        # 	resultf = resultf + params['last'][1]

        # Make parent directory and write parameter file.
        parse_ini.write_params(params, params["output_root"] + "params.ini", prefix="pk_")
        in_root = params["input_root"]
        out_root = params["output_root"]
        mid = params["mid"]

        FKPweight = params["FKPweight"]
        n_processes = params["processes"]

        #### Process ####
        n_new = n_processes - 1
        n_map = len(params["hrlist"])

        kbn = params["kbinNum"]
        kmin = params["kmin"]
        kmax = params["kmax"]
        PK = np.zeros(shape=(n_map, kbn))

        self.q = mp.JoinableQueue()

        if n_new <= 0:
            for ii in range(n_map):
                self.process_map(ii, PK[ii])
        elif n_new > 32:
            raise ValueError("Process limit is 32")
        else:
            process_list = range(n_new)
            for ii in xrange(n_map + n_new):
                if ii >= n_new:
                    PK[ii - n_new] = self.q.get()
                    process_list[ii % n_new].join()
                    if process_list[ii % n_new].exitcode != 0:
                        raise RuntimeError("A thred faild with exit code" + str(process_list[ii % n_new].exitcode))
                if ii < n_map:
                    process_list[ii % n_new] = mp.Process(target=self.process_map, args=(ii, ii % n_new))
                    process_list[ii % n_new].start()

        if FKPweight:
            sp.save(params["output_root"] + "PKeach_fkp_" + resultf, PK)
        else:
            sp.save(params["output_root"] + "PKeach_" + resultf, PK)
            # PKmean = sp.load(params['input_root'] + 'PK.npy')
        PKmean = PK.mean(axis=0)
        PK[:] = (PK[:] - PKmean) ** 2
        PKvar = np.sum(PK, axis=0)
        PKvar = PKvar / n_map
        PKvar = np.sqrt(PKvar)
        print PKmean
        print PKvar

        kunit = 2.0 * pi / (params["boxunit"])
        if (kmin == None) or (kmax == None):
            k = np.logspace(log10(1.0 / params["boxshape"][0]), log10(sqrt(3)), num=kbn + 1)
        else:
            kmin = kmin / kunit
            kmax = kmax / kunit
            k = np.logspace(log10(kmin), log10(kmax), num=kbn + 1)
        k = k * 2.0 * pi / params["boxunit"]
        k = k[:-1]
        sp.save(params["output_root"] + "k_combined_" + resultf, k)
        if FKPweight:
            sp.save(params["output_root"] + "PKvar_combined_fkp_" + resultf, PKvar)
            sp.save(params["output_root"] + "PK_combined_fkp_" + resultf, PKmean)
        else:
            sp.save(params["output_root"] + "PKvar_combined_" + resultf, PKvar)
            sp.save(params["output_root"] + "PK_combined_" + resultf, PKmean)
示例#48
0
 def execute(self, nprocesses=1):
     """Worker funciton."""
     params = self.params
     # Make parent directory and write parameter file.
     kiyopy.utils.mkparents(params['output_root'])
     parse_ini.write_params(params,
                            params['output_root'] + 'params.ini',
                            prefix=prefix)
     save_noise_diag = params['save_noise_diag']
     in_root = params['input_root']
     all_out_fname_list = []
     all_in_fname_list = []
     # Figure out what the band names are.
     bands = params['bands']
     if not bands:
         map_files = glob.glob(in_root + 'dirty_map_' + pol_str + "_*.npy")
         bands = []
         root_len = len(in_root + 'dirty_map_')
         for file_name in map_files:
             bands.append(file_name[root_len:-4])
     # Loop over files to process.
     for pol_str in params['polarizations']:
         for band in bands:
             if band == -1:
                 band_str = ''
             else:
                 band_str = "_" + repr(band)
             dmap_fname = (in_root + 'dirty_map_' + pol_str + band_str +
                           '.npy')
             all_in_fname_list.append(
                 kiyopy.utils.abbreviate_file_path(dmap_fname))
             # Load the dirty map and the noise matrix.
             dirty_map = algebra.load(dmap_fname)
             dirty_map = algebra.make_vect(dirty_map)
             if dirty_map.axes != ('freq', 'ra', 'dec'):
                 msg = ("Expeced dirty map to have axes ('freq',"
                        "'ra', 'dec'), but it has axes: " +
                        str(dirty_map.axes))
                 raise ce.DataError(msg)
             shape = dirty_map.shape
             # Initialize the clean map.
             clean_map = algebra.info_array(sp.zeros(dirty_map.shape))
             clean_map.info = dict(dirty_map.info)
             clean_map = algebra.make_vect(clean_map)
             # If needed, initialize a map for the noise diagonal.
             if save_noise_diag:
                 noise_diag = algebra.zeros_like(clean_map)
             if params["from_eig"]:
                 # Solving from eigen decomposition of the noise instead of
                 # the noise itself.
                 # Load in the decomposition.
                 evects_fname = (in_root + 'noise_evects_' + pol_str +
                                 +band_str + '.npy')
                 if self.feedback > 1:
                     print "Using dirty map: " + dmap_fname
                     print "Using eigenvectors: " + evects_fname
                 evects = algebra.open_memmap(evects_fname, 'r')
                 evects = algebra.make_mat(evects)
                 evals_inv_fname = (in_root + 'noise_evalsinv_' + pol_str +
                                    "_" + repr(band) + '.npy')
                 evals_inv = algebra.load(evals_inv_fname)
                 evals_inv = algebra.make_mat(evals_inv)
                 # Solve for the map.
                 if params["save_noise_diag"]:
                     clean_map, noise_diag = solve_from_eig(
                         evals_inv, evects, dirty_map, True, self.feedback)
                 else:
                     clean_map = solve_from_eig(evals_inv, evects,
                                                dirty_map, False,
                                                self.feedback)
                 # Delete the eigen vectors to recover memory.
                 del evects
             else:
                 # Solving from the noise.
                 noise_fname = (in_root + 'noise_inv_' + pol_str +
                                band_str + '.npy')
                 if self.feedback > 1:
                     print "Using dirty map: " + dmap_fname
                     print "Using noise inverse: " + noise_fname
                 all_in_fname_list.append(
                     kiyopy.utils.abbreviate_file_path(noise_fname))
                 noise_inv = algebra.open_memmap(noise_fname, 'r')
                 noise_inv = algebra.make_mat(noise_inv)
                 # Two cases for the noise.  If its the same shape as the map
                 # then the noise is diagonal.  Otherwise, it should be
                 # block diagonal in frequency.
                 if noise_inv.ndim == 3:
                     if noise_inv.axes != ('freq', 'ra', 'dec'):
                         msg = ("Expeced noise matrix to have axes "
                                "('freq', 'ra', 'dec'), but it has: " +
                                str(noise_inv.axes))
                         raise ce.DataError(msg)
                     # Noise inverse can fit in memory, so copy it.
                     noise_inv_memory = sp.array(noise_inv, copy=True)
                     # Find the non-singular (covered) pixels.
                     max_information = noise_inv_memory.max()
                     good_data = noise_inv_memory < 1.0e-10 * max_information
                     # Make the clean map.
                     clean_map[good_data] = (dirty_map[good_data] /
                                             noise_inv_memory[good_data])
                     if save_noise_diag:
                         noise_diag[good_data] = \
                                 1/noise_inv_memory[good_data]
                 elif noise_inv.ndim == 5:
                     if noise_inv.axes != ('freq', 'ra', 'dec', 'ra',
                                           'dec'):
                         msg = ("Expeced noise matrix to have axes "
                                "('freq', 'ra', 'dec', 'ra', 'dec'), "
                                "but it has: " + str(noise_inv.axes))
                         raise ce.DataError(msg)
                     # Arrange the dirty map as a vector.
                     dirty_map_vect = sp.array(dirty_map)  # A view.
                     dirty_map_vect.shape = (shape[0], shape[1] * shape[2])
                     frequencies = dirty_map.get_axis('freq') / 1.0e6
                     # Allowcate memory only once.
                     noise_inv_freq = sp.empty(
                         (shape[1], shape[2], shape[1], shape[2]),
                         dtype=float)
                     if self.feedback > 1:
                         print "Inverting noise matrix."
                     # Block diagonal in frequency so loop over frequencies.
                     for ii in xrange(dirty_map.shape[0]):
                         if self.feedback > 1:
                             print "Frequency: ", "%5.1f" % (
                                 frequencies[ii]),
                         if self.feedback > 2:
                             print ", start mmap read:",
                             sys.stdout.flush()
                         noise_inv_freq[...] = noise_inv[ii, ...]
                         if self.feedback > 2:
                             print "done, start eig:",
                             sys.stdout.flush()
                         noise_inv_freq.shape = (shape[1] * shape[2],
                                                 shape[1] * shape[2])
                         # Solve the map making equation by diagonalization.
                         noise_inv_diag, Rot = sp.linalg.eigh(
                             noise_inv_freq, overwrite_a=True)
                         if self.feedback > 2:
                             print "done",
                         map_rotated = sp.dot(Rot.T, dirty_map_vect[ii])
                         # Zero out infinite noise modes.
                         bad_modes = (noise_inv_diag <
                                      1.0e-5 * noise_inv_diag.max())
                         if self.feedback > 1:
                             print ", discarded: ",
                             print "%4.1f" % (100.0 * sp.sum(bad_modes) /
                                              bad_modes.size),
                             print "% of modes",
                         if self.feedback > 2:
                             print ", start rotations:",
                             sys.stdout.flush()
                         map_rotated[bad_modes] = 0.
                         noise_inv_diag[bad_modes] = 1.0
                         # Solve for the clean map and rotate back.
                         map_rotated /= noise_inv_diag
                         map = sp.dot(Rot, map_rotated)
                         if self.feedback > 2:
                             print "done",
                             sys.stdout.flush()
                         # Fill the clean array.
                         map.shape = (shape[1], shape[2])
                         clean_map[ii, ...] = map
                         if save_noise_diag:
                             # Using C = R Lambda R^T
                             # where Lambda = diag(1/noise_inv_diag).
                             temp_noise_diag = 1 / noise_inv_diag
                             temp_noise_diag[bad_modes] = 0
                             # Multiply R by the diagonal eigenvalue matrix.
                             # Broadcasting does equivalent of mult by diag
                             # matrix.
                             temp_mat = Rot * temp_noise_diag
                             # Multiply by R^T, but only calculate the
                             # diagonal elements.
                             for jj in range(shape[1] * shape[2]):
                                 temp_noise_diag[jj] = sp.dot(
                                     temp_mat[jj, :], Rot[jj, :])
                             temp_noise_diag.shape = (shape[1], shape[2])
                             noise_diag[ii, ...] = temp_noise_diag
                         # Return workspace memory to origional shape.
                         noise_inv_freq.shape = (shape[1], shape[2],
                                                 shape[1], shape[2])
                         if self.feedback > 1:
                             print ""
                             sys.stdout.flush()
                 elif noise_inv.ndim == 6:
                     if save_noise_diag:
                         # OLD WAY.
                         #clean_map, noise_diag, chol = solve(noise_inv,
                         #        dirty_map, True, feedback=self.feedback)
                         # NEW WAY.
                         clean_map, noise_diag, noise_inv_diag, chol = \
                                   solve(noise_fname, noise_inv, dirty_map,
                                   True, feedback=self.feedback)
                     else:
                         # OLD WAY.
                         #clean_map, chol = solve(noise_inv, dirty_map,
                         #            False, feedback=self.feedback)
                         # NEW WAY.
                         clean_map, noise_inv_diag, chol = \
                                   solve(noise_fname, noise_inv, dirty_map,
                                   False, feedback=self.feedback)
                     if params['save_cholesky']:
                         chol_fname = (params['output_root'] + 'chol_' +
                                       pol_str + band_str + '.npy')
                         sp.save(chol_fname, chol)
                     if params['save_noise_inv_diag']:
                         noise_inv_diag_fname = (params['output_root'] +
                                                 'noise_inv_diag_' +
                                                 pol_str + band_str +
                                                 '.npy')
                         algebra.save(noise_inv_diag_fname, noise_inv_diag)
                     # Delete the cholesky to recover memory.
                     del chol
                 else:
                     raise ce.DataError("Noise matrix has bad shape.")
                 # In all cases delete the noise object to recover memeory.
                 del noise_inv
             # Write the clean map to file.
             out_fname = (params['output_root'] + 'clean_map_' + pol_str +
                          band_str + '.npy')
             if self.feedback > 1:
                 print "Writing clean map to: " + out_fname
             algebra.save(out_fname, clean_map)
             all_out_fname_list.append(
                 kiyopy.utils.abbreviate_file_path(out_fname))
             if save_noise_diag:
                 noise_diag_fname = (params['output_root'] + 'noise_diag_' +
                                     pol_str + band_str + '.npy')
                 algebra.save(noise_diag_fname, noise_diag)
                 all_out_fname_list.append(
                     kiyopy.utils.abbreviate_file_path(noise_diag_fname))
             # Check the clean map for faileur.
             if not sp.alltrue(sp.isfinite(clean_map)):
                 n_bad = sp.sum(sp.logical_not(sp.isfinite(clean_map)))
                 msg = ("Non finite entries found in clean map. Solve"
                        " failed. %d out of %d entries bad" %
                        (n_bad, clean_map.size))
                 raise RuntimeError(msg)
示例#49
0
	def execute(self, nprocesses=1):
		params = self.params

		# Make parent directory and write parameter file.
		kiyopy.utils.mkparents(params['output_root'])
		parse_ini.write_params(params, params['output_root']+'params.ini',prefix='pk_')
		in_root = params['input_root']
		out_root = params['output_root']
		mid = params['mid']
		all_out_fname_list = []
		all_in_fname_list = []
		
		#### Process ####
		pol_str = params['polarizations'][0]
		hr_str = params['hr'][0]
		imap_fname = in_root + hr_str + mid + pol_str + '.npy'
		imap = algebra.load(imap_fname)
		imap = algebra.make_vect(imap)
		if imap.axes != ('freq', 'ra', 'dec') :
			raise ce.DataError('AXES ERROR!')

		nmap_fname = in_root + hr_str + 'noise_inv_diag_' + pol_str + '.npy'
		nmap = algebra.load(nmap_fname)
		nmap = algebra.make_vect(nmap)

		#noise normal
		normal = (nmap**2).sum()

		#Using map in different day 
		hr_str = params['hr'][1]
		imap_fname2 = in_root + hr_str + mid + pol_str + '.npy'
		imap2 = algebra.load(imap_fname2)
		imap2 = algebra.make_vect(imap2)
		if imap2.axes != ('freq', 'ra', 'dec') :
			raise ce.DataError('AXES ERROR!')

		nmap_fname = in_root + hr_str + 'noise_inv_diag_' + pol_str + '.npy'
		nmap2 = algebra.load(nmap_fname)
		nmap2 = algebra.make_vect(nmap2)

		#noise normal
		normal2 = (nmap2**2).sum()

		normal = sqrt(normal)*sqrt(normal2)

		mapshape = np.array(imap.shape)
		#print imap.shape

		r  = self.discrete(self.fq2r(imap.get_axis('freq')))
		ra = self.discrete(imap.get_axis('ra'))*deg2rad
		de = self.discrete(imap.get_axis('dec'))*deg2rad
		ra0= ra[int(ra.shape[0]/2)]
		ra = ra - ra0
		dr = r.ptp()/r.shape[0]
		dra= ra.ptp()/ra.shape[0]
		dde= de.ptp()/de.shape[0]
		disc_n = params['discrete']
		#imap = imap.swapaxes(1,2)  # change the ra and dec
		#print imap.shape

		mapinf = [dr, dra, dde, disc_n]
		mapinf = np.array(mapinf)
		#print mapinf

		#print r	
		#print ra
		#print de

		box = algebra.info_array(sp.zeros(params['boxshape']))
		box.axes = ('x','y','z')
		box = algebra.make_vect(box)
		boxshape = np.array(box.shape)

		box2 = algebra.info_array(sp.zeros(params['boxshape']))
		box2.axes = ('x','y','z')
		box2 = algebra.make_vect(box2)
		
		xrange0 = params['Xrange'][0]
		yrange0 = params['Yrange'][0]
		zrange0 = params['Zrange'][0]
		boxunit = params['boxunit']
		shapex = params['boxshape'][2]
		shapera = ra.shape[0]
		V = params['boxunit']**3

		boxinf = [xrange0, yrange0, zrange0, boxunit]
		boxinf = np.array(boxinf)

		print "Filling the BOX"
		MakePower.Filling(imap, imap2, box, box2, r, ra, de, boxinf, mapinf)

		print "FFTing "
		fftbox = fftn(box)
		fftbox = fftbox.real**2 + fftbox.imag**2
		fftbox2= fftn(box2)
		fftbox2 = fftbox2.real**2 + fftbox2.imag**2
		fftbox = fftbox2
		#fftbox = fftbox.__pow__(0.5)*fftbox2.__pow__(0.5)

		PK = np.zeros(40)
		k = np.zeros(40)
		PK2 = np.zeros(shape=(10, 10))
		k2 = np.zeros(shape=(2, 10))
		MakePower.Make(fftbox, PK, k, PK2, k2)
		kunit = 2.*pi/(boxshape[0]*boxunit)
		k = k*kunit
		k2 = k2*kunit
		PK = PK*V*params['boxshape'][0]**3/normal
		PK2 = PK2*V*params['boxshape'][0]**3/normal

		sp.save(out_root+'PK', PK)
		sp.save(out_root+'PK2', PK2)

		non0 = PK.nonzero()

		if self.plot==True:
			plt.figure(figsize=(8,8))
			#print k
			#print PK
			plt.subplot('211')
			plt.scatter(k.take(non0), PK.take(non0))
			plt.loglog()
			plt.ylim(ymin=1.e1)	
			plt.xlim(xmin=k.min())
			plt.title('Power Spectrum')
			plt.xlabel('$k$')
			plt.ylabel('$P(k) (Kelvin^{2}(h^{-1}Mpc)^3)$')

			PK = PK*V*params['boxshape'][0]**3/1.2e12*k*k*k/2./pi/pi
			#print PK
			plt.subplot('212')
			plt.scatter(k.take(non0), PK.take(non0))
			plt.loglog()
			plt.ylim(ymin=1.e-9)	
			plt.xlim(xmin=k.min())
			plt.xlabel('$k (h Mpc^{-1})$')
			plt.ylabel('$\Delta^2 (Kelvin^{2})$')
			#plt.show()
			plt.savefig(out_root+'power.eps', format='eps')

			PK2 = np.log10(PK2)
			plt.figure(figsize=(6,6))
			extent = (k2[0][0], k2[0][-1], k2[1][0], k2[1][-1])
			plt.imshow(PK2, origin='lower', extent = extent, interpolation='nearest')
			plt.xlabel('$k vertical (h Mpc^{-1})$')
			plt.ylabel('$k parallel (h Mpc^{-1})$')
			cb = plt.colorbar()
			cb.set_label('$lg(P^{2D}_{k_pk_v}) (Kelvin^2(h^{-1}Mpc)^3)$')
			plt.loglog()
			plt.savefig(out_root+'power2.eps', format='eps')

			#plt.show()
			print 'Finished @_@ '
		return PK
示例#50
0
    def execute(self, nprocesses=1):

        params = self.params
        kiyopy.utils.mkparents(params['output_root'])
        parse_ini.write_params(params,
                               params['output_root'] + 'params.ini',
                               prefix=prefix)
        guppi_result = params['Guppi_test']

        self.file_num = len(
            params['file_middles']
        )  # getting a variable for number of calibrator files being used

        # Need to remove count for calibrator files that are not the right size.
        session_nums = sp.zeros(self.file_num)
        c = 0
        for file_middle in params['file_middles']:
            input_fname = (params['input_root'] + file_middle +
                           params['input_end'])
            Reader = core.fitsGBT.Reader(input_fname)
            n_scans = len(Reader.scan_set)
            Len_set = Reader.read(0, 0, force_tuple=True)
            session_nums[c] = file_middle.split('_')[0]
            for Data in Len_set:
                freq_num = Data.dims[3]
            if guppi_result == True:
                if n_scans != 2:
                    self.file_num -= 1
            elif guppi_result == False:
                if n_scans != 4:
                    self.file_num -= 1
            c += 1

# Need to know the general frequency binning (going to assume that it's 200 for guppi, 260 for spectrometer, aka 1 MHz binning)
#        if guppi_result == True :
#            freq_num = 200
        if guppi_result == False:
            #            freq_num = 260
            self.file_num *= 2  #because there are two sets of scans per file for spectrometer, need to double the number of values

        self.function = sp.zeros(
            3 * self.file_num +
            1)  #setting a variable of the right size for peval to use
        self.theta = sp.zeros(
            3 * self.file_num +
            1)  #setting a variable for parallactic angle in radians
        d = sp.zeros((3 * self.file_num + 1,
                      freq_num))  #setting a variable for measured values

        # Loop over files to process.
        k = 0
        for file_middle in params['file_middles']:
            input_fname = (params['input_root'] + file_middle +
                           params['input_end'])
            # Read in the data, and loop over data blocks.
            Reader = core.fitsGBT.Reader(input_fname)
            n_IFs = len(
                Reader.IF_set
            )  # Should be 1 given that we've stitched windows for the spectrometer or by def in guppi
            n_scans = len(Reader.scan_set
                          )  #Should be 4 for the spectrometer, 2 for guppi
            OnBlocks = Reader.read(range(0, n_scans, 2), 0, force_tuple=True)
            OffBlocks = Reader.read(range(1, n_scans, 2), 0, force_tuple=True)
            #force_tuple=True makes the ouput of Reader.read a tuple even if thre is only one Block to return.
            Blocks = Reader.read(params['scans'],
                                 params['IFs'],
                                 force_tuple=True)

            # Setting labels for indices for later
            on_ind = 0
            off_ind = 1
            I_ind = 0
            Q_ind = 1
            U_ind = 2
            V_ind = 3

            #Calculating Parallactic angles for the cal file
            PA = sp.zeros(n_scans)
            m = 0
            for Data in Blocks:
                freq_len = Data.dims[3]
                Data.calc_freq()
                freq_val = Data.freq
                freq_val = freq_val / 1000000
                Data.calc_PA()
                PA[m] = ma.mean(Data.PA)
                m += 1

            if guppi_result == False:
                if n_scans == 4:  # To make sure that we don't use incomplete data
                    self.theta[k] = 0.5 * (
                        PA[0] + PA[1]
                    )  # the average between the on and off values
                    self.theta[k + 1] = 0.5 * (PA[0] + PA[1])
                    self.theta[k + 2] = 0.5 * (PA[0] + PA[1])
                    self.theta[k + 3] = 0.5 * (PA[2] + PA[3])
                    self.theta[k + 4] = 0.5 * (PA[2] + PA[3])
                    self.theta[k + 5] = 0.5 * (PA[2] + PA[3])

                    S_med_on = sp.zeros((2, freq_len, 4))
                    S_med = sp.zeros((2, freq_len, 4))

                    i = 0
                    for Data in OnBlocks:
                        S_med_on[i, :, 0] = ma.median(Data.data[:, I_ind,
                                                                off_ind, :],
                                                      axis=0)
                        S_med_on[i, :, 1] = ma.median(Data.data[:, Q_ind,
                                                                off_ind, :],
                                                      axis=0)
                        S_med_on[i, :, 2] = ma.median(Data.data[:, U_ind,
                                                                off_ind, :],
                                                      axis=0)
                        S_med_on[i, :, 3] = ma.median(Data.data[:, V_ind,
                                                                off_ind, :],
                                                      axis=0)
                        i += 1

                    j = 0
                    for Data in OffBlocks:
                        S_med[j, :, 0] = ma.median(Data.data[:, I_ind,
                                                             off_ind, :],
                                                   axis=0)
                        S_med[j, :, 1] = ma.median(Data.data[:, Q_ind,
                                                             off_ind, :],
                                                   axis=0)
                        S_med[j, :, 2] = ma.median(Data.data[:, U_ind,
                                                             off_ind, :],
                                                   axis=0)
                        S_med[j, :, 3] = ma.median(Data.data[:, V_ind,
                                                             off_ind, :],
                                                   axis=0)
                        j += 1

                    I_onoff_1 = S_med_on[0, :, 0] - S_med[
                        0, :, 0]  # for first set of on and off scans
                    I_onoff_2 = S_med_on[1, :, 0] - S_med[
                        1, :, 0]  # for second set of on and off scans

                    # Setting the measured stokes values for each file (to be used in the least squares fit)
                    d[k, :] = (S_med_on[0, :, 1] - S_med[0, :, 1]) / I_onoff_1
                    d[k +
                      1, :] = (S_med_on[0, :, 2] - S_med[0, :, 2]) / I_onoff_1
                    d[k +
                      2, :] = (S_med_on[0, :, 3] - S_med[0, :, 3]) / I_onoff_1
                    d[k +
                      3, :] = (S_med_on[1, :, 1] - S_med[1, :, 1]) / I_onoff_2
                    d[k +
                      4, :] = (S_med_on[1, :, 2] - S_med[1, :, 2]) / I_onoff_2
                    d[k +
                      5, :] = (S_med_on[1, :, 3] - S_med[1, :, 3]) / I_onoff_2
                    k += 6

            if guppi_result == True:  #This is the same as above only there is a single set of on and off scans in this case.
                if n_scans == 2:
                    self.theta[k] = ma.mean(PA)
                    self.theta[k + 1] = ma.mean(PA)
                    self.theta[k + 2] = ma.mean(PA)

                    S_med_on = sp.zeros((freq_len, 4))
                    S_med = sp.zeros((freq_len, 4))

                    for Data in OnBlocks:
                        S_med_on[:, 0] = ma.median(Data.data[:, I_ind,
                                                             off_ind, :],
                                                   axis=0)
                        S_med_on[:, 1] = ma.median(Data.data[:, Q_ind,
                                                             off_ind, :],
                                                   axis=0)
                        S_med_on[:, 2] = ma.median(Data.data[:, U_ind,
                                                             off_ind, :],
                                                   axis=0)
                        S_med_on[:, 3] = ma.median(Data.data[:, V_ind,
                                                             off_ind, :],
                                                   axis=0)

                    for Data in OffBlocks:
                        S_med[:, 0] = ma.median(Data.data[:, I_ind,
                                                          off_ind, :],
                                                axis=0)
                        S_med[:, 1] = ma.median(Data.data[:, Q_ind,
                                                          off_ind, :],
                                                axis=0)
                        S_med[:, 2] = ma.median(Data.data[:, U_ind,
                                                          off_ind, :],
                                                axis=0)
                        S_med[:, 3] = ma.median(Data.data[:, V_ind,
                                                          off_ind, :],
                                                axis=0)

                    I_onoff = S_med_on[:, 0] - S_med[:, 0]

                    d[k, :] = (S_med_on[:, 1] - S_med[:, 1]) / I_onoff
                    d[k + 1, :] = (S_med_on[:, 2] - S_med[:, 2]) / I_onoff
                    d[k + 2, :] = (S_med_on[:, 3] - S_med[:, 3]) / I_onoff
                    k += 3

        d[k, :] = sp.tan(
            2 * 33 * sp.pi / 180
        )  # the 33 degrees here  is specific to 3C286, takes a different value for different calibrators
        #The seven parameters are in order deltaG[0], alpha[1], psi[2], phi[3], epsilon[4], Qsrc[5], Usrc[6] chi[7] => the parameter vector is p
        p0 = [0.3, 90.0, 170.0, 10.0, 0.016, 0.005, 0.026,
              0]  # preliminary values based on guesses and heiles generation.
        error = sp.ones(3 * self.file_num + 1)
        #Note that error can be used to weight the equations if not all set to one.

        p_val_out = sp.zeros((freq_len, 9))
        #        p_err_out = sp.zeros((freq_len, 9))

        for f in range(0, freq_len):
            plsq = leastsq(self.residuals,
                           p0,
                           args=(d, error, f),
                           full_output=0,
                           maxfev=5000,
                           factor=1)
            pval = plsq[0]  # this is the 1-d array of results
            #            perr = plsq[1] # this is a 2d array representing the estimated covariance of the results.

            #want to adjust results if angles not in limits
            pval[1] = (pval[1] + 180) % 360 - 180
            pval[2] = (pval[2] + 180) % 360 - 180
            pval[3] = (pval[3] + 180) % 360 - 180
            pval[7] = (pval[7] + 180) % 360 - 180

            p_val_out[f, 0] = freq_val[f]
            p_val_out[f, 1] = pval[0]
            p_val_out[f, 2] = pval[1]
            p_val_out[f, 3] = pval[2]
            p_val_out[f, 4] = pval[3]
            p_val_out[f, 5] = pval[4]
            p_val_out[f, 6] = pval[5]
            p_val_out[f, 7] = pval[6]
            p_val_out[f, 8] = pval[7]

# Gets error values, note that this doesn't work if set chi to zero.
#            p_err_out[f,0] = freq_val[f]
#            p_err_out[f,1] = perr[0,0]
#            p_err_out[f,2] = perr[1,1]
#            p_err_out[f,3] = perr[2,2]
#            p_err_out[f,4] = perr[3,3]
#            p_err_out[f,5] = perr[4,4]
#            p_err_out[f,6] = perr[5,5]
#            p_err_out[f,7] = perr[6,6]
#            p_err_out[f,8] = perr[7,7]

        np.savetxt('mueller_params_calc.txt', p_val_out, delimiter=' ')
def spectrum_arithmetic(inifile, outdir="./plots/"):
    r"""Perform the operations"""
    params_init = {"pwr_a_ini": None,
                   "pwr_b_ini": None,
                   "pwr_c_ini": None,
                   "pwr_d_ini": None,
                   "mul_a": "1.",
                   "mul_b": "1.",
                   "mul_c": "1.",
                   "mul_d": "1.",
                   "output_tag": "tag identifying the output somehow"}
    prefix="sa_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_root = "%s/%s/" % (outdir, params["output_tag"])
    print output_root

    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini',
                           prefix=prefix)

    if not params["pwr_a_ini"]:
        print "ERROR: at least spectrum A must be given (and C for ratios)"
        return

    pwr_a_all = cp.wrap_batch_gbtpwrspec_data_run(params["pwr_a_ini"],
                                                  generate=False,
                                                  outdir=outdir)

    if params['pwr_b_ini']:
        pwr_b_all = cp.wrap_batch_gbtpwrspec_data_run(params["pwr_b_ini"],
                                                      generate=False,
                                                      outdir=outdir)

    if params['pwr_c_ini']:
        pwr_c_all = cp.wrap_batch_gbtpwrspec_data_run(params["pwr_c_ini"],
                                                      generate=False,
                                                      outdir=outdir)

    if params['pwr_d_ini']:
        pwr_d_all = cp.wrap_batch_gbtpwrspec_data_run(params["pwr_d_ini"],
                                                      generate=False,
                                                      outdir=outdir)

    for treatment in pwr_a_all:
        pwr_a = pwr_a_all[treatment]
        pwr_a = mul_pwrspec(pwr_a, params['mul_a'])
        pwr_numerator = copy.deepcopy(pwr_a)

        if params['pwr_b_ini']:
            pwr_b = pwr_b_all[treatment]
            pwr_b = mul_pwrspec(pwr_b, params['mul_b'])
            pwr_numerator = add_pwrspec(pwr_numerator, pwr_b)

        if params['pwr_c_ini']:
            pwr_c = pwr_c_all[treatment]
            pwr_c = mul_pwrspec(pwr_c, params['mul_c'])
            pwr_denominator = copy.deepcopy(pwr_c)

            if params['pwr_d_ini']:
                pwr_d = pwr_d_all[treatment]
                pwr_d = mul_pwrspec(pwr_d, params['mul_d'])
                pwr_denominator = add_pwrspec(pwr_denominator, pwr_d)

            pwr_final = divide_pwrspec(pwr_numerator, pwr_denominator)
        else:
            pwr_final = pwr_numerator

        filename = "%s/%s_%s.dat" % (output_root, params['output_tag'], treatment)
        outfile = open(filename, "w")
        for specdata in zip(pwr_final['bin_left'], pwr_final['bin_center'],
                            pwr_final['bin_right'], pwr_final['counts_histo'],
                            pwr_final['mean_1d'], pwr_final['std_1d']):
            outfile.write(("%10.15g " * 6 + "\n") % specdata)

        outfile.close()