def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict np.seterr(invalid='raise') if parameter_file: self.params = parse_ini.parse(parameter_file, calculatetransfer_init, prefix=calculatetransfer_prefix) print self.params["powerfile_in"], "->", self.params["powerfile_out"] self.stats_in = h5py.File(self.params["powerfile_in"], "r") self.stats_out = h5py.File(self.params["powerfile_out"], "r") self.treatments_in = self.stats_in.keys() self.treatments_out = self.stats_out.keys() self.multiplier = self.params["input_multiplier"] # If this is measuring the mode cleaning transfer function, it will be # with respect to the 0modes removed case. Note that map+sim (with zero # modes removed) x sim is quite noisy, so we really don't want to use # the zero-mode case of the plussim cleaning runs to estimate this. # Instead, use signal-only sims for the 0mode reference. print "AggregateStatistics: input treatments: ", self.treatments_in print "AggregateStatistics: output treatments: ", self.treatments_out if self.treatments_in[0] != "0modes": print "Transfer functions must be wrt only 0modes" return
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict if parameter_file: self.params = parse_ini.parse(parameter_file, crosspowertheoryparams_init, prefix=crosspowertheoryprefix) print self.params
def call_phys_space_run(cube1_file, cube2_file, inifile=None): """Directly call the power spectral estimation on some physical vol""" params_init = { "unitless": True, "return_3d": False, "truncate": False, "window": "blackman", "bins": [0.00765314, 2.49977141, 35] } prefix = 'xs_' params = parse_ini.parse(inifile, params_init, prefix=prefix) if inifile is None: print "WARNING: no ini file for pwrspec estimation" bparam = params['bins'] bins = np.logspace(math.log10(bparam[0]), math.log10(bparam[1]), num=bparam[2], endpoint=True) retval = pe.calculate_xspec_file(cube1_file, cube2_file, bins, weight1_file=None, weight2_file=None, truncate=params['truncate'], window=params['window'], return_3d=params['return_3d'], unitless=params['unitless']) return retval
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict if parameter_file: self.params = parse_ini.parse(parameter_file, autonoiseweightparams_init, prefix=autonoiseweightprefix)
def wrap_batch_single_crosspwr(inifile, generate=False, outdir="./plots/"): r"""Wrapper to the single crosspwr calculator """ params_init = {"left_mapkey": "some preparation of a map, cleaned", "right_simkey": "a simulation to cross it with", "right_weightkey": "weight to use for that sim", "multiplier": "multiply the 1D and 2D spectra", "spec_ini": "ini file for the spectral estimation", "output_tag": "tag identifying the output somehow"} prefix="csc_" params = parse_ini.parse(inifile, params_init, prefix=prefix) print params output_tag = "%s_%s" % (params['left_mapkey'], params['output_tag']) output_root = "%s/%s/" % (outdir, output_tag) if generate: output_tag = None print output_root, output_tag file_tools.mkparents(output_root) parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix) datapath_db = data_paths.DataPath() return batch_single_crosspwr(params["left_mapkey"], params["right_simkey"], params["right_weightkey"], multiplier=params["multiplier"], inifile=params["spec_ini"], datapath_db=datapath_db, outdir=output_root, output_tag=output_tag)
def __init__(self, parameter_file_or_dict=None, feedback=2): # Read in the parameters. self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=prefix, feedback=feedback) self.feedback=feedback self.plot = bool(self.params['plot'])
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict if parameter_file: self.params = parse_ini.parse(parameter_file, autopowerparams_init, prefix=autopowerprefix)
def __init__(self, parameter_file_or_dict=None): # recordkeeping self.pairs = {} self.pairs_nosim = {} self.pairlist = [] self.noisefiledict = {} self.datapath_db = dp.DataPath() self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=prefix) self.freq_list = sp.array(self.params['freq_list'], dtype=int) self.lags = sp.array(self.params['lags']) self.output_root = self.datapath_db.fetch(self.params['output_root'], intend_write=True) if self.params['SVD_root']: self.SVD_root = self.datapath_db.fetch(self.params['SVD_root'], intend_write=True) print "WARNING: using %s to clean (intended?)" % self.SVD_root else: self.SVD_root = self.output_root # Write parameter file. kiyopy.utils.mkparents(self.output_root) parse_ini.write_params(self.params, self.output_root + 'params.ini', prefix=prefix)
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict if parameter_file: self.params = parse_ini.parse(parameter_file, physsimparams_init, prefix=physsimprefix)
def __init__(self, parameter_file_or_dict=None, feedback=2): # Read the parameter file, store in dictionary named parameters. self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=prefix, feedback=feedback) self.feedback = feedback
def __init__(self, parameter_file_or_dict=None, feedback=2): # Read in the parameters. self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=prefix, feedback=feedback) self.feedback = feedback
def execute(pipe_file_or_dict, feedback=2) : """Execute all the modules listed in the input file.""" params, module_params = parse_ini.parse(pipe_file_or_dict, params_init, prefix='pipe_',return_undeclared=True, feedback=feedback) for module in params['modules'] : # Module is either the python object that should be executed, or a # tuple, with the first element being the module and the second element # being a prefix replacement of the form ('p1_', 'p2_'). Before # executing the module, we rename all parameters begining with 'p1_' # to 'p2_'. if isinstance(module, tuple) : mod = module[0] pars = dict(module_params) old_prefix = module[1][0] n = len(old_prefix) new_prefix = module[1][1] for key, value in module_params.iteritems() : if key[0:n] == old_prefix : pars[new_prefix + key[n:]] = value else : mod = module pars = module_params if feedback > 1 : print 'Excuting analysis module: ' + str(mod) mod(pars, feedback=feedback).execute(params['processes'])
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict if parameter_file: self.params = parse_ini.parse(parameter_file, params_init, prefix=prefix) self.output_file = self.params['output_file'] self.delta_temp_file = self.params['delta_temp_file'] self.total_integration = self.params['total_integration'] self.weight_map = algebra.make_vect( algebra.load(self.params['weight_file'])) self.max_stdev = self.params['max_stdev'] # set the random seed if (self.params['seed'] < 0): # The usual seed is not fine enough for parallel jobs randsource = open("/dev/random", "rb") self.seed = struct.unpack("I", randsource.read(4))[0] #self.seed = abs(long(outfile_physical.__hash__())) else: self.seed = self.params['seed'] random.seed(self.seed)
def __init__(self, parameter_file=None, params_dict=None, feedback=0): # recordkeeping self.pairs = {} self.pairs_parallel_track = {} self.pairlist = [] self.datapath_db = dp.DataPath() self.params = params_dict if parameter_file: self.params = parse_ini.parse(parameter_file, params_init, prefix=prefix) self.freq_list = sp.array(self.params['freq_list'], dtype=int) self.tack_on_input = self.params['tack_on_input'] self.output_root = self.datapath_db.fetch( self.params['output_root'], tack_on=self.params['tack_on_output']) #self.output_root = self.params['output_root'] print "foreground cleaning writing to output root", self.output_root if not os.path.isdir(self.output_root): os.mkdir(self.output_root) if self.params['svd_filename'] is not None: self.svd_filename = self.params['svd_filename'] print "WARNING: using %s to clean (intended?)" % self.svd_filename else: self.svd_filename = self.output_root + "/" + "SVD.hd5" # Write parameter file. parse_ini.write_params(self.params, self.output_root + 'params.ini', prefix=prefix)
def __init__(self, parameter_file=None, params_dict=None, feedback=0): # recordkeeping self.pairs = {} self.pairs_parallel_track = {} self.pairlist = [] self.datapath_db = dp.DataPath() self.params = params_dict if parameter_file: self.params = parse_ini.parse(parameter_file, params_init, prefix=prefix) self.freq_list = sp.array(self.params['freq_list'], dtype=int) self.tack_on_input = self.params['tack_on_input'] self.output_root = self.datapath_db.fetch(self.params['output_root'], tack_on=self.params['tack_on_output']) #self.output_root = self.params['output_root'] print "foreground cleaning writing to output root", self.output_root if not os.path.isdir(self.output_root): os.mkdir(self.output_root) if self.params['SVD_root']: self.SVD_root = self.datapath_db.fetch(self.params['SVD_root'], intend_write=True) print "WARNING: using %s to clean (intended?)" % self.SVD_root else: self.SVD_root = self.output_root # Write parameter file. parse_ini.write_params(self.params, self.output_root + 'params.ini', prefix=prefix)
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict self.datapath_db = dp.DataPath() if parameter_file: self.params = parse_ini.parse(parameter_file, singlephysicalsim_init, prefix=singlephysicalsim_prefix)
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict self.datapath_db = dp.DataPath() if parameter_file: self.params = parse_ini.parse(parameter_file, cleanup_init, prefix=cleanup_prefix)
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict np.seterr(invalid='raise') if parameter_file: self.params = parse_ini.parse(parameter_file, aggregatesummary_init, prefix=aggregatesummary_prefix)
def __init__(self, parameter_file_or_dict=None, feedback=2): # Read in the parameters. self.params, self.task_params = parse_ini.parse(parameter_file_or_dict, self.params_init, prefix=self.prefix, return_undeclared=True, feedback=feedback) self.tasks = self.params['modules'] # set environment var os.environ['TL_OUTPUT'] = self.params['output_dir'] + '/'
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict self.datapath_db = dp.DataPath() if parameter_file: self.params = parse_ini.parse(parameter_file, subtractmap_init, prefix=subtractmap_prefix)
def __init__(self, parameter_file_or_dict=None, feedback=2) : self.feedback = feedback # Read in the parameters. Parameters can be passed as a dictionary or # a file name. If the input is 'None' then all parameters revert to # the default. self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=prefix, checking=10*self.feedback + 2)
def __init__(self, parameter_file_or_dict=None, feedback=2): # Only have the first node report. rank = comm.Get_rank() if rank == 0: pass else: feedback = 0 # Read in the parameters. self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=prefix, feedback=feedback) self.feedback = feedback
def __init__(self, parameter_file_or_dict=None, feedback=2) : self.feedback = feedback # Add any parameters added by any class that inherits from this one. params_init = dict(base_params) params_init.update(self.params_init) # Read in the parameters. self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=self.prefix, feedback=feedback)
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict self.datapath_db = dp.DataPath() if parameter_file: self.params = parse_ini.parse(parameter_file, gbtdataautopower_init, prefix=gbtdataautopower_prefix) self.freq_list = np.array(self.params['freq_list'], dtype=int)
def __init__(self, parameter_file_or_dict=None, feedback=2): self.feedback = feedback # Read in the parameters. Parameters can be passed as a dictionary or # a file name. If the input is 'None' then all parameters revert to # the default. self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=prefix, checking=10 * self.feedback + 2)
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict if parameter_file: self.params = parse_ini.parse(parameter_file, params_init, prefix=prefix) self.template_key = self.params['template_key'] self.output_key = self.params['output_key'] self.total_integration = self.params['total_integration'] self.scenario = self.params['scenario'] self.refinement = self.params['refinement'] self.multiplier = self.params['multiplier'] self.tack_on = self.params['tack_on'] # set the random seed if (self.params['seed'] < 0): print "no seed given; generating one (are you sure?)" # The usual seed is not fine enough for parallel jobs randsource = open("/dev/random", "rb") self.seed = struct.unpack("I", randsource.read(4))[0] #self.seed = abs(long(outfile_physical.__hash__())) else: self.seed = self.params['seed'] random.seed(self.seed) self.datapath_db = data_paths.DataPath() self.input_weight_maps = self.return_maplist(self.template_key, "noise_weight") self.output_weight_maps = self.return_maplist(self.output_key, "noise_weight", tack_on=self.tack_on) self.output_maps = self.return_maplist(self.output_key, "clean_map", tack_on=self.tack_on) self.output_delta_thermal = [] self.output_thermal = [] for mapfile in self.output_maps: basename = os.path.splitext(mapfile)[0] self.output_delta_thermal.append(basename + "_deltat.npy") self.output_thermal.append(basename + "_thermal.npy") self.output_signal = "gaussian_signal_simulation.npy" print "input weight maps: ", self.input_weight_maps print "output weight maps: ", self.output_weight_maps self.output_root = os.path.dirname(self.output_weight_maps[0]) self.output_root += "/" print "output directory: ", self.output_root if not os.path.isdir(self.output_root): os.mkdir(self.output_root)
def __init__(self, parameter_file_or_dict=None): # Read in the parameters. self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=prefix) # main derived quantities: self.pairs = None self.corr_std = None self.fore_pairs = None self.svd_info_list = None self.corr_final = None
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict np.seterr(invalid='raise') if parameter_file: self.params = parse_ini.parse(parameter_file, subtractmap_init, prefix=subtractmap_prefix) print self.params["plussim_file"], "-", self.params["mappower_file"] print "writing to ", self.params["output_file"]
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict self.datapath_db = dp.DataPath() if parameter_file: self.params = parse_ini.parse(parameter_file, crosspowersim_init, prefix=crosspowersim_prefix) print self.params self.freq_list = np.array(self.params['freq_list'], dtype=int)
def execute(pipe_file_or_dict, feedback=0) : """Execute all the modules listed in the input file.""" params, module_params = parse_ini.parse(pipe_file_or_dict, params_init, prefix='pipe_',return_undeclared=True, feedback=feedback) #print rank for module in params['modules'] : if feedback > 1 : print 'Excuting analysis module: ' + str(module) module(module_params, feedback=feedback).execute(params['processes'])
def __init__(self, parameter_file_or_dict=None, feedback=2): self.feedback = feedback # Add any parameters added by any class that inherits from this one. params_init = dict(base_params) params_init.update(self.params_init) # Read in the parameters. self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=self.prefix, feedback=feedback)
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict self.datapath_db = dp.DataPath() if parameter_file: self.params = parse_ini.parse(parameter_file, calc_mixing_init, prefix=calc_mixing_prefix) bin_spec = self.params["bins"] self.bins = np.logspace(math.log10(bin_spec[0]), math.log10(bin_spec[1]), num=bin_spec[2], endpoint=True)
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict if parameter_file: self.params = parse_ini.parse(parameter_file, analyzeautopowerparams_init, prefix=analyzeautopowerprefix) print self.params self.data_auto = h5py.File(self.params["data_auto_summary"], "r") self.data_xspec = h5py.File(self.params["data_xspec_summary"], "r") self.sim_auto = h5py.File(self.params["sim_auto_summary"], "r") self.sim_xspec = h5py.File(self.params["sim_xspec_summary"], "r")
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict if parameter_file: self.params = parse_ini.parse(parameter_file, params_init, prefix=prefix) if not os.path.isdir(self.params['output_root']): os.mkdir(self.params['output_root']) self.refinement = self.params['refinement'] self.scenario = self.params['scenario'] self.template_file = self.params['template_file'] self.output_root = self.params['output_root'] # here we use 300 h km/s from WiggleZ for streaming dispersion self.streaming_dispersion = 300. * 0.72 #self.template_map = algebra.make_vect( # algebra.load(self.template_file)) self.datapath_db = data_paths.DataPath() self.template_map = self.datapath_db.fetch_multi(self.template_file) # determine the beam model self.beam_data = np.array([ 0.316148488246, 0.306805630985, 0.293729620792, 0.281176247549, 0.270856788455, 0.26745856078, 0.258910010848, 0.249188429031 ]) self.freq_data = np.array([695, 725, 755, 785, 815, 845, 875, 905], dtype=float) self.freq_data *= 1.0e6 # set the random seed if (self.params['seed'] < 0): # The usual seed is not fine enough for parallel jobs randsource = open("/dev/random", "rb") self.seed = struct.unpack("I", randsource.read(4))[0] #self.seed = abs(long(outfile_physical.__hash__())) else: self.seed = self.params['seed'] random.seed(self.seed) # register any maps that need to be produced self.sim_map_phys = None self.sim_map = None self.sim_map_delta = None self.sim_map_optsim = None self.sim_map_withbeam = None self.sim_map_meansub = None self.sim_map_degrade = None
def wrap_batch_gbtxwigglez_data_run(inifile, generate=False, outdir="./plots/"): r"""Wrapper to the GBT x WiggleZ calculation""" params_init = { "gbt_mapkey": "cleaned GBT map", "wigglez_deltakey": "WiggleZ overdensity map", "wigglez_mockkey": "WiggleZ overdensities from mocks", "wigglez_selectionkey": "WiggleZ selection function", "mode_transfer_1d_ini": "ini file -> 1d trans. function", "mode_transfer_2d_ini": "ini file -> 2d trans. function", "beam_transfer_ini": "ini file -> 2d beam trans. function", "spec_ini": "ini file for the spectral estimation", "output_tag": "tag identifying the output somehow" } prefix = "cwx_" params = parse_ini.parse(inifile, params_init, prefix=prefix) print params output_tag = "%s_%s" % (params['gbt_mapkey'], params['output_tag']) output_root = "%s/%s/" % (outdir, output_tag) if generate: output_tag = None print output_root print output_tag file_tools.mkparents(output_root) parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix) datapath_db = data_paths.DataPath() mode_transfer_1d = None if params["mode_transfer_1d_ini"]: mode_transfer_1d = cct.wrap_batch_crosspwr_transfer( params["mode_transfer_1d_ini"], generate=generate, outdir=outdir) batch_gbtxwigglez_data_run(params["gbt_mapkey"], params["wigglez_deltakey"], params["wigglez_mockkey"], params["wigglez_selectionkey"], inifile=params["spec_ini"], datapath_db=datapath_db, outdir=output_root, output_tag=output_tag, beam_transfer=None, mode_transfer_1d=mode_transfer_1d, mode_transfer_2d=None, theory_curve=None)
def wrap_batch_gbtxwigglez_data_run(inifile, generate=False, outdir="./plots/"): r"""Wrapper to the GBT x WiggleZ calculation""" params_init = {"gbt_mapkey": "cleaned GBT map", "wigglez_deltakey": "WiggleZ overdensity map", "wigglez_mockkey": "WiggleZ overdensities from mocks", "wigglez_selectionkey": "WiggleZ selection function", "mode_transfer_1d_ini": "ini file -> 1d trans. function", "mode_transfer_2d_ini": "ini file -> 2d trans. function", "beam_transfer_ini": "ini file -> 2d beam trans. function", "spec_ini": "ini file for the spectral estimation", "output_tag": "tag identifying the output somehow"} prefix = "cwx_" params = parse_ini.parse(inifile, params_init, prefix=prefix) print params output_tag = "%s_%s" % (params['gbt_mapkey'], params['output_tag']) output_root = "%s/%s/" % (outdir, output_tag) if generate: output_tag = None print output_root print output_tag file_tools.mkparents(output_root) parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix) datapath_db = data_paths.DataPath() mode_transfer_1d = None if params["mode_transfer_1d_ini"]: mode_transfer_1d = cct.wrap_batch_crosspwr_transfer( params["mode_transfer_1d_ini"], generate=generate, outdir=outdir) batch_gbtxwigglez_data_run(params["gbt_mapkey"], params["wigglez_deltakey"], params["wigglez_mockkey"], params["wigglez_selectionkey"], inifile=params["spec_ini"], datapath_db=datapath_db, outdir=output_root, output_tag=output_tag, beam_transfer=None, mode_transfer_1d=mode_transfer_1d, mode_transfer_2d=None, theory_curve=None)
def execute(pipe_file_or_dict, feedback=0): """Execute all the modules listed in the input file.""" params, module_params = parse_ini.parse(pipe_file_or_dict, params_init, prefix='pipe_', return_undeclared=True, feedback=feedback) #print rank for module in params['modules']: if feedback > 1: print 'Excuting analysis module: ' + str(module) module(module_params, feedback=feedback).execute(params['processes'])
def __init__(self, parameter_file_or_dict=None, feedback=1): ''' The initialize function here. parameter_file_or_dict : This can be your XXX.pipe feedback : how much information givin by the code. ''' # call parse_ini.parse() to initialize the parameters. # all parameters with prefix 'test_' will be read out, # and save into a dict self.params. self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=prefix, feedback=feedback) self.feedback = feedback
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict if parameter_file: self.params = parse_ini.parse(parameter_file, params_init, prefix=prefix) if not os.path.isdir(self.params['output_root']): os.mkdir(self.params['output_root']) self.refinement = self.params['refinement'] self.scenario = self.params['scenario'] self.template_file = self.params['template_file'] self.output_root = self.params['output_root'] # here we use 300 h km/s from WiggleZ for streaming dispersion self.streaming_dispersion = 300.*0.72 #self.template_map = algebra.make_vect( # algebra.load(self.template_file)) self.datapath_db = data_paths.DataPath() self.template_map = self.datapath_db.fetch_multi(self.template_file) # determine the beam model self.beam_data = np.array([0.316148488246, 0.306805630985, 0.293729620792, 0.281176247549, 0.270856788455, 0.26745856078, 0.258910010848, 0.249188429031]) self.freq_data = np.array([695, 725, 755, 785, 815, 845, 875, 905], dtype=float) self.freq_data *= 1.0e6 # set the random seed if (self.params['seed'] < 0): # The usual seed is not fine enough for parallel jobs randsource = open("/dev/random", "rb") self.seed = struct.unpack("I", randsource.read(4))[0] #self.seed = abs(long(outfile_physical.__hash__())) else: self.seed = self.params['seed'] random.seed(self.seed) # register any maps that need to be produced self.sim_map_phys = None self.sim_map = None self.sim_map_delta = None self.sim_map_optsim = None self.sim_map_withbeam = None self.sim_map_meansub = None self.sim_map_degrade = None
def __init__(self, parameter_file=None, params_dict=None, feedback=0): self.params = params_dict np.seterr(invalid='raise') if parameter_file: self.params = parse_ini.parse(parameter_file, calculatedatalike_init, prefix=calculatedatalike_prefix) print self.params["powerfile_in"], "->", self.params["powerdatalike_out"] self.stats_in = h5py.File(self.params["powerfile"], "r") self.treatments_in = self.stats_in["results"].keys() # maybe make this hd5? self.stats_dataout = shelve.open(self.params["powerdatalike_out"], "n")
def __init__(self, parameter_file=None, params_dict=None, feedback=0, make_plot=True): self.params = params_dict np.seterr(under='raise') self.make_plot = make_plot if parameter_file: self.params = parse_ini.parse(parameter_file, aggregatestatistics_init, prefix=aggregatestatistics_prefix) print "opening: ", self.params["aggfile_in"] self.summary = h5py.File(self.params["aggfile_in"], "r") # get the list of treatments self.treatments = self.summary["results"].keys() print "AggregateStatistics: treatment cases: ", self.treatments
def __init__(self, parameter_file_or_dict=None): self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=prefix) self.freq_list = sp.array(self.params['freq_list'], dtype=int) self.lags = self.params['lags'] self.nfreq_bin = self.params['nfreq_bin'] #self.output_root = self.datapath_db.fetch(self.params['output_root'], # intend_write=True) self.output_root = self.params['output_root'] self.ini_root = self.params['ini_root'] # Write parameter file. kiyopy.utils.mkparents(self.ini_root) parse_ini.write_params(self.params, self.ini_root + 'params.ini', prefix=prefix)
def __init__(self, parameter_file=None, params_dict=None, feedback=0): # recordkeeping self.pairs = {} self.pairs_parallel_track = {} self.pairlist = [] self.datapath_db = dp.DataPath() self.params = params_dict if parameter_file: self.params = parse_ini.parse(parameter_file, params_init, prefix=prefix) self.freq_list1 = sp.array(self.params['freq_list1'], dtype=int) if len(self.params['freq_list2']) == 0: self.freq_list2 = self.freq_list1 else: self.freq_list2 = sp.array(self.params['freq_list2'], dtype=int)
def __init__(self, parameter_file_or_dict=None, feedback=2): # merge params of the all super classes mro = inspect.getmro(self.__class__) all_params = {} for cls in mro[-1::-1]: # reverse order try: cls_params = cls.params_init except AttributeError: continue all_params.update(cls_params) # Read in the parameters. self.params = parse_ini.parse(parameter_file_or_dict, all_params, prefix=self.prefix, feedback=feedback) # setup pipeline self._pipeline_setup()
def wrap_batch_gbtpwrspec_data_run(inifile, generate=False, outdir="./plots/"): r"""Wrapper to the GBT x GBT calculation""" params_init = {"gbt_mapkey": "cleaned GBT map", "mode_transfer_1d_ini": "ini file -> 1d trans. function", "mode_transfer_2d_ini": "ini file -> 2d trans. function", "beam_transfer_ini": "ini file -> 2d beam trans. function", "square_1dmodetrans": False, "spec_ini": "ini file for the spectral estimation", "output_tag": "tag identifying the output somehow"} prefix="cp_" params = parse_ini.parse(inifile, params_init, prefix=prefix) print params output_tag = "%s_%s" % (params['gbt_mapkey'], params['output_tag']) output_root = "%s/%s/" % (outdir, output_tag) if generate: output_tag = None print output_root print output_tag file_tools.mkparents(output_root) parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix) datapath_db = data_paths.DataPath() mode_transfer_1d=None if params["mode_transfer_1d_ini"]: mode_transfer_1d = cct.wrap_batch_crosspwr_transfer( params["mode_transfer_1d_ini"], generate=generate, outdir=outdir) return batch_gbtpwrspec_data_run(params["gbt_mapkey"], inifile=params["spec_ini"], datapath_db=datapath_db, outdir=output_root, output_tag=output_tag, beam_transfer=None, square_1dmodetrans = params["square_1dmodetrans"], mode_transfer_1d=mode_transfer_1d, mode_transfer_2d=None)
def wrap_batch_gbtpwrspec_data_run(inifile, generate=False, outdir="./plots/"): r"""Wrapper to the GBT x GBT calculation""" params_init = { "gbt_mapkey": "cleaned GBT map", "mode_transfer_1d_ini": "ini file -> 1d trans. function", "mode_transfer_2d_ini": "ini file -> 2d trans. function", "beam_transfer_ini": "ini file -> 2d beam trans. function", "square_1dmodetrans": False, "spec_ini": "ini file for the spectral estimation", "output_tag": "tag identifying the output somehow" } prefix = "cp_" params = parse_ini.parse(inifile, params_init, prefix=prefix) print params output_tag = "%s_%s" % (params['gbt_mapkey'], params['output_tag']) output_root = "%s/%s/" % (outdir, output_tag) if generate: output_tag = None print output_root print output_tag file_tools.mkparents(output_root) parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix) datapath_db = data_paths.DataPath() mode_transfer_1d = None if params["mode_transfer_1d_ini"]: mode_transfer_1d = cct.wrap_batch_crosspwr_transfer( params["mode_transfer_1d_ini"], generate=generate, outdir=outdir) return batch_gbtpwrspec_data_run( params["gbt_mapkey"], inifile=params["spec_ini"], datapath_db=datapath_db, outdir=output_root, output_tag=output_tag, beam_transfer=None, square_1dmodetrans=params["square_1dmodetrans"], mode_transfer_1d=mode_transfer_1d, mode_transfer_2d=None)
def __init__(self, parameter_file_or_dict=None, feedback=2) : # Read the parameter file, store in dictionary named parameters. self.params = parse_ini.parse(parameter_file_or_dict, params_init, prefix=prefix, feedback=feedback) self.feedback = feedback # Read in the map files. map_fnames_start = (self.params['map_input_root'] + self.params['map_type']) self.maps = [] for band in self.params['map_bands']: this_band_maps = [] for pol in self.params['map_polarizations']: map_file_name = (map_fnames_start + pol + '_' + str(band) + '.npy') map = algebra.load(map_file_name) map = algebra.make_vect(map) this_band_maps.append(map) self.maps.append(this_band_maps)
def repair_shelve_files(batch_param, ini_prefix, params_default, param_prefix): """Add missing information to shelves""" filelist = make_shelve_names(batch_param) for (index, filename, multiplier, cross_power) in filelist: print "repairing: " + filename directory = "/".join(filename.split("/")[0:-1]) + "/" run_index = re.findall(r'\d+', index)[0] ini_file = directory + ini_prefix + run_index + ".ini" print ini_file params = parse_ini.parse(ini_file, params_default, prefix=param_prefix, feedback=10) radio_file1 = params['radio_root1'] + params['radio_data_file1'] map_radio1 = algebra.make_vect(algebra.load(radio_file1)) corr_data = shelve.open(filename + ".shelve") corr_data["params"] = params corr_data["freq_axis"] = map_radio1.get_axis('freq') corr_data.close()
def call_xspec_run(map1_key, map2_key, noiseinv1_key, noiseinv2_key, inifile=None): r"""a free-standing function which calls the xspec analysis """ params_init = { "unitless": True, "return_3d": False, "truncate": False, "window": None, "refinement": 2, "pad": 5, "order": 2, "freq_list": tuple(range(256)), "bins": [0.00765314, 2.49977141, 35] } prefix = 'xs_' params = parse_ini.parse(inifile, params_init, prefix=prefix) if inifile is None: print "WARNING: no ini file for pwrspec estimation" # initialize and calculate the xspec simpair = mp.MapPair(map1_key, map2_key, noiseinv1_key, noiseinv2_key, params['freq_list'], avoid_db=True) bparam = params['bins'] bins = np.logspace(math.log10(bparam[0]), math.log10(bparam[1]), num=bparam[2], endpoint=True) retval = simpair.pwrspec_summary(window=params['window'], unitless=params['unitless'], bins=bins, truncate=params['truncate'], refinement=params['refinement'], pad=params['pad'], order=params['order'], return_3d=params['return_3d']) return retval
def wrap_batch_crosspwr_transfer(inifile, generate=False, outdir="./plots/"): r"""Wrapper to the transfer function calculator """ params_init = { "cleaned_simkey": "cleaned sims for transfer func", "truesignal_simkey": "pure signal", "truesignal_weightkey": "weight to use for pure signal", "reference_simkey": "reference signal", "reference_weightkey": "weight to use for reference signal", "spec_ini": "ini file for the spectral estimation", "output_tag": "tag identifying the output somehow" } prefix = "cct_" params = parse_ini.parse(inifile, params_init, prefix=prefix) print params output_tag = "%s_%s" % (params['cleaned_simkey'], params['output_tag']) output_root = "%s/%s/" % (outdir, output_tag) if generate: output_tag = None print output_root print output_tag file_tools.mkparents(output_root) parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix) datapath_db = data_paths.DataPath() return batch_crosspwr_transfer(params["cleaned_simkey"], params["truesignal_simkey"], params["truesignal_weightkey"], params["reference_simkey"], params["reference_weightkey"], inifile=params["spec_ini"], datapath_db=datapath_db, outdir=output_root, output_tag=output_tag)
def execute(pipe_file_or_dict, feedback=2) : """Execute all the modules listed in the input file.""" # start a flag indicating that this pipeline is running busy_filename = "/tmp/pipeline.%s.%s.busy" % (os.getpid(), getpass.getuser()) print "flagging running pipeline with %s" % busy_filename busyfile = open(busy_filename, "w") busyfile.write("%10.15f" % time.time()) busyfile.close() params, module_params = parse_ini.parse(pipe_file_or_dict, params_init, prefix='pipe_',return_undeclared=True, feedback=feedback) for module in params['modules'] : # Module is either the python object that should be executed, or a # tuple, with the first element being the module and the second element # being a prefix replacement of the form ('p1_', 'p2_'). Before # executing the module, we rename all parameters begining with 'p1_' # to 'p2_'. if isinstance(module, tuple) : mod = module[0] pars = dict(module_params) old_prefix = module[1][0] n = len(old_prefix) new_prefix = module[1][1] for key, value in module_params.iteritems() : if key[0:n] == old_prefix : pars[new_prefix + key[n:]] = value else : mod = module pars = module_params if feedback > 1 : print 'Excuting analysis module: ' + str(mod) mod(pars, feedback=feedback).execute(params['processes']) # now remove the run indicator flag os.remove(busy_filename)