def wrap_batch_physical_sim_run(inifile, generate=False, outdir="./plots/"):
    r"""Wrapper to the physical sim processing
    """
    params_init = {
        "simkey": "point to the sims to run",
        "spec_ini": "ini file for the spectral estimation",
        "output_tag": "tag identifying the output somehow"
    }
    prefix = "csp_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_tag = "%s_%s" % (params['simkey'], params['output_tag'])
    output_root = "%s/%s/" % (outdir, output_tag)

    if generate:
        output_tag = None

    print output_root
    print output_tag
    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix)

    datapath_db = data_paths.DataPath()

    return batch_physical_sim_run(params["simkey"],
                                  inifile=params["spec_ini"],
                                  datapath_db=datapath_db,
                                  outdir=output_root,
                                  output_tag=output_tag)
def generate_windows(window="blackman"):
    datapath_db = data_paths.DataPath()
    # first generate a window for the full physical volume
    filename = datapath_db.fetch('simideal_15hr_physical', intend_read=True,
                                 pick='1')
    print filename
    pcube = algebra.make_vect(algebra.load(filename))
    pwindow = algebra.make_vect(fftutil.window_nd(pcube.shape, name=window),
                                axis_names=('freq', 'ra', 'dec'))
    pwindow.copy_axis_info(pcube)
    print pwindow.shape
    algebra.save("physical_window.npy", pwindow)

    # now generate one for the observed region and project onto the physical
    # volume.
    filename = datapath_db.fetch('simideal_15hr_beam', intend_read=True,
                                 pick='1')
    print filename
    ocube = algebra.make_vect(algebra.load(filename))
    owindow = algebra.make_vect(fftutil.window_nd(ocube.shape, name=window),
                                axis_names=('freq', 'ra', 'dec'))
    owindow.copy_axis_info(ocube)
    print owindow.shape
    print owindow.axes
    algebra.save("observed_window.npy", owindow)
    pwindow = physical_gridding.physical_grid(owindow, refinement=2)
    print pwindow.shape
    algebra.save("observed_window_physreg.npy", pwindow)
Exemple #3
0
def call_data_autopower(basemaps,
                        treatments,
                        inifile=None,
                        generate=False,
                        outdir="./plots/",
                        mode_transfer_1d=None,
                        mode_transfer_2d=None,
                        beam_transfer=None,
                        alttag=None):
    r"""Call a chunk of batch data runs for e.g. different map products
    """
    datapath_db = data_paths.DataPath()

    for base in basemaps:
        for treatment in treatments:
            output_tag = base + treatment
            if alttag:
                output_tag += "_" + output_tag

            output_root = "%s/%s/" % (outdir, output_tag)

            if generate:
                output_tag = None

            batch_gbtpwrspec_data_run(mapname,
                                      inifile=inifile,
                                      datapath_db=datapath_db,
                                      output_tag=output_tag,
                                      outdir=output_root,
                                      beam_transfer=beam_transfer,
                                      mode_transfer_1d=mode_transfer_1d,
                                      mode_transfer_2d=mode_transfer_2d)
    def __init__(self, parameter_file_or_dict=None):
        # recordkeeping
        self.pairs = {}
        self.pairs_nosim = {}
        self.pairlist = []
        self.noisefiledict = {}
        self.datapath_db = dp.DataPath()

        self.params = parse_ini.parse(parameter_file_or_dict,
                                      params_init,
                                      prefix=prefix)

        self.freq_list = sp.array(self.params['freq_list'], dtype=int)
        self.lags = sp.array(self.params['lags'])
        self.output_root = self.datapath_db.fetch(self.params['output_root'],
                                                  intend_write=True)

        if self.params['SVD_root']:
            self.SVD_root = self.datapath_db.fetch(self.params['SVD_root'],
                                                   intend_write=True)
            print "WARNING: using %s to clean (intended?)" % self.SVD_root
        else:
            self.SVD_root = self.output_root

        # Write parameter file.
        kiyopy.utils.mkparents(self.output_root)
        parse_ini.write_params(self.params,
                               self.output_root + 'params.ini',
                               prefix=prefix)
def find_wigglez_region(fieldname):
    """Find the RA/Dec edges of a WiggleZ field by exhaustion
    """
    datapath_db = data_paths.DataPath()

    db_key = "WiggleZ_%s_mock_catalog" % fieldname
    infile_mock = datapath_db.fetch(db_key, intend_read=True,
                             purpose="WiggleZ real data catalog", silent=True)

    n_rand_cats = len(infile_mock[0])

    ra_minmax = [None, None]
    dec_minmax = [None, None]
    freq_minmax = [None, None]

    ndtype = [('RA', float), ('Dec', float), ('z', float),
              ('r-mag', float), ('ijack', int), ('sec', int)]
    for index in infile_mock[0]:
        filename = infile_mock[1][index]
        #print "loading: " + filename
        catalog = np.genfromtxt(filename, dtype=ndtype,
                                skiprows=1)
        freq_vec = cc.freq_21cm_MHz * 1.e6 / (1 + catalog['z'])
        ra_vec = catalog['RA']
        dec_vec = catalog['Dec']

        ra_minmax = minmax(ra_vec, ra_minmax)
        dec_minmax = minmax(dec_vec, dec_minmax)
        freq_minmax = minmax(freq_vec, freq_minmax)
        #print ra_vec.min(), ra_vec.max(), ra_minmax
        #print dec_vec.min(), dec_vec.max(), dec_minmax
        #print freq_vec.min(), freq_vec.max(), freq_minmax

    return ra_minmax, dec_minmax, freq_minmax
Exemple #6
0
def combine_maps(source_key, combined_key,
                 signal='map', weight='noise_inv', divider=";",
                 fullcov=False, batchsim=None):
    r"""
    `source_key` is the file db key for the maps to combine
    `combined_key` is the file db key for the combined maps
    `signal` is the tag in the file db entry for the signal maps
    `weight` is the tag in the file db entry for the N^-1 weights
    `fullcov` uses a memory map for large N^-1 and pulls the diagonal
    `divider` is the token that divides the map section name
            from the data type e.g. "A_with_B;noise_inv"
    """
    datapath_db = data_paths.DataPath()
    input_fdb = datapath_db.fetch(source_key, intend_read=True,
                                   silent=True)
    output_fdb = datapath_db.fetch(combined_key, intend_write=True,
                                   silent=True)
    (input_fdict, output_fdict) = (input_fdb[1], output_fdb[1])

    # determine the number of foreground cleaning options (modes removed)
    input_map_cases = datapath_db.fileset_cases(source_key,
                      "pair;type;treatment", divider=divider)
    if batchsim:
        output_map_cases = datapath_db.fileset_cases(combined_key,
                          "type;treatment;simnum", divider=divider)
    else:
        output_map_cases = datapath_db.fileset_cases(combined_key,
                          "type;treatment", divider=divider)

    if input_map_cases['treatment'] != output_map_cases['treatment']:
        print "the source map does not match the requested combined map output"
        sys.exit()

    print input_map_cases['pair'], input_map_cases['treatment'], \
          output_map_cases['type']

    for treatment in input_map_cases['treatment']:
        inputmap_dict = {}
        inputweight_dict = {}
        for split in input_map_cases['pair']:
            mapkey = "%s;%s;%s" % (split, signal, treatment)
            weightkey = "%s;%s;%s" % (split, weight, treatment)
            inputmap_dict[split] = input_fdict[mapkey]
            inputweight_dict[split] = input_fdict[weightkey]

        output_dict = {}
        for product in output_map_cases['type']:
            if batchsim:
                mapkey = "%s;%s;%s" % (product, treatment, batchsim)
            else:
                mapkey = "%s;%s" % (product, treatment)
            output_dict[product] = output_fdict[mapkey]

        #print "-"*80
        #print inputmap_dict
        #print inputweight_dict
        #print output_dict

        combine_maps_driver(inputmap_dict, inputweight_dict, output_dict,
                            fullcov=fullcov, datapath_db=datapath_db)
Exemple #7
0
    def __init__(self, parameter_file=None, params_dict=None, feedback=0):
        # recordkeeping
        self.pairs = {}
        self.pairs_parallel_track = {}
        self.pairlist = []
        self.datapath_db = dp.DataPath()

        self.params = params_dict
        if parameter_file:
            self.params = parse_ini.parse(parameter_file,
                                          params_init,
                                          prefix=prefix)

        self.freq_list = sp.array(self.params['freq_list'], dtype=int)
        self.tack_on_input = self.params['tack_on_input']
        self.output_root = self.datapath_db.fetch(
            self.params['output_root'], tack_on=self.params['tack_on_output'])

        #self.output_root = self.params['output_root']
        print "foreground cleaning writing to output root", self.output_root

        if not os.path.isdir(self.output_root):
            os.mkdir(self.output_root)

        if self.params['svd_filename'] is not None:
            self.svd_filename = self.params['svd_filename']
            print "WARNING: using %s to clean (intended?)" % self.svd_filename
        else:
            self.svd_filename = self.output_root + "/" + "SVD.hd5"

        # Write parameter file.
        parse_ini.write_params(self.params,
                               self.output_root + 'params.ini',
                               prefix=prefix)
Exemple #8
0
def find_avg_fsky(map_key, tack_on=None, refinement=2, pad=5, order=1):
    """Take all the pairs that enter the autopower, open their weight files and
    find the fsky for each data treatment
    In a pipeline
        fsky = find_avg_fsky(self.params["map_key"],
                            tack_on=self.params["tack_on"],
                            refinement=self.params["refinement"],
                            pad=self.params["pad"],
                            order=self.params["order"])
    """
    fsky = {}
    datapath_db = dp.DataPath()

    map_cases = datapath_db.fileset_cases(map_key, "pair;type;treatment")

    # This code is essentially verbatim for the permutation in the real
    # autopower
    unique_pairs = dp.GBTauto_cross_pairs(map_cases['pair'],
                                          map_cases['pair'],
                                          cross_sym="_with_")

    treatment_list = map_cases['treatment']

    for treatment in treatment_list:
        for item in unique_pairs:
            dbkeydict = {}
            mapset0 = (map_key, item[0], treatment)
            mapset1 = (map_key, item[1], treatment)
            dbkeydict['noiseinv1_key'] = "%s:%s;noise_inv;%s" % mapset0
            dbkeydict['noiseinv2_key'] = "%s:%s;noise_inv;%s" % mapset1
            files = dp.convert_dbkeydict_to_filedict(dbkeydict,
                                                     datapath_db=datapath_db,
                                                     tack_on=tack_on)

            print files['noiseinv1_key'], files['noiseinv2_key']
            weight1 = algebra.make_vect(algebra.load(files['noiseinv1_key']))
            weight2 = algebra.make_vect(algebra.load(files['noiseinv2_key']))

            physweight1 = bh.repackage_kiyo(
                pg.physical_grid(weight1,
                                 refinement=refinement,
                                 pad=pad,
                                 order=order))

            physweight2 = bh.repackage_kiyo(
                pg.physical_grid(weight2,
                                 refinement=refinement,
                                 pad=pad,
                                 order=order))

            #fsky = np.sum(physweight1 * physweight2)**2
            #fsky /= np.sum(physweight1**2 * physweight2**2)
            #fsky /= float(physweight1.size)
            fsky = np.sum(weight1 * weight2)**2
            fsky /= np.sum(weight1**2 * weight2**2)
            fsky /= float(weight1.size)
            print "volume factor in noise weight: ", fsky

    return fsky
def generate_aux_simset(params, silent=False, datapath_db=None):

    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    weightfile = datapath_db.fetch(params['weight_key'],
                                   intend_read=True,
                                   purpose="weight map",
                                   silent=silent)

    input_rawsimset = datapath_db.fetch(params['sim_key'],
                                        intend_read=True,
                                        silent=silent)

    output_deltasimset = datapath_db.fetch(params['sim_delta_key'],
                                           intend_write=True,
                                           silent=silent)

    input_beamsimset = datapath_db.fetch(params['sim_beam_key'],
                                         intend_read=True,
                                         silent=silent)

    output_meansubsimset = datapath_db.fetch(params['sim_beam_meansub_key'],
                                             intend_write=True,
                                             silent=silent)

    output_convsimset = datapath_db.fetch(params['sim_beam_conv_key'],
                                          intend_write=True,
                                          silent=silent)

    output_meansubconvsimset = datapath_db.fetch(
        params['sim_beam_meansubconv_key'], intend_write=True, silent=silent)

    # TODO: actually implement the foreground simulations
    output_fgsimset = datapath_db.fetch(params['sim_beam_plus_fg_key'],
                                        intend_write=True,
                                        silent=silent)

    for index in input_rawsimset[0]:
        sg.generate_delta_sim(input_rawsimset[1][index],
                              output_deltasimset[1][index])

        sg.generate_proc_sim(input_beamsimset[1][index],
                             weightfile,
                             output_meansubsimset[1][index],
                             meansub=True,
                             degrade=False)

        sg.generate_proc_sim(input_beamsimset[1][index],
                             weightfile,
                             output_convsimset[1][index],
                             meansub=False,
                             degrade=True)

        sg.generate_proc_sim(input_beamsimset[1][index],
                             weightfile,
                             output_meansubconvsimset[1][index],
                             meansub=True,
                             degrade=True)
    def __init__(self, parameter_file=None, params_dict=None, feedback=0):
        self.params = params_dict
        self.datapath_db = dp.DataPath()

        if parameter_file:
            self.params = parse_ini.parse(parameter_file,
                                          subtractmap_init,
                                          prefix=subtractmap_prefix)
Exemple #11
0
    def __init__(self, parameter_file=None, params_dict=None, feedback=0):
        self.params = params_dict
        if parameter_file:
            self.params = parse_ini.parse(parameter_file, params_init,
                                          prefix=prefix)

        self.template_key = self.params['template_key']
        self.output_key = self.params['output_key']
        self.total_integration = self.params['total_integration']
        self.scenario = self.params['scenario']
        self.refinement = self.params['refinement']
        self.multiplier = self.params['multiplier']
        self.tack_on = self.params['tack_on']

        # set the random seed
        if (self.params['seed'] < 0):
            print "no seed given; generating one (are you sure?)"
            # The usual seed is not fine enough for parallel jobs
            randsource = open("/dev/random", "rb")
            self.seed = struct.unpack("I", randsource.read(4))[0]
            #self.seed = abs(long(outfile_physical.__hash__()))
        else:
            self.seed = self.params['seed']

        random.seed(self.seed)

        self.datapath_db = data_paths.DataPath()

        self.input_weight_maps = self.return_maplist(self.template_key,
                                                     "noise_weight")

        self.output_weight_maps = self.return_maplist(self.output_key,
                                                      "noise_weight",
                                                      tack_on=self.tack_on)

        self.output_maps = self.return_maplist(self.output_key,
                                               "clean_map",
                                               tack_on=self.tack_on)

        self.output_delta_thermal = []
        self.output_thermal = []
        for mapfile in self.output_maps:
            basename = os.path.splitext(mapfile)[0]
            self.output_delta_thermal.append(basename + "_deltat.npy")
            self.output_thermal.append(basename + "_thermal.npy")

        self.output_signal = "gaussian_signal_simulation.npy"

        print "input weight maps: ", self.input_weight_maps
        print "output weight maps: ", self.output_weight_maps
        self.output_root = os.path.dirname(self.output_weight_maps[0])
        self.output_root += "/"
        print "output directory: ", self.output_root
        if not os.path.isdir(self.output_root):
            os.mkdir(self.output_root)
def batch_physical_sim_run(sim_key,
                           inifile=None,
                           datapath_db=None,
                           output_tag=None,
                           outdir="./plots/"):
    """Test the power spectral estimator using simulations"""
    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    cache_path = datapath_db.fetch("quadratic_batch_data")

    mock_cases = datapath_db.fileset_cases(sim_key, "realization")

    funcname = "correlate.batch_quadratic.call_phys_space_run"
    generate = False if output_tag else True
    if generate:
        print "REGENERATING the power spectrum result cache: "

    caller = batch_handler.MemoizeBatch(funcname,
                                        cache_path,
                                        generate=generate,
                                        verbose=True)

    if output_tag:
        output_root = "%s/%s/" % (outdir, output_tag)
        file_tools.mkparents(output_root)

    pwr_1d = []
    pwr_1d_from_2d = []
    pwr_2d = []
    for index in mock_cases['realization']:
        mapfile = datapath_db.fetch("%s:%s" % (sim_key, index))

        pwrspec_out = caller.execute(mapfile, mapfile, inifile=inifile)

        if output_tag:
            pwr_1d_from_2d.append(
                pe.convert_2d_to_1d(pwrspec_out[0], transfer=None))

            pwr_2d.append(pwrspec_out[0])
            pwr_1d.append(pwrspec_out[1])

    if output_tag:
        pe.summarize_agg_pwrspec(pwr_1d,
                                 pwr_1d_from_2d,
                                 pwr_2d,
                                 output_tag,
                                 outdir=output_root)

        retval = (pwr_1d, pwr_1d_from_2d, pwr_2d)
    else:
        caller.multiprocess_stack()
        retval = None

    return retval
def wigglez_xspec(inifile):
    r"""required parameters:
    general:
    `inifile`: .ini file for the power spectrum estimator (bins, etc.) fixed
    `outdir`: place to dump plot data

    for the cross power:
    `selection_function`
    `crosspwr_tag`: unique tag (on top of the input data name) for this run

    for the transfer function:
    `cleaned_simkey`
    `truesignal_simkey`
    `truesignal_weightkey`
    `reference_simkey`
    `reference_weightkey`
    `crosspwr_trans_tag`: unique tag (on top of the input data name) for this run
    `generate`: True is regenerate the cache for this
    applied as:

    cleaned_simkey(map) * cleaned_simkey(weight) x
    truesignal_weightkey * truesignal_simkey
    divided by:
    reference_simkey * reference_weightkey x truesignal_weightkey * truesignal_simkey

    """
    datapath_db = data_paths.DataPath()

    #inifile=None, generate=False, alttag=None,
    #              mode_transfer_1d=None, mode_transfer_2d=None,
    #              beam_transfer=None):
    #datapath_db = data_paths.DataPath()
    # cleaned_mapkey "GBT_15hr_map_fdgcal_cleaned_noconv_combined"
    # tack "_xWigglez" onto the output_tag name and an optional alttag after that

    #cleaned_mapkey, wigglez_signalkey, wigglez_mockkey, wigglez_selection

    mapname = "GBT_15hr_map_fdgcal_cleaned_noconv_combined"
    wigglez_map_key = "WiggleZ_15hr_binned_delta"
    wigglez_mock_key = "WiggleZ_15hr_delta_mock"
    wigglez_selection_key = "WiggleZ_15hr_montecarlo"

    cwx.batch_gbtxwigglez_data_run(mapname,
                                   wigglez_map_key,
                                   wigglez_mock_key,
                                   wigglez_selection_key,
                                   inifile=inifile,
                                   datapath_db=datapath_db,
                                   outdir="./plots/",
                                   output_tag=output_tag,
                                   beam_transfer=beam_transfer,
                                   mode_transfer_1d=mode_transfer_1d,
                                   mode_transfer_2d=mode_transfer_2d,
                                   theory_curve=None)
Exemple #14
0
    def __init__(self, parameter_file=None, params_dict=None, feedback=0):
        self.params = params_dict
        if parameter_file:
            self.params = parse_ini.parse(parameter_file,
                                          params_init,
                                          prefix=prefix)

        if not os.path.isdir(self.params['output_root']):
            os.mkdir(self.params['output_root'])

        self.refinement = self.params['refinement']
        self.scenario = self.params['scenario']
        self.template_file = self.params['template_file']
        self.output_root = self.params['output_root']
        # here we use 300 h km/s from WiggleZ for streaming dispersion
        self.streaming_dispersion = 300. * 0.72

        #self.template_map = algebra.make_vect(
        #                        algebra.load(self.template_file))
        self.datapath_db = data_paths.DataPath()
        self.template_map = self.datapath_db.fetch_multi(self.template_file)

        # determine the beam model
        self.beam_data = np.array([
            0.316148488246, 0.306805630985, 0.293729620792, 0.281176247549,
            0.270856788455, 0.26745856078, 0.258910010848, 0.249188429031
        ])

        self.freq_data = np.array([695, 725, 755, 785, 815, 845, 875, 905],
                                  dtype=float)
        self.freq_data *= 1.0e6

        # set the random seed
        if (self.params['seed'] < 0):
            # The usual seed is not fine enough for parallel jobs
            randsource = open("/dev/random", "rb")
            self.seed = struct.unpack("I", randsource.read(4))[0]
            #self.seed = abs(long(outfile_physical.__hash__()))
        else:
            self.seed = self.params['seed']

        random.seed(self.seed)

        # register any maps that need to be produced
        self.sim_map_phys = None
        self.sim_map = None
        self.sim_map_delta = None
        self.sim_map_optsim = None
        self.sim_map_withbeam = None
        self.sim_map_meansub = None
        self.sim_map_degrade = None
Exemple #15
0
def process_mode_files(clean_path, outpath, n_modes=20, average=False):
    if not os.path.isdir(outpath):
        os.mkdir(outpath)

    datapath_db = dp.DataPath()
    output_root = datapath_db.fetch(clean_path)
    filename = output_root + "/" + "SVD.hd5"
    print filename
    svd_data = h5py.File(filename, "r")

    compilation = {}
    for fieldname in svd_data:
        acc = []
        if average:
            for pair in svd_data[fieldname]:
                acc.append(svd_data[fieldname][pair].value)
        else:
            pair0 = svd_data[fieldname].keys()[0]
            acc.append(svd_data[fieldname][pair0].value)

        compilation[fieldname] = np.mean(np.dstack(acc), axis=-1)

        print fieldname, compilation[fieldname].shape

        # handle the eigenvalues
        if "val" in fieldname:
            filename = outpath + "/" + fieldname + ".dat"
            print "writing ", filename
            fileobj = open(filename, "w")
            #data = compilation[fieldname][0]
            data = np.abs(np.sort(-compilation[fieldname][0]))
            data /= data[0]
            for index in range(data.shape[0]):
                fileobj.write("%10.15g\n" % data[index])

            fileobj.close()

        # handle the modes
        if "modes" in fieldname:
            filename = outpath + "/" + fieldname + ".dat"
            print "writing ", filename
            fileobj = open(filename, "w")
            data = compilation[fieldname][0:n_modes]

            for index in range(data.shape[1]):
                entry = tuple(data[:, index].tolist())
                fileobj.write(("%10.5g " * n_modes + "\n") % entry)

            fileobj.close()

    svd_data.close()
Exemple #16
0
    def __init__(self, parameter_file=None, params_dict=None, feedback=0):
        self.params = params_dict
        self.datapath_db = dp.DataPath()

        if parameter_file:
            self.params = parse_ini.parse(parameter_file,
                                          calc_mixing_init,
                                          prefix=calc_mixing_prefix)

        bin_spec = self.params["bins"]
        self.bins = np.logspace(math.log10(bin_spec[0]),
                                math.log10(bin_spec[1]),
                                num=bin_spec[2],
                                endpoint=True)
Exemple #17
0
def wrap_batch_gbtxwigglez_data_run(inifile,
                                    generate=False,
                                    outdir="./plots/"):
    r"""Wrapper to the GBT x WiggleZ calculation"""
    params_init = {
        "gbt_mapkey": "cleaned GBT map",
        "wigglez_deltakey": "WiggleZ overdensity map",
        "wigglez_mockkey": "WiggleZ overdensities from mocks",
        "wigglez_selectionkey": "WiggleZ selection function",
        "mode_transfer_1d_ini": "ini file -> 1d trans. function",
        "mode_transfer_2d_ini": "ini file -> 2d trans. function",
        "beam_transfer_ini": "ini file -> 2d beam trans. function",
        "spec_ini": "ini file for the spectral estimation",
        "output_tag": "tag identifying the output somehow"
    }
    prefix = "cwx_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_tag = "%s_%s" % (params['gbt_mapkey'], params['output_tag'])
    output_root = "%s/%s/" % (outdir, output_tag)

    if generate:
        output_tag = None

    print output_root
    print output_tag
    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix)

    datapath_db = data_paths.DataPath()

    mode_transfer_1d = None
    if params["mode_transfer_1d_ini"]:
        mode_transfer_1d = cct.wrap_batch_crosspwr_transfer(
            params["mode_transfer_1d_ini"], generate=generate, outdir=outdir)

    batch_gbtxwigglez_data_run(params["gbt_mapkey"],
                               params["wigglez_deltakey"],
                               params["wigglez_mockkey"],
                               params["wigglez_selectionkey"],
                               inifile=params["spec_ini"],
                               datapath_db=datapath_db,
                               outdir=output_root,
                               output_tag=output_tag,
                               beam_transfer=None,
                               mode_transfer_1d=mode_transfer_1d,
                               mode_transfer_2d=None,
                               theory_curve=None)
def plot_mode_amplitudes(mapkey,
                         outputdir="/cita/d/www/home/eswitzer/movies/"):
    datapath_db = data_paths.DataPath()
    map_cases = datapath_db.fileset_cases(mapkey, "pair;product;treatment")
    print map_cases
    for pair in map_cases['pair']:
        source_key = "db:%s:%s;modes;100modes" % (mapkey, pair)
        pc.make_cube_movie(source_key,
                           "Mode amp",
                           pc.cube_frame_dir,
                           sigmarange=-1,
                           outputdir=outputdir,
                           multiplier=1.,
                           transverse=False,
                           convolve=False,
                           logscale=True)
Exemple #19
0
def plot_first_few_amps(dbitem, modelist):
    datapath_db = data_paths.DataPath()
    frame_dir = "/scratch/eswitzer/cube_frames/"

    dbname = "db:%s:A_with_B;modes;100modes" % dbitem

    for modeindex in modelist:
        filename = "%s_%d.eps" % (dbitem, modeindex)
        plot_cube.make_cube_movie(dbname,
                                  "Amplitude",
                                  frame_dir,
                                  saveslice=modeindex,
                                  saveslice_file=filename,
                                  sigmarange=-1,
                                  multiplier=1.,
                                  title="%s" % filename)
Exemple #20
0
def gather_batch_data_run():
    datapath_db = data_paths.DataPath()

    outpath = datapath_db.fetch("quadratic_batch_data")
    print "reading from to: " + outpath

    funcname = "correlate.batch_quadratic.call_xspec_run"
    caller = batch_handler.MemoizeBatch(funcname, outpath, verbose=True)

    for mode_num in range(0, 55, 5):
        map1_key = "GBT_15hr_map_cleaned_%dmode" % mode_num
        map2_key = "GBT_15hr_map_cleaned_%dmode" % mode_num
        noise1_key = "GBT_15hr_map_cleaned_%dmode" % mode_num
        noise2_key = "GBT_15hr_map_cleaned_%dmode" % mode_num

        (pairlist, pairdict) = \
                data_paths.cross_maps(map1_key, map2_key,
                              noise1_key, noise2_key,
                              map_suffix=";map",
                              noise_inv_suffix=";noise_inv",
                              cross_sym="_x_",
                              pair_former="GBTauto_cross_pairs",
                              ignore=['param'],
                              tag1prefix=map1_key + "_",
                              tag2prefix=map2_key + "_",
                              verbose=False)

        pwr_1d = []
        pwr_2d = []
        for item in pairdict.keys():
            pairrun = pairdict[item]
            print pairrun['tag1']
            print pairrun['map1']
            print pairrun['noise_inv1']
            print pairdict[item].keys()

            pwr2d_run, pwr1d_run = caller.execute(pairrun['map1'],
                                                  pairrun['map2'],
                                                  pairrun['noise_inv1'],
                                                  pairrun['noise_inv2'])
            pwr_2d.append(pwr2d_run)
            pwr_1d.append(pwr1d_run)

        pe.summarize_1d_agg_pwrspec(pwr_1d,
                                    "pwr_data_%dmodes_1d.dat" % mode_num)
        pe.summarize_2d_agg_pwrspec(pwr_2d,
                                    "pwr_data_%dmodes_2d.dat" % mode_num)
def plot_gbt_comb_modeset(map_key,
                          outputdir="/cita/d/www/home/eswitzer/movies/",
                          convolve=False,
                          divider_token=";"):
    datapath_db = data_paths.DataPath()
    input_fdb = datapath_db.fetch(map_key, intend_read=True, silent=True)

    map_treatments = []
    for filekey in input_fdb[0]:
        keyparse = filekey.split(divider_token)
        if len(keyparse) == 2:
            treatment = keyparse[-1]
            if "modes" in treatment:
                map_treatments.append(treatment)

    map_treatments = unique_list(map_treatments)

    for treatment in map_treatments:
        source_key = "db:%s:map;%s" % (map_key, treatment)
        print source_key, treatment
        pc.make_cube_movie(source_key,
                           "Temperature (mK)",
                           pc.cube_frame_dir,
                           sigmarange=3.,
                           outputdir=outputdir,
                           multiplier=1000.,
                           transverse=False,
                           convolve=convolve)

        source_key = "db:%s:product;%s" % (map_key, treatment)
        pc.make_cube_movie(source_key,
                           "Cleaned map times weights",
                           pc.cube_frame_dir,
                           sigmarange=-1,
                           outputdir=outputdir,
                           multiplier=1000.,
                           transverse=False,
                           convolve=convolve)

        source_key = "db:%s:weight;%s" % (map_key, treatment)
        pc.make_cube_movie(source_key,
                           "inverse variance weight",
                           pc.cube_frame_dir,
                           sigmarange=-1,
                           outputdir=outputdir,
                           multiplier=1.,
                           transverse=False)
def generate_sim(params, parallel=True, silent=True, datapath_db=None):
    """generate simulations
    here, assuming the sec A of the set is the template map
    """

    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    template_mapname = datapath_db.fetch(params['template_key'],
                                         intend_read=True,
                                         purpose="template_map",
                                         silent=silent)

    physlist = datapath_db.fetch(params['sim_physical_key'],
                                 intend_write=True,
                                 purpose="output sim (physical)",
                                 silent=silent)

    rawlist = datapath_db.fetch(params['sim_key'],
                                intend_write=True,
                                purpose="output sim",
                                silent=silent)

    beamlist = datapath_db.fetch(params['sim_beam_key'],
                                 intend_write=True,
                                 purpose="output sim+beam",
                                 silent=silent)

    bpdlist = datapath_db.fetch(params['sim_beam_plus_data_key'],
                                intend_write=True,
                                purpose="output sim+beam+data",
                                silent=silent)

    runlist = [(template_mapname, physlist[1][index], rawlist[1][index],
                beamlist[1][index], bpdlist[1][index],
                params['pwrspec_scenario'], params['refinement'])
               for index in rawlist[0]]

    print runlist
    #sys.exit()
    if parallel:
        pool = multiprocessing.Pool(processes=multiprocessing.cpu_count() - 4)
        pool.map(wrap_sim, runlist)
    else:
        for runitem in runlist:
            wrap_sim(runitem)
Exemple #23
0
def wrap_batch_gbtpwrspec_data_run(inifile, generate=False, outdir="./plots/"):
    r"""Wrapper to the GBT x GBT calculation"""
    params_init = {
        "gbt_mapkey": "cleaned GBT map",
        "mode_transfer_1d_ini": "ini file -> 1d trans. function",
        "mode_transfer_2d_ini": "ini file -> 2d trans. function",
        "beam_transfer_ini": "ini file -> 2d beam trans. function",
        "square_1dmodetrans": False,
        "spec_ini": "ini file for the spectral estimation",
        "output_tag": "tag identifying the output somehow"
    }
    prefix = "cp_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_tag = "%s_%s" % (params['gbt_mapkey'], params['output_tag'])
    output_root = "%s/%s/" % (outdir, output_tag)

    if generate:
        output_tag = None

    print output_root
    print output_tag
    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix)

    datapath_db = data_paths.DataPath()

    mode_transfer_1d = None
    if params["mode_transfer_1d_ini"]:
        mode_transfer_1d = cct.wrap_batch_crosspwr_transfer(
            params["mode_transfer_1d_ini"], generate=generate, outdir=outdir)

    return batch_gbtpwrspec_data_run(
        params["gbt_mapkey"],
        inifile=params["spec_ini"],
        datapath_db=datapath_db,
        outdir=output_root,
        output_tag=output_tag,
        beam_transfer=None,
        square_1dmodetrans=params["square_1dmodetrans"],
        mode_transfer_1d=mode_transfer_1d,
        mode_transfer_2d=None)
Exemple #24
0
def add_sim_to_data(simkey, datakey, replace=False):
    datapath_db = data_paths.DataPath()

    mapA_file = datapath_db.fetch(datakey + ":A;clean_map", intend_read=True)
    mapB_file = datapath_db.fetch(datakey + ":B;clean_map", intend_read=True)
    mapC_file = datapath_db.fetch(datakey + ":C;clean_map", intend_read=True)
    mapD_file = datapath_db.fetch(datakey + ":D;clean_map", intend_read=True)
    simfile = datapath_db.fetch(simkey + ":1", intend_read=True)

    simmap = algebra.make_vect(algebra.load(simfile))

    mapset = [mapA_file, mapB_file, mapC_file, mapD_file]
    for mapfile in mapset:
        print mapfile, simfile
        origmap = algebra.make_vect(algebra.load(mapfile))
        if replace:
            algebra.save(mapfile, simmap)
        else:
            algebra.save(mapfile, origmap + simmap)
Exemple #25
0
def plot_cleaned_maps(source_key, alt_weight=None,
                 signal='map', weight='noise_inv', divider_token=";",
                 outputdir="/cita/d/www/home/eswitzer/movies/",
                 convolve=False,
                 transverse=False):
    r"""
    `source_key` is the file db key for the maps to combine
    `signal` is the tag in the file db entry for the signal maps
    `weight` is the tag in the file db entry for the N^-1 weights
    `divider_token` is the token that divides the map section name
            from the data type e.g. "A_with_B;noise_inv"
    """
    datapath_db = data_paths.DataPath()
    source_fdb = datapath_db.fetch(source_key, intend_read=True,
                                   silent=True)
    source_fdict = source_fdb[1]

    # accumulate all the files to combine
    weightkeys = {}
    signalkeys = {}
    for filekey in source_fdb[0]:
        if divider_token in filekey:
            data_type = filekey.split(divider_token)[1]
            map_section = filekey.split(divider_token)[0]

            if data_type == signal:
                signalkeys[map_section] = source_fdict[filekey]

            if data_type == weight:
                weightkeys[map_section] = source_fdict[filekey]

    for mapkey in signalkeys:
        signalfile = signalkeys[mapkey]
        weightfile = weightkeys[mapkey]
        print "loading pair: %s %s" % (signalfile, weightfile)

        make_cube_movie(signalfile, "Temperature (mK)", cube_frame_dir,
                        sigmarange=2.5, outputdir=outputdir, multiplier=1000.,
                        transverse=transverse, convolve=convolve)

        make_cube_movie(weightfile, "Inverse variance weight", cube_frame_dir,
                        sigmarange=2.5, outputdir=outputdir, multiplier=1.,
                        transverse=transverse)
def batch_crosspwr_transfer_run(cleaned_simkey,
                                rootsim,
                                selection_function,
                                simindex="1",
                                weightmap="15modes",
                                include_beam=True,
                                inifile=None,
                                generate=False,
                                alttag=None):
    r"""This provides some basic uniformity in how the WiggleZ transfer
    functions are derived. TODO: This can probably be phased out.
    """
    datapath_db = data_paths.DataPath()

    truesignal_simkey = "%s_delta:%s" % (rootsim, simindex)
    truesignal_weightkey = selection_function

    if include_beam:
        reference_simkey = "%s_temperature:%s" % (rootsim, simindex)
    else:
        reference_simkey = "%s_beam:%s" % (rootsim, simindex)

    output_tag = cleaned_simkey
    if alttag:
        output_tag += "_" + alttag

    reference_weightkey = "%s:weight;%s" % (cleaned_simkey, weightmap)

    if generate:
        output_tag = None

    outdir = "./plots/" + output_tag

    return batch_crosspwr_transfer(cleaned_simkey,
                                   truesignal_simkey,
                                   truesignal_weightkey,
                                   reference_simkey,
                                   reference_weightkey,
                                   inifile=inifile,
                                   datapath_db=datapath_db,
                                   outdir="./plots/",
                                   output_tag=output_tag)
def plot_gbt_simset(fieldname, outputdir="/cita/d/www/home/eswitzer/movies/"):
    datapath_db = data_paths.DataPath()

    #keyname = "%s" % fieldname
    #filename = datapath_db.fetch(keyname, pick='0')
    #pc.make_cube_movie(filename, "Temperature (mK)", pc.cube_frame_dir,
    #                    sigmarange=3., outputdir=outputdir, multiplier=1000.,
    #                    transverse=False, filetag_suffix="_"+fieldname)

    #keyname = "%s_beam" % fieldname
    #filename = datapath_db.fetch(keyname, pick='0')
    #pc.make_cube_movie(filename, "Temperature (mK)", pc.cube_frame_dir,
    #                    sigmarange=3., outputdir=outputdir, multiplier=1000.,
    #                    transverse=False, filetag_suffix="_"+fieldname)

    #keyname = "%s_beam_conv" % fieldname
    #filename = datapath_db.fetch(keyname, pick='0')
    #pc.make_cube_movie(filename, "Temperature (mK)", pc.cube_frame_dir,
    #                    sigmarange=3., outputdir=outputdir, multiplier=1000.,
    #                    transverse=False, filetag_suffix="_"+fieldname)

    keyname = "%s_beam_meansub" % fieldname
    filename = datapath_db.fetch(keyname, pick='0')
    pc.make_cube_movie(filename,
                       "Temperature (mK)",
                       pc.cube_frame_dir,
                       sigmarange=3.,
                       outputdir=outputdir,
                       multiplier=1000.,
                       transverse=False,
                       filetag_suffix="_" + fieldname)

    keyname = "%s_beam_meansubconv" % fieldname
    filename = datapath_db.fetch(keyname, pick='0')
    pc.make_cube_movie(filename,
                       "Temperature (mK)",
                       pc.cube_frame_dir,
                       sigmarange=3.,
                       outputdir=outputdir,
                       multiplier=1000.,
                       transverse=False,
                       filetag_suffix="_" + fieldname)
Exemple #28
0
def generate_aux_simset(fieldname, silent=False):

    datapath_db = data_paths.DataPath()
    weightfile = datapath_db.fetch("GBT_15hr_map_combined_cleaned_0mode_weight",
                                   intend_read=True, silent=silent)

    input_rawsimset = datapath_db.fetch("%s" % fieldname, intend_read=True,
                                        silent=silent)

    output_deltasimset = datapath_db.fetch("%s_delta" % fieldname,
                                          intend_write=True, silent=silent)

    input_beamsimset = datapath_db.fetch("%s_beam" % fieldname,
                                         intend_read=True, silent=silent)

    output_meansubsimset = datapath_db.fetch("%s_beam_meansub" % fieldname,
                                         intend_write=True, silent=silent)

    output_convsimset = datapath_db.fetch("%s_beam_conv" % fieldname,
                                         intend_write=True, silent=silent)

    output_meansubconvsimset = datapath_db.fetch("%s_beam_meansubconv" % fieldname,
                                         intend_write=True, silent=silent)

    for index in input_rawsimset[0]:
        generate_delta_sim(input_rawsimset[1][index],
                           output_deltasimset[1][index])

        generate_proc_sim(input_beamsimset[1][index], weightfile,
                          output_meansubsimset[1][index],
                          meansub=True, degrade=False)

        generate_proc_sim(input_beamsimset[1][index], weightfile,
                          output_convsimset[1][index],
                          meansub=False, degrade=True)

        generate_proc_sim(input_beamsimset[1][index], weightfile,
                          output_meansubconvsimset[1][index],
                          meansub=True, degrade=True)
def wrap_batch_crosspwr_transfer(inifile, generate=False, outdir="./plots/"):
    r"""Wrapper to the transfer function calculator
    """
    params_init = {
        "cleaned_simkey": "cleaned sims for transfer func",
        "truesignal_simkey": "pure signal",
        "truesignal_weightkey": "weight to use for pure signal",
        "reference_simkey": "reference signal",
        "reference_weightkey": "weight to use for reference signal",
        "spec_ini": "ini file for the spectral estimation",
        "output_tag": "tag identifying the output somehow"
    }
    prefix = "cct_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_tag = "%s_%s" % (params['cleaned_simkey'], params['output_tag'])
    output_root = "%s/%s/" % (outdir, output_tag)

    if generate:
        output_tag = None

    print output_root
    print output_tag
    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini', prefix=prefix)

    datapath_db = data_paths.DataPath()

    return batch_crosspwr_transfer(params["cleaned_simkey"],
                                   params["truesignal_simkey"],
                                   params["truesignal_weightkey"],
                                   params["reference_simkey"],
                                   params["reference_weightkey"],
                                   inifile=params["spec_ini"],
                                   datapath_db=datapath_db,
                                   outdir=output_root,
                                   output_tag=output_tag)
def wrap_batch_sim_run(inifile, generate=False, outdir="./plots/"):
    r"""Wrapper to the sim processing
    TODO: add transfer function to test its application
    """
    params_init = {"left_simkey": "sim to put on the left side",
                   "right_simkey": "sim to put on the right side",
                   "left_weightkey": "weight to use on the left side",
                   "right_weightkey": "weight to use on the right side",
                   "spec_ini": "ini file for the spectral estimation",
                   "output_tag": "tag identifying the output somehow"}
    prefix="csp_"

    params = parse_ini.parse(inifile, params_init, prefix=prefix)
    print params

    output_tag = "%sx%s_%s" % (params['left_simkey'], \
                               params['right_simkey'], \
                               params['output_tag'])
    output_root = "%s/%s/" % (outdir, output_tag)

    if generate:
        output_tag = None

    print output_root
    print output_tag
    file_tools.mkparents(output_root)
    parse_ini.write_params(params, output_root + 'params.ini',
                           prefix=prefix)

    datapath_db = data_paths.DataPath()

    return batch_sim_run(params["left_simkey"],
                         params["right_simkey"],
                         params["left_weightkey"],
                         params["right_weightkey"],
                         inifile=params["spec_ini"],
                         datapath_db=datapath_db,
                         outdir=output_root,
                         output_tag=output_tag)