def batch_wigglez_automock_run(mock_key, sel_key,
                               inifile=None, datapath_db=None,
                               output_tag=None):
    r"""TODO: make this work; wrote this but never really needed it yet
    """
    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    cache_path = datapath_db.fetch("quadratic_batch_data")

    mock_cases = datapath_db.fileset_cases(mock_key, "realization")

    funcname = "correlate.batch_quadratic.call_xspec_run"
    generate = False if output_tag else True
    caller = batch_handler.MemoizeBatch(funcname, cache_path,
                                        generate=generate, verbose=True)

    for index in mock_cases['realization']:
        dbkeydict = {}
        dbkeydict['map1_key'] = "%s:%s" % (mock_key, index)
        dbkeydict['map2_key'] = "%s:%s" % (mock_key, index)
        dbkeydict['noiseinv1_key'] = sel_key
        dbkeydict['noiseinv2_key'] = sel_key
        files = data_paths.convert_dbkeydict_to_filedict(dbkeydict,
                                                    datapath_db=datapath_db)

        caller.execute(files['map1_key'], files['map2_key'],
                       files['noiseinv1_key'], files['noiseinv2_key'],
                       inifile=inifile)

    caller.multiprocess_stack()
def batch_single_crosspwr(left_mapkey,
                          right_simkey, right_weightkey,
                          multiplier=1.,
                          inifile=None, datapath_db=None,
                          outdir="./plots",
                          output_tag=None):
    r"""w_left m_left x w_right m_right"""

    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    cache_path = datapath_db.fetch("quadratic_batch_data")

    map_cases = datapath_db.fileset_cases(left_mapkey, "type;treatment")

    funcname = "correlate.batch_quadratic.call_xspec_run"
    generate = False if output_tag else True
    if generate:
        print "REGENERATING the power spectrum result cache: "

    caller = batch_handler.MemoizeBatch(funcname, cache_path,
                                        generate=generate, verbose=True)

    crosspwr_collection = {}
    for treatment in map_cases['treatment']:
        dbkeydict = {}
        dbkeydict['map1_key'] = "%s:map;%s" % (left_mapkey, treatment)
        dbkeydict['map2_key'] = right_simkey
        dbkeydict['noiseinv1_key'] = "%s:weight;%s" % (left_mapkey, treatment)
        dbkeydict['noiseinv2_key'] = right_weightkey
        files = data_paths.convert_dbkeydict_to_filedict(dbkeydict,
                                                      datapath_db=datapath_db)

        pwrspec_out_signal = caller.execute(files['map1_key'],
                                            files['map2_key'],
                                            files['noiseinv1_key'],
                                            files['noiseinv2_key'],
                                            inifile=inifile)

        if output_tag:
            pwrspec_out_signal[0]['binavg'] *= multiplier
            pwrspec_out_signal[1]['binavg'] *= multiplier

            pwr_1d_from_2d = pe.convert_2d_to_1d(pwrspec_out_signal[0])

            mtag = "%s_%s" % (output_tag, treatment)
            pe.summarize_pwrspec(pwrspec_out_signal[1],
                                 pwr_1d_from_2d,
                                 pwrspec_out_signal[0],
                                 mtag, outdir=outdir)

            crosspwr_collection[treatment] = (pwrspec_out_signal[1],
                                              pwr_1d_from_2d,
                                              pwrspec_out_signal[0])

    if not output_tag:
        caller.multiprocess_stack()
        return None
    else:
        return crosspwr_collection
    def execute(self, processes):
        funcname = "quadratic_products.pwrspec_combinations.pwrspec_caller"
        caller = aggregate_outputs.AggregateOutputs(funcname)

        # TODO: simplify this
        try:
            dbkeydict = {}
            dbkeydict['noiseinv1_key'] = "%s:weight;0modes" % \
                                         self.params['map_key']

            files = dp.convert_dbkeydict_to_filedict(dbkeydict,
                                                     datapath_db=self.datapath_db)
        except KeyError:
            print "WARNING: NO 21cm weight in WiggleZxGBT_modesim"
            files = {'noiseinv1_key': False}

        if self.params['wigglez_sel_key']:
            sel_key = self.params['wigglez_sel_key']
            wigglez_selfile = self.datapath_db.fetch(sel_key)
        else:
            print "Warning: in WiggleZxGBT_modesim no selection function"
            wigglez_selfile = False

        execute_key = "sim:0modes"
        caller.execute(self.params['sim_file'],
                       self.params['wigglez_sim_file'],
                       files['noiseinv1_key'],
                       wigglez_selfile,
                       self.params,
                       execute_key=execute_key)

        caller.multiprocess_stack(self.params["outfile"],
                                  debug=False, ncpu=self.params['ncpu'])
Example #4
0
def find_avg_fsky(map_key, tack_on=None, refinement=2, pad=5, order=1):
    """Take all the pairs that enter the autopower, open their weight files and
    find the fsky for each data treatment
    In a pipeline
        fsky = find_avg_fsky(self.params["map_key"],
                            tack_on=self.params["tack_on"],
                            refinement=self.params["refinement"],
                            pad=self.params["pad"],
                            order=self.params["order"])
    """
    fsky = {}
    datapath_db = dp.DataPath()

    map_cases = datapath_db.fileset_cases(map_key, "pair;type;treatment")

    # This code is essentially verbatim for the permutation in the real
    # autopower
    unique_pairs = dp.GBTauto_cross_pairs(map_cases['pair'],
                                          map_cases['pair'],
                                          cross_sym="_with_")

    treatment_list = map_cases['treatment']

    for treatment in treatment_list:
        for item in unique_pairs:
            dbkeydict = {}
            mapset0 = (map_key, item[0], treatment)
            mapset1 = (map_key, item[1], treatment)
            dbkeydict['noiseinv1_key'] = "%s:%s;noise_inv;%s" % mapset0
            dbkeydict['noiseinv2_key'] = "%s:%s;noise_inv;%s" % mapset1
            files = dp.convert_dbkeydict_to_filedict(dbkeydict,
                                                     datapath_db=datapath_db,
                                                     tack_on=tack_on)

            print files['noiseinv1_key'], files['noiseinv2_key']
            weight1 = algebra.make_vect(algebra.load(files['noiseinv1_key']))
            weight2 = algebra.make_vect(algebra.load(files['noiseinv2_key']))

            physweight1 = bh.repackage_kiyo(
                pg.physical_grid(weight1,
                                 refinement=refinement,
                                 pad=pad,
                                 order=order))

            physweight2 = bh.repackage_kiyo(
                pg.physical_grid(weight2,
                                 refinement=refinement,
                                 pad=pad,
                                 order=order))

            #fsky = np.sum(physweight1 * physweight2)**2
            #fsky /= np.sum(physweight1**2 * physweight2**2)
            #fsky /= float(physweight1.size)
            fsky = np.sum(weight1 * weight2)**2
            fsky /= np.sum(weight1**2 * weight2**2)
            fsky /= float(weight1.size)
            print "volume factor in noise weight: ", fsky

    return fsky
Example #5
0
    def generate_runlist(self):
        map_key = self.params['map_key']
        map_cases = self.datapath_db.fileset_cases(map_key,
                                                   "pair;type;treatment")

        # first compute the mixing matrix for the crossed (AxB) weightings
        unique_pairs = dp.GBTauto_cross_pairs(map_cases['pair'],
                                              map_cases['pair'],
                                              cross_sym="_with_")

        # assume the weights are the same for all cleaning treatments
        # TODO: this may change in the future
        treatment = "0modes"
        self.all_pairs = {}
        for item in unique_pairs:
            dbkeydict = {}
            mapset0 = (map_key, item[0], treatment)
            mapset1 = (map_key, item[1], treatment)
            dbkeydict['noiseinv1_key'] = "%s:%s;noise_inv;%s" % mapset0
            dbkeydict['noiseinv2_key'] = "%s:%s;noise_inv;%s" % mapset1
            files = dp.convert_dbkeydict_to_filedict(
                dbkeydict, datapath_db=self.datapath_db)

            self.all_pairs[item[0]] = (files['noiseinv1_key'],
                                       files['noiseinv2_key'])

        # For the autopower (in noise assessment), we use the same cleaned maps
        # and the weights are the same for various pairs, e.g.
        # A_with_B is the same as A_with_C, etc. because the mode cleaning does
        # not impact the weighting functions
        A_file = (self.datapath_db.fetch('%s:A_with_B;noise_inv;0modes' %
                                         map_key,
                                         silent=True))
        self.all_pairs["A_with_A"] = (A_file, A_file)

        B_file = (self.datapath_db.fetch('%s:B_with_A;noise_inv;0modes' %
                                         map_key,
                                         silent=True))
        self.all_pairs["B_with_B"] = (B_file, B_file)

        C_file = (self.datapath_db.fetch('%s:C_with_A;noise_inv;0modes' %
                                         map_key,
                                         silent=True))
        self.all_pairs["C_with_C"] = (C_file, C_file)

        D_file = (self.datapath_db.fetch('%s:D_with_A;noise_inv;0modes' %
                                         map_key,
                                         silent=True))
        self.all_pairs["D_with_D"] = (D_file, D_file)

        self.mixing_fileout = {}
        for pair in self.all_pairs:
            self.mixing_fileout[pair] = "%s_%s.shelve" % \
                                   (self.params['perpair_base'], pair)
    def generate_runlist(self):
        map_key = self.params['map_key']
        map_cases = self.datapath_db.fileset_cases(map_key,
                                                   "pair;type;treatment")

        # first compute the mixing matrix for the crossed (AxB) weightings
        unique_pairs = dp.GBTauto_cross_pairs(map_cases['pair'],
                                              map_cases['pair'],
                                              cross_sym="_with_")

        # assume the weights are the same for all cleaning treatments
        # TODO: this may change in the future
        treatment = "0modes"
        self.all_pairs = {}
        for item in unique_pairs:
            dbkeydict = {}
            mapset0 = (map_key, item[0], treatment)
            mapset1 = (map_key, item[1], treatment)
            dbkeydict['noiseinv1_key'] = "%s:%s;noise_inv;%s" % mapset0
            dbkeydict['noiseinv2_key'] = "%s:%s;noise_inv;%s" % mapset1
            files = dp.convert_dbkeydict_to_filedict(dbkeydict,
                                                datapath_db=self.datapath_db)

            self.all_pairs[item[0]] = (files['noiseinv1_key'],
                                   files['noiseinv2_key'])

        # For the autopower (in noise assessment), we use the same cleaned maps
        # and the weights are the same for various pairs, e.g.
        # A_with_B is the same as A_with_C, etc. because the mode cleaning does
        # not impact the weighting functions
        A_file = (self.datapath_db.fetch(
                '%s:A_with_B;noise_inv;0modes' % map_key, silent=True))
        self.all_pairs["A_with_A"] = (A_file, A_file)

        B_file = (self.datapath_db.fetch(
                '%s:B_with_A;noise_inv;0modes' % map_key, silent=True))
        self.all_pairs["B_with_B"] = (B_file, B_file)

        C_file = (self.datapath_db.fetch(
                '%s:C_with_A;noise_inv;0modes' % map_key, silent=True))
        self.all_pairs["C_with_C"] = (C_file, C_file)

        D_file = (self.datapath_db.fetch(
                '%s:D_with_A;noise_inv;0modes' % map_key, silent=True))
        self.all_pairs["D_with_D"] = (D_file, D_file)

        self.mixing_fileout = {}
        for pair in self.all_pairs:
            self.mixing_fileout[pair] = "%s_%s.shelve" % \
                                   (self.params['perpair_base'], pair)
    def execute(self, processes):
        funcname = "quadratic_products.pwrspec_combinations.pwrspec_caller"
        caller = aggregate_outputs.AggregateOutputs(funcname)

        map_key = self.params['map_key']
        tack_on = self.params['tack_on']
        map_cases = self.datapath_db.fileset_cases(map_key,
                                                   "pair;type;treatment")

        # We use the same cleaned maps and the weights are the same for
        # various pairs, e.g. A_with_B is the same as A_with_C, etc. because
        # the mode cleaning does not impact the weighting functions
        noise_pairs = {"A_with_A": "A_with_B",
                       "B_with_B": "B_with_A",
                       "C_with_C": "C_with_A",
                       "D_with_D": "D_with_A"
                      }

        # unless specified otherwise find power spectra for all treatments
        if self.params['treatments'] is None:
            treatment_list = map_cases['treatment']
        else:
            treatment_list = self.params['treatments']

        for treatment in treatment_list:
            for item in noise_pairs:
                dbkeydict = {}
                mapset0 = (map_key, noise_pairs[item], treatment)
                mapset1 = (map_key, noise_pairs[item], treatment)
                dbkeydict['map1_key'] = "%s:%s;map;%s" % mapset0
                dbkeydict['map2_key'] = "%s:%s;map;%s" % mapset1
                dbkeydict['noiseinv1_key'] = "%s:%s;noise_inv;%s" % mapset0
                dbkeydict['noiseinv2_key'] = "%s:%s;noise_inv;%s" % mapset1
                files = dp.convert_dbkeydict_to_filedict(dbkeydict,
                                                datapath_db=self.datapath_db,
                                                tack_on=tack_on)

                execute_key = "%s:%s" % (item, treatment)
                caller.execute(files['map1_key'],
                               files['map2_key'],
                               files['noiseinv1_key'],
                               files['noiseinv2_key'],
                               self.params,
                               execute_key=execute_key)

        caller.multiprocess_stack(self.params["outfile"],
                                  debug=False, ncpu=self.params['ncpu'])
    def execute(self, processes):
        funcname = "quadratic_products.pwrspec_combinations.pwrspec_caller"
        caller = aggregate_outputs.AggregateOutputs(funcname)

        map_key = self.params['map_key']
        tack_on = self.params['tack_on']
        map_cases = self.datapath_db.fileset_cases(map_key,
                                                   "pair;type;treatment")

        unique_pairs = dp.GBTauto_cross_pairs(map_cases['pair'],
                                              map_cases['pair'],
                                              cross_sym="_with_")

        # unless specified otherwise find power spectra for all treatments
        if self.params['treatments'] is None:
            treatment_list = map_cases['treatment']
        else:
            treatment_list = self.params['treatments']

        for treatment in treatment_list:
            for item in unique_pairs:
                dbkeydict = {}
                mapset0 = (map_key, item[0], treatment)
                mapset1 = (map_key, item[1], treatment)
                dbkeydict['map1_key'] = "%s:%s;map;%s" % mapset0
                dbkeydict['map2_key'] = "%s:%s;map;%s" % mapset1
                dbkeydict['noiseinv1_key'] = "%s:%s;noise_inv;%s" % mapset0
                dbkeydict['noiseinv2_key'] = "%s:%s;noise_inv;%s" % mapset1
                files = dp.convert_dbkeydict_to_filedict(dbkeydict,
                                                datapath_db=self.datapath_db,
                                                tack_on=tack_on)

                execute_key = "%s:%s" % (item[0], treatment)
                caller.execute(files['map1_key'],
                               files['map2_key'],
                               files['noiseinv1_key'],
                               files['noiseinv2_key'],
                               self.params,
                               execute_key=execute_key)

        caller.multiprocess_stack(self.params["outfile"],
                                  debug=False, ncpu=self.params['ncpu'])
def batch_wigglez_automock_run(mock_key,
                               sel_key,
                               inifile=None,
                               datapath_db=None,
                               output_tag=None):
    r"""TODO: make this work; wrote this but never really needed it yet
    """
    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    cache_path = datapath_db.fetch("quadratic_batch_data")

    mock_cases = datapath_db.fileset_cases(mock_key, "realization")

    funcname = "correlate.batch_quadratic.call_xspec_run"
    generate = False if output_tag else True
    caller = batch_handler.MemoizeBatch(funcname,
                                        cache_path,
                                        generate=generate,
                                        verbose=True)

    for index in mock_cases['realization']:
        dbkeydict = {}
        dbkeydict['map1_key'] = "%s:%s" % (mock_key, index)
        dbkeydict['map2_key'] = "%s:%s" % (mock_key, index)
        dbkeydict['noiseinv1_key'] = sel_key
        dbkeydict['noiseinv2_key'] = sel_key
        files = data_paths.convert_dbkeydict_to_filedict(
            dbkeydict, datapath_db=datapath_db)

        caller.execute(files['map1_key'],
                       files['map2_key'],
                       files['noiseinv1_key'],
                       files['noiseinv2_key'],
                       inifile=inifile)

    caller.multiprocess_stack()
    def execute(self, processes):
        funcname = "quadratic_products.pwrspec_combinations.pwrspec_caller"
        caller = aggregate_outputs.AggregateOutputs(funcname)

        map_key = self.params['map_key']
        sim_file = self.params['sim_file']
        map_cases = self.datapath_db.fileset_cases(map_key,
                                                   "pair;type;treatment")

        unique_pairs = dp.GBTauto_cross_pairs(map_cases['pair'],
                                              map_cases['pair'],
                                              cross_sym="_with_")

        treatment = "0modes"

        for item in unique_pairs:
            dbkeydict = {}
            mapset0 = (map_key, item[0], treatment)
            mapset1 = (map_key, item[1], treatment)
            dbkeydict['map1_key'] = "%s:%s;map;%s" % mapset0
            dbkeydict['map2_key'] = "%s:%s;map;%s" % mapset1
            dbkeydict['noiseinv1_key'] = "%s:%s;noise_inv;%s" % mapset0
            dbkeydict['noiseinv2_key'] = "%s:%s;noise_inv;%s" % mapset1
            files = dp.convert_dbkeydict_to_filedict(dbkeydict,
                                            datapath_db=self.datapath_db)

            execute_key = "%s:%s" % (item[0], treatment)
            caller.execute(sim_file,
                           sim_file,
                           files['noiseinv1_key'],
                           files['noiseinv2_key'],
                           self.params,
                           execute_key=execute_key)

        caller.multiprocess_stack(self.params["outfile"],
                                  debug=False, ncpu=self.params['ncpu'])
    def execute(self, processes):
        funcname = "quadratic_products.pwrspec_combinations.pwrspec_caller"
        caller_data = aggregate_outputs.AggregateOutputs(funcname)
        caller_mock = aggregate_outputs.AggregateOutputs(funcname)

        wigglez_key = self.params['wigglez_key']
        sel_key = self.params['wigglez_sel_key']
        mock_key = self.params['wigglez_mock_key']
        mock_files = self.datapath_db.fetch(mock_key)

        map_key = self.params['map_key']
        map_cases = self.datapath_db.fileset_cases(map_key,
                                                   "type;treatment")

        for treatment in map_cases['treatment']:
            dbkeydict = {}
            dbkeydict['map1_key'] = "%s:map;%s" % (map_key, treatment)
            dbkeydict['map2_key'] = wigglez_key

            dbkeydict['noiseinv1_key'] = "%s:weight;%s" % \
                                             (map_key, treatment)

            dbkeydict['noiseinv2_key'] = sel_key
            files = dp.convert_dbkeydict_to_filedict(dbkeydict,
                                        datapath_db=self.datapath_db)

            execute_key = "data:%s" % treatment
            #print files, execute_key

            caller_data.execute(files['map1_key'],
                           files['map2_key'],
                           files['noiseinv1_key'],
                           files['noiseinv2_key'],
                           self.params,
                           execute_key=execute_key)

        caller_data.multiprocess_stack(self.params["outfile_data"],
                                       debug=False, ncpu=self.params['ncpu'])

        for treatment in map_cases['treatment']:
            for item in mock_files[0]:
                dbkeydict = {}
                dbkeydict['map1_key'] = "%s:map;%s" % (map_key, treatment)
                dbkeydict['map2_key'] = "%s:%s" % (mock_key, item)

                dbkeydict['noiseinv1_key'] = "%s:weight;%s" % \
                                             (map_key, treatment)

                dbkeydict['noiseinv2_key'] = sel_key
                files = dp.convert_dbkeydict_to_filedict(dbkeydict,
                                                datapath_db=self.datapath_db)

                execute_key = "mock%s:%s" % (item, treatment)
                #print files, execute_key

                caller_mock.execute(files['map1_key'],
                               files['map2_key'],
                               files['noiseinv1_key'],
                               files['noiseinv2_key'],
                               self.params,
                               execute_key=execute_key)

        caller_mock.multiprocess_stack(self.params["outfile_mock"],
                                       debug=False, ncpu=self.params['ncpu'])
def batch_crosspwr_transfer(cleaned_simkey,
                            truesignal_simkey, truesignal_weightkey,
                            reference_simkey, reference_weightkey,
                            inifile=None, datapath_db=None,
                            outdir="./plots",
                            output_tag=None):
    r"""take relevant cross-powers
    cleaned_simkey(map) * cleaned_simkey(weight) x
    truesignal_weightkey * truesignal_simkey
    divided by:
    reference_simkey * reference_weightkey x truesignal_weightkey * truesignal_simkey
    """

    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    cache_path = datapath_db.fetch("quadratic_batch_data")

    map_cases = datapath_db.fileset_cases(cleaned_simkey, "type;treatment")

    funcname = "correlate.batch_quadratic.call_xspec_run"
    generate = False if output_tag else True
    if generate:
        print "REGENERATING the power spectrum result cache: "

    caller = batch_handler.MemoizeBatch(funcname, cache_path,
                                        generate=generate, verbose=True)

    dbkeydict = {}
    dbkeydict['map1_key'] = reference_simkey
    dbkeydict['map2_key'] = truesignal_simkey
    dbkeydict['noiseinv1_key'] = reference_weightkey
    dbkeydict['noiseinv2_key'] = truesignal_weightkey
    files = data_paths.convert_dbkeydict_to_filedict(dbkeydict,
                                                     datapath_db=datapath_db)

    reference_pwrspec_out = caller.execute(files['map1_key'],
                                        files['map2_key'],
                                        files['noiseinv1_key'],
                                        files['noiseinv2_key'],
                                        inifile=inifile)

    if output_tag:
        file_tools.mkparents(outdir)
        ref_pwr_1d_from_2d = pe.convert_2d_to_1d(reference_pwrspec_out[0])

        ref_pwr_1d = reference_pwrspec_out[1]['binavg']
        ref_pwr_2d = reference_pwrspec_out[0]['binavg']
        ref_pwr_1d_from_2d = ref_pwr_1d_from_2d['binavg']

        bin_left = reference_pwrspec_out[1]['bin_left']
        bin_center = reference_pwrspec_out[1]['bin_center']
        bin_right = reference_pwrspec_out[1]['bin_right']
        counts_histo = reference_pwrspec_out[1]['counts_histo']

    transfer_functions = {}
    for treatment in map_cases['treatment']:
        dbkeydict = {}
        dbkeydict['map1_key'] = "%s:map;%s" % (cleaned_simkey, treatment)
        dbkeydict['map2_key'] = truesignal_simkey
        dbkeydict['noiseinv1_key'] = "%s:weight;%s" % (cleaned_simkey, treatment)
        dbkeydict['noiseinv2_key'] = truesignal_weightkey
        files = data_paths.convert_dbkeydict_to_filedict(dbkeydict,
                                                      datapath_db=datapath_db)

        pwrspec_out_signal = caller.execute(files['map1_key'],
                                            files['map2_key'],
                                            files['noiseinv1_key'],
                                            files['noiseinv2_key'],
                                            inifile=inifile)

        if output_tag:
            pwr_1d_from_2d = pe.convert_2d_to_1d(pwrspec_out_signal[0])

            pwr_1d = pwrspec_out_signal[1]['binavg']
            pwr_2d = pwrspec_out_signal[0]['binavg']
            pwr_1d_from_2d = pwr_1d_from_2d['binavg']

            trans1d_mode = pwr_1d / ref_pwr_1d
            trans1d_from2d_mode = pwr_1d_from_2d / ref_pwr_1d_from_2d
            trans2d_mode = pwr_2d / ref_pwr_2d

            transfer_functions[treatment] = (trans1d_mode,
                                             trans1d_from2d_mode,
                                             trans2d_mode)

            filename = "%s/%s_%s.dat" % (outdir, output_tag, treatment)

            outfile = open(filename, "w")
            for specdata in zip(bin_left, bin_center,
                                bin_right, counts_histo,
                                pwr_1d, pwr_1d_from_2d,
                                trans1d_mode, trans1d_from2d_mode):
                outfile.write(("%10.15g " * 8 + "\n") % specdata)
            outfile.close()

    if not output_tag:
        caller.multiprocess_stack()
        return None
    else:
        return transfer_functions
def batch_gbtxwigglez_data_run(gbt_map_key, wigglez_map_key,
                               wigglez_mock_key, wigglez_selection_key,
                               inifile=None, datapath_db=None,
                               outdir="./plots",
                               output_tag=None,
                               beam_transfer=None,
                               mode_transfer_1d=None,
                               mode_transfer_2d=None,
                               theory_curve=None):
    r"""assemble the pairs of GBT and WiggleZ and calculate the cross-power"""

    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    cache_path = datapath_db.fetch("quadratic_batch_data")

    map_cases = datapath_db.fileset_cases(gbt_map_key, "type;treatment")
    mock_cases = datapath_db.fileset_cases(wigglez_mock_key, "realization")

    funcname = "correlate.batch_quadratic.call_xspec_run"
    generate = False if output_tag else True
    if generate:
        print "REGENERATING the power spectrum result cache: "

    caller = batch_handler.MemoizeBatch(funcname, cache_path,
                                        generate=generate, verbose=True)

    if output_tag:
        file_tools.mkparents(outdir)

    for treatment in map_cases['treatment']:
        # TODO: make this more elegant
        # TODO: convert treatment into mode num
        transfer_2d = None
        if (mode_transfer_2d is not None) and (beam_transfer is None):
            transfer_2d = mode_transfer_2d[treatment][2]

        if (mode_transfer_2d is None) and (beam_transfer is not None):
            transfer_2d = beam_transfer

        if (mode_transfer_2d is not None) and (beam_transfer is not None):
            transfer_2d = mode_transfer_2d[treatment][2] * beam_transfer

        pwr_1d = []
        pwr_2d = []
        pwr_1d_from_2d = []
        for index in mock_cases['realization']:
            dbkeydict = {}
            dbkeydict['map1_key'] = "%s:map;%s" % (gbt_map_key, treatment)
            dbkeydict['map2_key'] = "%s:%s" % (wigglez_mock_key, index)
            dbkeydict['noiseinv1_key'] = "%s:weight;%s" % \
                                         (gbt_map_key, treatment)

            dbkeydict['noiseinv2_key'] = wigglez_selection_key
            files = data_paths.convert_dbkeydict_to_filedict(dbkeydict,
                                                      datapath_db=datapath_db)

            pwrspec_out = caller.execute(files['map1_key'], files['map2_key'],
                                         files['noiseinv1_key'],
                                         files['noiseinv2_key'],
                                         inifile=inifile)

            if output_tag:
                pwr_1d_from_2d.append(pe.convert_2d_to_1d(pwrspec_out[0],
                                      transfer=transfer_2d))

                pwr_2d.append(pwrspec_out[0])
                pwr_1d.append(pwrspec_out[1])

        if output_tag:
            if mode_transfer_1d is not None:
                transfunc = mode_transfer_1d[treatment][1]
            else:
                transfunc = None

            mtag = output_tag + "_%s_mock" % treatment
            agg_pwrspec = pe.summarize_agg_pwrspec(pwr_1d,
                              pwr_1d_from_2d, pwr_2d, mtag,
                              outdir=outdir,
                              apply_1d_transfer=transfunc)

            mean1dmock = agg_pwrspec["mean_1d"]
            std1dmock = agg_pwrspec["std_1d"]
            covmock = agg_pwrspec["covmat_1d"]

        # now recover the xspec with the real data
        dbkeydict = {}
        dbkeydict['map1_key'] = "%s:map;%s" % (gbt_map_key, treatment)
        dbkeydict['map2_key'] = wigglez_map_key
        dbkeydict['noiseinv1_key'] = "%s:weight;%s" % (gbt_map_key, treatment)
        dbkeydict['noiseinv2_key'] = wigglez_selection_key
        files = data_paths.convert_dbkeydict_to_filedict(dbkeydict,
                                                      datapath_db=datapath_db)

        pwrspec_out_signal = caller.execute(files['map1_key'],
                                            files['map2_key'],
                                            files['noiseinv1_key'],
                                            files['noiseinv2_key'],
                                            inifile=inifile)

        if output_tag:
            pwr_1d_from_2d = pe.convert_2d_to_1d(pwrspec_out_signal[0],
                                                 transfer=transfer_2d)

            pwr_1d = pwrspec_out_signal[1]['binavg']
            pwr_1d_from_2d = pwr_1d_from_2d['binavg']
            if mode_transfer_1d is not None:
                pwr_1d /= mode_transfer_1d[treatment][1]
                pwr_1d_from_2d /= mode_transfer_1d[treatment][1]

            # assume that they all have the same binning
            bin_left = pwrspec_out_signal[1]['bin_left']
            bin_center = pwrspec_out_signal[1]['bin_center']
            bin_right = pwrspec_out_signal[1]['bin_right']
            counts_histo = pwrspec_out_signal[1]['counts_histo']

            filename = "%s/%s_%s.dat" % (outdir,
                                         output_tag,
                                         treatment)

            outfile = open(filename, "w")
            for specdata in zip(bin_left, bin_center,
                                bin_right, counts_histo,
                                pwr_1d, pwr_1d_from_2d,
                                mean1dmock, std1dmock):
                outfile.write(("%10.15g " * 8 + "\n") % specdata)
            outfile.close()

            # TODO: kludge to make a fast fit; remove
            theory_curve = np.genfromtxt("plots/sim_15hr_oldmap_str_temperature_xWigglez/sim_15hr_oldmap_str_temperature_xWigglez_avg_from2d.dat")
            theory_curve = theory_curve[:, 4]

            if theory_curve is not None:
                restrict = np.where(np.logical_and(bin_center > 0.09,
                                                   bin_center < 1.1))
                res_slice = slice(min(restrict[0]), max(restrict[0]))

                #restrict_alt = np.where(restrict)[0][np.newaxis, :]
                #restricted_cov = covmock[restrict_alt][0]

                from core import utils
                amplitude = utils.ampfit(pwr_1d_from_2d[res_slice],
                                         covmock[res_slice, res_slice],
                                         theory_curve[res_slice])
                print "AMP:", mtag, treatment, amplitude

    if not output_tag:
        caller.multiprocess_stack()

    return None
Example #14
0
def batch_single_crosspwr(left_mapkey,
                          right_simkey,
                          right_weightkey,
                          multiplier=1.,
                          inifile=None,
                          datapath_db=None,
                          outdir="./plots",
                          output_tag=None):
    r"""w_left m_left x w_right m_right"""

    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    cache_path = datapath_db.fetch("quadratic_batch_data")

    map_cases = datapath_db.fileset_cases(left_mapkey, "type;treatment")

    funcname = "correlate.batch_quadratic.call_xspec_run"
    generate = False if output_tag else True
    if generate:
        print "REGENERATING the power spectrum result cache: "

    caller = batch_handler.MemoizeBatch(funcname,
                                        cache_path,
                                        generate=generate,
                                        verbose=True)

    crosspwr_collection = {}
    for treatment in map_cases['treatment']:
        dbkeydict = {}
        dbkeydict['map1_key'] = "%s:map;%s" % (left_mapkey, treatment)
        dbkeydict['map2_key'] = right_simkey
        dbkeydict['noiseinv1_key'] = "%s:weight;%s" % (left_mapkey, treatment)
        dbkeydict['noiseinv2_key'] = right_weightkey
        files = data_paths.convert_dbkeydict_to_filedict(
            dbkeydict, datapath_db=datapath_db)

        pwrspec_out_signal = caller.execute(files['map1_key'],
                                            files['map2_key'],
                                            files['noiseinv1_key'],
                                            files['noiseinv2_key'],
                                            inifile=inifile)

        if output_tag:
            pwrspec_out_signal[0]['binavg'] *= multiplier
            pwrspec_out_signal[1]['binavg'] *= multiplier

            pwr_1d_from_2d = pe.convert_2d_to_1d(pwrspec_out_signal[0])

            mtag = "%s_%s" % (output_tag, treatment)
            pe.summarize_pwrspec(pwrspec_out_signal[1],
                                 pwr_1d_from_2d,
                                 pwrspec_out_signal[0],
                                 mtag,
                                 outdir=outdir)

            crosspwr_collection[treatment] = (pwrspec_out_signal[1],
                                              pwr_1d_from_2d,
                                              pwrspec_out_signal[0])

    if not output_tag:
        caller.multiprocess_stack()
        return None
    else:
        return crosspwr_collection
Example #15
0
def batch_gbtpwrspec_data_run(map_key,
                              inifile=None,
                              datapath_db=None,
                              output_tag=None,
                              beam_transfer=None,
                              outdir="./plots/",
                              square_1dmodetrans=False,
                              mode_transfer_1d=None,
                              mode_transfer_2d=None):
    r"""Form the pairs of maps1*weight1 x map0*weight0 for calculating the
    auto-power of the GBT data"""

    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    cache_path = datapath_db.fetch("quadratic_batch_data")

    map_cases = datapath_db.fileset_cases(map_key, "pair;type;treatment")

    funcname = "correlate.batch_quadratic.call_xspec_run"
    generate = False if output_tag else True
    if generate:
        print "REGENERATING the power spectrum result cache: "

    caller = batch_handler.MemoizeBatch(funcname,
                                        cache_path,
                                        generate=generate,
                                        verbose=True)

    if output_tag:
        file_tools.mkparents(outdir)

    pwrspec_collection = {}
    for treatment in map_cases['treatment']:
        unique_pairs = data_paths.GBTauto_cross_pairs(map_cases['pair'],
                                                      map_cases['pair'],
                                                      cross_sym="_with_")

        # TODO: make this more elegant
        transfer_2d = None
        if (mode_transfer_2d is not None) and (beam_transfer is None):
            transfer_2d = mode_transfer_2d[treatment][1]

        if (mode_transfer_2d is None) and (beam_transfer is not None):
            transfer_2d = beam_transfer

        if (mode_transfer_2d is not None) and (beam_transfer is not None):
            transfer_2d = mode_transfer_2d[treatment][1] * beam_transfer

        pwr_1d = []
        pwr_2d = []
        pwr_1d_from_2d = []
        for item in unique_pairs:
            dbkeydict = {}
            # NOTE: formerly had "weight" instead of noise_inv
            mapset0 = (map_key, item[0], treatment)
            mapset1 = (map_key, item[1], treatment)
            dbkeydict['map1_key'] = "%s:%s;map;%s" % mapset0
            dbkeydict['map2_key'] = "%s:%s;map;%s" % mapset1
            dbkeydict['noiseinv1_key'] = "%s:%s;noise_inv;%s" % mapset0
            dbkeydict['noiseinv2_key'] = "%s:%s;noise_inv;%s" % mapset1
            files = data_paths.convert_dbkeydict_to_filedict(
                dbkeydict, datapath_db=datapath_db)

            pwrspec_out = caller.execute(files['map1_key'],
                                         files['map2_key'],
                                         files['noiseinv1_key'],
                                         files['noiseinv2_key'],
                                         inifile=inifile)

            if output_tag:
                pwr_1d_from_2d.append(
                    pe.convert_2d_to_1d(pwrspec_out[0], transfer=transfer_2d))

                pwr_2d.append(pwrspec_out[0])
                pwr_1d.append(pwrspec_out[1])

        if output_tag:
            mtag = output_tag + "_%s" % treatment
            if mode_transfer_1d is not None:
                transfunc = mode_transfer_1d[treatment][0]
                if square_1dmodetrans:
                    transfunc *= transfunc
            else:
                transfunc = None

            agg_pwrspec = pe.summarize_agg_pwrspec(pwr_1d,
                                                   pwr_1d_from_2d,
                                                   pwr_2d,
                                                   mtag,
                                                   outdir=outdir,
                                                   apply_1d_transfer=transfunc)

            # (mean_1d, std_1d, covmat_1d)
            pwrspec_collection[treatment] = agg_pwrspec

    if output_tag:
        return pwrspec_collection
    else:
        caller.multiprocess_stack()
        return None
def batch_crosspwr_transfer(cleaned_simkey,
                            truesignal_simkey,
                            truesignal_weightkey,
                            reference_simkey,
                            reference_weightkey,
                            inifile=None,
                            datapath_db=None,
                            outdir="./plots",
                            output_tag=None):
    r"""take relevant cross-powers
    cleaned_simkey(map) * cleaned_simkey(weight) x
    truesignal_weightkey * truesignal_simkey
    divided by:
    reference_simkey * reference_weightkey x truesignal_weightkey * truesignal_simkey
    """

    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    cache_path = datapath_db.fetch("quadratic_batch_data")

    map_cases = datapath_db.fileset_cases(cleaned_simkey, "type;treatment")

    funcname = "correlate.batch_quadratic.call_xspec_run"
    generate = False if output_tag else True
    if generate:
        print "REGENERATING the power spectrum result cache: "

    caller = batch_handler.MemoizeBatch(funcname,
                                        cache_path,
                                        generate=generate,
                                        verbose=True)

    dbkeydict = {}
    dbkeydict['map1_key'] = reference_simkey
    dbkeydict['map2_key'] = truesignal_simkey
    dbkeydict['noiseinv1_key'] = reference_weightkey
    dbkeydict['noiseinv2_key'] = truesignal_weightkey
    files = data_paths.convert_dbkeydict_to_filedict(dbkeydict,
                                                     datapath_db=datapath_db)

    reference_pwrspec_out = caller.execute(files['map1_key'],
                                           files['map2_key'],
                                           files['noiseinv1_key'],
                                           files['noiseinv2_key'],
                                           inifile=inifile)

    if output_tag:
        file_tools.mkparents(outdir)
        ref_pwr_1d_from_2d = pe.convert_2d_to_1d(reference_pwrspec_out[0])

        ref_pwr_1d = reference_pwrspec_out[1]['binavg']
        ref_pwr_2d = reference_pwrspec_out[0]['binavg']
        ref_pwr_1d_from_2d = ref_pwr_1d_from_2d['binavg']

        bin_left = reference_pwrspec_out[1]['bin_left']
        bin_center = reference_pwrspec_out[1]['bin_center']
        bin_right = reference_pwrspec_out[1]['bin_right']
        counts_histo = reference_pwrspec_out[1]['counts_histo']

    transfer_functions = {}
    for treatment in map_cases['treatment']:
        dbkeydict = {}
        dbkeydict['map1_key'] = "%s:map;%s" % (cleaned_simkey, treatment)
        dbkeydict['map2_key'] = truesignal_simkey
        dbkeydict['noiseinv1_key'] = "%s:weight;%s" % (cleaned_simkey,
                                                       treatment)
        dbkeydict['noiseinv2_key'] = truesignal_weightkey
        files = data_paths.convert_dbkeydict_to_filedict(
            dbkeydict, datapath_db=datapath_db)

        pwrspec_out_signal = caller.execute(files['map1_key'],
                                            files['map2_key'],
                                            files['noiseinv1_key'],
                                            files['noiseinv2_key'],
                                            inifile=inifile)

        if output_tag:
            pwr_1d_from_2d = pe.convert_2d_to_1d(pwrspec_out_signal[0])

            pwr_1d = pwrspec_out_signal[1]['binavg']
            pwr_2d = pwrspec_out_signal[0]['binavg']
            pwr_1d_from_2d = pwr_1d_from_2d['binavg']

            trans1d_mode = pwr_1d / ref_pwr_1d
            trans1d_from2d_mode = pwr_1d_from_2d / ref_pwr_1d_from_2d
            trans2d_mode = pwr_2d / ref_pwr_2d

            transfer_functions[treatment] = (trans1d_mode, trans1d_from2d_mode,
                                             trans2d_mode)

            filename = "%s/%s_%s.dat" % (outdir, output_tag, treatment)

            outfile = open(filename, "w")
            for specdata in zip(bin_left, bin_center, bin_right, counts_histo,
                                pwr_1d, pwr_1d_from_2d, trans1d_mode,
                                trans1d_from2d_mode):
                outfile.write(("%10.15g " * 8 + "\n") % specdata)
            outfile.close()

    if not output_tag:
        caller.multiprocess_stack()
        return None
    else:
        return transfer_functions
def batch_gbtpwrspec_data_run(map_key, inifile=None, datapath_db=None,
                   output_tag=None, beam_transfer=None,
                   outdir="./plots/",
                   square_1dmodetrans=False,
                   mode_transfer_1d=None,
                   mode_transfer_2d=None):
    r"""Form the pairs of maps1*weight1 x map0*weight0 for calculating the
    auto-power of the GBT data"""

    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    cache_path = datapath_db.fetch("quadratic_batch_data")

    map_cases = datapath_db.fileset_cases(map_key, "pair;type;treatment")

    funcname = "correlate.batch_quadratic.call_xspec_run"
    generate = False if output_tag else True
    if generate:
        print "REGENERATING the power spectrum result cache: "

    caller = batch_handler.MemoizeBatch(funcname, cache_path,
                                        generate=generate, verbose=True)

    if output_tag:
        file_tools.mkparents(outdir)

    pwrspec_collection = {}
    for treatment in map_cases['treatment']:
        unique_pairs = data_paths.GBTauto_cross_pairs(map_cases['pair'],
                                                    map_cases['pair'],
                                                    cross_sym="_with_")

        # TODO: make this more elegant
        transfer_2d = None
        if (mode_transfer_2d is not None) and (beam_transfer is None):
            transfer_2d = mode_transfer_2d[treatment][1]

        if (mode_transfer_2d is None) and (beam_transfer is not None):
            transfer_2d = beam_transfer

        if (mode_transfer_2d is not None) and (beam_transfer is not None):
            transfer_2d = mode_transfer_2d[treatment][1] * beam_transfer

        pwr_1d = []
        pwr_2d = []
        pwr_1d_from_2d = []
        for item in unique_pairs:
            dbkeydict = {}
            # NOTE: formerly had "weight" instead of noise_inv
            mapset0 = (map_key, item[0], treatment)
            mapset1 = (map_key, item[1], treatment)
            dbkeydict['map1_key'] = "%s:%s;map;%s" % mapset0
            dbkeydict['map2_key'] = "%s:%s;map;%s" % mapset1
            dbkeydict['noiseinv1_key'] = "%s:%s;noise_inv;%s" % mapset0
            dbkeydict['noiseinv2_key'] = "%s:%s;noise_inv;%s" % mapset1
            files = data_paths.convert_dbkeydict_to_filedict(dbkeydict,
                                                      datapath_db=datapath_db)

            pwrspec_out = caller.execute(files['map1_key'],
                                         files['map2_key'],
                                         files['noiseinv1_key'],
                                         files['noiseinv2_key'],
                                         inifile=inifile)

            if output_tag:
                pwr_1d_from_2d.append(pe.convert_2d_to_1d(pwrspec_out[0],
                                      transfer=transfer_2d))

                pwr_2d.append(pwrspec_out[0])
                pwr_1d.append(pwrspec_out[1])

        if output_tag:
            mtag = output_tag + "_%s" % treatment
            if mode_transfer_1d is not None:
                transfunc = mode_transfer_1d[treatment][0]
                if square_1dmodetrans:
                    transfunc *= transfunc
            else:
                transfunc = None

            agg_pwrspec = pe.summarize_agg_pwrspec(pwr_1d,
                                                   pwr_1d_from_2d, pwr_2d, mtag,
                                                   outdir=outdir,
                                                   apply_1d_transfer=transfunc)

            # (mean_1d, std_1d, covmat_1d)
            pwrspec_collection[treatment] = agg_pwrspec

    if output_tag:
        return pwrspec_collection
    else:
        caller.multiprocess_stack()
        return None
def batch_sim_run(simleft_key, simright_key,
                  weightleft_key, weightright_key,
                  inifile=None, datapath_db=None,
                  outdir="./plots/",
                  usecache_output_tag=None, transfer=None):
    r"""
    typical weight matrix:
    db:GBT_15hr_map_cleaned_0mode:A_with_B;noise_inv
    """

    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    cache_path = datapath_db.fetch("quadratic_batch_data")

    mock_cases = datapath_db.fileset_cases(simleft_key, "realization")

    funcname = "correlate.batch_quadratic.call_xspec_run"
    generate = False if usecache_output_tag else True
    if generate:
        print "REGENERATING the power spectrum result cache: "

    caller = batch_handler.MemoizeBatch(funcname, cache_path,
                                        generate=generate, verbose=True)

    if usecache_output_tag:
        output_root = "%s/%s/" % (outdir, usecache_output_tag)
        file_tools.mkparents(output_root)

    pwr_1d = []
    pwr_1d_from_2d = []
    pwr_2d = []
    for index in mock_cases['realization']:
        dbkeydict = {}
        dbkeydict['map1_key'] = "%s:%s" % (simleft_key, index)
        dbkeydict['map2_key'] = "%s:%s" % (simright_key, index)
        dbkeydict['noiseinv1_key'] = weightleft_key
        dbkeydict['noiseinv2_key'] = weightright_key
        files = data_paths.convert_dbkeydict_to_filedict(dbkeydict,
                                                    datapath_db=datapath_db)

        pwrspec_out = caller.execute(files['map1_key'], files['map2_key'],
                                     files['noiseinv1_key'],
                                     files['noiseinv2_key'],
                                     inifile=inifile)

        if usecache_output_tag:
            pwr_1d_from_2d.append(pe.convert_2d_to_1d(pwrspec_out[0],
                                                  transfer=transfer))

            pwr_2d.append(pwrspec_out[0])
            pwr_1d.append(pwrspec_out[1])

    if usecache_output_tag:
        pe.summarize_agg_pwrspec(pwr_1d, pwr_1d_from_2d, pwr_2d,
                                 usecache_output_tag, outdir=output_root)
        retval = (pwr_1d, pwr_1d_from_2d, pwr_2d)
    else:
        caller.multiprocess_stack()
        retval = None

    return retval
Example #19
0
def batch_gbtxwigglez_data_run(gbt_map_key,
                               wigglez_map_key,
                               wigglez_mock_key,
                               wigglez_selection_key,
                               inifile=None,
                               datapath_db=None,
                               outdir="./plots",
                               output_tag=None,
                               beam_transfer=None,
                               mode_transfer_1d=None,
                               mode_transfer_2d=None,
                               theory_curve=None):
    r"""assemble the pairs of GBT and WiggleZ and calculate the cross-power"""

    if datapath_db is None:
        datapath_db = data_paths.DataPath()

    cache_path = datapath_db.fetch("quadratic_batch_data")

    map_cases = datapath_db.fileset_cases(gbt_map_key, "type;treatment")
    mock_cases = datapath_db.fileset_cases(wigglez_mock_key, "realization")

    funcname = "correlate.batch_quadratic.call_xspec_run"
    generate = False if output_tag else True
    if generate:
        print "REGENERATING the power spectrum result cache: "

    caller = batch_handler.MemoizeBatch(funcname,
                                        cache_path,
                                        generate=generate,
                                        verbose=True)

    if output_tag:
        file_tools.mkparents(outdir)

    for treatment in map_cases['treatment']:
        # TODO: make this more elegant
        # TODO: convert treatment into mode num
        transfer_2d = None
        if (mode_transfer_2d is not None) and (beam_transfer is None):
            transfer_2d = mode_transfer_2d[treatment][2]

        if (mode_transfer_2d is None) and (beam_transfer is not None):
            transfer_2d = beam_transfer

        if (mode_transfer_2d is not None) and (beam_transfer is not None):
            transfer_2d = mode_transfer_2d[treatment][2] * beam_transfer

        pwr_1d = []
        pwr_2d = []
        pwr_1d_from_2d = []
        for index in mock_cases['realization']:
            dbkeydict = {}
            dbkeydict['map1_key'] = "%s:map;%s" % (gbt_map_key, treatment)
            dbkeydict['map2_key'] = "%s:%s" % (wigglez_mock_key, index)
            dbkeydict['noiseinv1_key'] = "%s:weight;%s" % \
                                         (gbt_map_key, treatment)

            dbkeydict['noiseinv2_key'] = wigglez_selection_key
            files = data_paths.convert_dbkeydict_to_filedict(
                dbkeydict, datapath_db=datapath_db)

            pwrspec_out = caller.execute(files['map1_key'],
                                         files['map2_key'],
                                         files['noiseinv1_key'],
                                         files['noiseinv2_key'],
                                         inifile=inifile)

            if output_tag:
                pwr_1d_from_2d.append(
                    pe.convert_2d_to_1d(pwrspec_out[0], transfer=transfer_2d))

                pwr_2d.append(pwrspec_out[0])
                pwr_1d.append(pwrspec_out[1])

        if output_tag:
            if mode_transfer_1d is not None:
                transfunc = mode_transfer_1d[treatment][1]
            else:
                transfunc = None

            mtag = output_tag + "_%s_mock" % treatment
            agg_pwrspec = pe.summarize_agg_pwrspec(pwr_1d,
                                                   pwr_1d_from_2d,
                                                   pwr_2d,
                                                   mtag,
                                                   outdir=outdir,
                                                   apply_1d_transfer=transfunc)

            mean1dmock = agg_pwrspec["mean_1d"]
            std1dmock = agg_pwrspec["std_1d"]
            covmock = agg_pwrspec["covmat_1d"]

        # now recover the xspec with the real data
        dbkeydict = {}
        dbkeydict['map1_key'] = "%s:map;%s" % (gbt_map_key, treatment)
        dbkeydict['map2_key'] = wigglez_map_key
        dbkeydict['noiseinv1_key'] = "%s:weight;%s" % (gbt_map_key, treatment)
        dbkeydict['noiseinv2_key'] = wigglez_selection_key
        files = data_paths.convert_dbkeydict_to_filedict(
            dbkeydict, datapath_db=datapath_db)

        pwrspec_out_signal = caller.execute(files['map1_key'],
                                            files['map2_key'],
                                            files['noiseinv1_key'],
                                            files['noiseinv2_key'],
                                            inifile=inifile)

        if output_tag:
            pwr_1d_from_2d = pe.convert_2d_to_1d(pwrspec_out_signal[0],
                                                 transfer=transfer_2d)

            pwr_1d = pwrspec_out_signal[1]['binavg']
            pwr_1d_from_2d = pwr_1d_from_2d['binavg']
            if mode_transfer_1d is not None:
                pwr_1d /= mode_transfer_1d[treatment][1]
                pwr_1d_from_2d /= mode_transfer_1d[treatment][1]

            # assume that they all have the same binning
            bin_left = pwrspec_out_signal[1]['bin_left']
            bin_center = pwrspec_out_signal[1]['bin_center']
            bin_right = pwrspec_out_signal[1]['bin_right']
            counts_histo = pwrspec_out_signal[1]['counts_histo']

            filename = "%s/%s_%s.dat" % (outdir, output_tag, treatment)

            outfile = open(filename, "w")
            for specdata in zip(bin_left, bin_center, bin_right, counts_histo,
                                pwr_1d, pwr_1d_from_2d, mean1dmock, std1dmock):
                outfile.write(("%10.15g " * 8 + "\n") % specdata)
            outfile.close()

            # TODO: kludge to make a fast fit; remove
            theory_curve = np.genfromtxt(
                "plots/sim_15hr_oldmap_str_temperature_xWigglez/sim_15hr_oldmap_str_temperature_xWigglez_avg_from2d.dat"
            )
            theory_curve = theory_curve[:, 4]

            if theory_curve is not None:
                restrict = np.where(
                    np.logical_and(bin_center > 0.09, bin_center < 1.1))
                res_slice = slice(min(restrict[0]), max(restrict[0]))

                #restrict_alt = np.where(restrict)[0][np.newaxis, :]
                #restricted_cov = covmock[restrict_alt][0]

                from core import utils
                amplitude = utils.ampfit(pwr_1d_from_2d[res_slice],
                                         covmock[res_slice, res_slice],
                                         theory_curve[res_slice])
                print "AMP:", mtag, treatment, amplitude

    if not output_tag:
        caller.multiprocess_stack()

    return None