Exemplo n.º 1
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not args.rosters:
            raise RuntimeError("You must specify at least one target roster")

        if is_null(args.ttype):
            raise RuntimeError("You must specify a target type")

        if is_null(args.sims):
            sims = []
        else:
            sims = args.sims

        if is_null(args.alias_dict):
            aliases = None
        else:
            aliases = load_yaml(args.alias_dict)

        name_keys = dict(target_type=args.ttype,
                         fullpath=True)
        config_file = NAME_FACTORY.ttypeconfig(**name_keys)

        if is_not_null(args.config):
            config_file = args.config

        roster_dict = {}
        for roster in args.rosters:
            a_roster = load_yaml(roster)
            roster_dict.update(a_roster)

        base_config = load_yaml(config_file)
        self._write_target_dirs(args.ttype, roster_dict, base_config,
                                sims, args.spatial_models, aliases)
Exemplo n.º 2
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError(
                "Trying to run fermipy analysis, but don't have ST")

        workdir = os.path.dirname(args.config)
        _config_file = self._clone_config_and_srcmaps(args.config, args.seed)

        gta = GTAnalysis(_config_file,
                         logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})
        gta.load_roi(args.roi_baseline)

        simfile = os.path.join(workdir,
                               'sim_%s_%s.yaml' % (args.sim, args.sim_profile))

        mcube_file = "%s_%s_%06i" % (args.sim, args.sim_profile, args.seed)
        sim_config = utils.load_yaml(simfile)

        injected_source = sim_config.get('injected_source', None)
        if injected_source is not None:
            src_dict = injected_source['source_model']
            src_dict['ra'] = gta.config['selection']['ra']
            src_dict['dec'] = gta.config['selection']['dec']
            injected_name = injected_source['name']
            gta.add_source(injected_name, src_dict)
            gta.write_model_map(mcube_file)
            mc_spec_dict = dict(
                true_counts=gta.model_counts_spectrum(injected_name),
                energies=gta.energies,
                model=src_dict)
            mcspec_file = os.path.join(
                workdir, "mcspec_%s_%06i.yaml" % (mcube_file, args.seed))
            utils.write_yaml(mc_spec_dict, mcspec_file)
        else:
            injected_name = None

        gta.write_roi('sim_baseline_%06i' % args.seed)

        test_sources = {}
        for profile in args.profiles:
            profile_path = os.path.join(workdir, 'profile_%s.yaml' % profile)
            test_source = load_yaml(profile_path)
            test_sources[profile] = test_source
            first = args.seed
            last = first + args.nsims
            for seed in range(first, last):
                self._run_simulation(gta,
                                     args.roi_baseline,
                                     injected_name,
                                     test_sources,
                                     first,
                                     seed,
                                     non_null_src=args.non_null_src,
                                     do_find_src=args.do_find_src)
Exemplo n.º 3
0
def main():

    usage = "make_spectra.py [options]"
    description = "Plot spectral components from a results file"

    parser = argparse.ArgumentParser(usage=usage, description=description)

    parser.add_argument('-i',
                        '--input',
                        type=str,
                        default=None,
                        help='Input file')
    parser.add_argument('-c',
                        '--compare',
                        type=str,
                        default=None,
                        help='Input file to compare')
    parser.add_argument('-o',
                        '--output',
                        type=str,
                        default=None,
                        help='Output file prefix ')
    parser.add_argument('-p',
                        '--plot',
                        type=str,
                        default='png',
                        help='Plot type')
    parser.add_argument('-d',
                        '--comp_dict',
                        type=str,
                        default=None,
                        help='Component dict')
    args = parser.parse_args(sys.argv[1:])

    if args.input is not None:
        npred_dict = utils.load_yaml(args.input)
    else:
        npred_dict = None

    if args.compare is not None:
        compare_npred_dict = utils.load_yaml(args.compare)
    else:
        compare_npred_dict = None

    comp_dict = utils.load_yaml(args.comp_dict)

    sum_npreds(npred_dict)

    figs = []
    for k, v in comp_dict.items():
        fig_map = plot_npreds(npred_dict, v, compare_npred_dict)
        fig_diff = plot_npred_diff(npred_dict, v, compare_npred_dict)
        fig_map.savefig("%s_%s_npred.%s" % (args.output, k, args.plot))
        fig_diff.savefig("%s_%s_npreddiff.%s" % (args.output, k, args.plot))
        figs.append(fig_map)
        figs.append(fig_diff)
Exemplo n.º 4
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError(
                "Trying to run fermipy analysis, but don't have ST")

        workdir = os.path.dirname(args.config)
        _config_file = self._clone_config_and_srcmaps(args.config, args.seed)

        gta = GTAnalysis(_config_file, logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})
        gta.load_roi(args.roi_baseline)

        simfile = os.path.join(workdir, 'sim_%s_%s.yaml' %
                               (args.sim, args.sim_profile))

        mcube_file = "%s_%s_%06i" % (args.sim, args.sim_profile, args.seed)
        sim_config = utils.load_yaml(simfile)

        injected_source = sim_config.get('injected_source', None)
        if injected_source is not None:
            src_dict =  injected_source['source_model']
            src_dict['ra'] = gta.config['selection']['ra']
            src_dict['dec'] = gta.config['selection']['dec']
            injected_name = injected_source['name']
            gta.add_source(injected_name, src_dict)
            gta.write_model_map(mcube_file)
            mc_spec_dict = dict(true_counts=gta.model_counts_spectrum(injected_name),
                                energies=gta.energies,
                                model=src_dict)
            mcspec_file = os.path.join(workdir,
                                       "mcspec_%s_%06i.yaml" % (mcube_file, args.seed))
            utils.write_yaml(mc_spec_dict, mcspec_file)
        else:
            injected_name = None

        gta.write_roi('sim_baseline_%06i' % args.seed)

        test_sources = []
        for profile in args.profiles:
            profile_path = os.path.join(workdir, 'profile_%s.yaml' % profile)
            test_source = load_yaml(profile_path)
            test_sources.append(test_source)
            first = args.seed
            last = first + args.nsims
            for seed in range(first, last):
                self._run_simulation(gta, args.roi_baseline,
                                     injected_name, test_sources, first, seed,
                                     non_null_src=args.non_null_src)
Exemplo n.º 5
0
    def _map_arguments(self, args):
        """Map from the top-level arguments to the arguments provided to
        the indiviudal links """
        config_yaml = args['config']
        config_dict = load_yaml(config_yaml)
        ttype = config_dict.get('ttype')
        config_localpath = config_dict.get('config_localpath', None)
        specfile = config_dict.get('specfile')
        targetlist = config_dict.get('targetlist')
        data_plotting = config_dict.get('data_plotting')

        self._set_link('analyze-roi',
                       AnalyzeROI_SG,
                       ttype=ttype,
                       targetlist=targetlist,
                       config=config_localpath)
        self._set_link('analyze-sed',
                       AnalyzeSED_SG,
                       ttype=ttype,
                       targetlist=targetlist,
                       config=config_localpath)

        config_plot_castro = _get_plot_config(data_plotting, 'plot-castro')
        if config_plot_castro is not None:
            self._set_link('plot-castro-sg',
                           PlotCastro_SG,
                           ttype=ttype,
                           targetlist=targetlist,
                           **config_plot_castro)
Exemplo n.º 6
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        models = load_yaml(args['models'])

        for modelkey in models:
            manifest = os.path.join('analysis', 'model_%s' % modelkey,
                                    'srcmap_manifest_%s.yaml' % modelkey)
            for comp in components:
                key = comp.make_key('{ebin_name}_{evtype_name}')
                fullkey = "%s_%s" % (modelkey, key)
                outfile = NAME_FACTORY.merged_srcmaps(modelkey=modelkey,
                                                      component=key,
                                                      coordsys=comp.coordsys,
                                                      mktime='none',
                                                      irf_ver=NAME_FACTORY.irf_ver())
                logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                job_configs[fullkey] = dict(input=manifest,
                                            compname=key,
                                            logfile=logfile)
        return job_configs
Exemplo n.º 7
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        targets = load_yaml(targets_yaml)

        for target_name, target_list in targets.items():
            for targ_prof in target_list:
                name_keys = dict(target_type=ttype,
                                 target_name=target_name,
                                 profile=targ_prof,
                                 fullpath=True)
                targ_key = "%s_%s" % (target_name, targ_prof)
                input_path = NAME_FACTORY.sedfile(**name_keys)
                output_path = input_path.replace('.fits', '.png')
                logfile = make_nfs_path(input_path.replace('.fits', '.log'))
                job_config = dict(infile=input_path,
                                  outfile=output_path,
                                  logfile=logfile)
                job_configs[targ_key] = job_config

        return job_configs
Exemplo n.º 8
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (sim_targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        targets = load_yaml(sim_targets_yaml)

        base_config = dict(ttype=ttype,
                           roi_baseline=args['roi_baseline'],
                           extracopy = args['extracopy'],
                           sim=sim)

        for target_name in targets.keys():
            targetdir = NAME_FACTORY.sim_targetdir(target_type=ttype,
                                                   target_name=target_name,
                                                   sim_name=sim)
            logfile = os.path.join(targetdir, 'copy_base_dir.log')
            job_config = base_config.copy()
            job_config.update(dict(target=target_name,
                                   logfile=logfile))
            job_configs[target_name] = job_config

        return job_configs
Exemplo n.º 9
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        models = load_yaml(args['models'])

        for modelkey in models:
            manifest = os.path.join('analysis', 'model_%s' % modelkey,
                                    'srcmap_manifest_%s.yaml' % modelkey)
            for comp in components:
                key = comp.make_key('{ebin_name}_{evtype_name}')
                fullkey = "%s_%s" % (modelkey, key)
                outfile = NAME_FACTORY.merged_srcmaps(
                    modelkey=modelkey,
                    component=key,
                    coordsys=comp.coordsys,
                    mktime='none',
                    irf_ver=NAME_FACTORY.irf_ver())
                logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                job_configs[fullkey] = dict(input=manifest,
                                            compname=key,
                                            logfile=logfile)
        return job_configs
Exemplo n.º 10
0
def build_profile_dict(basedir, profile_name):
    """Get the name and source dictionary for the test source.
    
    Parameters
    ----------
    
    basedir : str
        Path to the analysis directory
        
    profile_name : str
        Key for the spatial from of the target

    Returns
    -------
    
    profile_name : str
        Name of for this particular profile

    src_name : str
        Name of the source for this particular profile
    
    profile_dict : dict
        Dictionary with the source parameters

    """
    profile_path = os.path.join(basedir, "profile_%s.yaml" % profile_name)
    profile_config = load_yaml(profile_path)
    src_name = profile_config['name']
    profile_dict = profile_config['source_model']
    return profile_name, src_name, profile_dict
Exemplo n.º 11
0
    def _map_arguments(self, args):
        """Map from the top-level arguments to the arguments provided to
        the indiviudal links """

        config_yaml = args['config']
        config_dict = load_yaml(config_yaml)

        data = config_dict.get('data')
        comp = config_dict.get('comp')
        dry_run = args.get('dry_run', False)

        self._set_link('prepare', SplitAndMktimeChain,
                       comp=comp, data=data,
                       ft1file=config_dict['ft1file'],
                       ft2file=config_dict['ft2file'],
                       hpx_order_ccube=config_dict.get('hpx_order_ccube', 7),
                       hpx_order_expcube=config_dict.get('hpx_order_expcube', 7),
                       mktime=config_dict.get('mktimefitler', None),
                       do_ltsum=config_dict.get('do_ltsum', False),
                       scratch=config_dict.get('scratch', None),
                       dry_run=dry_run)

        self._set_link('residual-cr', ResidualCR_SG,
                       comp=comp, data=data,
                       mktimefilter=config_dict.get('mktimefitler', None),
                       hpx_order=config_dict.get('hpx_order_fitting', 4),
                       clean=config_dict.get('clean_class', None),
                       dirty=config_dict.get('dirty_class', None),                       
                       select_factor=config_dict.get('select_factor', None),
                       mask_factor=config_dict.get('mask_factor', None),
                       sigma=config_dict.get('sigma', None),
                       full_output=config_dict.get('full_output', False),
                       dry_run=dry_run)
Exemplo n.º 12
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if sim is not None:
            raise ValueError("Found 'sim' argument on AnalyzeExtension_SG config.")
        if targets_yaml is None:
            return job_configs

        targets = load_yaml(targets_yaml)
        config_yaml = 'config.yaml'

        base_config = dict(roi_baseline=args['roi_baseline'],
                           make_plots=args['make_plots'])

        for target_name, target_list in targets.items():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             fullpath=True)
            target_dir = NAME_FACTORY.targetdir(**name_keys)
            config_path = os.path.join(target_dir, config_yaml)
            logfile = make_nfs_path(os.path.join(
                target_dir, "%s_%s.log" % (self.linkname, target_name)))
            job_config = base_config.copy()
            job_config.update(dict(config=config_path,
                                   logfile=logfile))
            job_configs[target_name] = job_config

        return job_configs
Exemplo n.º 13
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        # Tweak the batch job args
        try:
            self._interface._lsf_args.update(dict(n=2))
            self._interface._lsf_args.update(dict(R='\"select[rhel60&&!fell] -R span[hosts=1]\"'))
        except AttributeError:
            pass

        models = load_yaml(args['models'])

        base_config = dict(fit_strategy=args['fit_strategy'],
                           input_pars=args['input_pars'],
                           load_baseline=args['load_baseline'],
                           make_plots=args['make_plots'])

        for modelkey in models:
            config_file = os.path.join('analysis', 'model_%s' % modelkey,
                                       args['config'])
            #roi_baseline = os.path.join('analysis', 'model_%s' % modelkey,
            #                           args['roi_baseline'])
            roi_baseline = args['roi_baseline']
            logfile = os.path.join('analysis', 'model_%s' % modelkey,
                                   'fit_%s.log' % modelkey)
            job_config = base_config.copy()
            job_config.update(dict(config=config_file,
                                   roi_baseline=roi_baseline,
                                   logfile=logfile))
            job_configs[modelkey] = job_config

        return job_configs
Exemplo n.º 14
0
def build_profile_dict(basedir, profile_name):
    """Get the name and source dictionary for the test source.
    
    Parameters
    ----------
    
    basedir : str
        Path to the analysis directory
        
    profile_name : str
        Key for the spatial from of the target

    Returns
    -------
    
    profile_name : str
        Name of for this particular profile

    src_name : str
        Name of the source for this particular profile
    
    profile_dict : dict
        Dictionary with the source parameters

    """
    profile_path = os.path.join(basedir, "profile_%s.yaml" % profile_name)
    profile_config = load_yaml(profile_path)
    src_name = profile_config['name']
    profile_dict = profile_config['source_model']
    return profile_name, src_name, profile_dict
Exemplo n.º 15
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if sim is not None:
            raise ValueError("Found 'sim' argument on AnalyzeROI_SG config.")
        if targets_yaml is None:
            return job_configs

        config_yaml = 'config.yaml'
        config_override = args.get('config')
        if is_not_null(config_override):
            config_yaml = config_override

        targets = load_yaml(targets_yaml)
        base_config = dict(roi_baseline=args['roi_baseline'],
                           make_plots=args['make_plots'])

        for target_name in targets.keys():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             fullpath=True)
            target_dir = NAME_FACTORY.targetdir(**name_keys)
            config_path = os.path.join(target_dir, config_yaml)
            logfile = make_nfs_path(
                os.path.join(target_dir,
                             "%s_%s.log" % (self.linkname, target_name)))
            job_config = base_config.copy()
            job_config.update(dict(config=config_path, logfile=logfile))
            job_configs[target_name] = job_config

        return job_configs
Exemplo n.º 16
0
def main():
    usage = "npred_txt2yaml.py [options]"
    description = "Convert Gardian text files to yaml dictionary"

    parser = argparse.ArgumentParser(usage=usage, description=description)

    parser.add_argument('-i',
                        '--input',
                        type=str,
                        default=None,
                        help='Input file')
    parser.add_argument('-o',
                        '--output',
                        type=str,
                        default=None,
                        help='Output ')
    parser.add_argument('-d',
                        '--conv_dict',
                        type=str,
                        default=None,
                        help='Output ')

    args = parser.parse_args(sys.argv[1:])

    conv_dict = load_yaml(args.conv_dict)
    npred_dict = make_npred_dict(args.input, conv_dict)

    write_yaml(npred_dict, args.output)
Exemplo n.º 17
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        input_config = {}
        job_configs = {}
        output_config = {}

        topdir = args['topdir']
        targets_yaml = os.path.join(topdir, args['targetlist'])

        try:
            targets = load_yaml(targets_yaml)
        except IOError:
            targets = {}

        for target_name, target_list in targets.items():
            for targ_prof in target_list:
                targ_key = "%s_%s" % (target_name, targ_prof)
                input_path = os.path.join(topdir, target_name,
                                          'sed_%s.fits' % targ_prof)
                output_path = os.path.join(topdir, target_name,
                                           'sed_%s.png' % targ_prof)
                logfile = os.path.join(topdir, target_name,
                                       'plot_castro_%s.log' % targ_prof)
                job_config = dict(input=input_path, output=output_path)
                job_configs[targ_key] = job_config

        return input_config, job_configs, output_config
Exemplo n.º 18
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        channels = [
            'ee', 'mumu', 'tautau', 'bb', 'tt', 'gg', 'ww', 'zz', 'cc', 'uu',
            'dd', 'ss'
        ]
        norm_type = 'eflux'

        spec_table = DMSpecTable.create_from_fits(args.spec)
        profile = load_yaml(args.profile_yaml)

        j_value = profile.get('j_integ')
        j_sigma = profile.get('j_sigma', None)
        if args.jprior is None or args.jprior == 'None' or j_sigma is None or j_sigma == 0.0:
            j_factor = j_value
            j_prior_key = 'none'
        else:
            j_factor = dict(functype=args.jprior,
                            j_value=j_value,
                            mu=j_value,
                            sigma=j_sigma)
            j_prior_key = args.jprior

        sed = CastroData.create_from_sedfile(args.sed_file, norm_type)
        c_list, t_list, n_list = DMCastroConvertor.convert_sed_to_dm(
            spec_table, sed, channels, norm_type, j_factor)

        fits_utils.write_tables_to_fits(args.outfile,
                                        t_list,
                                        clobber=args.clobber,
                                        namelist=n_list)
Exemplo n.º 19
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        targets = load_yaml(targets_yaml)

        for target_name, target_list in targets.items():
            for targ_prof in target_list:
                name_keys = dict(target_type=ttype,
                                 target_name=target_name,
                                 profile=targ_prof,
                                 fullpath=True)
                targ_key = "%s_%s" % (target_name, targ_prof)
                input_path = NAME_FACTORY.sedfile(**name_keys)
                output_path = input_path.replace('.fits', '.png')
                logfile = make_nfs_path(input_path.replace('.fits', '.log'))
                job_config = dict(infile=input_path,
                                  outfile=output_path,
                                  logfile=logfile)
                job_configs[targ_key] = job_config

        return job_configs
Exemplo n.º 20
0
def main():
    usage = "combine_dnm.py [options]"
    description = "add the pos and neg dnm components"

    parser = argparse.ArgumentParser(usage=usage, description=description)
    parser.add_argument('-i',
                        '--input',
                        type=str,
                        default=None,
                        help='Input file prefix')
    parser.add_argument('-o',
                        '--output',
                        type=str,
                        default=None,
                        help='Output ')
    parser.add_argument('-f',
                        '--fact',
                        default=False,
                        action='store_true',
                        help='Use factors')

    args = parser.parse_args(sys.argv[1:])

    out_dict = utils.load_yaml(args.input)

    combine_dnm_maps(out_dict, args.fact)

    utils.write_yaml(out_dict, args.output)
Exemplo n.º 21
0
    def _map_arguments(self, input_dict):
        """Map from the top-level arguments to the arguments provided to
        the indiviudal links """

        config_yaml = input_dict['config']
        config_dict = load_yaml(config_yaml)

        data = config_dict.get('data')
        comp = config_dict.get('comp')
        sourcekeys = config_dict.get('sourcekeys')

        mktimefilter = config_dict.get('mktimefilter')

        self._set_link('expcube2',
                       Gtexpcube2wcs_SG,
                       comp=comp,
                       data=data,
                       mktimefilter=mktimefilter)

        self._set_link('exphpsun',
                       Gtexphpsun_SG,
                       comp=comp,
                       data=data,
                       mktimefilter=mktimefilter)

        self._set_link('suntemp',
                       Gtsuntemp_SG,
                       comp=comp,
                       data=data,
                       mktimefilter=mktimefilter,
                       sourcekeys=sourcekeys)
Exemplo n.º 22
0
    def _build_profile_dict(basedir, profile_name):
        """Get the name and source dictionary for the test source.

        Parameters
        ----------
        
        basedir : str
            Path to the analysis directory

        profile_name : str
            Key for the spatial from of the target

        Returns
        -------

        profile_name : str
            Name of source to use for this particular profile

        profile_dict : dict
            Dictionary with the source parameters

        """
        profile_path = os.path.join(basedir, "profile_%s.yaml" % profile_name)
        profile_config = load_yaml(profile_path)
        if profile_name != profile_config['name']:
            sys.stderr.write(
                'Warning, profile name (%s) != name in %s (%s)\n' %
                (profile_name, profile_config['name'], profile_path))

        profile_dict = profile_config['source_model']
        return profile_name, profile_dict
Exemplo n.º 23
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        limitfile = args.limitfile
        first = args.seed
        last = first + args.nsims
        flist = [limitfile.replace("_SEED.fits", "_%06i.fits" % seed)\
                     for seed in range(first, last)]

        spec_config = load_yaml(args.specconfig)
        specs = spec_config['specs']
        sum_specs = specs.copy()

        outfile = args.outfile
        summaryfile = args.summaryfile

        hdus = sum_specs + ['INDICES']

        out_tables, out_names = vstack_tables(flist, hdus)

        if is_not_null(outfile):
            fits_utils.write_tables_to_fits(outfile,
                                            out_tables,
                                            namelist=out_names)

        if is_not_null(summaryfile):
            summary_tables = []
            for ot in out_tables[0:-1]:
                summary_table = summarize_limits_results(ot)
                summary_tables.append(summary_table)
            summary_tables.append(Table(out_tables[-1][0]))
            fits_utils.write_tables_to_fits(summaryfile,
                                            summary_tables,
                                            namelist=out_names)
Exemplo n.º 24
0
    def _map_arguments(self, input_dict):
        """Map from the top-level arguments to the arguments provided to
        the indiviudal links """

        config_yaml = input_dict['config']
        config_dict = load_yaml(config_yaml)

        data = config_dict.get('data')
        comp = config_dict.get('comp')
        sourcekeys = config_dict.get('sourcekeys')

        mktimefilter = config_dict.get('mktimefilter')

        self._set_link('expcube2', Gtexpcube2wcs_SG,
                       comp=comp, data=data,
                       mktimefilter=mktimefilter)

        self._set_link('exphpsun', Gtexphpsun_SG,
                       comp=comp, data=data,
                       mktimefilter=mktimefilter)

        self._set_link('suntemp', Gtsuntemp_SG,
                       comp=comp, data=data,
                       mktimefilter=mktimefilter,
                       sourcekeys=sourcekeys)
Exemplo n.º 25
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if args.ttype is None:
            raise RuntimeError('Target type must be specified')

        name_keys = dict(target_type=args.ttype,
                         rosterlist='roster_list.yaml',
                         sim_name=args.sim,
                         fullpath=True)

        spec_config = NAME_FACTORY.specconfig(**name_keys)
        if is_not_null(args.specconfig):
            spec_config = args.specconfig

        spec_config = load_yaml(spec_config)
        channels = spec_config['channels']

        if is_not_null(args.sim):
            roster_file = NAME_FACTORY.sim_rosterfile(**name_keys)
            sim_name = args.sim
            is_sim = True
        else:
            roster_file = NAME_FACTORY.rosterfile(**name_keys)
            is_sim = False
            sim_name = None

        if is_not_null(args.rosterlist):
            roster_file = args.rosterlist

        roster_dict = load_yaml(roster_file)

        if is_sim:
            seedlist = list(range(args.seed, args.seed + args.nsims))
        else:
            seedlist = [0]

        astro_prior = args.astro_prior
        if is_null(astro_prior):
            astro_prior = 'none'

        for seed in seedlist:
            StackLikelihood.stack_rosters(roster_dict, args.ttype, channels,
                                          astro_prior, sim_name, seed,
                                          args.clobber)
Exemplo n.º 26
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if is_null(args.config):
            raise ValueError("Config yaml file must be specified")
        if is_null(args.rand_config):
            raise ValueError(
                "Random direction config yaml file must be specified")
        config = load_yaml(args.config)
        rand_config = load_yaml(args.rand_config)

        wcsgeom = self._make_wcsgeom_from_config(config)
        dir_dict = self._build_skydir_dict(wcsgeom, rand_config)

        if is_not_null(args.outfile):
            write_yaml(dir_dict, args.outfile)
Exemplo n.º 27
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if is_null(args.config):
            raise ValueError("Config yaml file must be specified")
        if is_null(args.rand_config):
            raise ValueError(
                "Random direction config yaml file must be specified")
        config = load_yaml(args.config)
        rand_config = load_yaml(args.rand_config)

        wcsgeom = self._make_wcsgeom_from_config(config)
        dir_dict = self._build_skydir_dict(wcsgeom, rand_config)

        if is_not_null(args.outfile):
            write_yaml(dir_dict, args.outfile)
Exemplo n.º 28
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        config_yaml = 'config.yaml'
        config_override = args.get('config')
        if is_not_null(config_override):
            config_yaml = config_override

        targets = load_yaml(targets_yaml)
        nsims_job = args['nsims_job']
        first_seed = args['seed']
        nsims = args['nsims']
        last_seed = first_seed + nsims

        base_config = dict(sim_profile=args['sim_profile'],
                           roi_baseline=args['roi_baseline'],
                           non_null_src=args['non_null_src'],
                           do_find_src=args['do_find_src'],
                           sim=sim)

        for target_name, target_list in targets.items():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             sim_name=sim,
                             fullpath=True)
            simdir = NAME_FACTORY.sim_targetdir(**name_keys)
            config_path = os.path.join(simdir, config_yaml)

            job_config = base_config.copy()
            job_config.update(dict(config=config_path, profiles=target_list))

            current_seed = first_seed
            while current_seed < last_seed:
                fullkey = "%s_%06i" % (target_name, current_seed)
                logfile = make_nfs_path(
                    os.path.join(
                        simdir, "%s_%s_%06i.log" %
                        (self.linkname, target_name, current_seed)))
                if nsims_job <= 0 or current_seed + nsims_job >= last_seed:
                    nsims_current = last_seed - current_seed
                else:
                    nsims_current = nsims_job
                job_config.update(
                    dict(seed=current_seed,
                         nsims=nsims_current,
                         logfile=logfile))
                job_configs[fullkey] = job_config.copy()
                current_seed += nsims_current

        return job_configs
Exemplo n.º 29
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (roster_yaml, sim) = NAME_FACTORY.resolve_rosterfile(args)
        if roster_yaml is None:
            return job_configs

        roster_dict = load_yaml(roster_yaml)

        astro_priors = args['astro_priors']
        specs = args['specs']

        for roster_name in roster_dict.keys():
            rost_specs = specs
            for astro_prior in astro_priors:
                name_keys = dict(target_type=ttype,
                                 roster_name=roster_name,
                                 astro_prior=astro_prior,
                                 sim_name=sim,
                                 fullpath=True)
                for spec in rost_specs:
                    targ_key = "%s:%s:%s" % (roster_name, astro_prior, spec)
                    if sim is not None:
                        seedlist = range(args['seed'],
                                         args['seed'] + args['nsims'])
                        sim_path = os.path.join('config', 'sim_%s.yaml' % sim)
                    else:
                        seedlist = [None]
                        sim_path = None

                    for seed in seedlist:
                        if seed is not None:
                            name_keys['seed'] = "%06i" % seed
                            input_path = NAME_FACTORY.sim_stackedlimitsfile(
                                **name_keys)
                            full_targ_key = "%s_%06i" % (targ_key, seed)
                        else:
                            input_path = NAME_FACTORY.stackedlimitsfile(
                                **name_keys)
                            full_targ_key = targ_key

                        output_path = input_path.replace(
                            '.fits', '_%s.png' % spec)
                        logfile = make_nfs_path(
                            output_path.replace('.png', '.log'))
                        job_config = dict(infile=input_path,
                                          outfile=output_path,
                                          astro_prior=astro_prior,
                                          logfile=logfile,
                                          sim=sim_path,
                                          spec=spec)
                        job_configs[full_targ_key] = job_config

        return job_configs
Exemplo n.º 30
0
def make_interpolator(yamlfile):
    scale = utils.load_yaml(yamlfile)
    emids = (np.array(scale['E_min']) + np.array(scale['E_max'])) / 2.
    func = interp1d(emids,
                    scale['Value'],
                    kind='nearest',
                    bounds_error=False,
                    fill_value='extrapolate')
    return func
Exemplo n.º 31
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (roster_yaml, sim) = NAME_FACTORY.resolve_rosterfile(args)
        if roster_yaml is None:
            return job_configs

        roster_dict = load_yaml(roster_yaml)

        astro_priors = args['astro_priors']
        channels = args['channels']
        global_min = args['global_min']

        for roster_name in list(roster_dict.keys()):
            rost_chans = select_channels(channels, roster_name)
            for astro_prior in astro_priors:
                name_keys = dict(target_type=ttype,
                                 roster_name=roster_name,
                                 astro_prior=astro_prior,
                                 sim_name=sim,
                                 fullpath=True)

                for chan in rost_chans:
                    targ_key = "%s:%s:%s" % (roster_name, astro_prior, chan)

                    if sim is not None:
                        seedlist = list(
                            range(args['seed'], args['seed'] + args['nsims']))
                    else:
                        seedlist = [None]

                    for seed in seedlist:
                        if seed is not None:
                            name_keys['seed'] = "%06i" % seed  # pylint: disable=bad-string-format-type
                            input_path = NAME_FACTORY.sim_resultsfile(
                                **name_keys)
                            full_targ_key = "%s_%06i" % (targ_key, seed)  # pylint: disable=bad-string-format-type
                        else:
                            input_path = NAME_FACTORY.resultsfile(**name_keys)
                            full_targ_key = targ_key

                        output_path = input_path.replace(
                            '.fits', '_%s.png' % chan)
                        logfile = make_nfs_path(
                            output_path.replace('.png', '.log'))
                        job_config = dict(infile=input_path,
                                          outfile=output_path,
                                          astro_prior=astro_prior,
                                          logfile=logfile,
                                          global_min=global_min,
                                          chan=chan)
                        job_configs[full_targ_key] = job_config

        return job_configs
Exemplo n.º 32
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml,
         sim) = NAME_FACTORY.resolve_targetfile(args, require_sim_name=True)
        if targets_yaml is None:
            return job_configs

        specconfig = NAME_FACTORY.resolve_specconfig(args)

        astro_priors = args['astro_priors']
        write_full = args.get('write_full', False)

        targets = load_yaml(targets_yaml)
        base_config = dict(nsims=args['nsims'],
                           seed=args['seed'],
                           specconfig=specconfig)

        for target_name, profile_list in list(targets.items()):
            for profile in profile_list:
                for astro_prior in astro_priors:
                    if is_null(astro_prior):
                        astro_prior = 'none'
                    full_key = "%s:%s:%s:%s" % (target_name, profile, sim,
                                                astro_prior)
                    name_keys = dict(target_type=ttype,
                                     target_name=target_name,
                                     sim_name=sim,
                                     profile=profile,
                                     astro_prior=astro_prior,
                                     fullpath=True)
                    limitfile = NAME_FACTORY.sim_dmlimitsfile(**name_keys)
                    first = args['seed']
                    last = first + args['nsims'] - 1
                    outfile = limitfile.replace(
                        '_SEED.fits',
                        '_collected_%06i_%06i.fits' % (first, last))
                    logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                    if not write_full:
                        outfile = None
                    summaryfile = limitfile.replace(
                        '_SEED.fits',
                        '_summary_%06i_%06i.fits' % (first, last))
                    job_config = base_config.copy()
                    job_config.update(
                        dict(limitfile=limitfile,
                             astro_prior=astro_prior,
                             outfile=outfile,
                             summaryfile=summaryfile,
                             logfile=logfile))
                    job_configs[full_key] = job_config

        return job_configs
Exemplo n.º 33
0
    def _map_arguments(self, args):
        """Map from the top-level arguments to the arguments provided to
        the indiviudal links """

        config_yaml = args['config']
        config_dict = load_yaml(config_yaml)
        ttype = config_dict.get('ttype')
        config_template = config_dict.get('config_template', None)
        rosters = config_dict.get('rosters')
        rosterlist = config_dict.get('rosterlist')
        spatial_models = config_dict.get('spatial_models')
        specfile = config_dict.get('specfile')
        sims = config_dict.get('sims', {})
        sim_names = []
        sim_names += sims.keys()
        if 'random' in config_dict:
            sim_names += ['random']

        plot_channels = config_dict.get('plot_channels', [])

        dry_run = args.get('dry_run', False)

        self._set_link('prepare-targets',
                       PrepareTargets,
                       ttype=ttype,
                       rosters=rosters,
                       spatial_models=spatial_models,
                       sims=sim_names,
                       config=config_template)

        self._set_link('data',
                       PipelineData,
                       link_prefix='data.',
                       config=config_yaml,
                       dry_run=dry_run)

        final_plot_sims = []

        for sim in sims.keys():
            if sim in ['null']:
                final_plot_sims.append(sim)
            linkname = 'sim_%s' % sim
            self._set_link(linkname,
                           PipelineSim,
                           link_prefix='%s.' % linkname,
                           config=config_yaml,
                           sim=sim,
                           dry_run=dry_run)

        if 'random' in config_dict:
            final_plot_sims.append('random')
            self._set_link('random',
                           PipelineRandom,
                           link_prefix='random.',
                           config=config_yaml,
                           dry_run=dry_run)
Exemplo n.º 34
0
 def run_analysis(self, argv):
     """Run this analysis"""
     channels = [
         'ee', 'mumu', 'tautau', 'bb', 'tt', 'gg', 'ww', 'zz', 'cc', 'uu',
         'dd', 'ss'
     ]
     args = self._parser.parse_args(argv)
     roster_dict = load_yaml(os.path.join(args.topdir, args.rosterlist))
     DMCastroStacker.stack_rosters(roster_dict, args.topdir, channels,
                                   args.jprior, args.clobber)
Exemplo n.º 35
0
    def _map_arguments(self, args):
        """Map from the top-level arguments to the arguments provided to
        the indiviudal links """
        config_yaml = args['config']
        config_dict = load_yaml(config_yaml)

        sim_name = args['sim']
        sim_dict = config_dict['sims'][sim_name]

        ttype = config_dict.get('ttype')
        config_template = config_dict.get('config_template', None)
        config_localpath = config_dict.get('config_localpath', None)
        specfile = config_dict.get('specfile')
        targetlist = config_dict.get('targetlist')

        sim_values = config_dict['sim_defaults']
        sim_values.update(sim_dict)

        sim_profile = sim_values['profile']
        seed = sim_values.get('seed', 0)
        nsims = sim_values.get('nsims', 20)
        nsims_job = sim_values.get('nsims_job', 0)
        non_null_src = sim_values.get('non_null_src', False)
        do_find_src = sim_values.get('do_find_src', False)

        sim_plotting = config_dict.get('sim_plotting')
        plot_channels_default = config_dict.get('plot_channels', [])

        self._set_link('copy-base-roi',
                       CopyBaseROI_SG,
                       ttype=ttype,
                       targetlist=targetlist,
                       rosterlist=rosterlist,
                       sim=sim_name,
                       config=config_template)
        self._set_link('simulate-roi',
                       SimulateROI_SG,
                       ttype=ttype,
                       sim=sim_name,
                       sim_profile=sim_profile,
                       targetlist=targetlist,
                       config=config_localpath,
                       seed=seed,
                       nsims=nsims,
                       non_null_src=non_null_src,
                       do_find_src=do_find_src,
                       nsims_job=nsims_job)
        self._set_link('collect-sed',
                       CollectSED_SG,
                       ttype=ttype,
                       sim=sim_name,
                       config=config_localpath,
                       targetlist=targetlist,
                       seed=seed,
                       nsims=nsims)
Exemplo n.º 36
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        config_yaml = 'config.yaml'
        config_override = args.get('config')
        if is_not_null(config_override):
            config_yaml = config_override

        targets = load_yaml(targets_yaml)
        nsims_job = args['nsims_job']
        first_seed = args['seed']
        nsims = args['nsims']
        last_seed = first_seed + nsims

        base_config = dict(sim_profile=args['sim_profile'],
                           roi_baseline=args['roi_baseline'],
                           non_null_src=args['non_null_src'],
                           sim=sim)

        for target_name, target_list in targets.items():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             sim_name=sim,
                             fullpath=True)
            simdir = NAME_FACTORY.sim_targetdir(**name_keys)
            config_path = os.path.join(simdir, config_yaml)

            job_config = base_config.copy()
            job_config.update(dict(config=config_path,
                                   profiles=target_list))

            current_seed = first_seed
            while current_seed < last_seed:
                fullkey = "%s_%06i" % (target_name, current_seed)
                logfile = make_nfs_path(os.path.join(simdir, "%s_%s_%06i.log" % (self.linkname, 
                                                                                 target_name, current_seed)))
                if nsims_job <= 0 or current_seed + nsims_job >= last_seed:
                    nsims_current = last_seed - current_seed
                else:
                    nsims_current = nsims_job
                job_config.update(dict(seed=current_seed,
                                       nsims=nsims_current,
                                       logfile=logfile))
                job_configs[fullkey] = job_config.copy()
                current_seed += nsims_current

        return job_configs
Exemplo n.º 37
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)
        roster_lib = RosterLibrary()
        roster_dict = {}
        rost = roster_lib.create_roster(args.roster)
        roster_dict[args.roster] = rost

        base_config = load_yaml(args.baseconfig)

        TargetPreparer.write_target_dirs(args.topdir, roster_dict, base_config)
Exemplo n.º 38
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if DMSKY_ROSTER_LIB:
            roster_lib = RosterLibrary()
            roster_dict = {}
        else:
            raise RuntimeError(
                "Can't load roster library, probably b/c old version of yaml is not compatible with dmsky"
            )

        if not args.rosters:
            raise RuntimeError("You must specify at least one target roster")

        if is_null(args.ttype):
            raise RuntimeError("You must specify a target type")

        if is_null(args.sims):
            sims = []
        else:
            sims = args.sims

        if is_null(args.alias_dict):
            aliases = None
        else:
            aliases = load_yaml(args.alias_dict)

        name_keys = dict(target_type=args.ttype, fullpath=True)
        config_file = NAME_FACTORY.ttypeconfig(**name_keys)

        if is_not_null(args.config):
            config_file = args.config

        for roster in args.rosters:
            rost = roster_lib.create_roster(roster)
            roster_dict[roster] = rost

        base_config = load_yaml(config_file)
        self._write_target_dirs(args.ttype, roster_dict, base_config, sims,
                                args.spatial_models, aliases)
Exemplo n.º 39
0
    def _map_arguments(self, args):
        """Map from the top-level arguments to the arguments provided to
        the indiviudal links """
        config_yaml = args['config']
        config_dict = load_yaml(config_yaml)

        dry_run = args.get('dry_run', False)

        data = config_dict.get('data')
        comp = config_dict.get('comp')
        library = config_dict.get('library')
        models = config_dict.get('models')
        scratch = config_dict.get('scratch')

        self._set_link('prepare',
                       SplitAndBinChain,
                       comp=comp,
                       data=data,
                       ft1file=config_dict.get('ft1file'),
                       hpx_order_ccube=config_dict.get('hpx_order_ccube'),
                       hpx_order_expcube=config_dict.get('hpx_order_expcube'),
                       scratch=scratch,
                       dry_run=dry_run)

        self._set_link('diffuse-comp',
                       DiffuseCompChain,
                       comp=comp,
                       data=data,
                       library=library,
                       make_xml=config_dict.get('make_diffuse_comp_xml',
                                                False),
                       outdir=config_dict.get('merged_gasmap_dir',
                                              'merged_gasmap'),
                       dry_run=dry_run)

        self._set_link('catalog-comp',
                       CatalogCompChain,
                       comp=comp,
                       data=data,
                       library=library,
                       make_xml=config_dict.get('make_catalog_comp_xml',
                                                False),
                       nsrc=config_dict.get('catalog_nsrc', 500),
                       dry_run=dry_run)

        self._set_link('assemble-model',
                       AssembleModelChain,
                       comp=comp,
                       data=data,
                       library=library,
                       models=models,
                       hpx_order=config_dict.get('hpx_order_fitting'),
                       dry_run=dry_run)
Exemplo n.º 40
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (roster_yaml, sim) = NAME_FACTORY.resolve_rosterfile(
            args, require_sim_name=True)
        if roster_yaml is None:
            return job_configs

        specconfig = NAME_FACTORY.resolve_specconfig(args)

        astro_priors = args['astro_priors']
        write_full = args['write_full']
        first = args['seed']
        last = first + args['nsims'] - 1

        base_config = dict(nsims=args['nsims'],
                           seed=args['seed'])

        roster_dict = load_yaml(roster_yaml)
        for roster_name in roster_dict.keys():
            for astro_prior in astro_priors:
                if is_null(astro_prior):
                    astro_prior = 'none'
                full_key = "%s:%s:%s" % (roster_name, sim, astro_prior)
                name_keys = dict(target_type=ttype,
                                 roster_name=roster_name,
                                 sim_name=sim,
                                 astro_prior=astro_prior,
                                 fullpath=True)

                limitfile = NAME_FACTORY.sim_stackedlimitsfile(**name_keys)
                outfile = limitfile.replace(
                    '_SEED.fits', '_collected_%06i_%06i.fits' %
                    (first, last))
                logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                if not write_full:
                    outfile = None
                summaryfile = limitfile.replace('_SEED.fits', '_summary.fits')

                job_config = base_config.copy()
                job_config.update(dict(limitfile=limitfile,
                                       specconfig=specconfig,
                                       astro_prior=astro_prior,
                                       outfile=outfile,
                                       summaryfile=summaryfile,
                                       logfile=logfile))
                job_configs[full_key] = job_config

        return job_configs
Exemplo n.º 41
0
 def run_analysis(self, argv):
     """ Build the manifest for all the models
     """
     args = self._parser.parse_args(argv)
     components = Component.build_from_yamlfile(args.comp)
     NAME_FACTORY.update_base_dict(args.data)
     model_dict = make_library(**args.__dict__)
     model_manager = model_dict['ModelManager']
     models = load_yaml(args.models)
     data = args.data
     hpx_order = args.hpx_order
     for modelkey in models:
         model_manager.make_srcmap_manifest(modelkey, components, data)
         model_manager.make_fermipy_config_yaml(modelkey, components, data,
                                                hpx_order=hpx_order,
                                                irf_ver=NAME_FACTORY.irf_ver())
Exemplo n.º 42
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if sim is not None:
            raise ValueError("Found 'sim' argument on AnalyzeSED_SG config.")
        if targets_yaml is None:
            return job_configs

        targets = load_yaml(targets_yaml)
        config_yaml = 'config.yaml'

        if is_not_null(args['skydirs']):
            skydirs = args['skydirs']
        else:
            skydirs = None

        base_config = dict(roi_baseline=args['roi_baseline'],
                           make_plots=args['make_plots'],
                           non_null_src=args['non_null_src'])

        for target_name, target_list in targets.items():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             sim_name='random',
                             fullpath=True)
            if skydirs is None:
                target_dir = NAME_FACTORY.targetdir(**name_keys)
                skydir_path = None
            else:
                target_dir = NAME_FACTORY.sim_targetdir(**name_keys)
                skydir_path = os.path.join(target_dir, skydirs)
            config_path = os.path.join(target_dir, config_yaml)
            logfile = make_nfs_path(os.path.join(
                target_dir, "%s_%s.log" % (self.linkname, target_name)))
            job_config = base_config.copy()
            job_config.update(dict(config=config_path,
                                   profiles=target_list,
                                   skydirs=skydir_path,
                                   logfile=logfile))
            job_configs[target_name] = job_config

        return job_configs
Exemplo n.º 43
0
    def _clone_config_and_srcmaps(config_path, seed):
        """Clone the configuration"""
        workdir = os.path.dirname(config_path)
        new_config_path = config_path.replace('.yaml', '_%06i.yaml' % seed)
        config = load_yaml(config_path)
        comps = config.get('components', [config])
        for i, comp in enumerate(comps):
            comp_name = "%02i" % i
            if 'gtlike' not in comp:
                comp['gtlike'] = {}
            orig_srcmap = os.path.abspath(os.path.join(workdir, 'srcmap_%s.fits' % (comp_name)))
            new_srcmap = os.path.abspath(os.path.join(workdir, 'srcmap_%06i_%s.fits' % (seed, comp_name)))
            comp['gtlike']['srcmap'] = os.path.abspath(os.path.join(workdir, 'srcmap_%06i_%s.fits' % (seed, comp_name)))
            comp['gtlike']['use_external_srcmap'] = True
            copyfile(orig_srcmap, new_srcmap)

        write_yaml(config, new_config_path)
        return new_config_path
Exemplo n.º 44
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        config_yaml = 'config.yaml'
        config_override = args.get('config')
        if is_not_null(config_override):
            config_yaml = config_override

        rand_yaml = NAME_FACTORY.resolve_randconfig(args)

        targets = load_yaml(targets_yaml)

        base_config = dict(rand_config=rand_yaml)

        for target_name in targets.keys():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             sim_name=sim,
                             fullpath=True)
            simdir = NAME_FACTORY.sim_targetdir(**name_keys)
            config_path = os.path.join(simdir, config_yaml)
            outfile = os.path.join(simdir, 'skydirs.yaml')
            logfile = make_nfs_path(outfile.replace('yaml', 'log'))
            job_config = base_config.copy()
            job_config.update(dict(config=config_path,
                                   outfile=outfile,
                                   logfile=logfile))
            job_configs[target_name] = job_config

        return job_configs
Exemplo n.º 45
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError(
                "Trying to run fermipy analysis, but don't have ST")

        if is_null(args.skydirs):
            skydir_dict = None
        else:
            skydir_dict = load_yaml(args.skydirs)

        gta = GTAnalysis(args.config,
                         logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})
        #gta.setup(overwrite=False)
        gta.load_roi(args.roi_baseline)
        gta.print_roi()

        basedir = os.path.dirname(args.config)
        # This should be a no-op, b/c it was done in the baseline analysis

        for profile in args.profiles:
            if skydir_dict is None:
                skydir_keys = [None]
            else:
                skydir_keys = sorted(skydir_dict.keys())

            for skydir_key in skydir_keys:
                if skydir_key is None:
                    pkey, psrc_name, pdict = build_profile_dict(basedir, profile)
                else:
                    skydir_val = skydir_dict[skydir_key]
                    pkey, psrc_name, pdict = build_profile_dict(basedir, profile)
                    pdict['ra'] = skydir_val['ra']
                    pdict['dec'] = skydir_val['dec']
                    pkey += "_%06i" % skydir_key

                outfile = "sed_%s.fits" % pkey

                # Add the source and get the list of correlated soruces
                correl_dict, test_src_name = add_source_get_correlated(gta, psrc_name, 
                                                                       pdict, correl_thresh=0.25, 
                                                                       non_null_src=args.non_null_src)

                # Write the list of correlated sources
                correl_yaml = os.path.join(basedir, "correl_%s.yaml" % pkey)
                write_yaml(correl_dict, correl_yaml)

                gta.free_sources(False)
                for src_name in correl_dict.keys():
                    gta.free_source(src_name, pars='norm')

                # build the SED
                if args.non_null_src:
                    gta.update_source(test_src_name, reoptimize=True)
                    gta.write_roi("base_%s"% pkey, make_plots=False)
                gta.sed(test_src_name, prefix=pkey, outfile=outfile, make_plots=args.make_plots)

                # remove the source
                gta.delete_source(test_src_name)
                # put the ROI back to how it was
                gta.load_xml(args.roi_baseline)

        return gta
Exemplo n.º 46
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError(
                "Trying to run fermipy analysis, but don't have ST")

        if args.load_baseline:
            gta = GTAnalysis.create(args.roi_baseline,
                                    args.config)
        else:
            gta = GTAnalysis(args.config,
                             logging={'verbosity': 3},
                             fileio={'workdir_regex': '\.xml$|\.npy$'})
            gta.setup()
            if is_not_null(args.input_pars):
                gta.load_parameters_from_yaml(args.input_pars)
            gta.write_roi(args.roi_baseline,
                          save_model_map=True,
                          save_weight_map=True,
                          make_plots=args.make_plots)

        src_list = get_src_names(gta)
        plotter = plotting.AnalysisPlotter(gta.config['plotting'],
                                           fileio=gta.config['fileio'],
                                           logging=gta.config['logging'])

        if is_null(args.fit_strategy):
            return

        fit_strategy = load_yaml(args.fit_strategy)
        npred_current = None
        npred_prev = None
        
        plots_only = False

        for fit_stage in fit_strategy:
            mask = fit_stage.get('mask', None)
            npred_threshold = fit_stage.get('npred_threshold', 1.0e4)
            frac_threshold = fit_stage.get('frac_threshold', 0.5)
            npred_frac = fit_stage.get('npred_frac', 0.9999)

            if plots_only:
                gta.load_roi("%s.npy" % fit_stage['key'])
                npred_current =  set_wts_get_npred_wt(gta, mask)
                skip_list_region = get_unchanged(src_list,
                                                 npred_current,
                                                 npred_prev,
                                                 frac_threshold=frac_threshold)
            else:
                npred_current =  set_wts_get_npred_wt(gta, mask)
                skip_list_region = get_unchanged(src_list,
                                                 npred_current,
                                                 npred_prev,
                                                 frac_threshold=frac_threshold)     
                gta.optimize(npred_frac=npred_frac, 
                             npred_threshold=npred_threshold,
                             skip=skip_list_region)
            
            snapshot(gta, plotter, fit_stage['key'], make_plots=args.make_plots)
            npred_prev = npred_current
            npred_current = build_srcdict(gta, 'npred_wt')