コード例 #1
0
ファイル: solar.py プロジェクト: jefemagril/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        mktime = args['mktimefilter']

        for comp in components:
            zcut = "zmax%i" % comp.zmax
            key = comp.make_key('{ebin_name}_{evtype_name}')
            name_keys = dict(zcut=zcut,
                             ebin=comp.ebin_name,
                             psftype=comp.evtype_name,
                             irf_ver=NAME_FACTORY.irf_ver(),
                             mktime=mktime,
                             fullpath=True)
            outfile = NAME_FACTORY.bexpcube_sun(**name_keys)
            ltcube_sun = NAME_FACTORY.ltcube_sun(**name_keys)
            job_configs[key] = dict(infile=NAME_FACTORY.ltcube_sun(**name_keys),
                                    outfile=outfile,
                                    irfs=NAME_FACTORY.irfs(**name_keys),
                                    evtype=comp.evtype,
                                    emin=comp.emin,
                                    emax=comp.emax,
                                    enumbins=comp.enumbins,
                                    logfile=make_nfs_path(outfile.replace('.fits', '.log')))

        return job_configs
コード例 #2
0
ファイル: target_analysis.py プロジェクト: jefemagril/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if sim is not None:
            raise ValueError("Found 'sim' argument on AnalyzeROI_SG config.")
        if targets_yaml is None:
            return job_configs

        config_yaml = 'config.yaml'
        config_override = args.get('config')
        if is_not_null(config_override):
            config_yaml = config_override

        targets = load_yaml(targets_yaml)
        base_config = dict(roi_baseline=args['roi_baseline'],
                           make_plots=args['make_plots'])

        for target_name in targets.keys():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             fullpath=True)
            target_dir = NAME_FACTORY.targetdir(**name_keys)
            config_path = os.path.join(target_dir, config_yaml)
            logfile = make_nfs_path(os.path.join(
                target_dir, "%s_%s.log" % (self.linkname, target_name)))
            job_config = base_config.copy()           
            job_config.update(dict(config=config_path,
                                   logfile=logfile))
            job_configs[target_name] = job_config

        return job_configs
コード例 #3
0
ファイル: gt_assemble_model.py プロジェクト: tuoyl/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        models = load_yaml(args['models'])

        for modelkey in models:
            manifest = os.path.join('analysis', 'model_%s' % modelkey,
                                    'srcmap_manifest_%s.yaml' % modelkey)
            for comp in components:
                key = comp.make_key('{ebin_name}_{evtype_name}')
                fullkey = "%s_%s" % (modelkey, key)
                outfile = NAME_FACTORY.merged_srcmaps(
                    modelkey=modelkey,
                    component=key,
                    coordsys=comp.coordsys,
                    mktime='none',
                    irf_ver=NAME_FACTORY.irf_ver())
                logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                job_configs[fullkey] = dict(input=manifest,
                                            compname=key,
                                            logfile=logfile)
        return job_configs
コード例 #4
0
ファイル: target_plotting.py プロジェクト: jefemagril/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        targets = load_yaml(targets_yaml)

        for target_name, target_list in targets.items():
            for targ_prof in target_list:
                name_keys = dict(target_type=ttype,
                                 target_name=target_name,
                                 profile=targ_prof,
                                 fullpath=True)
                targ_key = "%s_%s" % (target_name, targ_prof)
                input_path = NAME_FACTORY.sedfile(**name_keys)
                output_path = input_path.replace('.fits', '.png')
                logfile = make_nfs_path(input_path.replace('.fits', '.log'))
                job_config = dict(infile=input_path,
                                  outfile=output_path,
                                  logfile=logfile)
                job_configs[targ_key] = job_config

        return job_configs
コード例 #5
0
ファイル: solar.py プロジェクト: tuoyl/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        mktime = args['mktimefilter']

        for comp in components:
            zcut = "zmax%i" % comp.zmax
            key = comp.make_key('{ebin_name}_{evtype_name}')
            name_keys = dict(zcut=zcut,
                             ebin=comp.ebin_name,
                             psftype=comp.evtype_name,
                             irf_ver=NAME_FACTORY.irf_ver(),
                             mktime=mktime,
                             fullpath=True)
            outfile = NAME_FACTORY.bexpcube_sun(**name_keys)
            ltcube_sun = NAME_FACTORY.ltcube_sun(**name_keys)
            job_configs[key] = dict(
                infile=NAME_FACTORY.ltcube_sun(**name_keys),
                outfile=outfile,
                irfs=NAME_FACTORY.irfs(**name_keys),
                evtype=comp.evtype,
                emin=comp.emin,
                emax=comp.emax,
                enumbins=comp.enumbins,
                logfile=make_nfs_path(outfile.replace('.fits', '.log')))

        return job_configs
コード例 #6
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if sim is not None:
            raise ValueError("Found 'sim' argument on AnalyzeROI_SG config.")
        if targets_yaml is None:
            return job_configs

        config_yaml = 'config.yaml'
        config_override = args.get('config')
        if is_not_null(config_override):
            config_yaml = config_override

        targets = load_yaml(targets_yaml)
        base_config = dict(roi_baseline=args['roi_baseline'],
                           make_plots=args['make_plots'])

        for target_name in targets.keys():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             fullpath=True)
            target_dir = NAME_FACTORY.targetdir(**name_keys)
            config_path = os.path.join(target_dir, config_yaml)
            logfile = make_nfs_path(
                os.path.join(target_dir,
                             "%s_%s.log" % (self.linkname, target_name)))
            job_config = base_config.copy()
            job_config.update(dict(config=config_path, logfile=logfile))
            job_configs[target_name] = job_config

        return job_configs
コード例 #7
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        targets = load_yaml(targets_yaml)

        for target_name, target_list in targets.items():
            for targ_prof in target_list:
                name_keys = dict(target_type=ttype,
                                 target_name=target_name,
                                 profile=targ_prof,
                                 fullpath=True)
                targ_key = "%s_%s" % (target_name, targ_prof)
                input_path = NAME_FACTORY.sedfile(**name_keys)
                output_path = input_path.replace('.fits', '.png')
                logfile = make_nfs_path(input_path.replace('.fits', '.log'))
                job_config = dict(infile=input_path,
                                  outfile=output_path,
                                  logfile=logfile)
                job_configs[targ_key] = job_config

        return job_configs
コード例 #8
0
ファイル: job_library.py プロジェクト: tuoyl/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        ret_dict = make_catalog_comp_dict(
            library=args['library'], basedir=NAME_FACTORY.base_dict['basedir'])
        catalog_info_dict = ret_dict['catalog_info_dict']

        for catalog_name in catalog_info_dict:
            for comp in components:
                zcut = "zmax%i" % comp.zmax
                key = comp.make_key('{ebin_name}_{evtype_name}')
                name_keys = dict(zcut=zcut,
                                 sourcekey=catalog_name,
                                 ebin=comp.ebin_name,
                                 psftype=comp.evtype_name,
                                 coordsys=comp.coordsys,
                                 irf_ver=NAME_FACTORY.irf_ver(),
                                 mktime='none',
                                 fullpath=True)

                outfile = NAME_FACTORY.srcmaps(**name_keys)
                outfile_tokens = os.path.splitext(outfile)
                infile_regexp = "%s_*.fits" % outfile_tokens[0]
                logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                job_configs[key] = dict(output=outfile,
                                        args=infile_regexp,
                                        logfile=logfile)

        return job_configs
コード例 #9
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        models = load_yaml(args['models'])

        for modelkey in models:
            manifest = os.path.join('analysis', 'model_%s' % modelkey,
                                    'srcmap_manifest_%s.yaml' % modelkey)
            for comp in components:
                key = comp.make_key('{ebin_name}_{evtype_name}')
                fullkey = "%s_%s" % (modelkey, key)
                outfile = NAME_FACTORY.merged_srcmaps(modelkey=modelkey,
                                                      component=key,
                                                      coordsys=comp.coordsys,
                                                      mktime='none',
                                                      irf_ver=NAME_FACTORY.irf_ver())
                logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                job_configs[fullkey] = dict(input=manifest,
                                            compname=key,
                                            logfile=logfile)
        return job_configs
コード例 #10
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        if self._comp_dict is None or self._comp_dict_file != args['library']:
            self._comp_dict_file = args['library']
            self._comp_dict = make_catalog_comp_dict(sources=self._comp_dict_file,
                                                     basedir=NAME_FACTORY.base_dict['basedir'])
        else:
            print ("Using cached catalog dict from %s" % args['library'])

        catalog_info_dict = self._comp_dict['catalog_info_dict']
        comp_info_dict = self._comp_dict['comp_info_dict']

        n_src_per_job = args['nsrc']

        if args['make_xml']:
            SrcmapsCatalog_SG._make_xml_files(catalog_info_dict, comp_info_dict)

        for catalog_name, catalog_info in catalog_info_dict.items():

            n_cat_src = len(catalog_info.catalog.table)
            n_job = int(math.ceil(float(n_cat_src) / n_src_per_job))

            for comp in components:
                zcut = "zmax%i" % comp.zmax
                key = comp.make_key('{ebin_name}_{evtype_name}')
                name_keys = dict(zcut=zcut,
                                 sourcekey=catalog_name,
                                 ebin=comp.ebin_name,
                                 psftype=comp.evtype_name,
                                 coordsys=comp.coordsys,
                                 irf_ver=NAME_FACTORY.irf_ver(),
                                 mktime='none',
                                 fullpath=True)

                for i_job in range(n_job):
                    full_key = "%s_%02i" % (key, i_job)
                    srcmin = i_job * n_src_per_job
                    srcmax = min(srcmin + n_src_per_job, n_cat_src)
                    outfile = NAME_FACTORY.srcmaps(
                        **name_keys).replace('.fits', "_%02i.fits" % (i_job))
                    logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                    job_configs[full_key] = dict(cmap=NAME_FACTORY.ccube(**name_keys),
                                                 expcube=NAME_FACTORY.ltcube(**name_keys),
                                                 irfs=NAME_FACTORY.irfs(**name_keys),
                                                 bexpmap=NAME_FACTORY.bexpcube(**name_keys),
                                                 outfile=outfile,
                                                 logfile=logfile,
                                                 srcmdl=catalog_info.srcmdl_name,
                                                 evtype=comp.evtype,
                                                 srcmin=srcmin,
                                                 srcmax=srcmax)

        return job_configs
コード例 #11
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (roster_yaml, sim) = NAME_FACTORY.resolve_rosterfile(args)
        if roster_yaml is None:
            return job_configs

        roster_dict = load_yaml(roster_yaml)

        astro_priors = args['astro_priors']
        specs = args['specs']

        for roster_name in roster_dict.keys():
            rost_specs = specs
            for astro_prior in astro_priors:
                name_keys = dict(target_type=ttype,
                                 roster_name=roster_name,
                                 astro_prior=astro_prior,
                                 sim_name=sim,
                                 fullpath=True)
                for spec in rost_specs:
                    targ_key = "%s:%s:%s" % (roster_name, astro_prior, spec)
                    if sim is not None:
                        seedlist = range(args['seed'],
                                         args['seed'] + args['nsims'])
                        sim_path = os.path.join('config', 'sim_%s.yaml' % sim)
                    else:
                        seedlist = [None]
                        sim_path = None

                    for seed in seedlist:
                        if seed is not None:
                            name_keys['seed'] = "%06i" % seed
                            input_path = NAME_FACTORY.sim_stackedlimitsfile(
                                **name_keys)
                            full_targ_key = "%s_%06i" % (targ_key, seed)
                        else:
                            input_path = NAME_FACTORY.stackedlimitsfile(
                                **name_keys)
                            full_targ_key = targ_key

                        output_path = input_path.replace(
                            '.fits', '_%s.png' % spec)
                        logfile = make_nfs_path(
                            output_path.replace('.png', '.log'))
                        job_config = dict(infile=input_path,
                                          outfile=output_path,
                                          astro_prior=astro_prior,
                                          logfile=logfile,
                                          sim=sim_path,
                                          spec=spec)
                        job_configs[full_targ_key] = job_config

        return job_configs
コード例 #12
0
ファイル: target_sim.py プロジェクト: mfacorcoran/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        config_yaml = 'config.yaml'
        config_override = args.get('config')
        if is_not_null(config_override):
            config_yaml = config_override

        targets = load_yaml(targets_yaml)
        nsims_job = args['nsims_job']
        first_seed = args['seed']
        nsims = args['nsims']
        last_seed = first_seed + nsims

        base_config = dict(sim_profile=args['sim_profile'],
                           roi_baseline=args['roi_baseline'],
                           non_null_src=args['non_null_src'],
                           do_find_src=args['do_find_src'],
                           sim=sim)

        for target_name, target_list in targets.items():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             sim_name=sim,
                             fullpath=True)
            simdir = NAME_FACTORY.sim_targetdir(**name_keys)
            config_path = os.path.join(simdir, config_yaml)

            job_config = base_config.copy()
            job_config.update(dict(config=config_path, profiles=target_list))

            current_seed = first_seed
            while current_seed < last_seed:
                fullkey = "%s_%06i" % (target_name, current_seed)
                logfile = make_nfs_path(
                    os.path.join(
                        simdir, "%s_%s_%06i.log" %
                        (self.linkname, target_name, current_seed)))
                if nsims_job <= 0 or current_seed + nsims_job >= last_seed:
                    nsims_current = last_seed - current_seed
                else:
                    nsims_current = nsims_job
                job_config.update(
                    dict(seed=current_seed,
                         nsims=nsims_current,
                         logfile=logfile))
                job_configs[fullkey] = job_config.copy()
                current_seed += nsims_current

        return job_configs
コード例 #13
0
ファイル: dm_plotting.py プロジェクト: fermiPy/dmpipe
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (roster_yaml, sim) = NAME_FACTORY.resolve_rosterfile(args)
        if roster_yaml is None:
            return job_configs

        roster_dict = load_yaml(roster_yaml)

        astro_priors = args['astro_priors']
        channels = args['channels']
        global_min = args['global_min']

        for roster_name in list(roster_dict.keys()):
            rost_chans = select_channels(channels, roster_name)
            for astro_prior in astro_priors:
                name_keys = dict(target_type=ttype,
                                 roster_name=roster_name,
                                 astro_prior=astro_prior,
                                 sim_name=sim,
                                 fullpath=True)

                for chan in rost_chans:
                    targ_key = "%s:%s:%s" % (roster_name, astro_prior, chan)

                    if sim is not None:
                        seedlist = list(
                            range(args['seed'], args['seed'] + args['nsims']))
                    else:
                        seedlist = [None]

                    for seed in seedlist:
                        if seed is not None:
                            name_keys['seed'] = "%06i" % seed  # pylint: disable=bad-string-format-type
                            input_path = NAME_FACTORY.sim_resultsfile(
                                **name_keys)
                            full_targ_key = "%s_%06i" % (targ_key, seed)  # pylint: disable=bad-string-format-type
                        else:
                            input_path = NAME_FACTORY.resultsfile(**name_keys)
                            full_targ_key = targ_key

                        output_path = input_path.replace(
                            '.fits', '_%s.png' % chan)
                        logfile = make_nfs_path(
                            output_path.replace('.png', '.log'))
                        job_config = dict(infile=input_path,
                                          outfile=output_path,
                                          astro_prior=astro_prior,
                                          logfile=logfile,
                                          global_min=global_min,
                                          chan=chan)
                        job_configs[full_targ_key] = job_config

        return job_configs
コード例 #14
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml,
         sim) = NAME_FACTORY.resolve_targetfile(args, require_sim_name=True)
        if targets_yaml is None:
            return job_configs

        specconfig = NAME_FACTORY.resolve_specconfig(args)

        astro_priors = args['astro_priors']
        write_full = args.get('write_full', False)

        targets = load_yaml(targets_yaml)
        base_config = dict(nsims=args['nsims'],
                           seed=args['seed'],
                           specconfig=specconfig)

        for target_name, profile_list in list(targets.items()):
            for profile in profile_list:
                for astro_prior in astro_priors:
                    if is_null(astro_prior):
                        astro_prior = 'none'
                    full_key = "%s:%s:%s:%s" % (target_name, profile, sim,
                                                astro_prior)
                    name_keys = dict(target_type=ttype,
                                     target_name=target_name,
                                     sim_name=sim,
                                     profile=profile,
                                     astro_prior=astro_prior,
                                     fullpath=True)
                    limitfile = NAME_FACTORY.sim_dmlimitsfile(**name_keys)
                    first = args['seed']
                    last = first + args['nsims'] - 1
                    outfile = limitfile.replace(
                        '_SEED.fits',
                        '_collected_%06i_%06i.fits' % (first, last))
                    logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                    if not write_full:
                        outfile = None
                    summaryfile = limitfile.replace(
                        '_SEED.fits',
                        '_summary_%06i_%06i.fits' % (first, last))
                    job_config = base_config.copy()
                    job_config.update(
                        dict(limitfile=limitfile,
                             astro_prior=astro_prior,
                             outfile=outfile,
                             summaryfile=summaryfile,
                             logfile=logfile))
                    job_configs[full_key] = job_config

        return job_configs
コード例 #15
0
ファイル: target_sim.py プロジェクト: jefemagril/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        config_yaml = 'config.yaml'
        config_override = args.get('config')
        if is_not_null(config_override):
            config_yaml = config_override

        targets = load_yaml(targets_yaml)
        nsims_job = args['nsims_job']
        first_seed = args['seed']
        nsims = args['nsims']
        last_seed = first_seed + nsims

        base_config = dict(sim_profile=args['sim_profile'],
                           roi_baseline=args['roi_baseline'],
                           non_null_src=args['non_null_src'],
                           sim=sim)

        for target_name, target_list in targets.items():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             sim_name=sim,
                             fullpath=True)
            simdir = NAME_FACTORY.sim_targetdir(**name_keys)
            config_path = os.path.join(simdir, config_yaml)

            job_config = base_config.copy()
            job_config.update(dict(config=config_path,
                                   profiles=target_list))

            current_seed = first_seed
            while current_seed < last_seed:
                fullkey = "%s_%06i" % (target_name, current_seed)
                logfile = make_nfs_path(os.path.join(simdir, "%s_%s_%06i.log" % (self.linkname, 
                                                                                 target_name, current_seed)))
                if nsims_job <= 0 or current_seed + nsims_job >= last_seed:
                    nsims_current = last_seed - current_seed
                else:
                    nsims_current = nsims_job
                job_config.update(dict(seed=current_seed,
                                       nsims=nsims_current,
                                       logfile=logfile))
                job_configs[fullkey] = job_config.copy()
                current_seed += nsims_current

        return job_configs
コード例 #16
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        comp_file = args.get('comp', None)
        if comp_file is not None:
            comp_dict = yaml.safe_load(open(comp_file))
            coordsys = comp_dict.pop('coordsys')
            for v in comp_dict.values():
                v['coordsys'] = coordsys
        else:
            return job_configs

        datafile = args['data']
        if datafile is None or datafile == 'None':
            return job_configs
        NAME_FACTORY.update_base_dict(args['data'])

        inputfiles = create_inputlist(args['ft1file'])
        outdir_base = os.path.join(NAME_FACTORY.base_dict['basedir'],
                                   'counts_cubes')
        ft2file = os.path.join(NAME_FACTORY.base_dict['basedir'],
                               args['ft2file'])
        data_ver = NAME_FACTORY.base_dict['data_ver']

        for idx, infile in enumerate(inputfiles):
            key = "%06i" % idx
            key_scfile = "%03i" % (idx + 1)
            output_dir = os.path.join(outdir_base, key)
            try:
                os.mkdir(output_dir)
            except OSError:
                pass
            scfile = args['ft2file'].replace('.lst', '_%s.fits' % key_scfile)
            logfile = make_nfs_path(
                os.path.join(output_dir,
                             'scatter_mk_%s_%s.log' % (data_ver, key)))

            job_configs[key] = comp_dict.copy()
            job_configs[key].update(
                dict(ft1file=infile,
                     ft2file=ft2file,
                     data=args['data'],
                     scfile=scfile,
                     comp=args['comp'],
                     hpx_order_max=args['hpx_order_max'],
                     outdir=outdir_base,
                     outkey=key,
                     logfile=logfile,
                     pfiles=output_dir))

        return job_configs
コード例 #17
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (roster_yaml, sim) = NAME_FACTORY.resolve_rosterfile(
            args, require_sim_name=True)
        if roster_yaml is None:
            return job_configs

        specconfig = NAME_FACTORY.resolve_specconfig(args)

        astro_priors = args['astro_priors']
        write_full = args['write_full']
        first = args['seed']
        last = first + args['nsims'] - 1

        base_config = dict(nsims=args['nsims'],
                           seed=args['seed'])

        roster_dict = load_yaml(roster_yaml)
        for roster_name in roster_dict.keys():
            for astro_prior in astro_priors:
                if is_null(astro_prior):
                    astro_prior = 'none'
                full_key = "%s:%s:%s" % (roster_name, sim, astro_prior)
                name_keys = dict(target_type=ttype,
                                 roster_name=roster_name,
                                 sim_name=sim,
                                 astro_prior=astro_prior,
                                 fullpath=True)

                limitfile = NAME_FACTORY.sim_stackedlimitsfile(**name_keys)
                outfile = limitfile.replace(
                    '_SEED.fits', '_collected_%06i_%06i.fits' %
                    (first, last))
                logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                if not write_full:
                    outfile = None
                summaryfile = limitfile.replace('_SEED.fits', '_summary.fits')

                job_config = base_config.copy()
                job_config.update(dict(limitfile=limitfile,
                                       specconfig=specconfig,
                                       astro_prior=astro_prior,
                                       outfile=outfile,
                                       summaryfile=summaryfile,
                                       logfile=logfile))
                job_configs[full_key] = job_config

        return job_configs
コード例 #18
0
ファイル: gt_coadd_split.py プロジェクト: tuoyl/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])

        datafile = args['data']
        if datafile is None or datafile == 'None':
            return job_configs
        NAME_FACTORY.update_base_dict(args['data'])
        outdir_base = os.path.join(NAME_FACTORY.base_dict['basedir'], 'counts_cubes')

        inputfiles = create_inputlist(args['ft1file'])
        num_files = len(inputfiles)

        for comp in components:
            zcut = "zmax%i" % comp.zmax

            mktimelist = copy.copy(comp.mktimefilters)
            if not mktimelist:
                mktimelist.append('none')
            evtclasslist_keys = copy.copy(comp.evtclasses)
            if not evtclasslist_keys:
                evtclasslist_vals = [NAME_FACTORY.base_dict['evclass']]
            else:
                evtclasslist_vals = copy.copy(evtclasslist_keys)

            for mktimekey in mktimelist:
                for evtclassval in evtclasslist_vals:
                    fullkey = comp.make_key(
                        '%s_%s_{ebin_name}_%s_{evtype_name}' %
                        (evtclassval, zcut, mktimekey))

                    name_keys = dict(zcut=zcut,
                                     ebin=comp.ebin_name,
                                     psftype=comp.evtype_name,
                                     coordsys=comp.coordsys,
                                     irf_ver=NAME_FACTORY.irf_ver(),
                                     mktime=mktimekey,
                                     evclass=evtclassval,
                                     fullpath=True)

                    ccube_name = os.path.basename(NAME_FACTORY.ccube(**name_keys))
                    outfile = os.path.join(outdir_base, ccube_name)
                    infiles = _make_input_file_list(outfile, num_files)
                    logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                    job_configs[fullkey] = dict(args=infiles,
                                                output=outfile,
                                                logfile=logfile)

        return job_configs
コード例 #19
0
ファイル: job_library.py プロジェクト: tuoyl/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        datafile = args['data']
        if datafile is None or datafile == 'None':
            return job_configs
        NAME_FACTORY.update_base_dict(args['data'])

        for comp in components:
            zcut = "zmax%i" % comp.zmax

            mktimelist = copy.copy(comp.mktimefilters)
            if not mktimelist:
                mktimelist.append('none')
            evtclasslist_keys = copy.copy(comp.evtclasses)
            if not evtclasslist_keys:
                evtclasslist_vals = [NAME_FACTORY.base_dict['evclass']]
            else:
                evtclasslist_vals = copy.copy(evtclasslist_keys)

            for mktimekey in mktimelist:
                for evtclassval in evtclasslist_vals:
                    fullkey = comp.make_key(
                        '%s_%s_{ebin_name}_%s_{evtype_name}' %
                        (evtclassval, zcut, mktimekey))
                    name_keys = dict(zcut=zcut,
                                     ebin=comp.ebin_name,
                                     psftype=comp.evtype_name,
                                     coordsys=comp.coordsys,
                                     irf_ver=NAME_FACTORY.irf_ver(),
                                     mktime=mktimekey,
                                     evclass=evtclassval,
                                     fullpath=True)

                    outfile = NAME_FACTORY.bexpcube(**name_keys)
                    cmap = NAME_FACTORY.ccube(**name_keys)
                    infile = NAME_FACTORY.ltcube(**name_keys)
                    logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                    job_configs[fullkey] = dict(
                        cmap=cmap,
                        infile=infile,
                        outfile=outfile,
                        irfs=NAME_FACTORY.irfs(**name_keys),
                        hpx_order=min(comp.hpx_order, args['hpx_order_max']),
                        evtype=comp.evtype,
                        logfile=logfile)

        return job_configs
コード例 #20
0
ファイル: job_library.py プロジェクト: tuoyl/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        ret_dict = make_diffuse_comp_info_dict(
            components=components,
            library=args['library'],
            basedir=NAME_FACTORY.base_dict['basedir'])
        diffuse_comp_info_dict = ret_dict['comp_info_dict']

        for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()):
            diffuse_comp_info_value = diffuse_comp_info_dict[
                diffuse_comp_info_key]

            for comp in components:
                zcut = "zmax%i" % comp.zmax
                key = comp.make_key('{ebin_name}_{evtype_name}')

                if diffuse_comp_info_value.components is None:
                    sub_comp_info = diffuse_comp_info_value
                else:
                    sub_comp_info = diffuse_comp_info_value.get_component_info(
                        comp)

                full_key = "%s_%s" % (sub_comp_info.sourcekey, key)

                name_keys = dict(zcut=zcut,
                                 sourcekey=sub_comp_info.sourcekey,
                                 ebin=comp.ebin_name,
                                 psftype=comp.evtype_name,
                                 coordsys=comp.coordsys,
                                 irf_ver=NAME_FACTORY.irf_ver(),
                                 mktime='none',
                                 fullpath=True)

                infile = NAME_FACTORY.srcmaps(**name_keys)
                outfile = infile.replace('.fits', '.png')

                logfile = make_nfs_path(outfile.replace('.png', '_png.log'))
                job_configs[full_key] = dict(
                    input=infile,
                    output=outfile,
                    extension=sub_comp_info.source_name,
                    zscale=args.get('zscale', 'log'),
                    logfile=logfile)

        return job_configs
コード例 #21
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])
        ret_dict = make_catalog_comp_dict(sources=args['library'], basedir='.')
        comp_info_dict = ret_dict['comp_info_dict']

        for split_ver, split_dict in comp_info_dict.items():
            for source_key, source_dict in split_dict.items():
                full_key = "%s_%s" % (split_ver, source_key)
                merged_name = "%s_%s" % (source_dict.catalog_info.catalog_name,
                                         source_key)
                if source_dict.model_type != 'CompositeSource':
                    continue

                for comp in components:
                    zcut = "zmax%i" % comp.zmax
                    key = "%s_%s" % (
                        full_key, comp.make_key('{ebin_name}_{evtype_name}'))
                    name_keys = dict(zcut=zcut,
                                     sourcekey=full_key,
                                     ebin=comp.ebin_name,
                                     psftype=comp.evtype_name,
                                     coordsys=comp.coordsys,
                                     mktime='none',
                                     irf_ver=NAME_FACTORY.irf_ver())
                    nested_name_keys = dict(
                        zcut=zcut,
                        sourcekey=source_dict.catalog_info.catalog_name,
                        ebin=comp.ebin_name,
                        psftype=comp.evtype_name,
                        coordsys=comp.coordsys,
                        mktime='none',
                        irf_ver=NAME_FACTORY.irf_ver())
                    outfile = NAME_FACTORY.srcmaps(**name_keys)
                    logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                    job_configs[key] = dict(
                        srcmaps=NAME_FACTORY.srcmaps(**nested_name_keys),
                        expcube=NAME_FACTORY.ltcube(**name_keys),
                        irfs=NAME_FACTORY.irfs(**name_keys),
                        bexpmap=NAME_FACTORY.bexpcube(**name_keys),
                        srcmdl=NAME_FACTORY.srcmdl_xml(**name_keys),
                        merged=merged_name,
                        outfile=outfile,
                        outxml=NAME_FACTORY.nested_srcmdl_xml(**name_keys),
                        logfile=logfile)

        return job_configs
コード例 #22
0
ファイル: dm_plotting.py プロジェクト: fermiPy/dmpipe
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']

        try:
            os.makedirs(os.path.join(ttype, 'results'))
        except OSError:
            pass

        (roster_yaml, sim) = NAME_FACTORY.resolve_rosterfile(args)
        if roster_yaml is None:
            return job_configs

        roster_dict = load_yaml(roster_yaml)

        astro_priors = args['astro_priors']
        channels = args['channels']

        sim_path = os.path.join('config', 'sim_%s.yaml' % sim)

        for roster_name in list(roster_dict.keys()):
            rost_chans = select_channels(channels, roster_name)
            for astro_prior in astro_priors:
                name_keys = dict(target_type=ttype,
                                 roster_name=roster_name,
                                 astro_prior=astro_prior,
                                 sim_name=sim,
                                 seed='summary',
                                 fullpath=True)
                bands_path = NAME_FACTORY.sim_stackedlimitsfile(**name_keys)

                for chan in rost_chans:
                    targ_key = "%s:%s:%s:%s" % (roster_name, astro_prior, sim,
                                                chan)
                    output_path = os.path.join(
                        ttype, 'results', "control_mle_%s_%s_%s_%s.png" %
                        (roster_name, astro_prior, sim, chan))
                    logfile = make_nfs_path(output_path.replace(
                        '.png', '.log'))
                    job_config = dict(bands=bands_path,
                                      outfile=output_path,
                                      sim=sim_path,
                                      logfile=logfile,
                                      chan=chan)
                    job_configs[targ_key] = job_config
        return job_configs
コード例 #23
0
ファイル: target_analysis.py プロジェクト: tuoyl/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if sim is not None:
            raise ValueError("Found 'sim' argument on AnalyzeSED_SG config.")
        if targets_yaml is None:
            return job_configs

        targets = load_yaml(targets_yaml)
        config_yaml = 'config.yaml'

        if is_not_null(args['skydirs']):
            skydirs = args['skydirs']
        else:
            skydirs = None

        base_config = dict(roi_baseline=args['roi_baseline'],
                           make_plots=args['make_plots'],
                           non_null_src=args['non_null_src'])

        for target_name, target_list in targets.items():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             sim_name='random',
                             fullpath=True)
            if skydirs is None:
                target_dir = NAME_FACTORY.targetdir(**name_keys)
                skydir_path = None
            else:
                target_dir = NAME_FACTORY.sim_targetdir(**name_keys)
                skydir_path = os.path.join(target_dir, skydirs)
            config_path = os.path.join(target_dir, config_yaml)
            logfile = make_nfs_path(
                os.path.join(target_dir,
                             "%s_%s.log" % (self.linkname, target_name)))
            job_config = base_config.copy()
            job_config.update(
                dict(config=config_path,
                     profiles=target_list,
                     skydirs=skydir_path,
                     logfile=logfile))
            job_configs[target_name] = job_config

        return job_configs
コード例 #24
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])
        ret_dict = make_catalog_comp_dict(sources=args['library'], basedir='.')
        comp_info_dict = ret_dict['comp_info_dict']

        for split_ver, split_dict in comp_info_dict.items():
            for source_key, source_dict in split_dict.items():
                full_key = "%s_%s" % (split_ver, source_key)
                merged_name = "%s_%s" % (source_dict.catalog_info.catalog_name, source_key)
                if source_dict.model_type != 'CompositeSource':
                    continue

                for comp in components:
                    zcut = "zmax%i" % comp.zmax
                    key = "%s_%s" % (full_key, comp.make_key('{ebin_name}_{evtype_name}'))
                    name_keys = dict(zcut=zcut,
                                     sourcekey=full_key,
                                     ebin=comp.ebin_name,
                                     psftype=comp.evtype_name,
                                     coordsys=comp.coordsys,
                                     mktime='none',
                                     irf_ver=NAME_FACTORY.irf_ver())
                    nested_name_keys = dict(zcut=zcut,
                                            sourcekey=source_dict.catalog_info.catalog_name,
                                            ebin=comp.ebin_name,
                                            psftype=comp.evtype_name,
                                            coordsys=comp.coordsys,
                                            mktime='none',
                                            irf_ver=NAME_FACTORY.irf_ver())
                    outfile = NAME_FACTORY.srcmaps(**name_keys)
                    logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                    job_configs[key] = dict(srcmaps=NAME_FACTORY.srcmaps(**nested_name_keys),
                                            expcube=NAME_FACTORY.ltcube(**name_keys),
                                            irfs=NAME_FACTORY.irfs(**name_keys),
                                            bexpmap=NAME_FACTORY.bexpcube(**name_keys),
                                            srcmdl=NAME_FACTORY.srcmdl_xml(**name_keys),
                                            merged=merged_name,
                                            outfile=outfile,
                                            outxml=NAME_FACTORY.nested_srcmdl_xml(**name_keys),
                                            logfile=logfile)

        return job_configs
コード例 #25
0
ファイル: dm_plotting.py プロジェクト: fermiPy/dmpipe
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (roster_yaml, sim) = NAME_FACTORY.resolve_rosterfile(args)
        if roster_yaml is None:
            return job_configs
        if sim is not None:
            raise ValueError("Sim argument set of plotting data results")

        roster_dict = load_yaml(roster_yaml)

        astro_priors = args['astro_priors']
        channels = args['channels']

        sims = args['sims']
        for roster_name in list(roster_dict.keys()):
            rost_chans = select_channels(channels, roster_name)
            for astro_prior in astro_priors:
                name_keys = dict(target_type=ttype,
                                 roster_name=roster_name,
                                 astro_prior=astro_prior,
                                 fullpath=True)
                input_path = NAME_FACTORY.stackedlimitsfile(**name_keys)
                for sim in sims:
                    name_keys.update(sim_name=sim, seed='summary')
                    bands_path = NAME_FACTORY.sim_stackedlimitsfile(
                        **name_keys)

                    for chan in rost_chans:
                        targ_key = "%s:%s:%s:%s" % (roster_name, astro_prior,
                                                    sim, chan)
                        output_path = os.path.join(
                            ttype, 'results', "final_%s_%s_%s_%s.png" %
                            (roster_name, astro_prior, sim, chan))
                        logfile = make_nfs_path(
                            output_path.replace('.png', '.log'))
                        job_config = dict(infile=input_path,
                                          outfile=output_path,
                                          bands=bands_path,
                                          logfile=logfile,
                                          chan=chan)
                        job_configs[targ_key] = job_config

        return job_configs
コード例 #26
0
ファイル: target_collect.py プロジェクト: labsaha/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(
            args, require_sim_name=True)
        if targets_yaml is None:
            return job_configs

        write_full = args['write_full']

        targets = load_yaml(targets_yaml)

        base_config = dict(config=args['config'],
                           nsims=args['nsims'],
                           seed=args['seed'])

        first = args['seed']
        last = first + args['nsims'] - 1

        for target_name, profile_list in targets.items():
            for profile in profile_list:
                full_key = "%s:%s:%s" % (target_name, profile, sim)
                name_keys = dict(target_type=ttype,
                                 target_name=target_name,
                                 sim_name=sim,
                                 profile=profile,
                                 fullpath=True)
                sed_file = NAME_FACTORY.sim_sedfile(**name_keys)
                outfile = sed_file.replace(
                    '_SEED.fits', '_collected_%06i_%06i.fits' % (first, last))
                logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                if not write_full:
                    outfile = None
                summaryfile = sed_file.replace(
                    '_SEED.fits', '_summary_%06i_%06i.fits' % (first, last))
                job_config = base_config.copy()
                job_config.update(dict(sed_file=sed_file,
                                       outfile=outfile,
                                       summaryfile=summaryfile,
                                       logfile=logfile))
                job_configs[full_key] = job_config

        return job_configs
コード例 #27
0
ファイル: target_analysis.py プロジェクト: jefemagril/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if sim is not None:
            raise ValueError("Found 'sim' argument on AnalyzeSED_SG config.")
        if targets_yaml is None:
            return job_configs

        targets = load_yaml(targets_yaml)
        config_yaml = 'config.yaml'

        if is_not_null(args['skydirs']):
            skydirs = args['skydirs']
        else:
            skydirs = None

        base_config = dict(roi_baseline=args['roi_baseline'],
                           make_plots=args['make_plots'],
                           non_null_src=args['non_null_src'])

        for target_name, target_list in targets.items():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             sim_name='random',
                             fullpath=True)
            if skydirs is None:
                target_dir = NAME_FACTORY.targetdir(**name_keys)
                skydir_path = None
            else:
                target_dir = NAME_FACTORY.sim_targetdir(**name_keys)
                skydir_path = os.path.join(target_dir, skydirs)
            config_path = os.path.join(target_dir, config_yaml)
            logfile = make_nfs_path(os.path.join(
                target_dir, "%s_%s.log" % (self.linkname, target_name)))
            job_config = base_config.copy()
            job_config.update(dict(config=config_path,
                                   profiles=target_list,
                                   skydirs=skydir_path,
                                   logfile=logfile))
            job_configs[target_name] = job_config

        return job_configs
コード例 #28
0
ファイル: dm_plotting.py プロジェクト: fermiPy/dmpipe
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        astro_priors = args['astro_priors']
        channels = args['channels']

        base_config = dict(bands=None, sim=sim)

        targets = load_yaml(targets_yaml)
        for target_name, target_list in list(targets.items()):
            for targ_prof in target_list:
                prof_chans = select_channels(channels, targ_prof)
                for astro_prior in astro_priors:
                    name_keys = dict(target_type=ttype,
                                     target_name=target_name,
                                     profile=targ_prof,
                                     astro_prior=astro_prior,
                                     fullpath=True)
                    input_path = NAME_FACTORY.dmlimitsfile(**name_keys)
                    for chan in prof_chans:
                        targ_key = "%s:%s:%s:%s" % (target_name, targ_prof,
                                                    astro_prior, chan)

                        output_path = input_path.replace(
                            '.fits', '_%s.png' % chan)
                        logfile = make_nfs_path(
                            output_path.replace('.png', '.log'))
                        job_config = base_config.copy()
                        job_config.update(
                            dict(infile=input_path,
                                 outfile=output_path,
                                 astro_prior=astro_prior,
                                 logfile=logfile,
                                 chan=chan))
                        job_configs[targ_key] = job_config

        return job_configs
コード例 #29
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        targetfile_info = NAME_FACTORY.resolve_targetfile(args)
        targets_yaml = targetfile_info[0]
        if targets_yaml is None:
            return job_configs

        targets = load_yaml(targets_yaml)

        astro_priors = args['astro_priors']
        specs = args['specs']
        global_min = args['global_min']

        for target_name, target_list in targets.items():
            for targ_prof in target_list:
                prof_specs = specs
                for astro_prior in astro_priors:
                    name_keys = dict(target_type=ttype,
                                     target_name=target_name,
                                     profile=targ_prof,
                                     astro_prior=astro_prior,
                                     fullpath=True)
                    input_path = NAME_FACTORY.stack_likefile(**name_keys)
                    for spec in prof_specs:
                        targ_key = "%s:%s:%s:%s" % (target_name, targ_prof,
                                                    astro_prior, spec)
                        output_path = input_path.replace(
                            '.fits', '_%s.png' % spec)
                        logfile = make_nfs_path(
                            output_path.replace('.png', '.log'))
                        job_config = dict(infile=input_path,
                                          outfile=output_path,
                                          astro_prior=astro_prior,
                                          logfile=logfile,
                                          global_min=global_min,
                                          spec=spec)
                        job_configs[targ_key] = job_config

        return job_configs
コード例 #30
0
ファイル: job_library.py プロジェクト: tuoyl/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        gmm = make_ring_dicts(library=args['library'], basedir='.')

        for galkey in gmm.galkeys():
            ring_dict = gmm.ring_dict(galkey)
            for ring_key, ring_info in ring_dict.items():
                output_file = ring_info.merged_gasmap
                file_string = ""
                for fname in ring_info.files:
                    file_string += " %s" % fname
                logfile = make_nfs_path(output_file.replace('.fits', '.log'))
                job_configs[ring_key] = dict(output=output_file,
                                             args=file_string,
                                             logfile=logfile)

        return job_configs
コード例 #31
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        comp_file = args.get('comp', None)
        if comp_file is not None:
            comp_dict = yaml.safe_load(open(comp_file))
            coordsys = comp_dict.pop('coordsys')
            for v in comp_dict.values():
                v['coordsys'] = coordsys
        else:
            return job_configs

        NAME_FACTORY.update_base_dict(args['data'])

        inputfiles = create_inputlist(args['ft1file'])
        outdir_base = os.path.join(NAME_FACTORY.base_dict['basedir'],
                                   'counts_cubes')

        for idx, infile in enumerate(inputfiles):
            key = "%06i" % idx
            output_dir = os.path.join(outdir_base, key)
            try:
                os.mkdir(output_dir)
            except OSError:
                pass
            logfile = make_nfs_path(
                os.path.join(output_dir, 'scatter_%s.log' % key))
            #job_configs[key] = args.copy()
            job_configs[key] = args.copy()
            job_configs[key].update(
                dict(ft1file=infile,
                     comp=args['comp'],
                     hpx_order_max=args['hpx_order_max'],
                     outdir=outdir_base,
                     outkey=key,
                     logfile=logfile,
                     pfiles=output_dir))

        return job_configs
コード例 #32
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        config_yaml = 'config.yaml'
        config_override = args.get('config')
        if is_not_null(config_override):
            config_yaml = config_override

        targets = load_yaml(targets_yaml)

        base_config = dict(sim_profile=args['sim_profile'],
                           roi_baseline=args['roi_baseline'],
                           sim=sim,
                           nsims=args['nsims'],
                           seed=args['seed'])

        for target_name, target_list in targets.items():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             sim_name=sim,
                             fullpath=True)
            simdir = NAME_FACTORY.sim_targetdir(**name_keys)
            config_path = os.path.join(simdir, config_yaml)
            logfile = make_nfs_path(
                os.path.join(simdir,
                             "%s_%s.log" % (self.linkname, target_name)))
            job_config = base_config.copy()
            job_config.update(
                dict(config=config_path, logfile=logfile,
                     profiles=target_list))
            job_configs[target_name] = job_config

        return job_configs
コード例 #33
0
ファイル: target_plotting.py プロジェクト: tuoyl/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        targets = load_yaml(targets_yaml)
        first = args['seed']
        last = first + args['nsims'] - 1

        for target_name, target_list in targets.items():
            for targ_prof in target_list:
                name_keys = dict(target_type=ttype,
                                 target_name=target_name,
                                 profile=targ_prof,
                                 fullpath=True)
                targ_key = "%s_%s" % (target_name, targ_prof)
                input_path = NAME_FACTORY.sedfile(**name_keys)
                output_path = input_path.replace('.fits', '.png')
                logfile = make_nfs_path(input_path.replace('.fits', '.log'))
                sed_file = NAME_FACTORY.sim_sedfile(sim_name=args['band_sim'],
                                                    **name_keys)
                summaryfile = sed_file.replace(
                    '_SEED.fits', '_summary_%06i_%06i.fits' % (first, last))

                job_config = dict(infile=input_path,
                                  outfile=output_path,
                                  summaryfile=summaryfile,
                                  band_sim=args['band_sim'],
                                  logfile=logfile)

                job_configs[targ_key] = job_config

        return job_configs
コード例 #34
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])
        NAME_FACTORY_CLEAN.update_base_dict(args['data'])
        NAME_FACTORY_DIRTY.update_base_dict(args['data'])

        NAME_FACTORY_CLEAN.base_dict['evclass'] = args['clean']
        NAME_FACTORY_DIRTY.base_dict['evclass'] = args['dirty']

        for comp in components:
            zcut = "zmax%i" % comp.zmax
            key = comp.make_key('{ebin_name}_{evtype_name}')
            name_keys = dict(zcut=zcut,
                             ebin=comp.ebin_name,
                             psftype=comp.evtype_name,
                             coordsys=comp.coordsys,
                             irf_ver=NAME_FACTORY.irf_ver(),
                             mktime=args['mktimefilter'],
                             fullpath=True)
            outfile = NAME_FACTORY.residual_cr(**name_keys)
            if args['hpx_order']:
                hpx_order = min(comp.hpx_order, args['hpx_order'])
            else:
                hpx_order = comp.hpx_order
            job_configs[key] = dict(bexpcube_dirty=NAME_FACTORY_DIRTY.bexpcube(**name_keys),
                                    ccube_dirty=NAME_FACTORY_DIRTY.ccube(**name_keys),
                                    bexpcube_clean=NAME_FACTORY_CLEAN.bexpcube(**name_keys),
                                    ccube_clean=NAME_FACTORY_CLEAN.ccube(**name_keys),
                                    outfile=outfile,
                                    hpx_order=hpx_order,
                                    full_output=args['full_output'],
                                    logfile=make_nfs_path(outfile.replace('.fits', '.log')))

        return job_configs
コード例 #35
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        comp_file = args.get('comp', None)
        if comp_file is not None:
            comp_dict = yaml.safe_load(open(comp_file))
            coordsys = comp_dict.pop('coordsys')
            for v in comp_dict.values():
                v['coordsys'] = coordsys
        else:
            return job_configs

        NAME_FACTORY.update_base_dict(args['data'])

        inputfiles = create_inputlist(args['ft1file'])
        outdir_base = os.path.join(NAME_FACTORY.base_dict['basedir'], 'counts_cubes')

        for idx, infile in enumerate(inputfiles):
            key = "%06i" % idx
            output_dir = os.path.join(outdir_base, key)
            try:
                os.mkdir(output_dir)
            except OSError:
                pass
            logfile = make_nfs_path(os.path.join(output_dir, 'scatter_%s.log' % key))
            job_configs[key] = comp_dict.copy()
            job_configs[key].update(dict(ft1file=infile,
                                         comp=args['comp'],
                                         hpx_order_max=args['hpx_order_max'],
                                         outdir=outdir_base,
                                         outkey=key,
                                         logfile=logfile,
                                         pfiles=output_dir))

        return job_configs
コード例 #36
0
ファイル: target_sim.py プロジェクト: jefemagril/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        config_yaml = 'config.yaml'
        config_override = args.get('config')
        if is_not_null(config_override):
            config_yaml = config_override

        rand_yaml = NAME_FACTORY.resolve_randconfig(args)

        targets = load_yaml(targets_yaml)

        base_config = dict(rand_config=rand_yaml)

        for target_name in targets.keys():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             sim_name=sim,
                             fullpath=True)
            simdir = NAME_FACTORY.sim_targetdir(**name_keys)
            config_path = os.path.join(simdir, config_yaml)
            outfile = os.path.join(simdir, 'skydirs.yaml')
            logfile = make_nfs_path(outfile.replace('yaml', 'log'))
            job_config = base_config.copy()
            job_config.update(dict(config=config_path,
                                   outfile=outfile,
                                   logfile=logfile))
            job_configs[target_name] = job_config

        return job_configs
コード例 #37
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        ttype = args['ttype']
        (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
        if targets_yaml is None:
            return job_configs

        config_yaml = 'config.yaml'
        config_override = args.get('config')
        if is_not_null(config_override):
            config_yaml = config_override

        rand_yaml = NAME_FACTORY.resolve_randconfig(args)

        targets = load_yaml(targets_yaml)

        base_config = dict(rand_config=rand_yaml)

        for target_name in targets.keys():
            name_keys = dict(target_type=ttype,
                             target_name=target_name,
                             sim_name=sim,
                             fullpath=True)
            simdir = NAME_FACTORY.sim_targetdir(**name_keys)
            config_path = os.path.join(simdir, config_yaml)
            outfile = os.path.join(simdir, 'skydirs.yaml')
            logfile = make_nfs_path(outfile.replace('yaml', 'log'))
            job_config = base_config.copy()
            job_config.update(
                dict(config=config_path, outfile=outfile, logfile=logfile))
            job_configs[target_name] = job_config

        return job_configs
コード例 #38
0
    def _map_arguments(self, args):
        """Map from the top-level arguments to the arguments provided to
        the indiviudal links """
        comp_file = args.get('comp', None)
        datafile = args.get('data', None)
        if is_null(comp_file):
            return
        if is_null(datafile):
            return

        NAME_FACTORY.update_base_dict(datafile)

        outdir = args.get('outdir', None)
        outkey = args.get('outkey', None)
        ft1file = args['ft1file']

        if is_null(outdir) or is_null(outkey):
            return
        pfiles = os.path.join(outdir, outkey)

        self.comp_dict = yaml.safe_load(open(comp_file))
        coordsys = self.comp_dict.pop('coordsys')
        full_out_dir = make_nfs_path(os.path.join(outdir, outkey))

        for key_e, comp_e in sorted(self.comp_dict.items()):
            emin = math.pow(10., comp_e['log_emin'])
            emax = math.pow(10., comp_e['log_emax'])
            enumbins = comp_e['enumbins']
            zmax = comp_e['zmax']
            zcut = "zmax%i" % comp_e['zmax']
            evclassstr = NAME_FACTORY.base_dict['evclass']
            kwargs_select = dict(zcut=zcut,
                                 ebin=key_e,
                                 psftype='ALL',
                                 coordsys=coordsys,
                                 mktime='none')
            selectfile_energy = make_full_path(outdir, outkey, NAME_FACTORY.select(**kwargs_select))
            linkname = 'select-energy-%s-%s' % (key_e, zcut)
            self._set_link(linkname, Gtlink_select,
                           infile=ft1file,
                           outfile=selectfile_energy,
                           zmax=zmax,
                           emin=emin,
                           emax=emax,
                           evclass=NAME_FACTORY.evclassmask(evclassstr),
                           pfiles=pfiles,
                           logfile=os.path.join(full_out_dir, "%s.log" % linkname))

            if 'evtclasses' in comp_e:
                evtclasslist_vals = comp_e['evtclasses']
            else:
                evtclasslist_vals = [NAME_FACTORY.base_dict['evclass']]

            for evtclassval in evtclasslist_vals:
                for psf_type, psf_dict in sorted(comp_e['psf_types'].items()):
                    linkname_select = 'select-type-%s-%s-%s-%s' % (
                        key_e, zcut, evtclassval, psf_type)
                    linkname_bin = 'bin-%s-%s-%s-%s' % (key_e, zcut, evtclassval, psf_type)
                    hpx_order = psf_dict['hpx_order']
                    kwargs_bin = kwargs_select.copy()
                    kwargs_bin['psftype'] = psf_type
                    selectfile_psf = make_full_path(
                        outdir, outkey, NAME_FACTORY.select(**kwargs_bin))
                    binfile = make_full_path(outdir, outkey, NAME_FACTORY.ccube(**kwargs_bin))
                    self._set_link(linkname_select, Gtlink_select,
                                   infile=selectfile_energy,
                                   outfile=selectfile_psf,
                                   zmax=zmax,
                                   emin=emin,
                                   emax=emax,
                                   evtype=EVT_TYPE_DICT[psf_type],
                                   evclass=NAME_FACTORY.evclassmask(evtclassval),
                                   pfiles=pfiles,
                                   logfile=os.path.join(full_out_dir, "%s.log" % linkname_select))

                    self._set_link(linkname_bin, Gtlink_bin,
                                   coordsys=coordsys,
                                   hpx_order=hpx_order,
                                   evfile=selectfile_psf,
                                   outfile=binfile,
                                   emin=emin,
                                   emax=emax,
                                   enumbins=enumbins,
                                   pfiles=pfiles,
                                   logfile=os.path.join(full_out_dir, "%s.log" % linkname_bin))
コード例 #39
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        ret_dict = make_diffuse_comp_info_dict(components=components,
                                               library=args['library'],
                                               basedir='.')
        diffuse_comp_info_dict = ret_dict['comp_info_dict']
        if args['make_xml']:
            SrcmapsDiffuse_SG._make_xml_files(diffuse_comp_info_dict)

        for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()):
            diffuse_comp_info_value = diffuse_comp_info_dict[diffuse_comp_info_key]
            no_psf = diffuse_comp_info_value.no_psf
            for comp in components:
                zcut = "zmax%i" % comp.zmax
                key = comp.make_key('{ebin_name}_{evtype_name}')
                if diffuse_comp_info_value.components is None:
                    sub_comp_info = diffuse_comp_info_value
                else:
                    sub_comp_info = diffuse_comp_info_value.get_component_info(comp)
                name_keys = dict(zcut=zcut,
                                 sourcekey=sub_comp_info.sourcekey,
                                 ebin=comp.ebin_name,
                                 psftype=comp.evtype_name,
                                 mktime='none',
                                 coordsys=comp.coordsys,
                                 irf_ver=NAME_FACTORY.irf_ver(),
                                 fullpath=True)

                kmin = 0
                kmax = comp.enumbins + 1
                outfile_base = NAME_FACTORY.srcmaps(**name_keys)
                kstep = HPX_ORDER_TO_KSTEP[comp.hpx_order]
                base_dict = dict(cmap=NAME_FACTORY.ccube(**name_keys),
                                 expcube=NAME_FACTORY.ltcube(**name_keys),
                                 irfs=NAME_FACTORY.irfs(**name_keys),
                                 bexpmap=NAME_FACTORY.bexpcube(**name_keys),
                                 srcmdl=sub_comp_info.srcmdl_name,
                                 source=sub_comp_info.source_name,
                                 no_psf=no_psf,
                                 evtype=comp.evtype)

                if kstep < 0:
                    kstep = kmax
                else:
                    pass

                for k in range(kmin, kmax, kstep):
                    full_key = "%s_%s_%02i" % (diffuse_comp_info_key, key, k)
                    khi = min(kmax, k + kstep)

                    full_dict = base_dict.copy()
                    outfile = outfile_base.replace('.fits', '_%02i.fits' % k)
                    logfile = make_nfs_path(outfile_base.replace('.fits', '_%02i.log' % k))
                    full_dict.update(dict(outfile=outfile,
                                          kmin=k, kmax=khi,
                                          logfile=logfile))
                    job_configs[full_key] = full_dict

        return job_configs
コード例 #40
0
ファイル: gt_srcmap_partial.py プロジェクト: tuoyl/fermipy
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        ret_dict = make_diffuse_comp_info_dict(components=components,
                                               library=args['library'],
                                               basedir='.')
        diffuse_comp_info_dict = ret_dict['comp_info_dict']
        if args['make_xml']:
            SrcmapsDiffuse_SG._make_xml_files(diffuse_comp_info_dict)

        for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()):
            diffuse_comp_info_value = diffuse_comp_info_dict[
                diffuse_comp_info_key]
            no_psf = diffuse_comp_info_value.no_psf
            for comp in components:
                zcut = "zmax%i" % comp.zmax
                key = comp.make_key('{ebin_name}_{evtype_name}')
                if diffuse_comp_info_value.components is None:
                    sub_comp_info = diffuse_comp_info_value
                else:
                    sub_comp_info = diffuse_comp_info_value.get_component_info(
                        comp)
                name_keys = dict(zcut=zcut,
                                 sourcekey=sub_comp_info.sourcekey,
                                 ebin=comp.ebin_name,
                                 psftype=comp.evtype_name,
                                 mktime='none',
                                 coordsys=comp.coordsys,
                                 irf_ver=NAME_FACTORY.irf_ver(),
                                 fullpath=True)

                kmin = 0
                kmax = comp.enumbins + 1
                outfile_base = NAME_FACTORY.srcmaps(**name_keys)
                kstep = HPX_ORDER_TO_KSTEP[comp.hpx_order]
                base_dict = dict(cmap=NAME_FACTORY.ccube(**name_keys),
                                 expcube=NAME_FACTORY.ltcube(**name_keys),
                                 irfs=NAME_FACTORY.irfs(**name_keys),
                                 bexpmap=NAME_FACTORY.bexpcube(**name_keys),
                                 srcmdl=sub_comp_info.srcmdl_name,
                                 source=sub_comp_info.source_name,
                                 no_psf=no_psf,
                                 evtype=comp.evtype)

                if kstep < 0:
                    kstep = kmax
                else:
                    pass

                for k in range(kmin, kmax, kstep):
                    full_key = "%s_%s_%02i" % (diffuse_comp_info_key, key, k)
                    khi = min(kmax, k + kstep)

                    full_dict = base_dict.copy()
                    outfile = outfile_base.replace('.fits', '_%02i.fits' % k)
                    logfile = make_nfs_path(
                        outfile_base.replace('.fits', '_%02i.log' % k))
                    full_dict.update(
                        dict(outfile=outfile,
                             kmin=k,
                             kmax=khi,
                             logfile=logfile))
                    job_configs[full_key] = full_dict

        return job_configs
コード例 #41
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        if self._comp_dict is None or self._comp_dict_file != args['library']:
            self._comp_dict_file = args['library']
            self._comp_dict = make_catalog_comp_dict(
                sources=self._comp_dict_file,
                basedir=NAME_FACTORY.base_dict['basedir'])
        else:
            print("Using cached catalog dict from %s" % args['library'])

        catalog_info_dict = self._comp_dict['catalog_info_dict']
        comp_info_dict = self._comp_dict['comp_info_dict']

        n_src_per_job = args['nsrc']

        if args['make_xml']:
            SrcmapsCatalog_SG._make_xml_files(catalog_info_dict,
                                              comp_info_dict)

        for catalog_name, catalog_info in catalog_info_dict.items():

            n_cat_src = len(catalog_info.catalog.table)
            n_job = int(math.ceil(float(n_cat_src) / n_src_per_job))

            for comp in components:
                zcut = "zmax%i" % comp.zmax
                key = comp.make_key('{ebin_name}_{evtype_name}')
                name_keys = dict(zcut=zcut,
                                 sourcekey=catalog_name,
                                 ebin=comp.ebin_name,
                                 psftype=comp.evtype_name,
                                 coordsys=comp.coordsys,
                                 irf_ver=NAME_FACTORY.irf_ver(),
                                 mktime='none',
                                 fullpath=True)

                for i_job in range(n_job):
                    full_key = "%s_%02i" % (key, i_job)
                    srcmin = i_job * n_src_per_job
                    srcmax = min(srcmin + n_src_per_job, n_cat_src)
                    outfile = NAME_FACTORY.srcmaps(**name_keys).replace(
                        '.fits', "_%02i.fits" % (i_job))
                    logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                    job_configs[full_key] = dict(
                        cmap=NAME_FACTORY.ccube(**name_keys),
                        expcube=NAME_FACTORY.ltcube(**name_keys),
                        irfs=NAME_FACTORY.irfs(**name_keys),
                        bexpmap=NAME_FACTORY.bexpcube(**name_keys),
                        outfile=outfile,
                        logfile=logfile,
                        srcmdl=catalog_info.srcmdl_name,
                        evtype=comp.evtype,
                        srcmin=srcmin,
                        srcmax=srcmax)

        return job_configs
コード例 #42
0
    def _map_arguments(self, args):
        """Map from the top-level arguments to the arguments provided to
        the indiviudal links """
        comp_file = args.get('comp', None)
        datafile = args.get('data', None)
        if is_null(comp_file):
            return
        if is_null(datafile):
            return

        NAME_FACTORY.update_base_dict(args['data'])

        outdir = args.get('outdir')
        outkey = args.get('outkey')
        ft1file = args['ft1file']
        ft2file = args['ft2file']
        if is_null(outdir) or is_null(outkey):
            return
        pfiles = os.path.join(outdir, outkey)

        self.comp_dict = yaml.safe_load(open(comp_file))
        coordsys = self.comp_dict.pop('coordsys')

        full_out_dir = make_nfs_path(os.path.join(outdir, outkey))

        for key_e, comp_e in sorted(self.comp_dict.items()):
            if 'logebins' in comp_e:
                ebins_file = make_nfs_path(
                    os.path.join(full_out_dir, 'energy_bins.fits'))
                write_ebins_file(ebins_file, comp_e['logebins'])
            else:
                ebins_file = None
            enumbins = comp_e['enumbins']
            emin = math.pow(10., comp_e['log_emin'])
            emax = math.pow(10., comp_e['log_emax'])
            zmax = comp_e['zmax']
            zcut = "zmax%i" % comp_e['zmax']
            tmin = comp_e.get('tmin', None)
            tmax = comp_e.get('tmax', None)
            if is_null(tmin):
                tmin = 'INDEF'
            if is_null(tmax):
                tmax = 'INDEF'
            evclassstr = NAME_FACTORY.base_dict['evclass']

            kwargs_select = dict(zcut=zcut,
                                 ebin=key_e,
                                 psftype='ALL',
                                 coordsys=coordsys)
            linkname = 'select-energy-%s-%s' % (key_e, zcut)
            selectfile_energy = make_full_path(
                outdir, outkey, NAME_FACTORY.select(**kwargs_select))
            self._set_link(linkname,
                           Gtlink_select,
                           infile=ft1file,
                           outfile=selectfile_energy,
                           zmax=zmax,
                           tmin=tmin,
                           tmax=tmax,
                           emin=emin,
                           emax=emax,
                           evclass=NAME_FACTORY.evclassmask(evclassstr),
                           pfiles=pfiles,
                           logfile=os.path.join(full_out_dir,
                                                "%s.log" % linkname))

            if 'mktimefilters' in comp_e:
                mktimefilters = comp_e['mktimefilters']
            else:
                mktimefilters = ['none']

            for mktimekey in mktimefilters:
                kwargs_mktime = kwargs_select.copy()
                kwargs_mktime['mktime'] = mktimekey
                filterstring = MKTIME_DICT[mktimekey]
                mktime_file = make_full_path(
                    outdir, outkey, NAME_FACTORY.mktime(**kwargs_mktime))
                ltcube_file = make_full_path(
                    outdir, outkey, NAME_FACTORY.ltcube(**kwargs_mktime))
                linkname_mktime = 'mktime-%s-%s-%s' % (key_e, zcut, mktimekey)
                linkname_ltcube = 'ltcube-%s-%s-%s' % (key_e, zcut, mktimekey)

                self._set_link(linkname_mktime,
                               Gtlink_mktime,
                               evfile=selectfile_energy,
                               outfile=mktime_file,
                               scfile=ft2file,
                               filter=filterstring,
                               pfiles=pfiles,
                               logfile=os.path.join(full_out_dir, "%s.log" %
                                                    linkname_mktime))
                self._set_link(linkname_ltcube,
                               Gtlink_ltcube,
                               evfile=mktime_file,
                               outfile=ltcube_file,
                               scfile=ft2file,
                               zmax=zmax,
                               pfiles=pfiles,
                               logfile=os.path.join(full_out_dir, "%s.log" %
                                                    linkname_ltcube))

                if 'evtclasses' in comp_e:
                    evtclasslist_vals = comp_e['evtclasses']
                else:
                    evtclasslist_vals = [NAME_FACTORY.base_dict['evclass']]

                for evtclassval in evtclasslist_vals:
                    for psf_type, psf_dict in sorted(
                            comp_e['psf_types'].items()):
                        linkname_select = 'select-type-%s-%s-%s-%s-%s' % (
                            key_e, zcut, mktimekey, evtclassval, psf_type)
                        linkname_bin = 'bin-%s-%s-%s-%s-%s' % (
                            key_e, zcut, mktimekey, evtclassval, psf_type)
                        kwargs_bin = kwargs_mktime.copy()
                        kwargs_bin['psftype'] = psf_type
                        kwargs_bin['coordsys'] = coordsys
                        kwargs_bin['evclass'] = evtclassval
                        selectfile_psf = make_full_path(
                            outdir, outkey, NAME_FACTORY.select(**kwargs_bin))
                        binfile_psf = make_full_path(
                            outdir, outkey, NAME_FACTORY.ccube(**kwargs_bin))
                        hpx_order_psf = min(args['hpx_order_max'],
                                            psf_dict['hpx_order'])
                        linkname_select = 'select-type-%s-%s-%s-%s-%s' % (
                            key_e, zcut, mktimekey, evtclassval, psf_type)
                        linkname_bin = 'bin-%s-%s-%s-%s-%s' % (
                            key_e, zcut, mktimekey, evtclassval, psf_type)

                        self._set_link(
                            linkname_select,
                            Gtlink_select,
                            infile=selectfile_energy,
                            outfile=selectfile_psf,
                            zmax=zmax,
                            emin=emin,
                            emax=emax,
                            tmin=tmin,
                            tmax=tmax,
                            evtype=EVT_TYPE_DICT[psf_type],
                            evclass=NAME_FACTORY.evclassmask(evtclassval),
                            pfiles=pfiles,
                            logfile=os.path.join(full_out_dir,
                                                 "%s.log" % linkname_select))
                        print("xx", ebins_file)
                        if ebins_file is None:
                            self._set_link(linkname_bin,
                                           Gtlink_bin,
                                           coordsys=coordsys,
                                           hpx_order=hpx_order_psf,
                                           evfile=selectfile_psf,
                                           outfile=binfile_psf,
                                           emin=emin,
                                           emax=emax,
                                           enumbins=enumbins,
                                           pfiles=pfiles,
                                           logfile=os.path.join(
                                               full_out_dir,
                                               "%s.log" % linkname_bin))
                        else:
                            self._set_link(linkname_bin,
                                           Gtlink_bin,
                                           coordsys=coordsys,
                                           hpx_order=hpx_order_psf,
                                           evfile=selectfile_psf,
                                           outfile=binfile_psf,
                                           ebinalg='FILE',
                                           ebinfile=ebins_file,
                                           pfiles=pfiles,
                                           logfile=os.path.join(
                                               full_out_dir,
                                               "%s.log" % linkname_bin))