def make_library(self, diffuse_yaml, catalog_yaml, binning_yaml): """ Build up the library of all the components Parameters ---------- diffuse_yaml : str Name of the yaml file with the library of diffuse component definitions catalog_yaml : str Name of the yaml file with the library of catalog split definitions binning_yaml : str Name of the yaml file with the binning definitions """ ret_dict = {} #catalog_dict = yaml.safe_load(open(catalog_yaml)) components_dict = Component.build_from_yamlfile(binning_yaml) diffuse_ret_dict = make_diffuse_comp_info_dict(GalpropMapManager=self._gmm, DiffuseModelManager=self._dmm, diffuse=diffuse_yaml, components=components_dict) catalog_ret_dict = make_catalog_comp_dict(sources=catalog_yaml, CatalogSourceManager=self._csm) ret_dict.update(diffuse_ret_dict['comp_info_dict']) ret_dict.update(catalog_ret_dict['comp_info_dict']) self._library.update(ret_dict) return ret_dict
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) for comp in components: for sourcekey in args['sourcekeys']: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') + "_%s"%sourcekey name_keys = dict(zcut=zcut, ebin=comp.ebin_name, psftype=comp.evtype_name, irf_ver=args['irf_ver'], sourcekey=sourcekey, fullpath=True) outfile = NAME_FACTORY.template_sunmoon(**name_keys) job_configs[key] = dict(expsun=NAME_FACTORY.bexpcube_sun(**name_keys), avgexp=NAME_FACTORY.bexpcube(**name_keys), sunprof=NAME_FACTORY.angprofile(**name_keys), cmap='none', outfile=outfile, irfs=NAME_FACTORY.irfs(**name_keys), evtype=comp.evtype, emin=comp.emin, emax=comp.emax, enumbins=comp.enumbins, logfile=outfile.replace('.fits', '.log')) output_config = {} return input_config, job_configs, output_config
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) models = load_yaml(args['models']) for modelkey in models: manifest = os.path.join('analysis', 'model_%s' % modelkey, 'srcmap_manifest_%s.yaml' % modelkey) for comp in components: key = comp.make_key('{ebin_name}_{evtype_name}') fullkey = "%s_%s" % (modelkey, key) outfile = NAME_FACTORY.merged_srcmaps(modelkey=modelkey, component=key, coordsys=comp.coordsys, mktime='none', irf_ver=NAME_FACTORY.irf_ver()) logfile = make_nfs_path(outfile.replace('.fits', '.log')) job_configs[fullkey] = dict(input=manifest, compname=key, logfile=logfile) return job_configs
def make_diffuse_comp_info_dict(**kwargs): """Build and return the information about the diffuse components """ diffuse_yamlfile = kwargs.pop('diffuse', 'config/diffuse_components.yaml') components = kwargs.pop('components', None) if components is None: comp_yamlfile = kwargs.pop('comp', 'config/binning.yaml') components = Component.build_from_yamlfile(comp_yamlfile) gmm = kwargs.get('GalpropMapManager', GalpropMapManager(**kwargs)) dmm = kwargs.get('DiffuseModelManager', DiffuseModelManager(**kwargs)) diffuse_comps = DiffuseModelManager.read_diffuse_component_yaml( diffuse_yamlfile) diffuse_comp_info_dict = dmm.make_diffuse_comp_info_dict( diffuse_comps, components) for diffuse_value in diffuse_comps.values(): if diffuse_value['model_type'] != 'galprop_rings': continue versions = diffuse_value['versions'] for version in versions: galprop_dict = gmm.make_diffuse_comp_info_dict(version) diffuse_comp_info_dict.update(galprop_dict) return dict(comp_info_dict=diffuse_comp_info_dict, GalpropMapManager=gmm, DiffuseModelManager=dmm)
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) mktime = args['mktimefilter'] for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, ebin=comp.ebin_name, psftype=comp.evtype_name, irf_ver=NAME_FACTORY.irf_ver(), mktime=mktime, fullpath=True) outfile = NAME_FACTORY.bexpcube_sun(**name_keys) ltcube_sun = NAME_FACTORY.ltcube_sun(**name_keys) job_configs[key] = dict(infile=NAME_FACTORY.ltcube_sun(**name_keys), outfile=outfile, irfs=NAME_FACTORY.irfs(**name_keys), evtype=comp.evtype, emin=comp.emin, emax=comp.emax, enumbins=comp.enumbins, logfile=make_nfs_path(outfile.replace('.fits', '.log'))) return job_configs
def make_library(self, diffuse_yaml, catalog_yaml, binning_yaml): """ Build up the library of all the components Parameters ---------- diffuse_yaml : str Name of the yaml file with the library of diffuse component definitions catalog_yaml : str Name of the yaml file width the library of catalog split definitions binning_yaml : str Name of the yaml file with the binning definitions """ ret_dict = {} #catalog_dict = yaml.safe_load(open(catalog_yaml)) components_dict = Component.build_from_yamlfile(binning_yaml) diffuse_ret_dict = make_diffuse_comp_info_dict( GalpropMapManager=self._gmm, DiffuseModelManager=self._dmm, library=diffuse_yaml, components=components_dict) catalog_ret_dict = make_catalog_comp_dict( library=catalog_yaml, CatalogSourceManager=self._csm) ret_dict.update(diffuse_ret_dict['comp_info_dict']) ret_dict.update(catalog_ret_dict['comp_info_dict']) self._library.update(ret_dict) return ret_dict
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=args['coordsys'], irf_ver=args['irf_ver'], fullpath=True) outfile = NAME_FACTORY.bexpcube(**name_keys) job_configs[key] = dict(cmap=NAME_FACTORY.ccube(**name_keys), infile=NAME_FACTORY.ltcube(**name_keys), outfile=outfile, irfs=NAME_FACTORY.irfs(**name_keys), hpx_order=min(comp.hpx_order, args['hpx_order_max']), evtype=comp.evtype, logfile=outfile.replace('.fits', '.log')) output_config = {} return input_config, job_configs, output_config
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_catalog_comp_dict( library=args['library'], basedir=NAME_FACTORY.base_dict['basedir']) catalog_info_dict = ret_dict['catalog_info_dict'] for catalog_name in catalog_info_dict: for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, sourcekey=catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), mktime='none', fullpath=True) outfile = NAME_FACTORY.srcmaps(**name_keys) outfile_tokens = os.path.splitext(outfile) infile_regexp = "%s_*.fits" % outfile_tokens[0] logfile = make_nfs_path(outfile.replace('.fits', '.log')) job_configs[key] = dict(output=outfile, args=infile_regexp, logfile=logfile) return job_configs
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) models = load_yaml(args['models']) for modelkey in models: manifest = os.path.join('analysis', 'model_%s' % modelkey, 'srcmap_manifest_%s.yaml' % modelkey) for comp in components: key = comp.make_key('{ebin_name}_{evtype_name}') fullkey = "%s_%s" % (modelkey, key) outfile = NAME_FACTORY.merged_srcmaps( modelkey=modelkey, component=key, coordsys=comp.coordsys, mktime='none', irf_ver=NAME_FACTORY.irf_ver()) logfile = make_nfs_path(outfile.replace('.fits', '.log')) job_configs[fullkey] = dict(input=manifest, compname=key, logfile=logfile) return job_configs
def build_job_configs(self, args): """Hook to build job configurations """ input_config = dict(comp=args['comp'], data=args['data'], sources=args['sources'], diffuse=args['diffuse'], hpx_order=args['hpx_order'], args=args['args'], logfile=os.path.join('analysis', 'init.log')) job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) for modelkey in args['args']: manifest = os.path.join('analysis', 'model_%s' % modelkey, 'srcmap_manifest_%s.yaml' % modelkey) for comp in components: key = comp.make_key('{ebin_name}_{evtype_name}') outfile = NAME_FACTORY.merged_srcmaps(modelkey=modelkey, component=key, coordsys='GAL', irf_ver=args['irf_ver']) logfile = outfile.replace('.fits', '.log') job_configs[key] = dict(input=manifest, comp=key, logfile=logfile) output_config = {} return input_config, job_configs, output_config
def make_diffuse_comp_info_dict(**kwargs): """Build and return the information about the diffuse components """ diffuse_yamlfile = kwargs.pop('diffuse', 'config/diffuse_components.yaml') components = kwargs.pop('components', None) if components is None: comp_yamlfile = kwargs.pop('comp', 'config/binning.yaml') components = Component.build_from_yamlfile(comp_yamlfile) gmm = kwargs.get('GalpropMapManager', GalpropMapManager(**kwargs)) dmm = kwargs.get('DiffuseModelManager', DiffuseModelManager(**kwargs)) if diffuse_yamlfile is None or diffuse_yamlfile == 'None': diffuse_comps = {} else: diffuse_comps = DiffuseModelManager.read_diffuse_component_yaml( diffuse_yamlfile) diffuse_comp_info_dict = dmm.make_diffuse_comp_info_dict( diffuse_comps, components) for diffuse_value in diffuse_comps.values(): if diffuse_value['model_type'] != 'galprop_rings': continue versions = diffuse_value['versions'] for version in versions: galprop_dict = gmm.make_diffuse_comp_info_dict(version) diffuse_comp_info_dict.update(galprop_dict) return dict(comp_info_dict=diffuse_comp_info_dict, GalpropMapManager=gmm, DiffuseModelManager=dmm)
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) mktime = args['mktimefilter'] for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, ebin=comp.ebin_name, psftype=comp.evtype_name, irf_ver=NAME_FACTORY.irf_ver(), mktime=mktime, fullpath=True) outfile = NAME_FACTORY.bexpcube_sun(**name_keys) ltcube_sun = NAME_FACTORY.ltcube_sun(**name_keys) job_configs[key] = dict( infile=NAME_FACTORY.ltcube_sun(**name_keys), outfile=outfile, irfs=NAME_FACTORY.irfs(**name_keys), evtype=comp.evtype, emin=comp.emin, emax=comp.emax, enumbins=comp.enumbins, logfile=make_nfs_path(outfile.replace('.fits', '.log'))) return job_configs
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) if self._comp_dict is None or self._comp_dict_file != args['library']: self._comp_dict_file = args['library'] self._comp_dict = make_catalog_comp_dict(sources=self._comp_dict_file, basedir=NAME_FACTORY.base_dict['basedir']) else: print ("Using cached catalog dict from %s" % args['library']) catalog_info_dict = self._comp_dict['catalog_info_dict'] comp_info_dict = self._comp_dict['comp_info_dict'] n_src_per_job = args['nsrc'] if args['make_xml']: SrcmapsCatalog_SG._make_xml_files(catalog_info_dict, comp_info_dict) for catalog_name, catalog_info in catalog_info_dict.items(): n_cat_src = len(catalog_info.catalog.table) n_job = int(math.ceil(float(n_cat_src) / n_src_per_job)) for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, sourcekey=catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), mktime='none', fullpath=True) for i_job in range(n_job): full_key = "%s_%02i" % (key, i_job) srcmin = i_job * n_src_per_job srcmax = min(srcmin + n_src_per_job, n_cat_src) outfile = NAME_FACTORY.srcmaps( **name_keys).replace('.fits', "_%02i.fits" % (i_job)) logfile = make_nfs_path(outfile.replace('.fits', '.log')) job_configs[full_key] = dict(cmap=NAME_FACTORY.ccube(**name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), outfile=outfile, logfile=logfile, srcmdl=catalog_info.srcmdl_name, evtype=comp.evtype, srcmin=srcmin, srcmax=srcmax) return job_configs
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) datafile = args['data'] if datafile is None or datafile == 'None': return job_configs NAME_FACTORY.update_base_dict(args['data']) outdir_base = os.path.join(NAME_FACTORY.base_dict['basedir'], 'counts_cubes') inputfiles = create_inputlist(args['ft1file']) num_files = len(inputfiles) for comp in components: zcut = "zmax%i" % comp.zmax mktimelist = copy.copy(comp.mktimefilters) if len(mktimelist) == 0: mktimelist.append('none') evtclasslist_keys = copy.copy(comp.evtclasses) if len(evtclasslist_keys) == 0: evtclasslist_keys.append('default') evtclasslist_vals = [NAME_FACTORY.base_dict['evclass']] else: evtclasslist_vals = copy.copy(evtclasslist_keys) for mktimekey in mktimelist: for evtclasskey, evtclassval in zip(evtclasslist_keys, evtclasslist_vals): fullkey = comp.make_key( '%s_%s_{ebin_name}_%s_{evtype_name}' % (evtclassval, zcut, mktimekey)) name_keys = dict(zcut=zcut, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), mktime=mktimekey, evclass=evtclassval, fullpath=True) ccube_name = os.path.basename( NAME_FACTORY.ccube(**name_keys)) outfile = os.path.join(outdir_base, ccube_name) infiles = make_input_file_list(outfile, num_files) logfile = make_nfs_path(outfile.replace('.fits', '.log')) job_configs[fullkey] = dict(args=infiles, output=outfile, logfile=logfile) return job_configs
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) datafile = args['data'] if datafile is None or datafile == 'None': return input_config, job_configs, {} NAME_FACTORY.update_base_dict(args['data']) for comp in components: zcut = "zmax%i" % comp.zmax mktimelist = copy.copy(comp.mktimefilters) if len(mktimelist) == 0: mktimelist.append('none') evtclasslist_keys = copy.copy(comp.evtclasses) if len(evtclasslist_keys) == 0: evtclasslist_keys.append('default') evtclasslist_vals = [NAME_FACTORY.base_dict['evclass']] else: evtclasslist_vals = copy.copy(evtclasslist_keys) for mktimekey in mktimelist: for evtclasskey, evtclassval in zip(evtclasslist_keys, evtclasslist_vals): fullkey = comp.make_key( '%s_%s_{ebin_name}_%s_{evtype_name}' % (evtclassval, zcut, mktimekey)) name_keys = dict(zcut=zcut, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=args['coordsys'], irf_ver=args['irf_ver'], mktime=mktimekey, evclass=evtclassval, fullpath=True) outfile = NAME_FACTORY.bexpcube(**name_keys) cmap = NAME_FACTORY.ccube(**name_keys) infile = NAME_FACTORY.ltcube(**name_keys) job_configs[fullkey] = dict( cmap=cmap, infile=infile, outfile=outfile, irfs=NAME_FACTORY.irfs(**name_keys), hpx_order=min(comp.hpx_order, args['hpx_order_max']), evtype=comp.evtype, logfile=outfile.replace('.fits', '.log')) output_config = {} return input_config, job_configs, output_config
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_catalog_comp_dict(sources=args['sources'], basedir='.') comp_info_dict = ret_dict['comp_info_dict'] for split_ver, split_dict in comp_info_dict.items(): for source_key, source_dict in split_dict.items(): print(split_ver, source_key, source_dict.model_type) full_key = "%s_%s" % (split_ver, source_key) if source_dict.model_type != 'CompositeSource': continue for comp in components: zcut = "zmax%i" % comp.zmax key = "%s_%s" % ( source_key, comp.make_key('{ebin_name}_{evtype_name}')) name_keys = dict(zcut=zcut, sourcekey=full_key, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', mktime='none', irf_ver=args['irf_ver']) nested_name_keys = dict( zcut=zcut, sourcekey=source_dict.catalog_info.catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', mktime='none', irf_ver=args['irf_ver']) outfile = NAME_FACTORY.srcmaps(**name_keys) job_configs[key] = dict( srcmaps=NAME_FACTORY.srcmaps(**nested_name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), srcmdl=NAME_FACTORY.srcmdl_xml(**name_keys), merged=source_key, outfile=outfile, outxml=NAME_FACTORY.nested_srcmdl_xml(**name_keys), logfile=outfile.replace('.fits', '.log')) output_config = {} return input_config, job_configs, output_config
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_catalog_comp_dict(sources=args['library'], basedir=NAME_FACTORY.base_dict['basedir']) catalog_info_dict = ret_dict['catalog_info_dict'] comp_info_dict = ret_dict['comp_info_dict'] n_src_per_job = args['nsrc'] if args['make_xml']: ConfigMaker_SrcmapsCatalog._make_xml_files(catalog_info_dict, comp_info_dict) for catalog_name, catalog_info in catalog_info_dict.items(): n_cat_src = len(catalog_info.catalog.table) n_job = int(math.ceil(float(n_cat_src)/n_src_per_job)) for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, sourcekey=catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), mktime='none', fullpath=True) for i_job in range(n_job): full_key = "%s_%02i"%(key, i_job) srcmin = i_job*n_src_per_job srcmax = min(srcmin+n_src_per_job, n_cat_src) outfile = NAME_FACTORY.srcmaps(**name_keys).replace('.fits', "_%02i.fits"%(i_job)) logfile = make_nfs_path(outfile.replace('.fits', '.log')) job_configs[full_key] = dict(cmap=NAME_FACTORY.ccube(**name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), outfile=outfile, logfile=logfile, srcmdl=catalog_info.srcmdl_name, evtype=comp.evtype, srcmin=srcmin, srcmax=srcmax) return job_configs
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) datafile = args['data'] if datafile is None or datafile == 'None': return job_configs NAME_FACTORY.update_base_dict(args['data']) outdir_base = os.path.join(NAME_FACTORY.base_dict['basedir'], 'counts_cubes') inputfiles = create_inputlist(args['ft1file']) num_files = len(inputfiles) for comp in components: zcut = "zmax%i" % comp.zmax mktimelist = copy.copy(comp.mktimefilters) if not mktimelist: mktimelist.append('none') evtclasslist_keys = copy.copy(comp.evtclasses) if not evtclasslist_keys: evtclasslist_vals = [NAME_FACTORY.base_dict['evclass']] else: evtclasslist_vals = copy.copy(evtclasslist_keys) for mktimekey in mktimelist: for evtclassval in evtclasslist_vals: fullkey = comp.make_key( '%s_%s_{ebin_name}_%s_{evtype_name}' % (evtclassval, zcut, mktimekey)) name_keys = dict(zcut=zcut, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), mktime=mktimekey, evclass=evtclassval, fullpath=True) ccube_name = os.path.basename(NAME_FACTORY.ccube(**name_keys)) outfile = os.path.join(outdir_base, ccube_name) infiles = _make_input_file_list(outfile, num_files) logfile = make_nfs_path(outfile.replace('.fits', '.log')) job_configs[fullkey] = dict(args=infiles, output=outfile, logfile=logfile) return job_configs
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_diffuse_comp_info_dict( components=components, library=args['library'], basedir=NAME_FACTORY.base_dict['basedir']) diffuse_comp_info_dict = ret_dict['comp_info_dict'] for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()): diffuse_comp_info_value = diffuse_comp_info_dict[ diffuse_comp_info_key] for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') if diffuse_comp_info_value.components is None: sub_comp_info = diffuse_comp_info_value else: sub_comp_info = diffuse_comp_info_value.get_component_info( comp) full_key = "%s_%s" % (sub_comp_info.sourcekey, key) name_keys = dict(zcut=zcut, sourcekey=sub_comp_info.sourcekey, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), mktime='none', fullpath=True) infile = NAME_FACTORY.srcmaps(**name_keys) outfile = infile.replace('.fits', '.png') logfile = make_nfs_path(outfile.replace('.png', '_png.log')) job_configs[full_key] = dict( input=infile, output=outfile, extension=sub_comp_info.source_name, zscale=args.get('zscale', 'log'), logfile=logfile) return job_configs
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_diffuse_comp_info_dict( components=components, diffuse=args['diffuse'], basedir=NAME_FACTORY.base_dict['basedir']) diffuse_comp_info_dict = ret_dict['comp_info_dict'] for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()): diffuse_comp_info_value = diffuse_comp_info_dict[ diffuse_comp_info_key] for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') if diffuse_comp_info_value.components is None: sub_comp_info = diffuse_comp_info_value else: sub_comp_info = diffuse_comp_info_value.get_component_info( comp) name_keys = dict(zcut=zcut, sourcekey=sub_comp_info.sourcekey, ebin=comp.ebin_name, psftype=comp.evtype_name, mktime='none', coordsys='GAL', irf_ver=args['irf_ver'], fullpath=True) outfile = NAME_FACTORY.srcmaps(**name_keys) outfile_tokens = os.path.splitext(outfile) infile_regexp = "%s_*.fits*" % outfile_tokens[0] full_key = "%s_%s" % (sub_comp_info.sourcekey, key) job_configs[full_key] = dict(output=outfile, args=infile_regexp, hdu=sub_comp_info.source_name, logfile=outfile.replace( '.fits', '.log')) output_config = {} return input_config, job_configs, output_config
def run(self, argv): """Assemble the source map file for one binning component FIXME """ args = self.parser.parse_args(argv) components = Component.build_from_yamlfile(args.comp) NAME_FACTORY.update_base_dict(args.data) model_dict = make_library(**args.__dict__) model_manager = model_dict['ModelManager'] modelkeys = args.args data = args.data hpx_order = args.hpx_order for modelkey in modelkeys: model_manager.make_srcmap_manifest(modelkey, components, data) fermipy_config = model_manager.make_fermipy_config_yaml(modelkey, components, data, hpxorder=hpx_order)
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_catalog_comp_dict(sources=args['sources'], basedir='.') comp_info_dict = ret_dict['comp_info_dict'] for split_ver, split_dict in comp_info_dict.items(): for source_key, source_dict in split_dict.items(): print (split_ver, source_key, source_dict.model_type) full_key = "%s_%s"%(split_ver, source_key) if source_dict.model_type != 'CompositeSource': continue for comp in components: zcut = "zmax%i" % comp.zmax key = "%s_%s" % (source_key, comp.make_key('{ebin_name}_{evtype_name}')) name_keys = dict(zcut=zcut, sourcekey=full_key, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', irf_ver=args['irf_ver']) nested_name_keys = dict(zcut=zcut, sourcekey=source_dict.catalog_info.catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', irf_ver=args['irf_ver']) outfile = NAME_FACTORY.srcmaps(**name_keys) job_configs[key] = dict(srcmaps=NAME_FACTORY.srcmaps(**nested_name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), srcmdl=NAME_FACTORY.srcmdl_xml(**name_keys), merged=source_key, outfile=outfile, outxml=NAME_FACTORY.nested_srcmdl_xml(**name_keys), logfile=outfile.replace('.fits', '.log')) output_config = {} return input_config, job_configs, output_config
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_diffuse_comp_info_dict(components=components, diffuse=args['diffuse'], basedir=NAME_FACTORY.base_dict['basedir']) diffuse_comp_info_dict = ret_dict['comp_info_dict'] for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()): diffuse_comp_info_value = diffuse_comp_info_dict[diffuse_comp_info_key] for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') if diffuse_comp_info_value.components is None: sub_comp_info = diffuse_comp_info_value else: sub_comp_info = diffuse_comp_info_value.get_component_info(comp) full_key = "%s_%s" % (sub_comp_info.sourcekey, key) name_keys = dict(zcut=zcut, sourcekey=sub_comp_info.sourcekey, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', irf_ver=args['irf_ver'], fullpath=True) infile = NAME_FACTORY.srcmaps(**name_keys) outfile = infile.replace('.fits', '.png') job_configs[full_key] = dict(input=infile, output=outfile, extension=sub_comp_info.source_name, zscale=args.get('zscale', 'log'), logfile=outfile.replace('.png', '_png.log')) output_config = {} return input_config, job_configs, output_config
def run_analysis(self, argv): """Assemble the source map file for one binning component FIXME """ args = self.parser.parse_args(argv) components = Component.build_from_yamlfile(args.comp) NAME_FACTORY.update_base_dict(args.data) model_dict = make_library(**args.__dict__) model_manager = model_dict['ModelManager'] modelkeys = args.args data = args.data hpx_order = args.hpx_order for modelkey in modelkeys: model_manager.make_srcmap_manifest(modelkey, components, data) fermipy_config = model_manager.make_fermipy_config_yaml( modelkey, components, data, hpxorder=hpx_order)
def run_analysis(self, argv): """ Build the manifest for all the models """ args = self._parser.parse_args(argv) components = Component.build_from_yamlfile(args.comp) NAME_FACTORY.update_base_dict(args.data) model_dict = make_library(**args.__dict__) model_manager = model_dict['ModelManager'] models = load_yaml(args.models) data = args.data hpx_order = args.hpx_order for modelkey in models: model_manager.make_srcmap_manifest(modelkey, components, data) model_manager.make_fermipy_config_yaml(modelkey, components, data, hpx_order=hpx_order, irf_ver=NAME_FACTORY.irf_ver())
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_catalog_comp_dict(sources=args['sources'], basedir='.') catalog_info_dict = ret_dict['catalog_info_dict'] comp_info_dict = ret_dict['comp_info_dict'] if args['make_xml']: ConfigMaker_SrcmapsCatalog._make_xml_files(catalog_info_dict, comp_info_dict) for catalog_name, catalog_info in catalog_info_dict.items(): for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, sourcekey=catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', irf_ver=args['irf_ver'], mktime='none', fullpath=True) outfile = NAME_FACTORY.srcmaps(**name_keys) logfile = outfile.replace('.fits', '.log') job_configs[key] = dict( cmap=NAME_FACTORY.ccube(**name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), outfile=outfile, logfile=logfile, srcmdl=catalog_info.srcmdl_name, evtype=comp.evtype) output_config = {} return input_config, job_configs, output_config
def test_binning(): basedir = os.path.join(PACKAGE_ROOT, 'diffuse', 'tests', 'data') the_yaml = os.path.join(basedir, 'binning.yaml') components = Component.build_from_yamlfile(the_yaml) assert(len(components) == 10) # spot check first and last components assert(components[0].log_emin == 1.5) assert(components[0].log_emax == 2.0) assert(components[0].enumbins == 4) assert(components[0].hpx_order == 5) assert(components[0].zmax == 80) assert(components[-1].log_emin == 3.0) assert(components[-1].log_emax == 6.0) assert(components[-1].enumbins == 12) assert(components[-1].hpx_order == 9) assert(components[-1].zmax == 105)
def test_binning(): basedir = os.path.join(PACKAGE_ROOT, 'diffuse', 'tests', 'data') the_yaml = os.path.join(basedir, 'binning.yaml') components = Component.build_from_yamlfile(the_yaml) assert (len(components) == 10) # spot check first and last components assert (components[0].log_emin == 1.5) assert (components[0].log_emax == 2.0) assert (components[0].enumbins == 4) assert (components[0].hpx_order == 5) assert (components[0].zmax == 80) assert (components[-1].log_emin == 3.0) assert (components[-1].log_emax == 6.0) assert (components[-1].enumbins == 12) assert (components[-1].hpx_order == 9) assert (components[-1].zmax == 105)
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_catalog_comp_dict(sources=args['sources'], basedir='.') catalog_info_dict = ret_dict['catalog_info_dict'] comp_info_dict = ret_dict['comp_info_dict'] if args['make_xml']: ConfigMaker_SrcmapsCatalog._make_xml_files(catalog_info_dict, comp_info_dict) for catalog_name, catalog_info in catalog_info_dict.items(): for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, sourcekey=catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', irf_ver=args['irf_ver'], fullpath=True) outfile = NAME_FACTORY.srcmaps(**name_keys) logfile = outfile.replace('.fits', '.log') job_configs[key] = dict(cmap=NAME_FACTORY.ccube(**name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), outfile=outfile, logfile=logfile, srcmdl=catalog_info.srcmdl_name, evtype=comp.evtype) output_config = {} return input_config, job_configs, output_config
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) NAME_FACTORY_CLEAN.update_base_dict(args['data']) NAME_FACTORY_DIRTY.update_base_dict(args['data']) NAME_FACTORY_CLEAN.base_dict['evclass'] = args['clean'] NAME_FACTORY_DIRTY.base_dict['evclass'] = args['dirty'] for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), mktime=args['mktimefilter'], fullpath=True) outfile = NAME_FACTORY.residual_cr(**name_keys) if args['hpx_order']: hpx_order = min(comp.hpx_order, args['hpx_order']) else: hpx_order = comp.hpx_order job_configs[key] = dict(bexpcube_dirty=NAME_FACTORY_DIRTY.bexpcube(**name_keys), ccube_dirty=NAME_FACTORY_DIRTY.ccube(**name_keys), bexpcube_clean=NAME_FACTORY_CLEAN.bexpcube(**name_keys), ccube_clean=NAME_FACTORY_CLEAN.ccube(**name_keys), outfile=outfile, hpx_order=hpx_order, full_output=args['full_output'], logfile=make_nfs_path(outfile.replace('.fits', '.log'))) return job_configs
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY_DIRTY.update_base_dict(args['data_dirty']) NAME_FACTORY_CLEAN.update_base_dict(args['data_clean']) for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=args['coordsys'], irf_ver=args['irf_ver'], fullpath=True) outfile = NAME_FACTORY_DIRTY.residual_cr(**name_keys) if args['hpx_order']: hpx_order = min(comp.hpx_order, args['hpx_order']) else: hpx_order = comp.hpx_order job_configs[key] = dict(bexpcube_dirty=NAME_FACTORY_DIRTY.bexpcube(**name_keys), ccube_dirty=NAME_FACTORY_DIRTY.ccube( **name_keys).replace('.fits', '.fits.gz'), bexpcube_clean=NAME_FACTORY_CLEAN.bexpcube(**name_keys), ccube_clean=NAME_FACTORY_CLEAN.ccube( **name_keys).replace('.fits', '.fits.gz'), outfile=outfile, hpx_order=hpx_order, logfile=outfile.replace('.fits', '.log')) output_config = {} return input_config, job_configs, output_config
def build_job_configs(self, args): """Hook to build job configurations """ input_config = dict(comp=args['comp'], data=args['data'], sources=args['sources'], diffuse=args['diffuse'], hpx_order=args['hpx_order'], args=args['args'], logfile=os.path.join('analysis', 'init.log')) job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) model_list = args['args'] if model_list is None: model_list = [] for modelkey in model_list: manifest = os.path.join('analysis', 'model_%s' % modelkey, 'srcmap_manifest_%s.yaml' % modelkey) for comp in components: key = comp.make_key('{ebin_name}_{evtype_name}') outfile = NAME_FACTORY.merged_srcmaps(modelkey=modelkey, component=key, coordsys='GAL', mktime='none', irf_ver=args['irf_ver']) logfile = outfile.replace('.fits', '.log') job_configs[key] = dict(input=manifest, comp=key, logfile=logfile) output_config = {} return input_config, job_configs, output_config
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_diffuse_comp_info_dict(components=components, diffuse=args['diffuse'], basedir='.') diffuse_comp_info_dict = ret_dict['comp_info_dict'] if args['make_xml']: ConfigMaker_SrcmapPartial._make_xml_files(diffuse_comp_info_dict) for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()): diffuse_comp_info_value = diffuse_comp_info_dict[diffuse_comp_info_key] for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') if diffuse_comp_info_value.components is None: sub_comp_info = diffuse_comp_info_value else: sub_comp_info = diffuse_comp_info_value.get_component_info(comp) name_keys = dict(zcut=zcut, sourcekey=sub_comp_info.sourcekey, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', irf_ver=args['irf_ver']) kmin = 0 kmax = comp.enumbins + 1 outfile_base = NAME_FACTORY.srcmaps(**name_keys) kstep = HPX_ORDER_TO_KSTEP[comp.hpx_order] base_dict = dict(cmap=NAME_FACTORY.ccube(**name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), srcmdl=sub_comp_info.srcmdl_name, source=sub_comp_info.source_name, evtype=comp.evtype) if kstep < 0: kstep = kmax else: pass for k in range(kmin, kmax, kstep): full_key = "%s_%s_%02i" % (diffuse_comp_info_key, key, k) khi = min(kmax, k + kstep) full_dict = base_dict.copy() full_dict.update(dict(outfile=\ outfile_base.replace('.fits', '_%02i.fits' % k), kmin=k, kmax=khi, logfile=\ outfile_base.replace('.fits', '_%02i.log' % k))) job_configs[full_key] = full_dict output_config = {} return input_config, job_configs, output_config
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_diffuse_comp_info_dict(components=components, library=args['library'], basedir='.') diffuse_comp_info_dict = ret_dict['comp_info_dict'] if args['make_xml']: SrcmapsDiffuse_SG._make_xml_files(diffuse_comp_info_dict) for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()): diffuse_comp_info_value = diffuse_comp_info_dict[diffuse_comp_info_key] no_psf = diffuse_comp_info_value.no_psf for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') if diffuse_comp_info_value.components is None: sub_comp_info = diffuse_comp_info_value else: sub_comp_info = diffuse_comp_info_value.get_component_info(comp) name_keys = dict(zcut=zcut, sourcekey=sub_comp_info.sourcekey, ebin=comp.ebin_name, psftype=comp.evtype_name, mktime='none', coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), fullpath=True) kmin = 0 kmax = comp.enumbins + 1 outfile_base = NAME_FACTORY.srcmaps(**name_keys) kstep = HPX_ORDER_TO_KSTEP[comp.hpx_order] base_dict = dict(cmap=NAME_FACTORY.ccube(**name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), srcmdl=sub_comp_info.srcmdl_name, source=sub_comp_info.source_name, no_psf=no_psf, evtype=comp.evtype) if kstep < 0: kstep = kmax else: pass for k in range(kmin, kmax, kstep): full_key = "%s_%s_%02i" % (diffuse_comp_info_key, key, k) khi = min(kmax, k + kstep) full_dict = base_dict.copy() outfile = outfile_base.replace('.fits', '_%02i.fits' % k) logfile = make_nfs_path(outfile_base.replace('.fits', '_%02i.log' % k)) full_dict.update(dict(outfile=outfile, kmin=k, kmax=khi, logfile=logfile)) job_configs[full_key] = full_dict return job_configs
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_diffuse_comp_info_dict(components=components, library=args['library'], basedir='.') diffuse_comp_info_dict = ret_dict['comp_info_dict'] if args['make_xml']: SrcmapsDiffuse_SG._make_xml_files(diffuse_comp_info_dict) for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()): diffuse_comp_info_value = diffuse_comp_info_dict[ diffuse_comp_info_key] no_psf = diffuse_comp_info_value.no_psf for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') if diffuse_comp_info_value.components is None: sub_comp_info = diffuse_comp_info_value else: sub_comp_info = diffuse_comp_info_value.get_component_info( comp) name_keys = dict(zcut=zcut, sourcekey=sub_comp_info.sourcekey, ebin=comp.ebin_name, psftype=comp.evtype_name, mktime='none', coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), fullpath=True) kmin = 0 kmax = comp.enumbins + 1 outfile_base = NAME_FACTORY.srcmaps(**name_keys) kstep = HPX_ORDER_TO_KSTEP[comp.hpx_order] base_dict = dict(cmap=NAME_FACTORY.ccube(**name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), srcmdl=sub_comp_info.srcmdl_name, source=sub_comp_info.source_name, no_psf=no_psf, evtype=comp.evtype) if kstep < 0: kstep = kmax else: pass for k in range(kmin, kmax, kstep): full_key = "%s_%s_%02i" % (diffuse_comp_info_key, key, k) khi = min(kmax, k + kstep) full_dict = base_dict.copy() outfile = outfile_base.replace('.fits', '_%02i.fits' % k) logfile = make_nfs_path( outfile_base.replace('.fits', '_%02i.log' % k)) full_dict.update( dict(outfile=outfile, kmin=k, kmax=khi, logfile=logfile)) job_configs[full_key] = full_dict return job_configs