def test_catalog_src_manager(): basedir = os.path.join(PACKAGE_ROOT, 'diffuse', 'tests', 'data') sources = os.path.join(basedir, 'catalog_components.yaml') ret_dict = make_catalog_comp_dict(sources=sources, basedir=basedir) # spot check results # Test the dictionary of catalogs assert(ret_dict['catalog_info_dict']['3FGL'].catalog_name == '3FGL') assert(ret_dict['catalog_info_dict']['3FGL'].catalog_type == '3FGL') assert(len(ret_dict['catalog_info_dict']['3FGL'].catalog.table) == 63) assert(len(ret_dict['catalog_info_dict'][ '3FGL'].roi_model.sources) == 63) # Test the split dictionary assert(len(ret_dict['comp_info_dict']['3FGL_v00'].keys()) == 3) assert(len(ret_dict['comp_info_dict']['3FGL_v00'] ['extended'].roi_model.sources) == 2) assert(len(ret_dict['comp_info_dict']['3FGL_v00'] ['faint'].roi_model.sources) == 60) assert(len(ret_dict['comp_info_dict']['3FGL_v00'] ['remain'].roi_model.sources) == 1) # Test the CatalogSourceManager assert(len(ret_dict['CatalogSourceManager'].splitkeys()) == 1) assert(len(ret_dict['CatalogSourceManager'].catalogs()) == 1)
def test_catalog_src_manager(): basedir = os.path.join(PACKAGE_ROOT, 'diffuse', 'tests', 'data') library = os.path.join(basedir, 'models', 'library.yaml') ret_dict = make_catalog_comp_dict(library=library, basedir=basedir) # spot check results # Test the dictionary of catalogs assert(ret_dict['catalog_info_dict']['3FGL'].catalog_name == '3FGL') assert(ret_dict['catalog_info_dict']['3FGL'].catalog_type == '3FGL') assert(len(ret_dict['catalog_info_dict']['3FGL'].catalog.table) == 63) assert(len(ret_dict['catalog_info_dict'][ '3FGL'].roi_model.sources) == 63) # Test the split dictionary assert(len(ret_dict['comp_info_dict']['3FGL_v00'].keys()) == 3) assert(len(ret_dict['comp_info_dict']['3FGL_v00'] ['extended'].roi_model.sources) == 2) assert(len(ret_dict['comp_info_dict']['3FGL_v00'] ['faint'].roi_model.sources) == 60) assert(len(ret_dict['comp_info_dict']['3FGL_v00'] ['remain'].roi_model.sources) == 1) # Test the CatalogSourceManager assert(len(ret_dict['CatalogSourceManager'].splitkeys()) == 1) assert(len(ret_dict['CatalogSourceManager'].catalogs()) == 1)
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_catalog_comp_dict( library=args['library'], basedir=NAME_FACTORY.base_dict['basedir']) catalog_info_dict = ret_dict['catalog_info_dict'] for catalog_name in catalog_info_dict: for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, sourcekey=catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), mktime='none', fullpath=True) outfile = NAME_FACTORY.srcmaps(**name_keys) outfile_tokens = os.path.splitext(outfile) infile_regexp = "%s_*.fits" % outfile_tokens[0] logfile = make_nfs_path(outfile.replace('.fits', '.log')) job_configs[key] = dict(output=outfile, args=infile_regexp, logfile=logfile) return job_configs
def make_library(self, diffuse_yaml, catalog_yaml, binning_yaml): """ Build up the library of all the components Parameters ---------- diffuse_yaml : str Name of the yaml file with the library of diffuse component definitions catalog_yaml : str Name of the yaml file width the library of catalog split definitions binning_yaml : str Name of the yaml file with the binning definitions """ ret_dict = {} #catalog_dict = yaml.safe_load(open(catalog_yaml)) components_dict = Component.build_from_yamlfile(binning_yaml) diffuse_ret_dict = make_diffuse_comp_info_dict( GalpropMapManager=self._gmm, DiffuseModelManager=self._dmm, library=diffuse_yaml, components=components_dict) catalog_ret_dict = make_catalog_comp_dict( library=catalog_yaml, CatalogSourceManager=self._csm) ret_dict.update(diffuse_ret_dict['comp_info_dict']) ret_dict.update(catalog_ret_dict['comp_info_dict']) self._library.update(ret_dict) return ret_dict
def make_library(self, diffuse_yaml, catalog_yaml, binning_yaml): """ Build up the library of all the components Parameters ---------- diffuse_yaml : str Name of the yaml file with the library of diffuse component definitions catalog_yaml : str Name of the yaml file with the library of catalog split definitions binning_yaml : str Name of the yaml file with the binning definitions """ ret_dict = {} #catalog_dict = yaml.safe_load(open(catalog_yaml)) components_dict = Component.build_from_yamlfile(binning_yaml) diffuse_ret_dict = make_diffuse_comp_info_dict(GalpropMapManager=self._gmm, DiffuseModelManager=self._dmm, diffuse=diffuse_yaml, components=components_dict) catalog_ret_dict = make_catalog_comp_dict(sources=catalog_yaml, CatalogSourceManager=self._csm) ret_dict.update(diffuse_ret_dict['comp_info_dict']) ret_dict.update(catalog_ret_dict['comp_info_dict']) self._library.update(ret_dict) return ret_dict
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) if self._comp_dict is None or self._comp_dict_file != args['library']: self._comp_dict_file = args['library'] self._comp_dict = make_catalog_comp_dict(sources=self._comp_dict_file, basedir=NAME_FACTORY.base_dict['basedir']) else: print ("Using cached catalog dict from %s" % args['library']) catalog_info_dict = self._comp_dict['catalog_info_dict'] comp_info_dict = self._comp_dict['comp_info_dict'] n_src_per_job = args['nsrc'] if args['make_xml']: SrcmapsCatalog_SG._make_xml_files(catalog_info_dict, comp_info_dict) for catalog_name, catalog_info in catalog_info_dict.items(): n_cat_src = len(catalog_info.catalog.table) n_job = int(math.ceil(float(n_cat_src) / n_src_per_job)) for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, sourcekey=catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), mktime='none', fullpath=True) for i_job in range(n_job): full_key = "%s_%02i" % (key, i_job) srcmin = i_job * n_src_per_job srcmax = min(srcmin + n_src_per_job, n_cat_src) outfile = NAME_FACTORY.srcmaps( **name_keys).replace('.fits', "_%02i.fits" % (i_job)) logfile = make_nfs_path(outfile.replace('.fits', '.log')) job_configs[full_key] = dict(cmap=NAME_FACTORY.ccube(**name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), outfile=outfile, logfile=logfile, srcmdl=catalog_info.srcmdl_name, evtype=comp.evtype, srcmin=srcmin, srcmax=srcmax) return job_configs
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_catalog_comp_dict(sources=args['sources'], basedir='.') comp_info_dict = ret_dict['comp_info_dict'] for split_ver, split_dict in comp_info_dict.items(): for source_key, source_dict in split_dict.items(): print(split_ver, source_key, source_dict.model_type) full_key = "%s_%s" % (split_ver, source_key) if source_dict.model_type != 'CompositeSource': continue for comp in components: zcut = "zmax%i" % comp.zmax key = "%s_%s" % ( source_key, comp.make_key('{ebin_name}_{evtype_name}')) name_keys = dict(zcut=zcut, sourcekey=full_key, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', mktime='none', irf_ver=args['irf_ver']) nested_name_keys = dict( zcut=zcut, sourcekey=source_dict.catalog_info.catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', mktime='none', irf_ver=args['irf_ver']) outfile = NAME_FACTORY.srcmaps(**name_keys) job_configs[key] = dict( srcmaps=NAME_FACTORY.srcmaps(**nested_name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), srcmdl=NAME_FACTORY.srcmdl_xml(**name_keys), merged=source_key, outfile=outfile, outxml=NAME_FACTORY.nested_srcmdl_xml(**name_keys), logfile=outfile.replace('.fits', '.log')) output_config = {} return input_config, job_configs, output_config
def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_catalog_comp_dict(sources=args['library'], basedir=NAME_FACTORY.base_dict['basedir']) catalog_info_dict = ret_dict['catalog_info_dict'] comp_info_dict = ret_dict['comp_info_dict'] n_src_per_job = args['nsrc'] if args['make_xml']: ConfigMaker_SrcmapsCatalog._make_xml_files(catalog_info_dict, comp_info_dict) for catalog_name, catalog_info in catalog_info_dict.items(): n_cat_src = len(catalog_info.catalog.table) n_job = int(math.ceil(float(n_cat_src)/n_src_per_job)) for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, sourcekey=catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), mktime='none', fullpath=True) for i_job in range(n_job): full_key = "%s_%02i"%(key, i_job) srcmin = i_job*n_src_per_job srcmax = min(srcmin+n_src_per_job, n_cat_src) outfile = NAME_FACTORY.srcmaps(**name_keys).replace('.fits', "_%02i.fits"%(i_job)) logfile = make_nfs_path(outfile.replace('.fits', '.log')) job_configs[full_key] = dict(cmap=NAME_FACTORY.ccube(**name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), outfile=outfile, logfile=logfile, srcmdl=catalog_info.srcmdl_name, evtype=comp.evtype, srcmin=srcmin, srcmax=srcmax) return job_configs
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_catalog_comp_dict(sources=args['sources'], basedir='.') comp_info_dict = ret_dict['comp_info_dict'] for split_ver, split_dict in comp_info_dict.items(): for source_key, source_dict in split_dict.items(): print (split_ver, source_key, source_dict.model_type) full_key = "%s_%s"%(split_ver, source_key) if source_dict.model_type != 'CompositeSource': continue for comp in components: zcut = "zmax%i" % comp.zmax key = "%s_%s" % (source_key, comp.make_key('{ebin_name}_{evtype_name}')) name_keys = dict(zcut=zcut, sourcekey=full_key, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', irf_ver=args['irf_ver']) nested_name_keys = dict(zcut=zcut, sourcekey=source_dict.catalog_info.catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', irf_ver=args['irf_ver']) outfile = NAME_FACTORY.srcmaps(**name_keys) job_configs[key] = dict(srcmaps=NAME_FACTORY.srcmaps(**nested_name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), srcmdl=NAME_FACTORY.srcmdl_xml(**name_keys), merged=source_key, outfile=outfile, outxml=NAME_FACTORY.nested_srcmdl_xml(**name_keys), logfile=outfile.replace('.fits', '.log')) output_config = {} return input_config, job_configs, output_config
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_catalog_comp_dict(sources=args['sources'], basedir='.') catalog_info_dict = ret_dict['catalog_info_dict'] comp_info_dict = ret_dict['comp_info_dict'] if args['make_xml']: ConfigMaker_SrcmapsCatalog._make_xml_files(catalog_info_dict, comp_info_dict) for catalog_name, catalog_info in catalog_info_dict.items(): for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, sourcekey=catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', irf_ver=args['irf_ver'], mktime='none', fullpath=True) outfile = NAME_FACTORY.srcmaps(**name_keys) logfile = outfile.replace('.fits', '.log') job_configs[key] = dict( cmap=NAME_FACTORY.ccube(**name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), outfile=outfile, logfile=logfile, srcmdl=catalog_info.srcmdl_name, evtype=comp.evtype) output_config = {} return input_config, job_configs, output_config
def build_job_configs(self, args): """Hook to build job configurations """ input_config = {} job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_catalog_comp_dict(sources=args['sources'], basedir='.') catalog_info_dict = ret_dict['catalog_info_dict'] comp_info_dict = ret_dict['comp_info_dict'] if args['make_xml']: ConfigMaker_SrcmapsCatalog._make_xml_files(catalog_info_dict, comp_info_dict) for catalog_name, catalog_info in catalog_info_dict.items(): for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, sourcekey=catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys='GAL', irf_ver=args['irf_ver'], fullpath=True) outfile = NAME_FACTORY.srcmaps(**name_keys) logfile = outfile.replace('.fits', '.log') job_configs[key] = dict(cmap=NAME_FACTORY.ccube(**name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), outfile=outfile, logfile=logfile, srcmdl=catalog_info.srcmdl_name, evtype=comp.evtype) output_config = {} return input_config, job_configs, output_config