Exemplo n.º 1
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        NAME_FACTORY.update_base_dict(args['data'])

        inputfiles = create_inputlist(args['ft1file'])
        outdir_base = os.path.join(NAME_FACTORY.base_dict['basedir'], 'counts_cubes')

        nfiles = len(inputfiles)
        for idx, infile in enumerate(inputfiles):
            key = "%06i" % idx
            output_dir = os.path.join(outdir_base, key)
            try:
                os.mkdir(output_dir)
            except OSError:
                pass
            logfile = make_nfs_path(os.path.join(output_dir, 'scatter_%s.log' % key))
            job_configs[key] = dict(ft1file=infile,
                                    comp=args['comp'],
                                    hpx_order_max=args['hpx_order_max'],
                                    outdir=outdir_base,
                                    outkey=key,             
                                    logfile=logfile,
                                    pfiles=output_dir)


        return job_configs
Exemplo n.º 2
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        models = load_yaml(args['models'])

        for modelkey, modelpath in models.items():
            manifest = os.path.join('analysis', 'model_%s' % modelkey,
                                    'srcmap_manifest_%s.yaml' % modelkey)
            for comp in components:
                key = comp.make_key('{ebin_name}_{evtype_name}')
                fullkey = "%s_%s" % (modelkey, key)
                outfile = NAME_FACTORY.merged_srcmaps(
                    modelkey=modelkey,
                    component=key,
                    coordsys=comp.coordsys,
                    mktime='none',
                    irf_ver=NAME_FACTORY.irf_ver())
                logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                job_configs[fullkey] = dict(input=manifest,
                                            compname=key,
                                            logfile=logfile)
        return job_configs
Exemplo n.º 3
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])

        datafile = args['data']
        if datafile is None or datafile == 'None':
            return job_configs
        NAME_FACTORY.update_base_dict(args['data'])
        outdir_base = os.path.join(NAME_FACTORY.base_dict['basedir'],
                                   'counts_cubes')

        inputfiles = create_inputlist(args['ft1file'])
        num_files = len(inputfiles)

        for comp in components:
            zcut = "zmax%i" % comp.zmax

            mktimelist = copy.copy(comp.mktimefilters)
            if len(mktimelist) == 0:
                mktimelist.append('none')
            evtclasslist_keys = copy.copy(comp.evtclasses)
            if len(evtclasslist_keys) == 0:
                evtclasslist_keys.append('default')
                evtclasslist_vals = [NAME_FACTORY.base_dict['evclass']]
            else:
                evtclasslist_vals = copy.copy(evtclasslist_keys)

            for mktimekey in mktimelist:
                for evtclasskey, evtclassval in zip(evtclasslist_keys,
                                                    evtclasslist_vals):
                    fullkey = comp.make_key(
                        '%s_%s_{ebin_name}_%s_{evtype_name}' %
                        (evtclassval, zcut, mktimekey))

                    name_keys = dict(zcut=zcut,
                                     ebin=comp.ebin_name,
                                     psftype=comp.evtype_name,
                                     coordsys=comp.coordsys,
                                     irf_ver=NAME_FACTORY.irf_ver(),
                                     mktime=mktimekey,
                                     evclass=evtclassval,
                                     fullpath=True)

                    ccube_name = os.path.basename(
                        NAME_FACTORY.ccube(**name_keys))
                    outfile = os.path.join(outdir_base, ccube_name)
                    infiles = make_input_file_list(outfile, num_files)
                    logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                    job_configs[fullkey] = dict(args=infiles,
                                                output=outfile,
                                                logfile=logfile)

        return job_configs
Exemplo n.º 4
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        datafile = args['data']
        if datafile is None or datafile == 'None':
            return job_configs
        NAME_FACTORY.update_base_dict(args['data'])

        for comp in components:
            zcut = "zmax%i" % comp.zmax

            mktimelist = copy.copy(comp.mktimefilters)
            if len(mktimelist) == 0:
                mktimelist.append('none')
            evtclasslist_keys = copy.copy(comp.evtclasses)
            if len(evtclasslist_keys) == 0:
                evtclasslist_keys.append('default')
                evtclasslist_vals = [NAME_FACTORY.base_dict['evclass']]
            else:
                evtclasslist_vals = copy.copy(evtclasslist_keys)

            for mktimekey in mktimelist:
                for evtclasskey, evtclassval in zip(evtclasslist_keys,
                                                    evtclasslist_vals):
                    fullkey = comp.make_key(
                        '%s_%s_{ebin_name}_%s_{evtype_name}' %
                        (evtclassval, zcut, mktimekey))
                    name_keys = dict(zcut=zcut,
                                     ebin=comp.ebin_name,
                                     psftype=comp.evtype_name,
                                     coordsys=comp.coordsys,
                                     irf_ver=NAME_FACTORY.irf_ver(),
                                     mktime=mktimekey,
                                     evclass=evtclassval,
                                     fullpath=True)

                    outfile = NAME_FACTORY.bexpcube(**name_keys)
                    cmap = NAME_FACTORY.ccube(**name_keys)
                    infile = NAME_FACTORY.ltcube(**name_keys)
                    logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                    job_configs[fullkey] = dict(
                        cmap=cmap,
                        infile=infile,
                        outfile=outfile,
                        irfs=NAME_FACTORY.irfs(**name_keys),
                        hpx_order=min(comp.hpx_order, args['hpx_order_max']),
                        evtype=comp.evtype,
                        logfile=logfile)

        return job_configs
Exemplo n.º 5
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        ret_dict = make_catalog_comp_dict(sources=args['library'], 
                                          basedir=NAME_FACTORY.base_dict['basedir'])
        catalog_info_dict = ret_dict['catalog_info_dict']
        comp_info_dict = ret_dict['comp_info_dict']

        n_src_per_job = args['nsrc']

        if args['make_xml']:
            ConfigMaker_SrcmapsCatalog._make_xml_files(catalog_info_dict, comp_info_dict)

        for catalog_name, catalog_info in catalog_info_dict.items():

            n_cat_src = len(catalog_info.catalog.table)
            n_job = int(math.ceil(float(n_cat_src)/n_src_per_job))

            for comp in components:
                zcut = "zmax%i" % comp.zmax
                key = comp.make_key('{ebin_name}_{evtype_name}')
                name_keys = dict(zcut=zcut,
                                 sourcekey=catalog_name,
                                 ebin=comp.ebin_name,
                                 psftype=comp.evtype_name,
                                 coordsys=comp.coordsys,
                                 irf_ver=NAME_FACTORY.irf_ver(),
                                 mktime='none',
                                 fullpath=True)

                for i_job in range(n_job):
                    full_key = "%s_%02i"%(key, i_job)
                    srcmin = i_job*n_src_per_job
                    srcmax = min(srcmin+n_src_per_job, n_cat_src)
                    outfile = NAME_FACTORY.srcmaps(**name_keys).replace('.fits', "_%02i.fits"%(i_job))
                    logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                    job_configs[full_key] = dict(cmap=NAME_FACTORY.ccube(**name_keys),
                                                 expcube=NAME_FACTORY.ltcube(**name_keys),
                                                 irfs=NAME_FACTORY.irfs(**name_keys),
                                                 bexpmap=NAME_FACTORY.bexpcube(**name_keys),
                                                 outfile=outfile,
                                                 logfile=logfile,
                                                 srcmdl=catalog_info.srcmdl_name,
                                                 evtype=comp.evtype,
                                                 srcmin=srcmin,
                                                 srcmax=srcmax)

        return job_configs
Exemplo n.º 6
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])
        ret_dict = make_catalog_comp_dict(sources=args['library'], basedir='.')
        comp_info_dict = ret_dict['comp_info_dict']

        for split_ver, split_dict in comp_info_dict.items():
            for source_key, source_dict in split_dict.items():
                full_key = "%s_%s" % (split_ver, source_key)
                merged_name = "%s_%s" % (source_dict.catalog_info.catalog_name,
                                         source_key)
                if source_dict.model_type != 'CompositeSource':
                    continue

                for comp in components:
                    zcut = "zmax%i" % comp.zmax
                    key = "%s_%s" % (
                        full_key, comp.make_key('{ebin_name}_{evtype_name}'))
                    name_keys = dict(zcut=zcut,
                                     sourcekey=full_key,
                                     ebin=comp.ebin_name,
                                     psftype=comp.evtype_name,
                                     coordsys=comp.coordsys,
                                     mktime='none',
                                     irf_ver=NAME_FACTORY.irf_ver())
                    nested_name_keys = dict(
                        zcut=zcut,
                        sourcekey=source_dict.catalog_info.catalog_name,
                        ebin=comp.ebin_name,
                        psftype=comp.evtype_name,
                        coordsys=comp.coordsys,
                        mktime='none',
                        irf_ver=NAME_FACTORY.irf_ver())
                    outfile = NAME_FACTORY.srcmaps(**name_keys)
                    logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                    print(key, merged_name, logfile)
                    job_configs[key] = dict(
                        srcmaps=NAME_FACTORY.srcmaps(**nested_name_keys),
                        expcube=NAME_FACTORY.ltcube(**name_keys),
                        irfs=NAME_FACTORY.irfs(**name_keys),
                        bexpmap=NAME_FACTORY.bexpcube(**name_keys),
                        srcmdl=NAME_FACTORY.srcmdl_xml(**name_keys),
                        merged=merged_name,
                        outfile=outfile,
                        outxml=NAME_FACTORY.nested_srcmdl_xml(**name_keys),
                        logfile=logfile)

        return job_configs
Exemplo n.º 7
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        ret_dict = make_diffuse_comp_info_dict(
            components=components,
            library=args['library'],
            basedir=NAME_FACTORY.base_dict['basedir'])
        diffuse_comp_info_dict = ret_dict['comp_info_dict']

        for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()):
            diffuse_comp_info_value = diffuse_comp_info_dict[
                diffuse_comp_info_key]

            for comp in components:
                zcut = "zmax%i" % comp.zmax
                key = comp.make_key('{ebin_name}_{evtype_name}')

                if diffuse_comp_info_value.components is None:
                    sub_comp_info = diffuse_comp_info_value
                else:
                    sub_comp_info = diffuse_comp_info_value.get_component_info(
                        comp)

                full_key = "%s_%s" % (sub_comp_info.sourcekey, key)

                name_keys = dict(zcut=zcut,
                                 sourcekey=sub_comp_info.sourcekey,
                                 ebin=comp.ebin_name,
                                 psftype=comp.evtype_name,
                                 coordsys=comp.coordsys,
                                 irf_ver=NAME_FACTORY.irf_ver(),
                                 mktime='none',
                                 fullpath=True)

                infile = NAME_FACTORY.srcmaps(**name_keys)
                outfile = infile.replace('.fits', '.png')

                logfile = make_nfs_path(outfile.replace('.png', '_png.log'))
                job_configs[full_key] = dict(
                    input=infile,
                    output=outfile,
                    extension=sub_comp_info.source_name,
                    zscale=args.get('zscale', 'log'),
                    logfile=logfile)

        return job_configs
Exemplo n.º 8
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        gmm = make_ring_dicts(library=args['library'], basedir='.')

        for galkey in gmm.galkeys():
            ring_dict = gmm.ring_dict(galkey)
            for ring_key, ring_info in ring_dict.items():
                output_file = ring_info.merged_gasmap
                file_string = ""
                for fname in ring_info.files:
                    file_string += " %s" % fname
                logfile = make_nfs_path(output_file.replace('.fits', '.log'))
                job_configs[ring_key] = dict(output=output_file,
                                             args=file_string,
                                             logfile=logfile)

        return job_configs
Exemplo n.º 9
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])

        ret_dict = make_catalog_comp_dict(
            library=args['library'], basedir=NAME_FACTORY.base_dict['basedir'])
        catalog_info_dict = ret_dict['catalog_info_dict']
        comp_info_dict = ret_dict['comp_info_dict']

        for catalog_name, catalog_info in catalog_info_dict.items():

            for comp in components:
                zcut = "zmax%i" % comp.zmax
                key = comp.make_key('{ebin_name}_{evtype_name}')
                name_keys = dict(zcut=zcut,
                                 sourcekey=catalog_name,
                                 ebin=comp.ebin_name,
                                 psftype=comp.evtype_name,
                                 coordsys=comp.coordsys,
                                 irf_ver=NAME_FACTORY.irf_ver(),
                                 mktime='none',
                                 fullpath=True)

                outfile = NAME_FACTORY.srcmaps(**name_keys)
                outfile_tokens = os.path.splitext(outfile)
                infile_regexp = "%s_*.fits" % outfile_tokens[0]
                logfile = make_nfs_path(outfile.replace('.fits', '.log'))
                job_configs[key] = dict(output=outfile,
                                        args=infile_regexp,
                                        logfile=logfile)

        return job_configs
Exemplo n.º 10
0
    def build_job_configs(self, args):
        """Hook to build job configurations
        """
        job_configs = {}

        components = Component.build_from_yamlfile(args['comp'])
        NAME_FACTORY.update_base_dict(args['data'])
        
        ret_dict = make_diffuse_comp_info_dict(components=components,
                                               library=args['library'],
                                               basedir='.')
        diffuse_comp_info_dict = ret_dict['comp_info_dict']
        if args['make_xml']:
            ConfigMaker_SrcmapPartial._make_xml_files(diffuse_comp_info_dict)

        for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()):
            diffuse_comp_info_value = diffuse_comp_info_dict[diffuse_comp_info_key]
            no_psf = diffuse_comp_info_value.no_psf
            for comp in components:
                zcut = "zmax%i" % comp.zmax
                key = comp.make_key('{ebin_name}_{evtype_name}')
                if diffuse_comp_info_value.components is None:
                    sub_comp_info = diffuse_comp_info_value
                else:
                    sub_comp_info = diffuse_comp_info_value.get_component_info(comp)
                name_keys = dict(zcut=zcut,
                                 sourcekey=sub_comp_info.sourcekey,
                                 ebin=comp.ebin_name,
                                 psftype=comp.evtype_name,
                                 mktime='none',
                                 coordsys=comp.coordsys,
                                 irf_ver=NAME_FACTORY.irf_ver(),
                                 fullpath=True)

                kmin = 0
                kmax = comp.enumbins + 1
                outfile_base = NAME_FACTORY.srcmaps(**name_keys)
                kstep = HPX_ORDER_TO_KSTEP[comp.hpx_order]
                base_dict = dict(cmap=NAME_FACTORY.ccube(**name_keys),
                                 expcube=NAME_FACTORY.ltcube(**name_keys),
                                 irfs=NAME_FACTORY.irfs(**name_keys),
                                 bexpmap=NAME_FACTORY.bexpcube(**name_keys),
                                 srcmdl=sub_comp_info.srcmdl_name,
                                 source=sub_comp_info.source_name,
                                 no_psf=no_psf,
                                 evtype=comp.evtype)

                if kstep < 0:
                    kstep = kmax
                else:
                    pass

                for k in range(kmin, kmax, kstep):
                    full_key = "%s_%s_%02i" % (diffuse_comp_info_key, key, k)
                    khi = min(kmax, k + kstep)
                    
                    full_dict = base_dict.copy()
                    outfile = outfile_base.replace('.fits', '_%02i.fits' % k)
                    logfile = make_nfs_path(outfile_base.replace('.fits', '_%02i.log' % k))
                    full_dict.update(dict(outfile=outfile,
                                          kmin=k, kmax=khi,
                                          logfile=logfile))
                    job_configs[full_key] = full_dict

        return job_configs