def print_summary(self, stream=sys.stdout, indent="", recurse_level=2): """Print a summary of the activity done by this `Link`. Parameters ---------- stream : `file` Stream to print to indent : str Indentation at start of line recurse_level : int Number of recursion levels to print """ Link.print_summary(self, stream, indent, recurse_level) if recurse_level > 0: recurse_level -= 1 indent += " " if self._initialize_link is not None: stream.write("\n") self._initialize_link.print_summary(stream, indent, recurse_level) stream.write("\n") self._scatter_link.print_summary(stream, indent, recurse_level) if self._gather_link is not None: stream.write("\n") self._gather_link.print_summary(stream, indent, recurse_level)
def print_summary(self, stream=sys.stdout, indent="", recurse_level=2): """Print a summary of the activity done by this `Link`. Parameters ---------- stream : `file` Stream to print to indent : str Indentation at start of line recurse_level : int Number of recursion levels to print """ Link.print_summary(self, stream, indent, recurse_level) if recurse_level > 0: recurse_level -= 1 indent += " " if self._initialize_link is not None: stream.write("\n") self._initialize_link.print_summary( stream, indent, recurse_level) stream.write("\n") self._scatter_link.print_summary(stream, indent, recurse_level) if self._gather_link is not None: stream.write("\n") self._gather_link.print_summary(stream, indent, recurse_level)
def update_args(self, override_args): """Update the argument used to invoke the application See help for `chain.Link` for details This calls the base class function then fills the parameters of the GtApp object """ Link.update_args(self, override_args) update_gtapp(self.__app, **self.args)
def __init__(self, **kwargs): """C'tor Keyword arguements --------------- config_maker : `ConfigMaker' Object used to translate arguments Must have functions 'add_arguments' and 'build_job_configs' Defaults to ConfigMaker() usage : str Usage string for argument parser description : str Description string for argument parser job_archive : `fermipy.job_archive.JobArchive` [optional] Archive used to track jobs and associated data products Defaults to None initialize : `fermipy.chain.Link` [optional] Link run for the initialization stage Defaults to None scatter : `fermipy.chain.Link` Link run for the scatter stage Defaults to None gather : `fermipy.chain.Link` [optional] Link run for the gather stage Defaults to None no_batch : bool If true, do not send jobs to the batch to run Defaults to False """ linkname = kwargs.pop('linkname', 'ScatterGather') self._config_maker = kwargs.pop('config_maker', None) self._usage = kwargs.pop('usage', "") self._description = kwargs.pop('description', "") self._job_archive = kwargs.pop('job_archive', None) self._initialize_link = kwargs.pop('initialize', None) self._scatter_link = kwargs.pop('scatter', None) self._gather_link = kwargs.pop('gather', None) self._no_batch = kwargs.pop('no_batch', False) options = kwargs.get('options', self.default_options.copy()) self._config_maker.add_options(options) Link.__init__(self, linkname, options=options, parser=self._make_parser(), **kwargs) self._base_config = None self._input_config = {} self._job_configs = {} self._output_config = {}
def __init__(self, **kwargs): """C'tor """ parser = argparse.ArgumentParser(usage="dmpipe-prepare-targets [options]", description="Prepare directories for target analyses") Link.__init__(self, kwargs.pop('linkname', 'prepare-targets'), parser=parser, appname='dmpipe-prepare-targets', options=TargetPreparer.default_options.copy(), **kwargs)
def __init__(self, **kwargs): """C'tor """ parser = argparse.ArgumentParser(usage="dmpipe-analyze-roi [options]", description="Run analysis of a single ROI") Link.__init__(self, kwargs.pop('linkname', 'analyze-roi'), parser=parser, appname='dmpipe-analyze-roi', options=TargetAnalysis.default_options.copy(), **kwargs)
def __init__(self, **kwargs): """C'tor """ parser = argparse.ArgumentParser(usage='dmpipe-analyze-sed', description="Extract the SED for a single target") Link.__init__(self, kwargs.pop('linkname', 'analyze-sed'), parser=parser, appname='dmpipe-analyze-sed', options=SEDAnalysis.default_options.copy(), **kwargs)
def __init__(self, **kwargs): """C'tor """ self.parser = argparse.ArgumentParser( usage="fermipy-assemble-model [options]", description="Initialize model fitting directory") Link.__init__(self, kwargs.pop('linkname', 'init-model'), appname='fermipy-init-model', options=GtInitModel.default_options.copy(), **kwargs)
def __init__(self, **kwargs): """C'tor """ parser = argparse.ArgumentParser(usage="dmpipe-plot-dm [options]", description="Make castro plots") Link.__init__(self, kwargs.pop('linkname', 'plot-dm'), parser=parser, appname='dmpipe-plot-dm', options=CastroPlotterDM.default_options.copy(), **kwargs)
def __init__(self, linkname, **kwargs): """C'tor See help for `chain.Link` for details This calls the base class c'tor then builds a GtApp object """ Link.__init__(self, linkname, **kwargs) try: self.__app = build_gtapp(self.appname, **self.args) except: raise ValueError("Failed to build link %s" % self.linkname)
def __init__(self, linkname, **kwargs): """C'tor See help for `chain.Link` for details This calls the base class c'tor then builds a GtApp object """ Link.__init__(self, linkname, **kwargs) try: self.__app = build_gtapp(self.appname, **self.args) except: raise ValueError("Failed to build link %s %s %s" % self.linkname, self.appname, self.args)
def __init__(self, **kwargs): """ C'tor to build this object from energy binning and spectral values tables. """ parser = argparse.ArgumentParser( usage="dmpipe-stack-likelihood [options]", description="Stack the likelihood from targets") Link.__init__(self, kwargs.pop('linkname', 'stack-likelihood'), parser=parser, appname='dmpipe-stack-likelihood', options=DMCastroStacker.default_options.copy(), **kwargs)
def __init__(self, **kwargs): """ C'tor to build this object from energy binning and spectral values tables. """ parser = argparse.ArgumentParser( usage="dmpipe-spec-table [options]", description="Build a table with the spectra") Link.__init__(self, kwargs.pop('linkname', 'spec-table'), parser=parser, appname=kwargs.pop('appname', 'dmpipe-spec-table'), options=DMSpecTableBuilder.default_options.copy(), **kwargs)
def __init__(self, **kwargs): """C'tor """ parser = argparse.ArgumentParser( usage="dmpipe-convert-castro [options]", description="Convert SED to DMCastroData") Link.__init__(self, kwargs.pop('linkname', 'convert-castro'), parser=parser, appname=kwargs.pop('appname', 'dmpipe-convert-castro'), options=DMCastroConvertor.default_options.copy(), file_args=dict(), **kwargs)
def __init__(self, **kwargs): """C'tor """ self.parser = argparse.ArgumentParser( usage="fermipy-assemble-model [options]", description= "Copy source maps from the library to a analysis directory") Link.__init__(self, kwargs.pop('linkname', 'assemble-model'), appname='fermipy-assemble-model', options=GtAssembleModel.default_options.copy(), file_args=dict(input=FileFlags.input_mask), **kwargs)
def __init__(self, **kwargs): """C'tor """ parser = argparse.ArgumentParser(usage="fermipy-srcmaps-diffuse [options]", description="Run gtsrcmaps for one or more energy planes for a single source") Link.__init__(self, kwargs.pop('linkname', 'srcmaps-diffuse'), parser=parser, appname='fermipy-srcmaps-diffuse', options=GtSrcmapPartial.default_options.copy(), file_args=dict(expcube=FileFlags.input_mask, cmap=FileFlags.input_mask, bexpmap=FileFlags.input_mask, srcmdl=FileFlags.input_mask, outfile=FileFlags.output_mask))
def __init__(self, **kwargs): """C'tor """ parser = argparse.ArgumentParser(usage="fermipy-srcmaps-catalog [options]", description="Run gtsrcmaps for all the sources in a catalog") Link.__init__(self, kwargs.pop('linkname', 'srcmaps-diffuse'), parser=parser, appname='fermipy-srcmaps-catalog', options=GtSrcmapsCatalog.default_options.copy(), file_args=dict(expcube=FileFlags.input_mask, cmap=FileFlags.input_mask, bexpmap=FileFlags.input_mask, srcmdl=FileFlags.input_mask, outfile=FileFlags.output_mask))
def run_argparser(self, argv): """Initialize a link with a set of arguments using argparser """ args = Link.run_argparser(self, argv) for link in self._links.values(): link.run_link(stream=sys.stdout, dry_run=True) return args
def test_comlink(): kwargs = dict(appname='dummy', options=dict(arg_float=(4.0, 'a float', float), arg_in=('test.in', 'an input file', str), arg_out=('test.out', 'an output file', str)), mapping=dict(arg_in='alias'), file_args=dict(arg_in=FileFlags.input_mask, arg_out=FileFlags.output_mask)) link = Link('link', **kwargs)
def __init__(self, **kwargs): """C'tor """ parser = argparse.ArgumentParser( usage='fermipy-residual-cr', description="Compute the residual cosmic-ray contamination.") Link.__init__(self, kwargs.pop('linkname', 'residual_cr'), appname='fermipy-residual-cr', options=ResidualCRAnalysis.default_options.copy(), parser=parser, file_args=dict(ccube_dirty=FileFlags.input_mask, bexpcube_dirty=FileFlags.input_mask, ccube_clean=FileFlags.input_mask, bexpcube_clean=FileFlags.input_mask, outfile=FileFlags.output_mask), **kwargs)
def _make_coadd_links(self, do_ltsum): """Make the links to run fermipy-coadd for each energy bin X psf type """ links = [] for key_e, comp_e in sorted(self.comp_dict.items()): if comp_e.has_key('mktimefilters'): mktimelist = comp_e['mktimefilters'] else: mktimelist = ['none'] if comp_e.has_key('evtclasses'): evtclasslist = comp_e['evtclasses'] else: evtclasslist = ['default'] for mktimekey in mktimelist: if do_ltsum: ltsum_listfile = 'ltsumlist_%s_%s' % (key_e, mktimekey) ltsum_outfile = 'ltsum_%s_%s' % (key_e, mktimekey) link_ltsum = Gtlink( 'ltsum_%s_%s' % (key_e, mktimekey), appname='gtltsum', mapping={ 'infile1': ltsum_listfile, 'outfile': ltsum_outfile }, options=dict( infile1=(None, "Livetime cube 1 or list of files", str), infile2=("none", "Livetime cube 2", str), outfile=(None, "Output file", str)), file_args=dict(infile1=FileFlags.input_mask, outfile=FileFlags.output_mask)) links.append(link_ltsum) for evtclass in evtclasslist: for psf_type in sorted(comp_e['psf_types'].keys()): key = "%s_%s_%s_%s" % (key_e, mktimekey, evtclass, psf_type) binkey = 'binfile_%s' % key argkey = 'args_%s' % key self.files.file_args[argkey] = FileFlags.gz_mask link = Link( 'coadd_%s' % key, appname='fermipy-coadd', options=dict(args=([], "List of input files", list), output=(None, "Output file", str)), mapping={ 'args': argkey, 'output': binkey }, file_args=dict(args=FileFlags.input_mask, output=FileFlags.output_mask)) links.append(link) return links
def create_link_fermipy_coadd(**kwargs): """Make a `fermipy.jobs.Link` object to run fermipy-coadd """ link = Link(linkname=kwargs.pop('linkname', 'fermipy-coadd'), appname='fermipy-coadd', options=dict(args=([], "List of input files", list), output=(None, "Output file", str)), file_args=dict(args=FileFlags.input_mask, output=FileFlags.output_mask), **kwargs) return link
def create_link_fermipy_vstack(**kwargs): """Make a `fermipy.jobs.Link` object to run fermipy-vstack """ link = Link(linkname=kwargs.pop('linkname', 'fermipy-vstack'), appname='fermipy-vstack', options=dict(output=(None, "Output file name", str), hdu=(None, "Name of HDU to stack", str), args=([], "List of input files", list), gzip=(False, "Compress output", bool)), file_args=dict(args=FileFlags.input_mask, output=FileFlags.output_mask), **kwargs) return link
def create_link_fermipy_healview(**kwargs): """Make a `fermipy.jobs.Link` object to run fermipy-healview """ link = Link(linkname=kwargs.pop('linkname', 'fermipy-healview'), appname='fermipy-healview', options=dict(input=(None, "Input file", str), output=(None, "Output file name", str), extension=(None, "FITS HDU with HEALPix map", str), zscale=("log", "Scaling for color scale", str)), file_args=dict(args=FileFlags.input_mask, output=FileFlags.output_mask), **kwargs) return link
def create_link_fermipy_gather_srcmaps(**kwargs): """Make a `fermipy.jobs.Link` object to run fermipy-gather-srcmaps """ link = Link(linkname=kwargs.pop('linkname', 'fermipy-gather-srcmaps'), appname='fermipy-gather-srcmaps', options=dict(output=(None, "Output file name", str), args=([], "List of input files", list), gzip=(False, "Compress output", bool), rm=(False, "Remove input files", bool), clobber=(False, "Overwrite output", bool)), file_args=dict(args=FileFlags.input_mask, output=FileFlags.output_mask), **kwargs) return link
def make_link(**kwargs): """Make a `fermipy.jobs.Link object to run `GtMergeSourceMaps` """ link = Link(kwargs.pop('linkname', 'merge-srcmaps'), appname='fermipy-merge-srcmaps', options=GtMergeSourceMaps.default_options.copy(), file_args=dict(expcube=FileFlags.input_mask, cmap=FileFlags.input_mask, bexpmap=FileFlags.input_mask, srcmdl=FileFlags.input_mask, outfile=FileFlags.output_mask, outxml=FileFlags.output_mask), **kwargs) return link
def test_chain(): kwargs = dict(appname='dummy', options=dict(arg_float=(4.0, 'a float', float), arg_in=('test.in', 'an input file', str), arg_out=('test.out', 'an output file', str)), mapping=dict(arg_in='alias'), file_args=dict(arg_in=FileFlags.input_mask, arg_out=FileFlags.output_mask)) link = Link('link', **kwargs) kwargs = dict(options=dict(irfs=('CALDB', 'IRF version', str), expcube=(None, 'Livetime cube file', str), bexpmap=(None, 'Binned exposure map', str), cmap=(None, 'Binned counts map', str), srcmdl=(None, 'Input source model xml file', str), outfile=(None, 'Output file', str)), file_args=dict(expcube=FileFlags.input_mask, cmap=FileFlags.input_mask, bexpmap=FileFlags.input_mask, srcmdl=FileFlags.input_mask, outfile=FileFlags.output_mask)) # This should be a Gtlink, but we only really wanna test the chain functionality here link2 = Link('gtsrcmaps', **kwargs) def argmapper(args): basename = args['basename'] ret_dict = dict(expcube="%s_ltcube.fits"%basename, cmap="%s_ccube.fits"%basename, bexpmap="%s_bexpmap.fits"%basename, srcmdl="%s_srcmdl.xml"%basename) return ret_dict chain = Chain('chain', links=[link, link2], options=dict(basename=('dummy', 'Base file name', str)), argmapper=argmapper)
def test_job_archive(): link = Link('test', appname='test_app', options=dict(optstr='CALDB', infile1=None, infile2=None, infile3=None, outfile1=None, outfile2=None), input_file_args=['infile1', 'infile2', 'infile3'], output_file_args=['outfile1', 'outfile2']) job_archive = JobArchive(file_archive_table='archive_files.fits', job_archive_table='archive_jobs.fits', base_path=os.path.abspath('.')) job_archive._file_archive.register_file('input1_1.fits', 0) job_archive._file_archive.register_file('input1_2.fits', 0) job_archive._file_archive.register_file('input1_3.fits', 0) job_archive._file_archive.register_file('input2_1.fits', 0) job_archive._file_archive.register_file('input2_2.fits', 0) job_archive._file_archive.register_file('input2_3.fits', 0) config_1 = dict(infile1='input1_1.fits', infile2='input1_2.fits', infile3='input1_3.fits', outfile1='output1_1.fits', outfile2='output1_2.fits') config_2 = dict(infile1='input2_1.fits', infile2='input2_2.fits', infile3='input2_3.fits', outfile1='output2_1.fits', outfile2='output2_2.fits') link.update_args(config_1) job = job_archive.register_job_from_link(link, 'dummy1', logfile='dummy1.log') link.update_args(config_2) job2 = job_archive.register_job_from_link(link, 'dummy2', logfile='dummy2.log')
class SplitAndMktime_SG(ScatterGather): """Small class to generate configurations for SplitAndMktime """ appname = 'fermipy-split-and-mktime-sg' usage = "%s [options]" % (appname) description = "Prepare data for diffuse all-sky analysis" clientclass = SplitAndMktime job_time = 1500 default_options = dict( comp=diffuse_defaults.diffuse['comp'], data=diffuse_defaults.diffuse['data'], hpx_order_max=diffuse_defaults.diffuse['hpx_order_ccube'], ft1file=diffuse_defaults.diffuse['ft1file'], ft2file=diffuse_defaults.diffuse['ft2file'], do_ltsum=diffuse_defaults.diffuse['do_ltsum'], scratch=diffuse_defaults.diffuse['scratch'], dry_run=diffuse_defaults.diffuse['dry_run']) __doc__ += Link.construct_docstring(default_options) def build_job_configs(self, args): """Hook to build job configurations """ job_configs = {} comp_file = args.get('comp', None) if comp_file is not None: comp_dict = yaml.safe_load(open(comp_file)) coordsys = comp_dict.pop('coordsys') for v in comp_dict.values(): v['coordsys'] = coordsys else: return job_configs datafile = args['data'] if datafile is None or datafile == 'None': return job_configs NAME_FACTORY.update_base_dict(args['data']) inputfiles = create_inputlist(args['ft1file']) outdir_base = os.path.join(NAME_FACTORY.base_dict['basedir'], 'counts_cubes') ft2file = os.path.join(NAME_FACTORY.base_dict['basedir'], args['ft2file']) data_ver = NAME_FACTORY.base_dict['data_ver'] for idx, infile in enumerate(inputfiles): key = "%06i" % idx key_scfile = "%03i" % (idx + 1) output_dir = os.path.join(outdir_base, key) try: os.mkdir(output_dir) except OSError: pass scfile = args['ft2file'].replace('.lst', '_%s.fits' % key_scfile) logfile = make_nfs_path( os.path.join(output_dir, 'scatter_mk_%s_%s.log' % (data_ver, key))) job_configs[key] = comp_dict.copy() job_configs[key].update( dict(ft1file=infile, ft2file=ft2file, data=args['data'], scfile=scfile, comp=args['comp'], hpx_order_max=args['hpx_order_max'], outdir=outdir_base, outkey=key, logfile=logfile, pfiles=output_dir)) return job_configs
class SplitAndMktime(Chain): """Small class to split, apply mktime and bin data according to some user-provided specification This chain consists multiple `Link` objects: select-energy-EBIN-ZCUT : `Gtlink_select` Initial splitting by energy bin and zenith angle cut mktime-EBIN-ZCUT-FILTER : `Gtlink_mktime` Application of gtmktime filter for zenith angle cut ltcube-EBIN-ZCUT-FILTER : `Gtlink_ltcube` Computation of livetime cube for zenith angle cut select-type-EBIN-ZCUT-FILTER-TYPE : `Gtlink_select` Refinement of selection from event types bin-EBIN-ZCUT-FILTER-TYPE : `Gtlink_bin` Final binning of the data for each event type """ appname = 'fermipy-split-and-mktime' linkname_default = 'split-and-mktime' usage = '%s [options]' % (appname) description = 'Run gtselect and gtbin together' default_options = dict( comp=diffuse_defaults.diffuse['comp'], data=diffuse_defaults.diffuse['data'], hpx_order_max=diffuse_defaults.diffuse['hpx_order_ccube'], ft1file=diffuse_defaults.diffuse['ft1file'], ft2file=diffuse_defaults.diffuse['ft2file'], evclass=(128, 'Event class bit mask', int), outdir=('counts_cubes', 'Output directory', str), outkey=(None, 'Key for this particular output file', str), pfiles=(None, 'Directory for .par files', str), do_ltsum=(False, 'Sum livetime cube files', bool), scratch=(None, 'Scratch area', str), dry_run=(False, 'Print commands but do not run them', bool)) __doc__ += Link.construct_docstring(default_options) def __init__(self, **kwargs): """C'tor """ super(SplitAndMktime, self).__init__(**kwargs) self.comp_dict = None def _map_arguments(self, args): """Map from the top-level arguments to the arguments provided to the indiviudal links """ comp_file = args.get('comp', None) datafile = args.get('data', None) if is_null(comp_file): return if is_null(datafile): return NAME_FACTORY.update_base_dict(args['data']) outdir = args.get('outdir') outkey = args.get('outkey') ft1file = args['ft1file'] ft2file = args['ft2file'] if is_null(outdir) or is_null(outkey): return pfiles = os.path.join(outdir, outkey) self.comp_dict = yaml.safe_load(open(comp_file)) coordsys = self.comp_dict.pop('coordsys') full_out_dir = make_nfs_path(os.path.join(outdir, outkey)) for key_e, comp_e in sorted(self.comp_dict.items()): if 'logebins' in comp_e: ebins_file = make_nfs_path( os.path.join(full_out_dir, 'energy_bins.fits')) write_ebins_file(ebins_file, comp_e['logebins']) else: ebins_file = None enumbins = comp_e['enumbins'] emin = math.pow(10., comp_e['log_emin']) emax = math.pow(10., comp_e['log_emax']) zmax = comp_e['zmax'] zcut = "zmax%i" % comp_e['zmax'] tmin = comp_e.get('tmin', None) tmax = comp_e.get('tmax', None) if is_null(tmin): tmin = 'INDEF' if is_null(tmax): tmax = 'INDEF' evclassstr = NAME_FACTORY.base_dict['evclass'] kwargs_select = dict(zcut=zcut, ebin=key_e, psftype='ALL', coordsys=coordsys) linkname = 'select-energy-%s-%s' % (key_e, zcut) selectfile_energy = make_full_path( outdir, outkey, NAME_FACTORY.select(**kwargs_select)) self._set_link(linkname, Gtlink_select, infile=ft1file, outfile=selectfile_energy, zmax=zmax, tmin=tmin, tmax=tmax, emin=emin, emax=emax, evclass=NAME_FACTORY.evclassmask(evclassstr), pfiles=pfiles, logfile=os.path.join(full_out_dir, "%s.log" % linkname)) if 'mktimefilters' in comp_e: mktimefilters = comp_e['mktimefilters'] else: mktimefilters = ['none'] for mktimekey in mktimefilters: kwargs_mktime = kwargs_select.copy() kwargs_mktime['mktime'] = mktimekey filterstring = MKTIME_DICT[mktimekey] mktime_file = make_full_path( outdir, outkey, NAME_FACTORY.mktime(**kwargs_mktime)) ltcube_file = make_full_path( outdir, outkey, NAME_FACTORY.ltcube(**kwargs_mktime)) linkname_mktime = 'mktime-%s-%s-%s' % (key_e, zcut, mktimekey) linkname_ltcube = 'ltcube-%s-%s-%s' % (key_e, zcut, mktimekey) self._set_link(linkname_mktime, Gtlink_mktime, evfile=selectfile_energy, outfile=mktime_file, scfile=ft2file, filter=filterstring, pfiles=pfiles, logfile=os.path.join(full_out_dir, "%s.log" % linkname_mktime)) self._set_link(linkname_ltcube, Gtlink_ltcube, evfile=mktime_file, outfile=ltcube_file, scfile=ft2file, zmax=zmax, pfiles=pfiles, logfile=os.path.join(full_out_dir, "%s.log" % linkname_ltcube)) if 'evtclasses' in comp_e: evtclasslist_vals = comp_e['evtclasses'] else: evtclasslist_vals = [NAME_FACTORY.base_dict['evclass']] for evtclassval in evtclasslist_vals: for psf_type, psf_dict in sorted( comp_e['psf_types'].items()): linkname_select = 'select-type-%s-%s-%s-%s-%s' % ( key_e, zcut, mktimekey, evtclassval, psf_type) linkname_bin = 'bin-%s-%s-%s-%s-%s' % ( key_e, zcut, mktimekey, evtclassval, psf_type) kwargs_bin = kwargs_mktime.copy() kwargs_bin['psftype'] = psf_type kwargs_bin['coordsys'] = coordsys kwargs_bin['evclass'] = evtclassval selectfile_psf = make_full_path( outdir, outkey, NAME_FACTORY.select(**kwargs_bin)) binfile_psf = make_full_path( outdir, outkey, NAME_FACTORY.ccube(**kwargs_bin)) hpx_order_psf = min(args['hpx_order_max'], psf_dict['hpx_order']) linkname_select = 'select-type-%s-%s-%s-%s-%s' % ( key_e, zcut, mktimekey, evtclassval, psf_type) linkname_bin = 'bin-%s-%s-%s-%s-%s' % ( key_e, zcut, mktimekey, evtclassval, psf_type) self._set_link( linkname_select, Gtlink_select, infile=selectfile_energy, outfile=selectfile_psf, zmax=zmax, emin=emin, emax=emax, tmin=tmin, tmax=tmax, evtype=EVT_TYPE_DICT[psf_type], evclass=NAME_FACTORY.evclassmask(evtclassval), pfiles=pfiles, logfile=os.path.join(full_out_dir, "%s.log" % linkname_select)) print("xx", ebins_file) if ebins_file is None: self._set_link(linkname_bin, Gtlink_bin, coordsys=coordsys, hpx_order=hpx_order_psf, evfile=selectfile_psf, outfile=binfile_psf, emin=emin, emax=emax, enumbins=enumbins, pfiles=pfiles, logfile=os.path.join( full_out_dir, "%s.log" % linkname_bin)) else: self._set_link(linkname_bin, Gtlink_bin, coordsys=coordsys, hpx_order=hpx_order_psf, evfile=selectfile_psf, outfile=binfile_psf, ebinalg='FILE', ebinfile=ebins_file, pfiles=pfiles, logfile=os.path.join( full_out_dir, "%s.log" % linkname_bin))
class SplitAndMktimeChain(Chain): """Chain to run split and mktime and then make livetime and exposure cubes This chain consists of: split-and-mktime : `SplitAndMkTime_SG` Chain to make the binned counts maps for each input file coadd-split : `CoaddSplit_SG` Link to co-add the binnec counts maps files ltsum : `Gtltsum_SG` Link to co-add the livetime cube files expcube2 : `Gtexpcube2_SG` Link to make the corresponding binned exposure maps """ appname = 'fermipy-split-and-mktime-chain' linkname_default = 'split-and-mktime-chain' usage = '%s [options]' % (appname) description = 'Run split-and-mktime, coadd-split and exposure' default_options = dict( data=diffuse_defaults.diffuse['data'], comp=diffuse_defaults.diffuse['comp'], ft1file=diffuse_defaults.diffuse['ft1file'], ft2file=diffuse_defaults.diffuse['ft2file'], hpx_order_ccube=diffuse_defaults.diffuse['hpx_order_ccube'], hpx_order_expcube=diffuse_defaults.diffuse['hpx_order_expcube'], do_ltsum=diffuse_defaults.diffuse['do_ltsum'], scratch=diffuse_defaults.diffuse['scratch'], dry_run=diffuse_defaults.diffuse['dry_run']) __doc__ += Link.construct_docstring(default_options) def __init__(self, **kwargs): """C'tor """ super(SplitAndMktimeChain, self).__init__(**kwargs) self.comp_dict = None def _map_arguments(self, args): """Map from the top-level arguments to the arguments provided to the indiviudal links """ data = args.get('data') comp = args.get('comp') ft1file = args.get('ft1file') ft2file = args.get('ft2file') scratch = args.get('scratch', None) dry_run = args.get('dry_run', None) self._set_link('split-and-mktime', SplitAndMktime_SG, comp=comp, data=data, hpx_order_max=args.get('hpx_order_ccube', 9), ft1file=ft1file, ft2file=ft2file, do_ltsum=args.get('do_ltsum', False), scratch=scratch, dry_run=dry_run) self._set_link('coadd-split', CoaddSplit_SG, comp=comp, data=data, ft1file=ft1file) self._set_link('ltsum', Gtltsum_SG, comp=comp, data=data, ft1file=args['ft1file'], dry_run=dry_run) self._set_link('expcube2', Gtexpcube2_SG, comp=comp, data=data, hpx_order_max=args.get('hpx_order_expcube', 5), dry_run=dry_run)