def test_comlink(): kwargs = dict(appname='dummy', options=dict(arg_float=(4.0, 'a float', float), arg_in=('test.in', 'an input file', str), arg_out=('test.out', 'an output file', str)), mapping=dict(arg_in='alias'), file_args=dict(arg_in=FileFlags.input_mask, arg_out=FileFlags.output_mask)) link = Link('link', **kwargs)
def _make_coadd_links(self, do_ltsum): """Make the links to run fermipy-coadd for each energy bin X psf type """ links = [] for key_e, comp_e in sorted(self.comp_dict.items()): if comp_e.has_key('mktimefilters'): mktimelist = comp_e['mktimefilters'] else: mktimelist = ['none'] if comp_e.has_key('evtclasses'): evtclasslist = comp_e['evtclasses'] else: evtclasslist = ['default'] for mktimekey in mktimelist: if do_ltsum: ltsum_listfile = 'ltsumlist_%s_%s' % (key_e, mktimekey) ltsum_outfile = 'ltsum_%s_%s' % (key_e, mktimekey) link_ltsum = Gtlink( 'ltsum_%s_%s' % (key_e, mktimekey), appname='gtltsum', mapping={ 'infile1': ltsum_listfile, 'outfile': ltsum_outfile }, options=dict( infile1=(None, "Livetime cube 1 or list of files", str), infile2=("none", "Livetime cube 2", str), outfile=(None, "Output file", str)), file_args=dict(infile1=FileFlags.input_mask, outfile=FileFlags.output_mask)) links.append(link_ltsum) for evtclass in evtclasslist: for psf_type in sorted(comp_e['psf_types'].keys()): key = "%s_%s_%s_%s" % (key_e, mktimekey, evtclass, psf_type) binkey = 'binfile_%s' % key argkey = 'args_%s' % key self.files.file_args[argkey] = FileFlags.gz_mask link = Link( 'coadd_%s' % key, appname='fermipy-coadd', options=dict(args=([], "List of input files", list), output=(None, "Output file", str)), mapping={ 'args': argkey, 'output': binkey }, file_args=dict(args=FileFlags.input_mask, output=FileFlags.output_mask)) links.append(link) return links
def create_link_fermipy_coadd(**kwargs): """Make a `fermipy.jobs.Link` object to run fermipy-coadd """ link = Link(linkname=kwargs.pop('linkname', 'fermipy-coadd'), appname='fermipy-coadd', options=dict(args=([], "List of input files", list), output=(None, "Output file", str)), file_args=dict(args=FileFlags.input_mask, output=FileFlags.output_mask), **kwargs) return link
def create_link_fermipy_vstack(**kwargs): """Make a `fermipy.jobs.Link` object to run fermipy-vstack """ link = Link(linkname=kwargs.pop('linkname', 'fermipy-vstack'), appname='fermipy-vstack', options=dict(output=(None, "Output file name", str), hdu=(None, "Name of HDU to stack", str), args=([], "List of input files", list), gzip=(False, "Compress output", bool)), file_args=dict(args=FileFlags.input_mask, output=FileFlags.output_mask), **kwargs) return link
def create_link_fermipy_healview(**kwargs): """Make a `fermipy.jobs.Link` object to run fermipy-healview """ link = Link(linkname=kwargs.pop('linkname', 'fermipy-healview'), appname='fermipy-healview', options=dict(input=(None, "Input file", str), output=(None, "Output file name", str), extension=(None, "FITS HDU with HEALPix map", str), zscale=("log", "Scaling for color scale", str)), file_args=dict(args=FileFlags.input_mask, output=FileFlags.output_mask), **kwargs) return link
def create_link_fermipy_gather_srcmaps(**kwargs): """Make a `fermipy.jobs.Link` object to run fermipy-gather-srcmaps """ link = Link(linkname=kwargs.pop('linkname', 'fermipy-gather-srcmaps'), appname='fermipy-gather-srcmaps', options=dict(output=(None, "Output file name", str), args=([], "List of input files", list), gzip=(False, "Compress output", bool), rm=(False, "Remove input files", bool), clobber=(False, "Overwrite output", bool)), file_args=dict(args=FileFlags.input_mask, output=FileFlags.output_mask), **kwargs) return link
def make_link(**kwargs): """Make a `fermipy.jobs.Link object to run `GtMergeSourceMaps` """ link = Link(kwargs.pop('linkname', 'merge-srcmaps'), appname='fermipy-merge-srcmaps', options=GtMergeSourceMaps.default_options.copy(), file_args=dict(expcube=FileFlags.input_mask, cmap=FileFlags.input_mask, bexpmap=FileFlags.input_mask, srcmdl=FileFlags.input_mask, outfile=FileFlags.output_mask, outxml=FileFlags.output_mask), **kwargs) return link
def test_chain(): kwargs = dict(appname='dummy', options=dict(arg_float=(4.0, 'a float', float), arg_in=('test.in', 'an input file', str), arg_out=('test.out', 'an output file', str)), mapping=dict(arg_in='alias'), file_args=dict(arg_in=FileFlags.input_mask, arg_out=FileFlags.output_mask)) link = Link('link', **kwargs) kwargs = dict(options=dict(irfs=('CALDB', 'IRF version', str), expcube=(None, 'Livetime cube file', str), bexpmap=(None, 'Binned exposure map', str), cmap=(None, 'Binned counts map', str), srcmdl=(None, 'Input source model xml file', str), outfile=(None, 'Output file', str)), file_args=dict(expcube=FileFlags.input_mask, cmap=FileFlags.input_mask, bexpmap=FileFlags.input_mask, srcmdl=FileFlags.input_mask, outfile=FileFlags.output_mask)) # This should be a Gtlink, but we only really wanna test the chain functionality here link2 = Link('gtsrcmaps', **kwargs) def argmapper(args): basename = args['basename'] ret_dict = dict(expcube="%s_ltcube.fits"%basename, cmap="%s_ccube.fits"%basename, bexpmap="%s_bexpmap.fits"%basename, srcmdl="%s_srcmdl.xml"%basename) return ret_dict chain = Chain('chain', links=[link, link2], options=dict(basename=('dummy', 'Base file name', str)), argmapper=argmapper)
def test_job_archive(): link = Link('test', appname='test_app', options=dict(optstr='CALDB', infile1=None, infile2=None, infile3=None, outfile1=None, outfile2=None), input_file_args=['infile1', 'infile2', 'infile3'], output_file_args=['outfile1', 'outfile2']) job_archive = JobArchive(file_archive_table='archive_files.fits', job_archive_table='archive_jobs.fits', base_path=os.path.abspath('.')) job_archive._file_archive.register_file('input1_1.fits', 0) job_archive._file_archive.register_file('input1_2.fits', 0) job_archive._file_archive.register_file('input1_3.fits', 0) job_archive._file_archive.register_file('input2_1.fits', 0) job_archive._file_archive.register_file('input2_2.fits', 0) job_archive._file_archive.register_file('input2_3.fits', 0) config_1 = dict(infile1='input1_1.fits', infile2='input1_2.fits', infile3='input1_3.fits', outfile1='output1_1.fits', outfile2='output1_2.fits') config_2 = dict(infile1='input2_1.fits', infile2='input2_2.fits', infile3='input2_3.fits', outfile1='output2_1.fits', outfile2='output2_2.fits') link.update_args(config_1) job = job_archive.register_job_from_link(link, 'dummy1', logfile='dummy1.log') link.update_args(config_2) job2 = job_archive.register_job_from_link(link, 'dummy2', logfile='dummy2.log')