def create_expmap(cmp): cmap = 'none' appname = 'gtexpcube2' outfile = cfg['gtlike']['bexpmap'] % ('_' + cmp['name']) if os.path.isfile(outfile) and not overwrite: logger.info('skipping {}'.format(outfile)) return kw = dict( infile=cfg['data']['ltcube'], cmap=cmap, ebinalg='LOG', emin=cmp['selection']['emin'], emax=cmp['selection']['emax'], enumbins=cmp['binning']['enumbins'], outfile=outfile, proj='CAR', nxpix=360, nypix=180, binsz=1, xref=0.0, yref=0.0, evtype=cmp['selection']['evtype'], thmax=cfg['selection']['thmax'], #THB irfs=cfg['gtlike']['irfs'], coordsys=cfg['binning']['coordsys'], chatter=cfg['logging']['chatter']) gtanalysis.run_gtapp(appname, logger, kw, loglevel=logging.INFO) if not os.path.isfile(kw['outfile']): logger.error('Failed to create output file') logger.error('{} keywords:'.format(appname)) for k, v in kw.items(): logger.error(' {:10s}: {}'.format(k, v)) else: logger.info('created {}'.format(outfile))
def _compute_lc_exp(self, srcmdl='none', target="", specin=-2.1, overwrite=False, **kwargs): """ Run gtexposure to compute the exposure for a light curve """ loglevel = kwargs.get('loglevel', self.loglevel) for i, c in enumerate(self.components): kw = dict(infile=c.files['lcmap'], scfile=c.data_files['scfile'], irfs = c.config['gtlike']['irfs'], srcmdl = path.join(self.workdir,'{1:s}_{0:02n}.xml'.format(i,srcmdl)) \ if not target == "" else "none", target = target, specin = specin, emin = c.config['selection']['emin'], emax = c.config['selection']['emax'], enumbins = self.enumbins ) print kw['target'], kw['srcmdl'] run_gtapp('gtexposure', self.logger, kw, loglevel=loglevel) return
def _compute_diffrsp(self, srcmdl, overwrite=False, **kwargs): """ Run gtsrcprob on an ft1 file """ loglevel = kwargs.get('loglevel', self.loglevel) for i, c in enumerate(self.components): kw = dict(evfile=c.files['ft1'], scfile=c.data_files['scfile'], irfs=c.config['gtlike']['irfs'], evtype=c.config['selection']['evtype'], srcmdl=path.join(self.workdir, '{1:s}_{0:02n}.xml'.format(i, srcmdl))) logging.info("Using srcmdl {0:s}".format(kw['srcmdl'])) run_gtapp('gtdiffrsp', self.logger, kw, loglevel=loglevel) return
def _bin_data_lc(self, overwrite=False, dtime=0., **kwargs): """ Run gtbin for a light curve counts map """ if dtime > 0.: self.config['lightcurve']['binsz'] = dtime logging.info("Binning for LC: {0}".format( self.config['lightcurve']['binsz'])) loglevel = kwargs.get('loglevel', self.loglevel) for i, c in enumerate(self.components): self.components[i]._files['lcmap'] = path.join( self.workdir, 'lcmap{0[file_suffix]:s}.fits'.format(c.config)) kw = dict(algorithm='lc', evfile=c.files['ft1'], outfile=c.files['lcmap'], scfile=c.data_files['scfile'], emin=c.config['selection']['emin'], emax=c.config['selection']['emax'], tstart=c.config['selection']['tmin'], tstop=c.config['selection']['tmax'], chatter=self.config['logging']['chatter']) if isinstance(self.config['lightcurve']['binsz'], str): kw['tbinalg'] = 'FILE' kw['tbinfile'] = self.config['lightcurve']['binsz'], else: kw['tbinalg'] = 'LIN' kw['dtime'] = self.config['lightcurve']['binsz'] if not os.path.isfile(c.files['lcmap']) or overwrite: run_gtapp('gtbin', self.logger, kw, loglevel=loglevel) else: self.logger.debug('Skipping gtbin.') return
def main(): usage = "usage: %(prog)s [options] " description = "Run gtselect and gtmktime on one or more FT1 files. " "Note that gtmktime will be skipped if no FT2 file is provided." parser = argparse.ArgumentParser(usage=usage, description=description) add_lsf_args(parser) parser.add_argument('--zmax', default=100., type=float, help='') parser.add_argument('--dcostheta', default=0.025, type=float, help='') parser.add_argument('--binsz', default=1.0, type=float, help='') parser.add_argument('--outdir', default=None, type=str, help='Path to output directory used when merge=False.') parser.add_argument('--outfile', default=None, type=str, help='Path to output file used when merge=True.') parser.add_argument('--scfile', default=None, type=str, help='', required=True) parser.add_argument('--dry_run', default=False, action='store_true') parser.add_argument('--overwrite', default=False, action='store_true') parser.add_argument('--merge', default=False, action='store_true', help='Merge input FT1 files into a single file.') parser.add_argument('files', nargs='+', default=None, help='List of directories in which the analysis will ' 'be run.') args = parser.parse_args() args.outdir = os.path.abspath(args.outdir) args.scfile = os.path.abspath(args.scfile) mkdir(args.outdir) input_files = [[os.path.abspath(x)] for x in args.files] output_files = [ os.path.join(args.outdir, os.path.basename(x)) for x in args.files ] if args.batch: opts = copy.deepcopy(args.__dict__) opts.pop('files') opts.pop('batch') submit_jobs('python ' + os.path.abspath(__file__.rstrip('cd')), input_files, output_files, {k: v for k, v in opts.items()}) sys.exit(0) logger = Logger.get(os.path.basename(__file__), None, logging.INFO) logger.info('Starting.') cwd = os.getcwd() user = os.environ['USER'] tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch') os.chdir(tmpdir) logger.info('tmpdir %s', tmpdir) logger.info('outdir %s', args.outdir) logger.info('outfile %s', args.outfile) for infiles, outfile in zip(input_files, output_files): logger.info('infiles %s', pprint.pformat(infiles)) logger.info('outfile %s', outfile) kw = dict(evfile='list.txt', scfile=args.scfile, outfile='ltcube.fits', binsz=args.binsz, dcostheta=args.dcostheta, zmax=args.zmax) create_filelist(infiles, 'list.txt') staged_outfile = kw['outfile'] run_gtapp('gtltcube', logger, kw) logger.info('cp %s %s', staged_outfile, outfile) shutil.copy(staged_outfile, outfile) os.chdir(cwd) logger.info('Deleting %s', tmpdir) shutil.rmtree(tmpdir) logger.info('Done.')
def main(): usage = "usage: %(prog)s [options] " description = "Run gtselect and gtmktime on one or more FT1 files. " "Note that gtmktime will be skipped if no FT2 file is provided." parser = argparse.ArgumentParser(usage=usage, description=description) add_lsf_args(parser) parser.add_argument('--zmax', default=100., type=float, help='') parser.add_argument('--dcostheta', default=0.025, type=float, help='') parser.add_argument('--binsz', default=1.0, type=float, help='') parser.add_argument('--outdir', default=None, type=str, help='Path to output directory used when merge=False.') parser.add_argument('--outfile', default=None, type=str, help='Path to output file used when merge=True.') parser.add_argument('--scfile', default=None, type=str, help='', required=True) parser.add_argument('--dry_run', default=False, action='store_true') parser.add_argument('--overwrite', default=False, action='store_true') parser.add_argument('--merge', default=False, action='store_true', help='Merge input FT1 files into a single file.') parser.add_argument('files', nargs='+', default=None, help='List of directories in which the analysis will ' 'be run.') args = parser.parse_args() args.outdir = os.path.abspath(args.outdir) args.scfile = os.path.abspath(args.scfile) mkdir(args.outdir) input_files = [[os.path.abspath(x)] for x in args.files] output_files = [os.path.join(args.outdir, os.path.basename(x)) for x in args.files] if args.batch: opts = copy.deepcopy(args.__dict__) opts.pop('files') opts.pop('batch') submit_jobs('python ' + os.path.abspath(__file__.rstrip('cd')), input_files, output_files, {k: v for k, v in opts.items()}) sys.exit(0) logger = Logger.get(os.path.basename(__file__), None, logging.INFO) logger.info('Starting.') cwd = os.getcwd() user = os.environ['USER'] tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch') os.chdir(tmpdir) logger.info('tmpdir %s', tmpdir) logger.info('outdir %s', args.outdir) logger.info('outfile %s', args.outfile) for infiles, outfile in zip(input_files, output_files): logger.info('infiles %s', pprint.pformat(infiles)) logger.info('outfile %s', outfile) kw = dict(evfile='list.txt', scfile=args.scfile, outfile='ltcube.fits', binsz=args.binsz, dcostheta=args.dcostheta, zmax=args.zmax) create_filelist(infiles, 'list.txt') staged_outfile = kw['outfile'] run_gtapp('gtltcube', logger, kw) logger.info('cp %s %s', staged_outfile, outfile) shutil.copy(staged_outfile, outfile) os.chdir(cwd) logger.info('Deleting %s', tmpdir) shutil.rmtree(tmpdir) logger.info('Done.')
def main(): gtselect_keys = ['tmin','tmax','emin','emax','zmax','evtype','evclass', 'phasemin','phasemax','convtype','rad','ra','dec'] gtmktime_keys = ['roicut','filter'] usage = "usage: %(prog)s [options] " description = "Run gtselect and gtmktime on one or more FT1 files. " "Note that gtmktime will be skipped if no FT2 file is provided." parser = argparse.ArgumentParser(usage=usage, description=description) add_lsf_args(parser) for k in gtselect_keys: if k in ['evtype','evclass','convtype']: parser.add_argument('--%s'%k, default=None, type=int, help='') else: parser.add_argument('--%s'%k, default=None, type=float, help='') for k in gtmktime_keys: parser.add_argument('--%s'%k, default=None, type=str, help='') parser.add_argument('--rock_angle', default=None, type=float, help='') parser.add_argument('--outdir', default=None, type=str, help='Path to output directory used when merge=False.') parser.add_argument('--outfile', default=None, type=str, help='Path to output file used when merge=True.') parser.add_argument('--scfile', default=None, type=str, help='') parser.add_argument('--dry_run', default=False, action='store_true') parser.add_argument('--overwrite', default=False, action='store_true') parser.add_argument('--merge', default=False, action='store_true', help='Merge input FT1 files into a single file.') parser.add_argument('files', nargs='+', default=None, help='List of directories in which the analysis will ' 'be run.') args = parser.parse_args() if args.merge: if not args.outfile: raise Exception('No output file defined.') input_files = [[os.path.abspath(x) for x in args.files]] output_files = [os.path.abspath(args.outfile)] else: args.outdir = os.path.abspath(args.outdir) mkdir(args.outdir) input_files = [[os.path.abspath(x)] for x in args.files] output_files = [os.path.join(args.outdir,os.path.basename(x)) for x in args.files] if args.batch: opts = vars(args).copy() del opts['files'] del opts['batch'] submit_jobs('fermipy-select', input_files, opts, output_files, overwrite=args.overwrite, dry_run=args.dry_run) sys.exit(0) logger = Logger.get(os.path.basename(__file__),None,logging.INFO) logger.info('Starting.') if args.scfile is not None: args.scfile = os.path.abspath(args.scfile) cwd = os.getcwd() user = os.environ['USER'] tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch') os.chdir(tmpdir) logger.info('tmpdir %s',tmpdir) logger.info('outdir %s',args.outdir) logger.info('outfile %s',args.outfile) for infiles, outfile in zip(input_files,output_files): logger.info('infiles %s',pprint.pformat(infiles)) logger.info('outfile %s',outfile) kw = { k : args.__dict__[k] for k in gtselect_keys } if kw['emax'] is None: kw['emax'] = 1E6 create_filelist(infiles,'list.txt') kw['infile'] = 'list.txt' kw['outfile'] = 'out.fits' staged_outfile = kw['outfile'] run_gtapp('gtselect',logger,kw) kw = { k : args.__dict__[k] for k in gtmktime_keys } if kw['roicut'] is None: kw['roicut'] = 'no' if kw['filter'] is None: kw['filter'] = 'DATA_QUAL==1 && LAT_CONFIG==1' if args.rock_angle is not None: kw['filter'] += ' && ABS(ROCK_ANGLE)<%(rock)s '%dict(rock=args.rock_angle) kw['evfile'] = 'out.fits' kw['outfile'] = 'out_filtered.fits' if args.scfile is not None: kw['scfile'] = args.scfile staged_outfile = kw['outfile'] run_gtapp('gtmktime',logger,kw) logger.info('cp %s %s',staged_outfile,outfile) shutil.copy(staged_outfile,outfile) os.chdir(cwd) logger.info('Deleting %s',tmpdir) shutil.rmtree(tmpdir) logger.info('Done.')
def main(): gtselect_keys = [ 'tmin', 'tmax', 'emin', 'emax', 'zmax', 'evtype', 'evclass', 'phasemin', 'phasemax', 'convtype', 'rad', 'ra', 'dec' ] gtmktime_keys = ['roicut', 'filter'] usage = "usage: %(prog)s [options] " description = "Run gtselect and gtmktime on one or more FT1 files. " "Note that gtmktime will be skipped if no FT2 file is provided." parser = argparse.ArgumentParser(usage=usage, description=description) add_lsf_args(parser) for k in gtselect_keys: if k in ['evtype', 'evclass', 'convtype']: parser.add_argument('--%s' % k, default=None, type=int, help='') else: parser.add_argument('--%s' % k, default=None, type=float, help='') for k in gtmktime_keys: parser.add_argument('--%s' % k, default=None, type=str, help='') parser.add_argument('--rock_angle', default=None, type=float, help='') parser.add_argument('--outdir', default=None, type=str, help='Path to output directory used when merge=False.') parser.add_argument('--output', default=None, type=str, help='Path to output file used when merge=True.') parser.add_argument('--scfile', default=None, type=str, help='') parser.add_argument('--dry_run', default=False, action='store_true') parser.add_argument('--overwrite', default=False, action='store_true') parser.add_argument( '--merge', default=False, action='store_true', help='Merge input FT1 files into N files where N is determined ' 'by files_per_split.') parser.add_argument('--files_per_split', default=100, type=int, help='Set the number of files to combine in each ' 'split of the input file list.') parser.add_argument('--file_idx_min', default=None, type=int, help='Set the number of files to assign to ' 'each batch job.') parser.add_argument('--file_idx_max', default=None, type=int, help='Set the number of files to assign to ' 'each batch job.') parser.add_argument('files', nargs='+', default=None, help='List of files.') args = parser.parse_args() batch = vars(args).pop('batch') files = vars(args).pop('files') args.outdir = os.path.abspath(args.outdir) files = [os.path.abspath(f) for f in files] ft1_files = get_files(files, ['.fit', '.fits']) for i, f in enumerate(ft1_files): if re.search('^root\:\/\/', f) is None: ft1_files[i] = os.path.abspath(f) input_files = [] output_files = [] files_idx_min = [] files_idx_max = [] opts = [] if args.file_idx_min is not None and args.file_idx_max is not None: files_idx_min = [args.file_idx_min] files_idx_max = [args.file_idx_max] input_files = [files] output_files = [args.output] elif args.merge: if not args.output: raise Exception('No output file defined.') nfiles = len(ft1_files) njob = int(np.ceil(nfiles / float(args.files_per_split))) for ijob, i in enumerate(range(0, nfiles, args.files_per_split)): if args.outdir is not None: mkdir(args.outdir) outdir = os.path.abspath(args.outdir) else: outdir = os.path.dirname(os.path.dirname(args.output)) outfile = os.path.splitext(os.path.basename(args.output))[0] outfile += '_%03i.fits' % (ijob) outfile = os.path.join(outdir, outfile) input_files += [files] output_files += [outfile] files_idx_min += [i] files_idx_max += [i + args.files_per_split] opts += [vars(args).copy()] opts[-1]['output'] = outfile opts[-1]['file_idx_min'] = i opts[-1]['file_idx_max'] = i + args.files_per_split else: input_files = ft1_files files_idx_min = [i for i in range(len(ft1_files))] files_idx_max = [i + 1 for i in range(len(ft1_files))] output_files = [ os.path.join(args.outdir, os.path.basename(x)) for x in ft1_files ] opts = [vars(args).copy() for x in ft1_files] if batch: submit_jobs('fermipy-select', input_files, opts, output_files, overwrite=args.overwrite, dry_run=args.dry_run) sys.exit(0) logger = Logger.configure(os.path.basename(__file__), None, logging.INFO) logger.info('Starting.') if args.scfile is not None: args.scfile = os.path.abspath(args.scfile) cwd = os.getcwd() user = os.environ['USER'] tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch') os.chdir(tmpdir) logger.info('tmpdir %s', tmpdir) logger.info('outdir %s', args.outdir) logger.info('output %s', args.output) for infiles, outfile, idx_min, idx_max in zip(input_files, output_files, files_idx_min, files_idx_max): logger.info('infiles %s', pprint.pformat(infiles)) logger.info('outfile %s', outfile) infiles = get_files(infiles, ['.fit', '.fits']) if idx_min is not None: infiles = infiles[idx_min:idx_max] for i, f in enumerate(infiles): if re.search('^root\:\/\/', f) is None: continue os.system('xrdcp %s %s' % (f, f.split('/')[-1])) infiles[i] = os.path.join(tmpdir, f.split('/')[-1]) kw = {k: args.__dict__[k] for k in gtselect_keys} if kw['emax'] is None: kw['emax'] = 1E6 create_filelist(infiles, 'list.txt') kw['infile'] = 'list.txt' kw['outfile'] = 'out.fits' staged_outfile = kw['outfile'] run_gtapp('gtselect', logger, kw) kw = {k: args.__dict__[k] for k in gtmktime_keys} if kw['roicut'] is None: kw['roicut'] = 'no' if kw['filter'] is None: kw['filter'] = 'DATA_QUAL==1 && LAT_CONFIG==1' if args.rock_angle is not None: kw['filter'] += ' && ABS(ROCK_ANGLE)<%(rock)s ' % dict( rock=args.rock_angle) kw['evfile'] = 'out.fits' kw['outfile'] = 'out_filtered.fits' if args.scfile is not None: kw['scfile'] = args.scfile staged_outfile = kw['outfile'] run_gtapp('gtmktime', logger, kw) logger.info('cp %s %s', staged_outfile, outfile) shutil.copy(staged_outfile, outfile) os.chdir(cwd) logger.info('Deleting %s', tmpdir) shutil.rmtree(tmpdir) logger.info('Done.')
def main(): gtselect_keys = ['tmin','tmax','emin','emax','zmax','evtype','evclass', 'phasemin','phasemax','convtype','rad','ra','dec'] gtmktime_keys = ['roicut','filter'] usage = "usage: %(prog)s [options] " description = "Run gtselect and gtmktime on one or more FT1 files. " "Note that gtmktime will be skipped if no FT2 file is provided." parser = argparse.ArgumentParser(usage=usage, description=description) add_lsf_args(parser) for k in gtselect_keys: if k in ['evtype','evclass','convtype']: parser.add_argument('--%s'%k, default=None, type=int, help='') else: parser.add_argument('--%s'%k, default=None, type=float, help='') for k in gtmktime_keys: parser.add_argument('--%s'%k, default=None, type=str, help='') parser.add_argument('--rock_angle', default=None, type=float, help='') parser.add_argument('--outdir', default=None, type=str, help='Path to output directory used when merge=False.') parser.add_argument('--outfile', default=None, type=str, help='Path to output file used when merge=True.') parser.add_argument('--scfile', default=None, type=str, help='') parser.add_argument('--dry_run', default=False, action='store_true') parser.add_argument('--overwrite', default=False, action='store_true') parser.add_argument('--merge', default=False, action='store_true', help='Merge input FT1 files into a single file.') parser.add_argument('files', nargs='+', default=None, help='List of directories in which the analysis will ' 'be run.') args = parser.parse_args() if args.merge: if not args.outfile: raise Exception('No output file defined.') input_files = [[os.path.abspath(x) for x in args.files]] output_files = [os.path.abspath(args.outfile)] else: args.outdir = os.path.abspath(args.outdir) mkdir(args.outdir) input_files = [[os.path.abspath(x)] for x in args.files] output_files = [os.path.join(args.outdir,os.path.basename(x)) for x in args.files] if args.batch: batch_opts = {'W' : args.time, 'R' : args.resources, 'oo' : 'batch.log' } args.batch=False for infile, outfile in zip(input_files,output_files): if os.path.isfile(outfile) and not args.overwrite: print('Output file exists, skipping.',outfile) continue batch_opts['oo'] = os.path.splitext(outfile)[0] + '_select.log' dispatch_jobs('python ' + os.path.abspath(__file__.rstrip('cd')), infile, args, batch_opts, dry_run=args.dry_run) sys.exit(0) logger = Logger.get(os.path.basename(__file__),None,logging.INFO) logger.info('Starting.') if args.scfile is not None: args.scfile = os.path.abspath(args.scfile) cwd = os.getcwd() user = os.environ['USER'] tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch') os.chdir(tmpdir) logger.info('tmpdir %s',tmpdir) logger.info('outdir %s',args.outdir) logger.info('outfile %s',args.outfile) for infiles, outfile in zip(input_files,output_files): logger.info('infiles %s',pprint.pformat(infiles)) logger.info('outfile %s',outfile) kw = { k : args.__dict__[k] for k in gtselect_keys } if kw['emax'] is None: kw['emax'] = 1E6 create_filelist(infiles,'list.txt') kw['infile'] = 'list.txt' kw['outfile'] = 'out.fits' staged_outfile = kw['outfile'] run_gtapp('gtselect',logger,kw) kw = { k : args.__dict__[k] for k in gtmktime_keys } if kw['roicut'] is None: kw['roicut'] = 'no' if kw['filter'] is None: kw['filter'] = 'DATA_QUAL==1 && LAT_CONFIG==1' if args.rock_angle is not None: kw['filter'] += ' && ABS(ROCK_ANGLE)<%(rock)s '%dict(rock=args.rock_angle) kw['evfile'] = 'out.fits' kw['outfile'] = 'out_filtered.fits' if args.scfile is not None: kw['scfile'] = args.scfile staged_outfile = kw['outfile'] run_gtapp('gtmktime',logger,kw) logger.info('cp %s %s',staged_outfile,outfile) shutil.copy(staged_outfile,outfile) os.chdir(cwd) logger.info('Deleting %s',tmpdir) shutil.rmtree(tmpdir) logger.info('Done.')
def main(): gtselect_keys = ['tmin', 'tmax', 'emin', 'emax', 'zmax', 'evtype', 'evclass', 'phasemin', 'phasemax', 'convtype', 'rad', 'ra', 'dec'] gtmktime_keys = ['roicut', 'filter'] usage = "usage: %(prog)s [options] " description = "Run gtselect and gtmktime on one or more FT1 files. " "Note that gtmktime will be skipped if no FT2 file is provided." parser = argparse.ArgumentParser(usage=usage, description=description) add_lsf_args(parser) for k in gtselect_keys: if k in ['evtype', 'evclass', 'convtype']: parser.add_argument('--%s' % k, default=None, type=int, help='') else: parser.add_argument('--%s' % k, default=None, type=float, help='') for k in gtmktime_keys: parser.add_argument('--%s' % k, default=None, type=str, help='') parser.add_argument('--rock_angle', default=None, type=float, help='') parser.add_argument('--outdir', default=None, type=str, help='Path to output directory used when merge=False.') parser.add_argument('--output', default=None, type=str, help='Path to output file used when merge=True.') parser.add_argument('--scfile', default=None, type=str, help='') parser.add_argument('--dry_run', default=False, action='store_true') parser.add_argument('--overwrite', default=False, action='store_true') parser.add_argument('--merge', default=False, action='store_true', help='Merge input FT1 files into N files where N is determined ' 'by files_per_split.') parser.add_argument('--files_per_split', default=100, type=int, help='Set the number of files to combine in each ' 'split of the input file list.') parser.add_argument('--file_idx_min', default=None, type=int, help='Set the number of files to assign to ' 'each batch job.') parser.add_argument('--file_idx_max', default=None, type=int, help='Set the number of files to assign to ' 'each batch job.') parser.add_argument('files', nargs='+', default=None, help='List of files.') args = parser.parse_args() batch = vars(args).pop('batch') files = vars(args).pop('files') args.outdir = os.path.abspath(args.outdir) files = [os.path.abspath(f) for f in files] ft1_files = get_files(files, ['.fit', '.fits']) for i, f in enumerate(ft1_files): if re.search('^root\:\/\/', f) is None: ft1_files[i] = os.path.abspath(f) input_files = [] output_files = [] files_idx_min = [] files_idx_max = [] opts = [] if args.file_idx_min is not None and args.file_idx_max is not None: files_idx_min = [args.file_idx_min] files_idx_max = [args.file_idx_max] input_files = [files] output_files = [args.output] elif args.merge: if not args.output: raise Exception('No output file defined.') nfiles = len(ft1_files) njob = int(np.ceil(nfiles / float(args.files_per_split))) for ijob, i in enumerate(range(0, nfiles, args.files_per_split)): if args.outdir is not None: mkdir(args.outdir) outdir = os.path.abspath(args.outdir) else: outdir = os.path.dirname(os.path.dirname(args.output)) outfile = os.path.splitext(os.path.basename(args.output))[0] outfile += '_%03i.fits' % (ijob) outfile = os.path.join(outdir, outfile) input_files += [files] output_files += [outfile] files_idx_min += [i] files_idx_max += [i + args.files_per_split] opts += [vars(args).copy()] opts[-1]['output'] = outfile opts[-1]['file_idx_min'] = i opts[-1]['file_idx_max'] = i + args.files_per_split else: input_files = ft1_files files_idx_min = [i for i in range(len(ft1_files))] files_idx_max = [i + 1 for i in range(len(ft1_files))] output_files = [os.path.join( args.outdir, os.path.basename(x)) for x in ft1_files] opts = [vars(args).copy() for x in ft1_files] if batch: submit_jobs('fermipy-select', input_files, opts, output_files, overwrite=args.overwrite, dry_run=args.dry_run) sys.exit(0) logger = Logger.configure(os.path.basename(__file__), None, logging.INFO) logger.info('Starting.') if args.scfile is not None: args.scfile = os.path.abspath(args.scfile) cwd = os.getcwd() user = os.environ['USER'] tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch') os.chdir(tmpdir) logger.info('tmpdir %s', tmpdir) logger.info('outdir %s', args.outdir) logger.info('output %s', args.output) for infiles, outfile, idx_min, idx_max in zip(input_files, output_files, files_idx_min, files_idx_max): logger.info('infiles %s', pprint.pformat(infiles)) logger.info('outfile %s', outfile) infiles = get_files(infiles, ['.fit', '.fits']) if idx_min is not None: infiles = infiles[idx_min:idx_max] for i, f in enumerate(infiles): if re.search('^root\:\/\/', f) is None: continue os.system('xrdcp %s %s' % (f, f.split('/')[-1])) infiles[i] = os.path.join(tmpdir, f.split('/')[-1]) kw = {k: args.__dict__[k] for k in gtselect_keys} if kw['emax'] is None: kw['emax'] = 1E6 create_filelist(infiles, 'list.txt') kw['infile'] = 'list.txt' kw['outfile'] = 'out.fits' staged_outfile = kw['outfile'] run_gtapp('gtselect', logger, kw) kw = {k: args.__dict__[k] for k in gtmktime_keys} if kw['roicut'] is None: kw['roicut'] = 'no' if kw['filter'] is None: kw['filter'] = 'DATA_QUAL==1 && LAT_CONFIG==1' if args.rock_angle is not None: kw['filter'] += ' && ABS(ROCK_ANGLE)<%(rock)s ' % dict( rock=args.rock_angle) kw['evfile'] = 'out.fits' kw['outfile'] = 'out_filtered.fits' if args.scfile is not None: kw['scfile'] = args.scfile staged_outfile = kw['outfile'] run_gtapp('gtmktime', logger, kw) logger.info('cp %s %s', staged_outfile, outfile) shutil.copy(staged_outfile, outfile) os.chdir(cwd) logger.info('Deleting %s', tmpdir) shutil.rmtree(tmpdir) logger.info('Done.')