Ejemplo n.º 1
0
def main():

    usage = "usage: %(prog)s [options] "
    description = "Run validation analysis"
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)

    parser.add_argument('--config',
                        default=None,
                        type=str,
                        required=True,
                        help='Configuration file.')
    parser.add_argument(
        '--dataset',
        default=None,
        type=str,
        help='Key name of data set to analyze.  If None then all data '
        'sets will be analyzed.')
    parser.add_argument('--outdir',
                        default=None,
                        type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile',
                        default=None,
                        type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--mode', default='fill', type=str)
    parser.add_argument('--overwrite', default=False, action='store_true')

    args = parser.parse_args()

    # if args.outdir is not None:
    #    args.outdir = os.path.abspath(args.outdir)
    #    mkdir(args.outdir)

    # if args.mode == 'fill':
    #    input_files = [[os.path.abspath(x)] for x in args.files]
    #    output_files = [make_outpath(x,args.outdir) for x in args.files]
    # elif args.mode == 'collect':
    #    input_files = [[os.path.abspath(x) for x in args.files]]
    #    output_files = [args.outfile]

    # print(input_files)
    # print(output_files)

    config = yaml.load(open(args.config))

    if args.batch:

        input_files = [[]] * len(config.keys())
        output_files = [v['outfile'] for k, v in config.items()]

        opts = []
        for k, v in config['datasets'].items():
            o = vars(args).copy()
            del o['batch']
            o['dataset'] = k
            opts += [o]

        submit_jobs('fermipy-validate',
                    input_files,
                    opts,
                    output_files,
                    overwrite=args.overwrite,
                    dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.get(os.path.basename(__file__), None, logging.INFO)
    logger.info('Starting.')

    for k, v in config['datasets'].items():

        if args.dataset is not None and k != args.dataset:
            continue

        if v['data_type'] == 'agn':
            val = AGNValidator(config['scfile'], 100.)
        elif v['data_type'] == 'psr':
            val = PSRValidator(config['scfile'], 100.)
        elif v['data_type'] == 'ridge':
            val = GRValidator(config['scfile'], 100.)
        else:
            raise Exception('Unknown data type {}'.format(v['data_type']))

        infiles = glob.glob(v['files'])

        for f in infiles:
            print('processing', f)
            val.process(f)

        val.calc_eff()
        if v['data_type'] in ['agn', 'psr']:
            val.calc_containment()

        print('write', v['outfile'])
        val.write(v['outfile'])

    logger.info('Done.')
Ejemplo n.º 2
0
def main():

    usage = "usage: %(prog)s [options] "
    description = "Run validation analysis"
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)

    parser.add_argument('--outdir',
                        default=None,
                        type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile',
                        default=None,
                        type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--data_type', default='agn', type=str)
    parser.add_argument('--mode', default='fill', type=str)
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('files',
                        nargs='+',
                        default=None,
                        help='List of directories in which the analysis will '
                        'be run.')

    args = parser.parse_args()

    if args.outdir is not None:
        args.outdir = os.path.abspath(args.outdir)
        mkdir(args.outdir)

    if args.mode == 'fill':
        input_files = [[os.path.abspath(x)] for x in args.files]
        output_files = [make_outpath(x, args.outdir) for x in args.files]
    elif args.mode == 'collect':
        input_files = [[os.path.abspath(x) for x in args.files]]
        output_files = [args.outfile]

    print(input_files)
    print(output_files)

    if args.batch:

        batch_opts = {'W': args.time, 'R': args.resources, 'oo': 'batch.log'}
        args.batch = False
        for infile, outfile in zip(input_files, output_files):

            if os.path.isfile(outfile) and not args.overwrite:
                print('Output file exists, skipping.', outfile)
                continue

            batch_opts['oo'] = os.path.splitext(outfile)[0] + '.log'
            dispatch_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
                          infile,
                          args,
                          batch_opts,
                          dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.get(os.path.basename(__file__), None, logging.INFO)
    logger.info('Starting.')

    for infiles, outfile in zip(input_files, output_files):

        if args.data_type == 'agn':
            acc = AGNAccumulator()

        for f in infiles:
            print('process', f)
            acc.process(f)

        print('write', outfile)
        acc.write(outfile)

    logger.info('Done.')
Ejemplo n.º 3
0
def main():

    usage = "usage: %(prog)s [options] "
    description = "Run tempo2 application on one or more FT1 files."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)

    parser.add_argument('--par_file',
                        default=None,
                        type=str,
                        required=True,
                        help='Ephemeris file')

    parser.add_argument('--scfile',
                        default=None,
                        type=str,
                        required=True,
                        help='FT2 file')

    parser.add_argument('--outdir', default=None, type=str, help='')

    parser.add_argument('--phase_colname',
                        default='PULSE_PHASE',
                        type=str,
                        help='Set the name of the phase column.')

    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')

    parser.add_argument('files',
                        nargs='+',
                        default=None,
                        help='List of directories in which the analysis will '
                        'be run.')

    args = parser.parse_args()

    if args.outdir is None:
        outdirs = [os.path.dirname(os.path.abspath(x)) for x in args.files]
    else:
        outdir = os.path.abspath(args.outdir)
        mkdir(args.outdir)
        outdirs = [outdir for x in args.files]

    input_files = [[os.path.abspath(x)] for x in args.files]
    output_files = [
        os.path.join(y, os.path.basename(x))
        for x, y in zip(args.files, outdirs)
    ]

    if args.batch:

        opts = vars(args).copy()
        del opts['files']
        del opts['batch']
        submit_jobs(
            'fermipy-run-tempo',  # 'python ' + os.path.abspath(__file__.rstrip('cd')),
            input_files,
            opts,
            output_files,
            overwrite=args.overwrite,
            dry_run=args.dry_run)

        # batch_opts = {'W' : args.time, 'R' : args.resources,
        #              'oo' : 'batch.log' }
        # args.batch=False
        # for infile, outfile in zip(input_files,output_files):
        #
        #    if os.path.isfile(outfile) and not args.overwrite:
        #        print('Output file exists, skipping.',outfile)
        #        continue
        #
        #    batch_opts['oo'] = os.path.join(outdir,
        #                                    os.path.splitext(outfile)[0] +
        #                                    '_tempo2.log')
        #    dispatch_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
        #                  [infile], args, batch_opts, dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.get(__file__, None, logging.INFO)

    par_file = os.path.abspath(args.par_file)
    ft2_file = os.path.abspath(args.scfile)

    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')

    logger.info('tmpdir %s', tmpdir)
    os.chdir(tmpdir)

    for infiles, outfile in zip(input_files, output_files):

        infile = infiles[0]

        staged_infile = os.path.join(tmpdir, os.path.basename(infile))
        logFile = os.path.splitext(infile)[0] + '_tempo2.log'

        print('cp %s %s' % (infile, staged_infile))
        os.system('cp %s %s' % (infile, staged_infile))

        if not re.search('\.root?', infile) is None:
            phase_merit(staged_infile, outfile, logFile, ft2_file, par_file,
                        args.dry_run)
        elif not re.search('\.fits?', infile) is None:
            phase_ft1(staged_infile, outfile, logFile, ft2_file, par_file,
                      args.dry_run)
        else:
            print('Unrecognized file extension: ', infile)

    os.chdir(cwd)
    shutil.rmtree(tmpdir)
Ejemplo n.º 4
0
def main():

    usage = "usage: %(prog)s [options] "
    description = "Run validation analysis"
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)

    parser.add_argument('--config', default=None, type=str, required=True,
                        help='Configuration file.')
    parser.add_argument('--dataset', default=None, type=str,
                        help='Key name of data set to analyze.  If None then all data '
                        'sets will be analyzed.')
    parser.add_argument('--outdir', default=None, type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile', default=None, type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--mode', default='fill', type=str)
    parser.add_argument('--overwrite', default=False, action='store_true')

    args = parser.parse_args()

    # if args.outdir is not None:
    #    args.outdir = os.path.abspath(args.outdir)
    #    mkdir(args.outdir)

    # if args.mode == 'fill':
    #    input_files = [[os.path.abspath(x)] for x in args.files]
    #    output_files = [make_outpath(x,args.outdir) for x in args.files]
    # elif args.mode == 'collect':
    #    input_files = [[os.path.abspath(x) for x in args.files]]
    #    output_files = [args.outfile]

    # print(input_files)
    # print(output_files)

    config = yaml.load(open(args.config))

    if args.batch:

        input_files = [[]] * len(config.keys())
        output_files = [v['outfile'] for k, v in config.items()]

        opts = []
        for k, v in config['datasets'].items():
            o = vars(args).copy()
            del o['batch']
            o['dataset'] = k
            opts += [o]

        submit_jobs('fermipy-validate',
                    input_files, opts, output_files, overwrite=args.overwrite,
                    dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.get(os.path.basename(__file__), None, logging.INFO)
    logger.info('Starting.')

    for k, v in config['datasets'].items():

        if args.dataset is not None and k != args.dataset:
            continue

        if v['data_type'] == 'agn':
            val = AGNValidator(config['scfile'], 100.)
        elif v['data_type'] == 'psr':
            val = PSRValidator(config['scfile'], 100.)
        elif v['data_type'] == 'ridge':
            val = GRValidator(config['scfile'], 100.)
        else:
            raise Exception('Unknown data type {}'.format(v['data_type']))

        infiles = glob.glob(v['files'])

        for f in infiles:
            print('processing', f)
            val.process(f)

        val.calc_eff()
        if v['data_type'] in ['agn', 'psr']:
            val.calc_containment()

        print('write', v['outfile'])
        val.write(v['outfile'])

    logger.info('Done.')
Ejemplo n.º 5
0
def main():

    usage = "usage: %(prog)s [options] "
    description = "Run gtselect and gtmktime on one or more FT1 files.  "
    "Note that gtmktime will be skipped if no FT2 file is provided."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)
    parser.add_argument('--zmax', default=100., type=float, help='')
    parser.add_argument('--dcostheta', default=0.025, type=float, help='')
    parser.add_argument('--binsz', default=1.0, type=float, help='')
    parser.add_argument('--outdir',
                        default=None,
                        type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile',
                        default=None,
                        type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--scfile',
                        default=None,
                        type=str,
                        help='',
                        required=True)

    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('--merge',
                        default=False,
                        action='store_true',
                        help='Merge input FT1 files into a single file.')

    parser.add_argument('files',
                        nargs='+',
                        default=None,
                        help='List of directories in which the analysis will '
                        'be run.')

    args = parser.parse_args()

    args.outdir = os.path.abspath(args.outdir)
    args.scfile = os.path.abspath(args.scfile)
    mkdir(args.outdir)
    input_files = [[os.path.abspath(x)] for x in args.files]
    output_files = [
        os.path.join(args.outdir, os.path.basename(x)) for x in args.files
    ]

    if args.batch:
        opts = copy.deepcopy(args.__dict__)
        opts.pop('files')
        opts.pop('batch')
        submit_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
                    input_files, output_files, {k: v
                                                for k, v in opts.items()})
        sys.exit(0)

    logger = Logger.get(os.path.basename(__file__), None, logging.INFO)
    logger.info('Starting.')
    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')
    os.chdir(tmpdir)

    logger.info('tmpdir %s', tmpdir)
    logger.info('outdir %s', args.outdir)
    logger.info('outfile %s', args.outfile)

    for infiles, outfile in zip(input_files, output_files):

        logger.info('infiles %s', pprint.pformat(infiles))
        logger.info('outfile %s', outfile)

        kw = dict(evfile='list.txt',
                  scfile=args.scfile,
                  outfile='ltcube.fits',
                  binsz=args.binsz,
                  dcostheta=args.dcostheta,
                  zmax=args.zmax)

        create_filelist(infiles, 'list.txt')
        staged_outfile = kw['outfile']
        run_gtapp('gtltcube', logger, kw)
        logger.info('cp %s %s', staged_outfile, outfile)
        shutil.copy(staged_outfile, outfile)

    os.chdir(cwd)
    logger.info('Deleting %s', tmpdir)
    shutil.rmtree(tmpdir)
    logger.info('Done.')
Ejemplo n.º 6
0
def main():

    usage = "usage: %(prog)s [options] "
    description = "Run gtselect and gtmktime on one or more FT1 files.  "
    "Note that gtmktime will be skipped if no FT2 file is provided."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)
    parser.add_argument('--zmax', default=100., type=float, help='')
    parser.add_argument('--dcostheta', default=0.025, type=float, help='')
    parser.add_argument('--binsz', default=1.0, type=float, help='')
    parser.add_argument('--outdir', default=None, type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile', default=None, type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--scfile', default=None, type=str, help='',
                        required=True)

    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('--merge', default=False, action='store_true',
                        help='Merge input FT1 files into a single file.')

    parser.add_argument('files', nargs='+', default=None,
                        help='List of directories in which the analysis will '
                             'be run.')

    args = parser.parse_args()

    args.outdir = os.path.abspath(args.outdir)
    args.scfile = os.path.abspath(args.scfile)
    mkdir(args.outdir)
    input_files = [[os.path.abspath(x)] for x in args.files]
    output_files = [os.path.join(args.outdir, os.path.basename(x))
                    for x in args.files]

    if args.batch:
        opts = copy.deepcopy(args.__dict__)
        opts.pop('files')
        opts.pop('batch')
        submit_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
                    input_files, output_files, {k: v for k, v in opts.items()})
        sys.exit(0)

    logger = Logger.get(os.path.basename(__file__), None, logging.INFO)
    logger.info('Starting.')
    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')
    os.chdir(tmpdir)

    logger.info('tmpdir %s', tmpdir)
    logger.info('outdir %s', args.outdir)
    logger.info('outfile %s', args.outfile)

    for infiles, outfile in zip(input_files, output_files):

        logger.info('infiles %s', pprint.pformat(infiles))
        logger.info('outfile %s', outfile)

        kw = dict(evfile='list.txt',
                  scfile=args.scfile,
                  outfile='ltcube.fits',
                  binsz=args.binsz,
                  dcostheta=args.dcostheta,
                  zmax=args.zmax)

        create_filelist(infiles, 'list.txt')
        staged_outfile = kw['outfile']
        run_gtapp('gtltcube', logger, kw)
        logger.info('cp %s %s', staged_outfile, outfile)
        shutil.copy(staged_outfile, outfile)

    os.chdir(cwd)
    logger.info('Deleting %s', tmpdir)
    shutil.rmtree(tmpdir)
    logger.info('Done.')
Ejemplo n.º 7
0
def main():
    
    usage = "usage: %(prog)s [options] "
    description = "Run tempo2 application on one or more FT1 files."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)
    
    parser.add_argument('--par_file', default=None, type=str, required=True,
                        help='Ephemeris file')

    parser.add_argument('--scfile', default=None, type=str, required=True,
                        help='FT2 file')

    parser.add_argument('--outdir', default=None, type=str, help='')
    
    parser.add_argument('--phase_colname', default='PULSE_PHASE',
                        type=str, help='Set the name of the phase column.')
    
    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')

    parser.add_argument('files', nargs='+', default=None,
                        help='List of directories in which the analysis will '
                             'be run.')
    
    args = parser.parse_args()

    if args.outdir is None:
        outdirs = [os.path.dirname(os.path.abspath(x)) for x in args.files]
    else:
        outdir = os.path.abspath(args.outdir)
        mkdir(args.outdir)
        outdirs = [outdir for x in args.files]

    input_files = [os.path.abspath(x) for x in args.files]
    output_files = [os.path.join(y,os.path.basename(x))
                    for x, y in zip(args.files,outdirs)]
    
    if args.batch:

        batch_opts = {'W' : args.time, 'R' : args.resources,
                      'oo' : 'batch.log' }
        args.batch=False
        for infile, outfile in zip(input_files,output_files):
            
            if os.path.isfile(outfile) and not args.overwrite:
                print('Output file exists, skipping.',outfile)
                continue
            
            batch_opts['oo'] = os.path.join(outdir,
                                            os.path.splitext(outfile)[0] +
                                            '_tempo2.log')            
            dispatch_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
                          [infile], args, batch_opts, dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.get(__file__,None,logging.INFO)
        
    par_file = os.path.abspath(args.par_file)
    ft2_file = os.path.abspath(args.scfile)
    
    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')

    logger.info('tmpdir %s',tmpdir)
    os.chdir(tmpdir)

    for infile, outfile in zip(input_files,output_files):

        staged_infile = os.path.join(tmpdir,os.path.basename(x))
        logFile = os.path.splitext(x)[0] + '_tempo2.log'

        print('cp %s %s' % (infile, staged_infile))
        os.system('cp %s %s' % (infile, staged_infile))

        if not re.search('\.root?', x) is None:
            phase_merit(staged_infile, outfile, logFile, ft2_file, par_file, args.dry_run)
        elif not re.search('\.fits?', x) is None:
            phase_ft1(staged_infile, outfile, logFile, ft2_file, par_file, args.dry_run)
        else:
            print('Unrecognized file extension: ', x)

    os.chdir(cwd)
    shutil.rmtree(tmpdir)
Ejemplo n.º 8
0
def main():
    
    usage = "usage: %(prog)s [options] "
    description = "Run validation analysis"
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)

    parser.add_argument('--outdir', default=None, type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile', default=None, type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--data_type', default='agn', type=str)
    parser.add_argument('--mode', default='fill', type=str)
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('files', nargs='+', default=None,
                        help='List of directories in which the analysis will '
                             'be run.')
    
    args = parser.parse_args()
    
    if args.outdir is not None:
        args.outdir = os.path.abspath(args.outdir)
        mkdir(args.outdir)

    if args.mode == 'fill':
        input_files = [[os.path.abspath(x)] for x in args.files]
        output_files = [make_outpath(x,args.outdir) for x in args.files]
    elif args.mode == 'collect':        
        input_files = [[os.path.abspath(x) for x in args.files]]
        output_files = [args.outfile]

    print(input_files)
    print(output_files)
        
    if args.batch:

        batch_opts = {'W' : args.time, 'R' : args.resources,
                      'oo' : 'batch.log' }
        args.batch=False
        for infile, outfile in zip(input_files,output_files):
            
            if os.path.isfile(outfile) and not args.overwrite:
                print('Output file exists, skipping.',outfile)
                continue
            
            batch_opts['oo'] = os.path.splitext(outfile)[0] + '.log'
            dispatch_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
                          infile, args, batch_opts, dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.get(os.path.basename(__file__),None,logging.INFO)
    logger.info('Starting.')

    for infiles, outfile in zip(input_files,output_files):

        if args.data_type == 'agn':    
            acc = AGNAccumulator()

        for f in infiles:
            print('process',f)
            acc.process(f)

        print('write',outfile)
        acc.write(outfile)
        
    logger.info('Done.')
Ejemplo n.º 9
0
def main():

    gtselect_keys = ['tmin','tmax','emin','emax','zmax','evtype','evclass',
                     'phasemin','phasemax','convtype','rad','ra','dec']

    gtmktime_keys = ['roicut','filter']
    
    usage = "usage: %(prog)s [options] "
    description = "Run gtselect and gtmktime on one or more FT1 files.  "
    "Note that gtmktime will be skipped if no FT2 file is provided."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)
    
    for k in gtselect_keys:

        if k in ['evtype','evclass','convtype']:
            parser.add_argument('--%s'%k, default=None, type=int, help='')
        else:
            parser.add_argument('--%s'%k, default=None, type=float, help='')

    for k in gtmktime_keys:
        parser.add_argument('--%s'%k, default=None, type=str, help='')
        
    parser.add_argument('--rock_angle', default=None, type=float, help='')
        
    parser.add_argument('--outdir', default=None, type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile', default=None, type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--scfile', default=None, type=str, help='')
        
    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('--merge', default=False, action='store_true',
                        help='Merge input FT1 files into a single file.')

    parser.add_argument('files', nargs='+', default=None,
                        help='List of directories in which the analysis will '
                             'be run.')
    
    args = parser.parse_args()

    if args.merge:
        if not args.outfile:
            raise Exception('No output file defined.')        
        input_files = [[os.path.abspath(x) for x in args.files]]
        output_files = [os.path.abspath(args.outfile)]
    else:
        args.outdir = os.path.abspath(args.outdir)
        mkdir(args.outdir)
        input_files = [[os.path.abspath(x)] for x in args.files]
        output_files = [os.path.join(args.outdir,os.path.basename(x)) for x in args.files]

    if args.batch:

        opts = vars(args).copy()
        del opts['files']
        del opts['batch']
        submit_jobs('fermipy-select',
                    input_files, opts, output_files, overwrite=args.overwrite,
                    dry_run=args.dry_run)
        sys.exit(0)


    logger = Logger.get(os.path.basename(__file__),None,logging.INFO)

    logger.info('Starting.')
    
    if args.scfile is not None:
        args.scfile = os.path.abspath(args.scfile)
    
    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')
    os.chdir(tmpdir)

    logger.info('tmpdir %s',tmpdir)
    logger.info('outdir %s',args.outdir)
    logger.info('outfile %s',args.outfile)
    
    for infiles, outfile in zip(input_files,output_files):

        logger.info('infiles %s',pprint.pformat(infiles))
        logger.info('outfile %s',outfile)
        
        kw = { k : args.__dict__[k] for k in gtselect_keys }
        if kw['emax'] is None:
            kw['emax'] = 1E6

        create_filelist(infiles,'list.txt')
        kw['infile'] = 'list.txt'
        kw['outfile'] = 'out.fits'
        staged_outfile = kw['outfile']
        run_gtapp('gtselect',logger,kw)

        kw = { k : args.__dict__[k] for k in gtmktime_keys }
        if kw['roicut'] is None:
            kw['roicut'] = 'no'
        
        if kw['filter'] is None:
            kw['filter'] = 'DATA_QUAL==1 && LAT_CONFIG==1'
            if args.rock_angle is not None:
                kw['filter'] += ' && ABS(ROCK_ANGLE)<%(rock)s '%dict(rock=args.rock_angle)
        kw['evfile'] = 'out.fits'
        kw['outfile'] = 'out_filtered.fits'
        if args.scfile is not None:
            kw['scfile'] = args.scfile
            staged_outfile = kw['outfile']
            run_gtapp('gtmktime',logger,kw)
                            
        logger.info('cp %s %s',staged_outfile,outfile)
        shutil.copy(staged_outfile,outfile)
        
    os.chdir(cwd)
    logger.info('Deleting %s',tmpdir)
    shutil.rmtree(tmpdir)
    logger.info('Done.')
Ejemplo n.º 10
0
def main():

    gtselect_keys = [
        'tmin', 'tmax', 'emin', 'emax', 'zmax', 'evtype', 'evclass',
        'phasemin', 'phasemax', 'convtype', 'rad', 'ra', 'dec'
    ]

    gtmktime_keys = ['roicut', 'filter']

    usage = "usage: %(prog)s [options] "
    description = "Run gtselect and gtmktime on one or more FT1 files.  "
    "Note that gtmktime will be skipped if no FT2 file is provided."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)

    for k in gtselect_keys:

        if k in ['evtype', 'evclass', 'convtype']:
            parser.add_argument('--%s' % k, default=None, type=int, help='')
        else:
            parser.add_argument('--%s' % k, default=None, type=float, help='')

    for k in gtmktime_keys:
        parser.add_argument('--%s' % k, default=None, type=str, help='')

    parser.add_argument('--rock_angle', default=None, type=float, help='')

    parser.add_argument('--outdir',
                        default=None,
                        type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--output',
                        default=None,
                        type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--scfile', default=None, type=str, help='')

    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument(
        '--merge',
        default=False,
        action='store_true',
        help='Merge input FT1 files into N files where N is determined '
        'by files_per_split.')

    parser.add_argument('--files_per_split',
                        default=100,
                        type=int,
                        help='Set the number of files to combine in each '
                        'split of the input file list.')

    parser.add_argument('--file_idx_min',
                        default=None,
                        type=int,
                        help='Set the number of files to assign to '
                        'each batch job.')

    parser.add_argument('--file_idx_max',
                        default=None,
                        type=int,
                        help='Set the number of files to assign to '
                        'each batch job.')

    parser.add_argument('files',
                        nargs='+',
                        default=None,
                        help='List of files.')

    args = parser.parse_args()

    batch = vars(args).pop('batch')
    files = vars(args).pop('files')
    args.outdir = os.path.abspath(args.outdir)
    files = [os.path.abspath(f) for f in files]

    ft1_files = get_files(files, ['.fit', '.fits'])
    for i, f in enumerate(ft1_files):
        if re.search('^root\:\/\/', f) is None:
            ft1_files[i] = os.path.abspath(f)

    input_files = []
    output_files = []
    files_idx_min = []
    files_idx_max = []
    opts = []

    if args.file_idx_min is not None and args.file_idx_max is not None:

        files_idx_min = [args.file_idx_min]
        files_idx_max = [args.file_idx_max]
        input_files = [files]
        output_files = [args.output]

    elif args.merge:
        if not args.output:
            raise Exception('No output file defined.')

        nfiles = len(ft1_files)
        njob = int(np.ceil(nfiles / float(args.files_per_split)))
        for ijob, i in enumerate(range(0, nfiles, args.files_per_split)):

            if args.outdir is not None:
                mkdir(args.outdir)
                outdir = os.path.abspath(args.outdir)
            else:
                outdir = os.path.dirname(os.path.dirname(args.output))

            outfile = os.path.splitext(os.path.basename(args.output))[0]
            outfile += '_%03i.fits' % (ijob)
            outfile = os.path.join(outdir, outfile)
            input_files += [files]
            output_files += [outfile]
            files_idx_min += [i]
            files_idx_max += [i + args.files_per_split]
            opts += [vars(args).copy()]
            opts[-1]['output'] = outfile
            opts[-1]['file_idx_min'] = i
            opts[-1]['file_idx_max'] = i + args.files_per_split

    else:
        input_files = ft1_files
        files_idx_min = [i for i in range(len(ft1_files))]
        files_idx_max = [i + 1 for i in range(len(ft1_files))]
        output_files = [
            os.path.join(args.outdir, os.path.basename(x)) for x in ft1_files
        ]
        opts = [vars(args).copy() for x in ft1_files]

    if batch:
        submit_jobs('fermipy-select',
                    input_files,
                    opts,
                    output_files,
                    overwrite=args.overwrite,
                    dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.configure(os.path.basename(__file__), None, logging.INFO)
    logger.info('Starting.')

    if args.scfile is not None:
        args.scfile = os.path.abspath(args.scfile)

    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')
    os.chdir(tmpdir)

    logger.info('tmpdir %s', tmpdir)
    logger.info('outdir %s', args.outdir)
    logger.info('output %s', args.output)

    for infiles, outfile, idx_min, idx_max in zip(input_files, output_files,
                                                  files_idx_min,
                                                  files_idx_max):

        logger.info('infiles %s', pprint.pformat(infiles))
        logger.info('outfile %s', outfile)
        infiles = get_files(infiles, ['.fit', '.fits'])
        if idx_min is not None:
            infiles = infiles[idx_min:idx_max]

        for i, f in enumerate(infiles):

            if re.search('^root\:\/\/', f) is None:
                continue
            os.system('xrdcp %s %s' % (f, f.split('/')[-1]))
            infiles[i] = os.path.join(tmpdir, f.split('/')[-1])

        kw = {k: args.__dict__[k] for k in gtselect_keys}
        if kw['emax'] is None:
            kw['emax'] = 1E6

        create_filelist(infiles, 'list.txt')
        kw['infile'] = 'list.txt'
        kw['outfile'] = 'out.fits'
        staged_outfile = kw['outfile']
        run_gtapp('gtselect', logger, kw)

        kw = {k: args.__dict__[k] for k in gtmktime_keys}
        if kw['roicut'] is None:
            kw['roicut'] = 'no'

        if kw['filter'] is None:
            kw['filter'] = 'DATA_QUAL==1 && LAT_CONFIG==1'
            if args.rock_angle is not None:
                kw['filter'] += ' && ABS(ROCK_ANGLE)<%(rock)s ' % dict(
                    rock=args.rock_angle)
        kw['evfile'] = 'out.fits'
        kw['outfile'] = 'out_filtered.fits'
        if args.scfile is not None:
            kw['scfile'] = args.scfile
            staged_outfile = kw['outfile']
            run_gtapp('gtmktime', logger, kw)

        logger.info('cp %s %s', staged_outfile, outfile)
        shutil.copy(staged_outfile, outfile)

    os.chdir(cwd)
    logger.info('Deleting %s', tmpdir)
    shutil.rmtree(tmpdir)
    logger.info('Done.')
Ejemplo n.º 11
0
def main():

    gtselect_keys = ['tmin','tmax','emin','emax','zmax','evtype','evclass',
                     'phasemin','phasemax','convtype','rad','ra','dec']

    gtmktime_keys = ['roicut','filter']
    
    usage = "usage: %(prog)s [options] "
    description = "Run gtselect and gtmktime on one or more FT1 files.  "
    "Note that gtmktime will be skipped if no FT2 file is provided."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)
    
    for k in gtselect_keys:

        if k in ['evtype','evclass','convtype']:
            parser.add_argument('--%s'%k, default=None, type=int, help='')
        else:
            parser.add_argument('--%s'%k, default=None, type=float, help='')

    for k in gtmktime_keys:
        parser.add_argument('--%s'%k, default=None, type=str, help='')
        
    parser.add_argument('--rock_angle', default=None, type=float, help='')
        
    parser.add_argument('--outdir', default=None, type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile', default=None, type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--scfile', default=None, type=str, help='')
        
    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('--merge', default=False, action='store_true',
                        help='Merge input FT1 files into a single file.')

    parser.add_argument('files', nargs='+', default=None,
                        help='List of directories in which the analysis will '
                             'be run.')
    
    args = parser.parse_args()

    if args.merge:
        if not args.outfile:
            raise Exception('No output file defined.')        
        input_files = [[os.path.abspath(x) for x in args.files]]
        output_files = [os.path.abspath(args.outfile)]
    else:
        args.outdir = os.path.abspath(args.outdir)
        mkdir(args.outdir)
        input_files = [[os.path.abspath(x)] for x in args.files]
        output_files = [os.path.join(args.outdir,os.path.basename(x)) for x in args.files]

    if args.batch:

        batch_opts = {'W' : args.time, 'R' : args.resources,
                      'oo' : 'batch.log' }
        args.batch=False
        for infile, outfile in zip(input_files,output_files):
            
            if os.path.isfile(outfile) and not args.overwrite:
                print('Output file exists, skipping.',outfile)
                continue
            
            batch_opts['oo'] = os.path.splitext(outfile)[0] + '_select.log'
            dispatch_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
                          infile, args, batch_opts, dry_run=args.dry_run)
        sys.exit(0)


    logger = Logger.get(os.path.basename(__file__),None,logging.INFO)

    logger.info('Starting.')
    
    if args.scfile is not None:
        args.scfile = os.path.abspath(args.scfile)
    
    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')
    os.chdir(tmpdir)

    logger.info('tmpdir %s',tmpdir)
    logger.info('outdir %s',args.outdir)
    logger.info('outfile %s',args.outfile)
    
    for infiles, outfile in zip(input_files,output_files):

        logger.info('infiles %s',pprint.pformat(infiles))
        logger.info('outfile %s',outfile)
        
        kw = { k : args.__dict__[k] for k in gtselect_keys }
        if kw['emax'] is None:
            kw['emax'] = 1E6

        create_filelist(infiles,'list.txt')
        kw['infile'] = 'list.txt'
        kw['outfile'] = 'out.fits'
        staged_outfile = kw['outfile']
        run_gtapp('gtselect',logger,kw)

        kw = { k : args.__dict__[k] for k in gtmktime_keys }
        if kw['roicut'] is None:
            kw['roicut'] = 'no'
        
        if kw['filter'] is None:
            kw['filter'] = 'DATA_QUAL==1 && LAT_CONFIG==1'
            if args.rock_angle is not None:
                kw['filter'] += ' && ABS(ROCK_ANGLE)<%(rock)s '%dict(rock=args.rock_angle)
        kw['evfile'] = 'out.fits'
        kw['outfile'] = 'out_filtered.fits'
        if args.scfile is not None:
            kw['scfile'] = args.scfile
            staged_outfile = kw['outfile']
            run_gtapp('gtmktime',logger,kw)
                            
        logger.info('cp %s %s',staged_outfile,outfile)
        shutil.copy(staged_outfile,outfile)
        
    os.chdir(cwd)
    logger.info('Deleting %s',tmpdir)
    shutil.rmtree(tmpdir)
    logger.info('Done.')
Ejemplo n.º 12
0
def main():

    gtselect_keys = ['tmin', 'tmax', 'emin', 'emax', 'zmax', 'evtype', 'evclass',
                     'phasemin', 'phasemax', 'convtype', 'rad', 'ra', 'dec']

    gtmktime_keys = ['roicut', 'filter']

    usage = "usage: %(prog)s [options] "
    description = "Run gtselect and gtmktime on one or more FT1 files.  "
    "Note that gtmktime will be skipped if no FT2 file is provided."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)

    for k in gtselect_keys:

        if k in ['evtype', 'evclass', 'convtype']:
            parser.add_argument('--%s' % k, default=None, type=int, help='')
        else:
            parser.add_argument('--%s' % k, default=None, type=float, help='')

    for k in gtmktime_keys:
        parser.add_argument('--%s' % k, default=None, type=str, help='')

    parser.add_argument('--rock_angle', default=None, type=float, help='')

    parser.add_argument('--outdir', default=None, type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--output', default=None, type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--scfile', default=None, type=str, help='')

    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('--merge', default=False, action='store_true',
                        help='Merge input FT1 files into N files where N is determined '
                        'by files_per_split.')

    parser.add_argument('--files_per_split', default=100,
                        type=int, help='Set the number of files to combine in each '
                        'split of the input file list.')

    parser.add_argument('--file_idx_min', default=None,
                        type=int, help='Set the number of files to assign to '
                        'each batch job.')

    parser.add_argument('--file_idx_max', default=None,
                        type=int, help='Set the number of files to assign to '
                        'each batch job.')

    parser.add_argument('files', nargs='+', default=None,
                        help='List of files.')

    args = parser.parse_args()

    batch = vars(args).pop('batch')
    files = vars(args).pop('files')
    args.outdir = os.path.abspath(args.outdir)
    files = [os.path.abspath(f) for f in files]

    ft1_files = get_files(files, ['.fit', '.fits'])
    for i, f in enumerate(ft1_files):
        if re.search('^root\:\/\/', f) is None:
            ft1_files[i] = os.path.abspath(f)

    input_files = []
    output_files = []
    files_idx_min = []
    files_idx_max = []
    opts = []

    if args.file_idx_min is not None and args.file_idx_max is not None:

        files_idx_min = [args.file_idx_min]
        files_idx_max = [args.file_idx_max]
        input_files = [files]
        output_files = [args.output]

    elif args.merge:
        if not args.output:
            raise Exception('No output file defined.')

        nfiles = len(ft1_files)
        njob = int(np.ceil(nfiles / float(args.files_per_split)))
        for ijob, i in enumerate(range(0, nfiles, args.files_per_split)):

            if args.outdir is not None:
                mkdir(args.outdir)
                outdir = os.path.abspath(args.outdir)
            else:
                outdir = os.path.dirname(os.path.dirname(args.output))

            outfile = os.path.splitext(os.path.basename(args.output))[0]
            outfile += '_%03i.fits' % (ijob)
            outfile = os.path.join(outdir, outfile)
            input_files += [files]
            output_files += [outfile]
            files_idx_min += [i]
            files_idx_max += [i + args.files_per_split]
            opts += [vars(args).copy()]
            opts[-1]['output'] = outfile
            opts[-1]['file_idx_min'] = i
            opts[-1]['file_idx_max'] = i + args.files_per_split

    else:
        input_files = ft1_files
        files_idx_min = [i for i in range(len(ft1_files))]
        files_idx_max = [i + 1 for i in range(len(ft1_files))]
        output_files = [os.path.join(
            args.outdir, os.path.basename(x)) for x in ft1_files]
        opts = [vars(args).copy() for x in ft1_files]

    if batch:
        submit_jobs('fermipy-select',
                    input_files, opts, output_files, overwrite=args.overwrite,
                    dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.configure(os.path.basename(__file__), None, logging.INFO)
    logger.info('Starting.')

    if args.scfile is not None:
        args.scfile = os.path.abspath(args.scfile)

    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')
    os.chdir(tmpdir)

    logger.info('tmpdir %s', tmpdir)
    logger.info('outdir %s', args.outdir)
    logger.info('output %s', args.output)

    for infiles, outfile, idx_min, idx_max in zip(input_files, output_files,
                                                  files_idx_min, files_idx_max):

        logger.info('infiles %s', pprint.pformat(infiles))
        logger.info('outfile %s', outfile)
        infiles = get_files(infiles, ['.fit', '.fits'])
        if idx_min is not None:
            infiles = infiles[idx_min:idx_max]

        for i, f in enumerate(infiles):

            if re.search('^root\:\/\/', f) is None:
                continue
            os.system('xrdcp %s %s' % (f, f.split('/')[-1]))
            infiles[i] = os.path.join(tmpdir, f.split('/')[-1])

        kw = {k: args.__dict__[k] for k in gtselect_keys}
        if kw['emax'] is None:
            kw['emax'] = 1E6

        create_filelist(infiles, 'list.txt')
        kw['infile'] = 'list.txt'
        kw['outfile'] = 'out.fits'
        staged_outfile = kw['outfile']
        run_gtapp('gtselect', logger, kw)

        kw = {k: args.__dict__[k] for k in gtmktime_keys}
        if kw['roicut'] is None:
            kw['roicut'] = 'no'

        if kw['filter'] is None:
            kw['filter'] = 'DATA_QUAL==1 && LAT_CONFIG==1'
            if args.rock_angle is not None:
                kw['filter'] += ' && ABS(ROCK_ANGLE)<%(rock)s ' % dict(
                    rock=args.rock_angle)
        kw['evfile'] = 'out.fits'
        kw['outfile'] = 'out_filtered.fits'
        if args.scfile is not None:
            kw['scfile'] = args.scfile
            staged_outfile = kw['outfile']
            run_gtapp('gtmktime', logger, kw)

        logger.info('cp %s %s', staged_outfile, outfile)
        shutil.copy(staged_outfile, outfile)

    os.chdir(cwd)
    logger.info('Deleting %s', tmpdir)
    shutil.rmtree(tmpdir)
    logger.info('Done.')