예제 #1
0
def clone_script(basedir, roi_set, script_in, options):
    """ Clone a bash script for every ROI in a DMRoiSet
    """
    bash_script = """
cat $0
python {script} {options}
"""

    print(bash_script.format(script=script_in, options=options))

    scriptdir = os.path.join(basedir, 'scripts')
    utils.mkdir(scriptdir)
    os.system('cp %s %s' % (script_in, scriptdir))

    for name in roi_set.roi_dict.keys():

        dirname = os.path.join(basedir, name)
        utils.mkdir(dirname)

        print(dirname)

        script = os.path.basename(script_in)
        scriptpath = os.path.abspath(os.path.join(dirname, script))
        os.system(
            'ln -sf %s %s' %
            (os.path.abspath(os.path.join(scriptdir, script)), scriptpath))
        runscript = os.path.abspath(
            os.path.join(dirname,
                         os.path.splitext(script)[0] + '.sh'))
        with open(os.path.join(runscript), 'wt') as fout:
            fout.write(
                bash_script.format(script=scriptpath,
                                   options=options).format(roi=name))
예제 #2
0
def clone_configs(basedir, base_configs, opt_configs, scripts, args=''):
    """
    """
    config = {}
    for c in base_configs:
        config = utils.merge_dict(config,
                                  yaml.load(open(c)),
                                  add_new_keys=True)

    scriptdir = os.path.abspath(os.path.join(basedir, 'scripts'))
    utils.mkdir(scriptdir)
    bash_scripts = []
    for script_in in scripts:
        bash_script = """
cat $0
{scriptexe} --config={config} {args}
"""

        if os.path.isfile(script_in):
            script = os.path.basename(script_in)
            scriptpath = os.path.join(scriptdir, script)
            scriptexe = 'python ' + scriptpath
            os.system('cp %s %s' % (script_in, scriptdir))
        elif cmd_exists(script_in):
            scriptexe = script_in
            script = script_in
        else:
            raise Exception('Could not find script: %s' % script_in)

        bash_scripts.append((script, scriptexe, bash_script))

    for name, vdict in opt_configs.items():

        dirname = os.path.abspath(os.path.join(basedir, name))
        utils.mkdir(dirname)

        cfgfile = os.path.join(dirname, 'config.yaml')
        for script_in, bash_script in zip(scripts, bash_scripts):
            runscript = os.path.splitext(bash_script[0])[0] + '.sh'
            runscript = os.path.join(dirname, runscript)
            with open(os.path.join(runscript), 'wt') as f:
                f.write(bash_script[2].format(source=name,
                                              scriptexe=bash_script[1],
                                              config=cfgfile,
                                              args=args))

        if not config:
            continue

        c = copy.deepcopy(config)
        c = utils.merge_dict(c, vdict, add_new_keys=True)
        yaml.dump(utils.tolist(c),
                  open(cfgfile, 'w'),
                  default_flow_style=False)
예제 #3
0
def clone_configs(basedir, base_configs, opt_configs, scripts, args=''):
    """
    """
    config = {}
    for c in base_configs:
        config = utils.merge_dict(config, yaml.load(open(c)),
                                  add_new_keys=True)

    scriptdir = os.path.abspath(os.path.join(basedir, 'scripts'))
    utils.mkdir(scriptdir)
    bash_scripts = []
    for script_in in scripts:
        bash_script = """
cat $0
{scriptexe} --config={config} {args}
"""

        if os.path.isfile(script_in):
            script = os.path.basename(script_in)
            scriptpath = os.path.join(scriptdir, script)
            scriptexe = 'python ' + scriptpath
            os.system('cp %s %s' % (script_in, scriptdir))
        elif cmd_exists(script_in):
            scriptexe = script_in
            script = script_in
        else:
            raise Exception('Could not find script: %s' % script_in)

        bash_scripts.append((script, scriptexe, bash_script))

    for name, vdict in opt_configs.items():

        dirname = os.path.abspath(os.path.join(basedir, name))
        utils.mkdir(dirname)

        cfgfile = os.path.join(dirname, 'config.yaml')
        for script_in, bash_script in zip(scripts, bash_scripts):
            runscript = os.path.splitext(bash_script[0])[0] + '.sh'
            runscript = os.path.join(dirname, runscript)
            with open(os.path.join(runscript), 'wt') as f:
                f.write(bash_script[2].format(source=name,
                                              scriptexe=bash_script[1],
                                              config=cfgfile,
                                              args=args))

        if not config:
            continue

        c = copy.deepcopy(config)
        c = utils.merge_dict(c, vdict, add_new_keys=True)
        yaml.dump(utils.tolist(c), open(cfgfile, 'w'),
                  default_flow_style=False)
예제 #4
0
파일: batch.py 프로젝트: sarabuson/fermipy
def create_job_array(jobname, commands, logfiles=None, sleep='1m'):
    subdir = mkdir("sub")
    outdir = mkdir("log")

    subbase = os.path.join(subdir, os.path.basename(jobname))
    outbase = os.path.join(outdir, os.path.basename(jobname))

    create_scripts(commands, subbase, sleep)
    if logfiles is not None:
        link_logfiles(logfiles, outbase)

    submit = "sh " + subbase + ".${LSB_JOBINDEX}"
    output = outbase + ".%I"

    njobs = len(commands)
    params = dict(name=jobname, cmnd=submit, log=output, njobs=njobs)

    job = """-oo %(log)s -J %(name)s[1-%(njobs)i] %(cmnd)s""" % (params)
    return job
예제 #5
0
파일: batch.py 프로젝트: jefemagril/fermipy
def create_job_array(jobname, commands, logfiles=None, sleep='1m'):
    subdir = mkdir("sub")
    outdir = mkdir("log")

    subbase = os.path.join(subdir, os.path.basename(jobname))
    outbase = os.path.join(outdir, os.path.basename(jobname))

    create_scripts(commands, subbase, sleep)
    if logfiles is not None:
        link_logfiles(logfiles, outbase)

    submit = "sh " + subbase + ".${LSB_JOBINDEX}"
    output = outbase + ".%I"

    njobs = len(commands)
    params = dict(name=jobname,
                  cmnd=submit,
                  log=output,
                  njobs=njobs)

    job = """-oo %(log)s -J %(name)s[1-%(njobs)i] %(cmnd)s""" % (params)
    return job
예제 #6
0
def main():

    usage = "Usage: %(prog)s  [options] input"
    description = "python script"
    parser = argparse.ArgumentParser(usage=usage, description=description)
    parser.add_argument("-d", "--dryrun", action='store_true')
    parser.add_argument("-s", "--sleep", default='1m', help="Pause between")
    parser.add_argument("--ls1",
                        action='store_true',
                        default=False,
                        help='Fetch LS1 files.')
    parser.add_argument("--emin", default=100)
    parser.add_argument("--emax", default=1e6)
    parser.add_argument("--tmin",
                        default=239557414,
                        type=int,
                        help="Min time; default is start of first LAT run")
    parser.add_argument("--tmax",
                        default=None,
                        type=int,
                        help="Default is current time.")
    parser.add_argument("--evtclass", default="Source", help="Event class")
    parser.add_argument("--evtsample",
                        default="P7.6_P130_BASE",
                        choices=[
                            'P7.6_P130_BASE', 'P6_public_v3', 'P7_P202_BASE',
                            'P7_P203_BASE', 'P8_P301_BASE', 'P8_P302_BASE',
                            'P8_P302_ALL'
                        ],
                        help="Event sample")
    parser.add_argument("--chunk",
                        default=int(YEAR // 12),
                        type=int,
                        help="Time chunk for download. Default is ~1 month.")

    args = parser.parse_args()

    basedir = os.environ['PWD']
    codedir = join(basedir, dirname(os.path.relpath(__file__)))
    logdir = join(basedir, "log")
    if not args.dryrun:
        logdir = mkdir(logdir)
    astro = astroserver()

    chunk = args.chunk
    # Might want to think more about how tmin and tmax are set
    first = args.tmin
    if args.tmax is None:
        args.tmax = int(utc2met())

    emin, emax = args.emin, args.emax
    evtclass = args.evtclass
    evtsample = args.evtsample
    sample = '_'.join(evtsample.split('_')[:-1])
    events = evtclass.upper()

    # Break data into chunks
    epsilon = 1e-6
    times = np.arange(args.tmin, args.tmax + epsilon, chunk).astype(int)

    # Get new full ft2 file.
    # Assumption is that it is a longer time period...
    ft2 = join(
        basedir,
        "%s_%s_%s_%s_ft2.fits" % (sample, events, min(times), max(times)))
    jobname = 'ft2'
    if os.path.exists(ft2):
        # exact ft2 already exists; skip
        print("%s exists; skipping.\n" % ft2)
    else:
        # Remove old ft2 file and replace with link
        if not args.dryrun:
            #for f in glob.glob(join(basedir, "*ft2.fits")):
            #    os.remove(f)
            #    os.symlink(ft2, f)
            for f in glob.glob(join(basedir, "*ft2_fix_checksums.sh")):
                os.remove(f)

        logfile = join(logdir, basename(ft2).replace('fits', 'log'))
        command = astro(
            'storeft2',
            output_ft2_30s=ft2,
            _event_sample=evtsample,
            minTimestamp=min(times),
            maxTimestamp=max(times),
            excludeMaxTimestamp='',
            quiet='',
            brief='',
        )

        print(command)
        bsub(jobname,
             command,
             logfile,
             sleep=args.sleep,
             submit=not args.dryrun,
             W=1000,
             R='rhel60')

    # Download ft1, ft2 files
    ft1dir = mkdir(join(basedir, 'ft1'))
    ft1_lst, ft1_cmnds, ft1_logs = [], [], []

    ls1dir = mkdir(join(basedir, 'ls1'))
    ls1_lst, ls1_cmnds, ls1_logs = [], [], []

    ft2dir = mkdir(join(basedir, 'ft2'))
    ft2_lst, ft2_cmnds, ft2_logs = [], [], []

    for tmin, tmax in zip(times[:-1], times[1:]):

        # If ft1 file exists, skip it...
        ft1 = join(ft1dir,
                   "%s_%s_%s_%s_ft1.fits" % (sample, events, tmin, tmax))
        if os.path.exists(ft1):
            print("%s exists; skipping.\n" % ft1)
        else:
            ft1_kw = dict(_output_ft1=ft1,
                          _event_sample=evtsample,
                          minTimestamp=tmin,
                          maxTimestamp=tmax,
                          minEnergy=emin,
                          maxEnergy=emax,
                          _event_class_name=evtclass,
                          excludeMaxTimestamp='',
                          quiet='',
                          brief='')

            ft1_cmnd = astro("store", **ft1_kw)
            ft1_logs.append(join(logdir, basename(ft1).replace('fits', 'log')))
            ft1_cmnds.append(ft1_cmnd)
        ft1_lst.append(ft1)

        # If ls1 file exists, skip it...
        ls1 = join(ls1dir,
                   "%s_%s_%s_%s_ls1.fits" % (sample, events, tmin, tmax))
        if not args.ls1:
            print("%s; skipping.\n" % ls1)
        elif os.path.exists(ls1):
            print("%s exists; skipping.\n" % ls1)
        else:
            ls1_kw = dict(_output_ls1=ls1,
                          _event_sample=evtsample,
                          _output_ls1_max_bytes_per_file=0,
                          minTimestamp=tmin,
                          maxTimestamp=tmax,
                          minEnergy=emin,
                          maxEnergy=emax,
                          _event_class_name=evtclass,
                          excludeMaxTimestamp='',
                          quiet='',
                          brief='')

            ls1_cmnd = astro("store", **ls1_kw)
            ls1_logs.append(join(logdir, basename(ls1).replace('fits', 'log')))
            ls1_cmnds.append(ls1_cmnd)
        ls1_lst.append(ls1)

        # If ft2 file exists, skip it...
        ft2 = join(ft2dir,
                   "%s_%s_%s_%s_ft2.fits" % (sample, events, tmin, tmax))
        if os.path.exists(ft2):
            print("%s exists; skipping.\n" % ft2)
        else:
            ft2_cmnd = astro(
                'storeft2',
                output_ft2_30s=ft2,
                _event_sample=evtsample,
                minTimestamp=tmin,
                maxTimestamp=tmax,
                excludeMaxTimestamp='',
                quiet='',
                brief='',
            )
            ft2_logs.append(join(logdir, basename(ft2).replace('fits', 'log')))
            ft2_cmnds.append(ft2_cmnd)
        ft2_lst.append(ft2)

    resources = 'bullet,hequ,kiso'

    bsub('ft1',
         ft1_cmnds,
         ft1_logs,
         sleep=args.sleep,
         submit=not args.dryrun,
         W=1000,
         R=resources)
    bsub('ls1',
         ls1_cmnds,
         ls1_logs,
         sleep=args.sleep,
         submit=not args.dryrun,
         W=1000,
         R=resources)
    bsub('ft2',
         ft2_cmnds,
         ft2_logs,
         sleep=args.sleep,
         submit=not args.dryrun,
         W=1000,
         R=resources)

    # Create list of ft1 files
    ft1_lstfile = join(
        basedir,
        "%s_%s_%s_%s_ft1.lst" % (sample, events, min(times), max(times)))
    ls1_lstfile = join(
        basedir,
        "%s_%s_%s_%s_ls1.lst" % (sample, events, min(times), max(times)))
    ft2_lstfile = join(
        basedir,
        "%s_%s_%s_%s_ft2.lst" % (sample, events, min(times), max(times)))
    if not args.dryrun:
        for f in glob.glob(join(basedir, "*.lst")):
            os.remove(f)
        print("Creating ft1 file list: %s" % ft1_lstfile)
        np.savetxt(ft1_lstfile, ft1_lst, fmt='%s')
        print("Creating ls1 file list: %s" % ls1_lstfile)
        np.savetxt(ls1_lstfile, ls1_lst, fmt='%s')
        print("Creating ft2 file list: %s" % ft2_lstfile)
        np.savetxt(ft2_lstfile, ft2_lst, fmt='%s')
예제 #7
0
def _process_lc_bin(itime, name, config, basedir, workdir, diff_sources, const_spectrum, roi, lck_params,
                    **kwargs):
    i, time = itime

    roi = copy.deepcopy(roi)

    config = copy.deepcopy(config)
    config['selection']['tmin'] = time[0]
    config['selection']['tmax'] = time[1]

    # create output directories labeled in MET vals
    outdir = basedir + 'lightcurve_%.0f_%.0f' % (time[0], time[1])
    config['fileio']['outdir'] = os.path.join(workdir, outdir)
    config['logging']['prefix'] = 'lightcurve_%.0f_%.0f ' % (time[0], time[1])
    config['fileio']['logfile'] = os.path.join(config['fileio']['outdir'],
                                               'fermipy.log')
    utils.mkdir(config['fileio']['outdir'])

    yaml.dump(utils.tolist(config),
              open(os.path.join(config['fileio']['outdir'],
                                'config.yaml'), 'w'))

    xmlfile = os.path.join(config['fileio']['outdir'], 'base.xml')

    try:
        from fermipy.gtanalysis import GTAnalysis
        gta = GTAnalysis(config, roi, loglevel=logging.DEBUG)
        gta.logger.info('Fitting time range %i %i' % (time[0], time[1]))
        gta.setup()
    except:
        print('Analysis failed in time range %i %i' %
              (time[0], time[1]))
        print(sys.exc_info()[0])
        raise
        return {}

    gta._lck_params = lck_params
    # Recompute source map for source of interest and sources within 3 deg
    if gta.config['gtlike']['use_scaled_srcmap']:
        names = [s.name for s in
                 gta.roi.get_sources(distance=3.0, skydir=gta.roi[name].skydir)
                 if not s.diffuse]
        gta.reload_sources(names)

    # Write the current model
    gta.write_xml(xmlfile)

    # Optimize the model
    gta.optimize(skip=diff_sources,
                 shape_ts_threshold=kwargs.get('shape_ts_threshold'))

    fit_results = _fit_lc(gta, name, **kwargs)
    gta.write_xml('fit_model_final.xml')
    srcmodel = copy.deepcopy(gta.get_src_model(name))
    numfree = gta.get_free_param_vector().count(True)

    max_ts_thresholds = [None, 4, 9]
    for max_ts in max_ts_thresholds:
        if max_ts is not None:
            gta.free_sources(minmax_ts=[None, max_ts], free=False, exclude=[name])

        # rerun fit using params from full time (constant) fit using same
        # param vector as the successful fit to get loglike
        specname, spectrum = const_spectrum
        gta.set_source_spectrum(name, spectrum_type=specname,
                                spectrum_pars=spectrum,
                                update_source=False)
        gta.free_source(name, free=False)
        const_fit_results = gta.fit()
        if not const_fit_results['fit_success']:
            continue
        const_srcmodel = gta.get_src_model(name)

        # rerun using shape fixed to full time fit
        # for the fixed-shape lightcurve
        gta.free_source(name, pars='norm')
        fixed_fit_results = gta.fit()
        if not fixed_fit_results['fit_success']:
            continue
        fixed_srcmodel = gta.get_src_model(name)
        break
    
    # special lc output
    o = {'flux_const': const_srcmodel['flux'],
         'loglike_const': const_fit_results['loglike'],
         'fit_success': fit_results['fit_success'],
         'fit_success_fixed': fixed_fit_results['fit_success'],
         'fit_quality': fit_results['fit_quality'],
         'fit_status': fit_results['fit_status'],
         'num_free_params': numfree,
         'config': config}

    # full flux output
    if fit_results['fit_success'] == 1:
        for k in defaults.source_flux_output.keys():
            if not k in srcmodel:
                continue
            o[k] = srcmodel[k]
            o[k+'_fixed'] = fixed_srcmodel[k]

    gta.logger.info('Finished time range %i %i' % (time[0], time[1]))
    return o
예제 #8
0
    def _make_lc(self, name, **kwargs):

        # make array of time values in MET
        if kwargs['time_bins']:
            times = np.array(kwargs['time_bins'])
        elif kwargs['nbins']:
            times = np.linspace(self.tmin, self.tmax,
                                kwargs['nbins'] + 1)
        else:
            times = np.arange(self.tmin, self.tmax,
                              kwargs['binsz'])

        o = self._create_lc_dict(name, times)
        o['config'] = kwargs

        diff_sources = [s.name for s in self.roi.sources if s.diffuse]
        skydir = self.roi[name].skydir

        if kwargs.get('free_radius', None) is not None:
            kwargs['free_sources'] += [s.name for s in
                                       self.roi.get_sources(skydir=skydir,
                                                            distance=kwargs[
                                                                'free_radius'],
                                                            exclude=diff_sources)]

        for i, time in enumerate(zip(times[:-1], times[1:])):

            self.logger.info('Fitting time range %i %i', time[0], time[1])

            config = copy.deepcopy(self.config)
            config['selection']['tmin'] = time[0]
            config['selection']['tmax'] = time[1]
            config['ltcube']['use_local_ltcube'] = kwargs['use_local_ltcube']
            config['model']['diffuse_dir'] = [self.workdir]

            if config['components'] is None:
                config['components'] = []

            for j, c in enumerate(self.components):
                if len(config['components']) <= j:
                    config['components'] += [{}]

                data_cfg = {'evfile': c.files['ft1'],
                            'scfile': c.data_files['scfile'],
                            'ltcube': None}

                config['components'][j] = \
                    utils.merge_dict(config['components'][j],
                                     {'data': data_cfg},
                                     add_new_keys=True)

            # create output directories labeled in MET vals
            outdir = 'lightcurve_%.0f_%.0f' % (time[0], time[1])
            config['fileio']['outdir'] = os.path.join(self.workdir, outdir)
            utils.mkdir(config['fileio']['outdir'])

            yaml.dump(utils.tolist(config),
                      open(os.path.join(config['fileio']['outdir'],
                                        'config.yaml'), 'w'))

            xmlfile = os.path.join(config['fileio']['outdir'], 'base.xml')

            # Make a copy of the source maps. TODO: Implement a
            # correction to account for the difference in exposure for
            # each time bin.
            #     for c in self.components:
            #        shutil.copy(c._files['srcmap'],config['fileio']['outdir'])

            try:
                gta = self.clone(config, loglevel=logging.DEBUG)
                gta.setup()
            except:
                self.logger.warning('Analysis failed in time range %i %i',
                                    time[0], time[1])
                continue

            # Write the current model
            gta.write_xml(xmlfile)

            # Optimize the model (skip diffuse?)
            gta.optimize(skip=diff_sources)

            fit_results = self._fit_lc(gta, name, **kwargs)
            gta.write_xml('fit_model_final.xml')
            output = gta.get_src_model(name)

            if fit_results['fit_success'] == 1:
                for k in defaults.source_flux_output.keys():
                    if not k in output:
                        continue
                    if (isinstance(output[k], np.ndarray) and
                            o[k][i].shape != output[k].shape):
                        self.logger.warning(
                            'Incompatible shape for column %s', k)
                        continue
                    o[k][i] = output[k]

            self.logger.info('Finished time range %i %i', time[0], time[1])

        src = self.roi.get_source_by_name(name)
        return o
예제 #9
0
def _process_lc_bin(itime, name, config, basedir, workdir, diff_sources, const_spectrum, roi, lck_params,
                    **kwargs):
    i, time = itime

    roi = copy.deepcopy(roi)

    config = copy.deepcopy(config)
    config['selection']['tmin'] = time[0]
    config['selection']['tmax'] = time[1]

    # create output directories labeled in MET vals
    outdir = basedir + 'lightcurve_%.0f_%.0f' % (time[0], time[1])
    config['fileio']['outdir'] = os.path.join(workdir, outdir)
    config['logging']['prefix'] = 'lightcurve_%.0f_%.0f ' % (time[0], time[1])
    config['fileio']['logfile'] = os.path.join(config['fileio']['outdir'],
                                               'fermipy.log')
    utils.mkdir(config['fileio']['outdir'])

    yaml.dump(utils.tolist(config),
              open(os.path.join(config['fileio']['outdir'],
                                'config.yaml'), 'w'))

    xmlfile = os.path.join(config['fileio']['outdir'], 'base.xml')

    try:
        from fermipy.gtanalysis import GTAnalysis
        gta = GTAnalysis(config, roi, loglevel=logging.DEBUG)
        gta.logger.info('Fitting time range %i %i' % (time[0], time[1]))
        gta.setup()
    except:
        print('Analysis failed in time range %i %i' %
              (time[0], time[1]))
        print(sys.exc_info()[0])
        raise
        return {}

    gta._lck_params = lck_params
    # Recompute source map for source of interest and sources within 3 deg
    if gta.config['gtlike']['use_scaled_srcmap']:
        names = [s.name for s in
                 gta.roi.get_sources(distance=3.0, skydir=gta.roi[name].skydir)
                 if not s.diffuse]
        gta.reload_sources(names)

    # Write the current model
    gta.write_xml(xmlfile)

    # Optimize the model
    gta.optimize(skip=diff_sources,
                 shape_ts_threshold=kwargs.get('shape_ts_threshold'))

    fit_results = _fit_lc(gta, name, **kwargs)
    gta.write_xml('fit_model_final.xml')
    srcmodel = copy.deepcopy(gta.get_src_model(name))
    numfree = gta.get_free_param_vector().count(True)
    
    const_srcmodel = gta.get_src_model(name).copy()
    fixed_fit_results = fit_results.copy()
    fixed_srcmodel = gta.get_src_model(name).copy()
    fixed_fit_results['fit_success'],fixed_srcmodel['fit_success'] = [False,False]
    fixed_fit_results['fit_quality'],fixed_srcmodel['fit_quality'] = [0,0]
    max_ts_thresholds = [None, 4, 9, 16, 25]
    for max_ts in max_ts_thresholds:
        if max_ts is not None:
            gta.free_sources(minmax_ts=[None, max_ts], free=False, exclude=[name])

        # rerun fit using params from full time (constant) fit using same
        # param vector as the successful fit to get loglike
        specname, spectrum = const_spectrum
        gta.set_source_spectrum(name, spectrum_type=specname,
                                spectrum_pars=spectrum,
                                update_source=False)
        gta.free_source(name, free=False)
        const_fit_results = gta.fit()
        if not const_fit_results['fit_success']:
            continue
        const_srcmodel = gta.get_src_model(name)
        # rerun using shape fixed to full time fit
        # for the fixed-shape lightcurve
        gta.free_source(name, pars='norm')
        fixed_fit_results = gta.fit()
	if not fixed_fit_results['fit_success']:
            continue
        fixed_srcmodel = gta.get_src_model(name)
        break
    
    # special lc output
    o = {'flux_const': const_srcmodel['flux'],
         'loglike_const': const_fit_results['loglike'],
         'fit_success': fit_results['fit_success'],
         'fit_success_fixed': fixed_fit_results['fit_success'],
         'fit_quality': fit_results['fit_quality'],
         'fit_status': fit_results['fit_status'],
         'num_free_params': numfree,
         'config': config}
    # full flux output
    if fit_results['fit_success'] == 1:
        for k in defaults.source_flux_output.keys():
            if not k in srcmodel:
                continue
            o[k] = srcmodel[k]
            o[k+'_fixed'] = fixed_srcmodel[k]

    gta.logger.info('Finished time range %i %i' % (time[0], time[1]))
    return o
예제 #10
0
def main():

    usage = "usage: %(prog)s [options] "
    description = "Run validation analysis"
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)

    parser.add_argument('--outdir',
                        default=None,
                        type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile',
                        default=None,
                        type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--data_type', default='agn', type=str)
    parser.add_argument('--mode', default='fill', type=str)
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('files',
                        nargs='+',
                        default=None,
                        help='List of directories in which the analysis will '
                        'be run.')

    args = parser.parse_args()

    if args.outdir is not None:
        args.outdir = os.path.abspath(args.outdir)
        mkdir(args.outdir)

    if args.mode == 'fill':
        input_files = [[os.path.abspath(x)] for x in args.files]
        output_files = [make_outpath(x, args.outdir) for x in args.files]
    elif args.mode == 'collect':
        input_files = [[os.path.abspath(x) for x in args.files]]
        output_files = [args.outfile]

    print(input_files)
    print(output_files)

    if args.batch:

        batch_opts = {'W': args.time, 'R': args.resources, 'oo': 'batch.log'}
        args.batch = False
        for infile, outfile in zip(input_files, output_files):

            if os.path.isfile(outfile) and not args.overwrite:
                print('Output file exists, skipping.', outfile)
                continue

            batch_opts['oo'] = os.path.splitext(outfile)[0] + '.log'
            dispatch_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
                          infile,
                          args,
                          batch_opts,
                          dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.get(os.path.basename(__file__), None, logging.INFO)
    logger.info('Starting.')

    for infiles, outfile in zip(input_files, output_files):

        if args.data_type == 'agn':
            acc = AGNAccumulator()

        for f in infiles:
            print('process', f)
            acc.process(f)

        print('write', outfile)
        acc.write(outfile)

    logger.info('Done.')
예제 #11
0
def main():

    usage = "usage: %(prog)s [options] "
    description = "Run gtselect and gtmktime on one or more FT1 files.  "
    "Note that gtmktime will be skipped if no FT2 file is provided."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)
    parser.add_argument('--zmax', default=100., type=float, help='')
    parser.add_argument('--dcostheta', default=0.025, type=float, help='')
    parser.add_argument('--binsz', default=1.0, type=float, help='')
    parser.add_argument('--outdir',
                        default=None,
                        type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile',
                        default=None,
                        type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--scfile',
                        default=None,
                        type=str,
                        help='',
                        required=True)

    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('--merge',
                        default=False,
                        action='store_true',
                        help='Merge input FT1 files into a single file.')

    parser.add_argument('files',
                        nargs='+',
                        default=None,
                        help='List of directories in which the analysis will '
                        'be run.')

    args = parser.parse_args()

    args.outdir = os.path.abspath(args.outdir)
    args.scfile = os.path.abspath(args.scfile)
    mkdir(args.outdir)
    input_files = [[os.path.abspath(x)] for x in args.files]
    output_files = [
        os.path.join(args.outdir, os.path.basename(x)) for x in args.files
    ]

    if args.batch:
        opts = copy.deepcopy(args.__dict__)
        opts.pop('files')
        opts.pop('batch')
        submit_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
                    input_files, output_files, {k: v
                                                for k, v in opts.items()})
        sys.exit(0)

    logger = Logger.get(os.path.basename(__file__), None, logging.INFO)
    logger.info('Starting.')
    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')
    os.chdir(tmpdir)

    logger.info('tmpdir %s', tmpdir)
    logger.info('outdir %s', args.outdir)
    logger.info('outfile %s', args.outfile)

    for infiles, outfile in zip(input_files, output_files):

        logger.info('infiles %s', pprint.pformat(infiles))
        logger.info('outfile %s', outfile)

        kw = dict(evfile='list.txt',
                  scfile=args.scfile,
                  outfile='ltcube.fits',
                  binsz=args.binsz,
                  dcostheta=args.dcostheta,
                  zmax=args.zmax)

        create_filelist(infiles, 'list.txt')
        staged_outfile = kw['outfile']
        run_gtapp('gtltcube', logger, kw)
        logger.info('cp %s %s', staged_outfile, outfile)
        shutil.copy(staged_outfile, outfile)

    os.chdir(cwd)
    logger.info('Deleting %s', tmpdir)
    shutil.rmtree(tmpdir)
    logger.info('Done.')
예제 #12
0
def main():

    gtselect_keys = [
        'tmin', 'tmax', 'emin', 'emax', 'zmax', 'evtype', 'evclass',
        'phasemin', 'phasemax', 'convtype', 'rad', 'ra', 'dec'
    ]

    gtmktime_keys = ['roicut', 'filter']

    usage = "usage: %(prog)s [options] "
    description = "Run gtselect and gtmktime on one or more FT1 files.  "
    "Note that gtmktime will be skipped if no FT2 file is provided."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)

    for k in gtselect_keys:

        if k in ['evtype', 'evclass', 'convtype']:
            parser.add_argument('--%s' % k, default=None, type=int, help='')
        else:
            parser.add_argument('--%s' % k, default=None, type=float, help='')

    for k in gtmktime_keys:
        parser.add_argument('--%s' % k, default=None, type=str, help='')

    parser.add_argument('--rock_angle', default=None, type=float, help='')

    parser.add_argument('--outdir',
                        default=None,
                        type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--output',
                        default=None,
                        type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--scfile', default=None, type=str, help='')

    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument(
        '--merge',
        default=False,
        action='store_true',
        help='Merge input FT1 files into N files where N is determined '
        'by files_per_split.')

    parser.add_argument('--files_per_split',
                        default=100,
                        type=int,
                        help='Set the number of files to combine in each '
                        'split of the input file list.')

    parser.add_argument('--file_idx_min',
                        default=None,
                        type=int,
                        help='Set the number of files to assign to '
                        'each batch job.')

    parser.add_argument('--file_idx_max',
                        default=None,
                        type=int,
                        help='Set the number of files to assign to '
                        'each batch job.')

    parser.add_argument('files',
                        nargs='+',
                        default=None,
                        help='List of files.')

    args = parser.parse_args()

    batch = vars(args).pop('batch')
    files = vars(args).pop('files')
    args.outdir = os.path.abspath(args.outdir)
    files = [os.path.abspath(f) for f in files]

    ft1_files = get_files(files, ['.fit', '.fits'])
    for i, f in enumerate(ft1_files):
        if re.search('^root\:\/\/', f) is None:
            ft1_files[i] = os.path.abspath(f)

    input_files = []
    output_files = []
    files_idx_min = []
    files_idx_max = []
    opts = []

    if args.file_idx_min is not None and args.file_idx_max is not None:

        files_idx_min = [args.file_idx_min]
        files_idx_max = [args.file_idx_max]
        input_files = [files]
        output_files = [args.output]

    elif args.merge:
        if not args.output:
            raise Exception('No output file defined.')

        nfiles = len(ft1_files)
        njob = int(np.ceil(nfiles / float(args.files_per_split)))
        for ijob, i in enumerate(range(0, nfiles, args.files_per_split)):

            if args.outdir is not None:
                mkdir(args.outdir)
                outdir = os.path.abspath(args.outdir)
            else:
                outdir = os.path.dirname(os.path.dirname(args.output))

            outfile = os.path.splitext(os.path.basename(args.output))[0]
            outfile += '_%03i.fits' % (ijob)
            outfile = os.path.join(outdir, outfile)
            input_files += [files]
            output_files += [outfile]
            files_idx_min += [i]
            files_idx_max += [i + args.files_per_split]
            opts += [vars(args).copy()]
            opts[-1]['output'] = outfile
            opts[-1]['file_idx_min'] = i
            opts[-1]['file_idx_max'] = i + args.files_per_split

    else:
        input_files = ft1_files
        files_idx_min = [i for i in range(len(ft1_files))]
        files_idx_max = [i + 1 for i in range(len(ft1_files))]
        output_files = [
            os.path.join(args.outdir, os.path.basename(x)) for x in ft1_files
        ]
        opts = [vars(args).copy() for x in ft1_files]

    if batch:
        submit_jobs('fermipy-select',
                    input_files,
                    opts,
                    output_files,
                    overwrite=args.overwrite,
                    dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.configure(os.path.basename(__file__), None, logging.INFO)
    logger.info('Starting.')

    if args.scfile is not None:
        args.scfile = os.path.abspath(args.scfile)

    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')
    os.chdir(tmpdir)

    logger.info('tmpdir %s', tmpdir)
    logger.info('outdir %s', args.outdir)
    logger.info('output %s', args.output)

    for infiles, outfile, idx_min, idx_max in zip(input_files, output_files,
                                                  files_idx_min,
                                                  files_idx_max):

        logger.info('infiles %s', pprint.pformat(infiles))
        logger.info('outfile %s', outfile)
        infiles = get_files(infiles, ['.fit', '.fits'])
        if idx_min is not None:
            infiles = infiles[idx_min:idx_max]

        for i, f in enumerate(infiles):

            if re.search('^root\:\/\/', f) is None:
                continue
            os.system('xrdcp %s %s' % (f, f.split('/')[-1]))
            infiles[i] = os.path.join(tmpdir, f.split('/')[-1])

        kw = {k: args.__dict__[k] for k in gtselect_keys}
        if kw['emax'] is None:
            kw['emax'] = 1E6

        create_filelist(infiles, 'list.txt')
        kw['infile'] = 'list.txt'
        kw['outfile'] = 'out.fits'
        staged_outfile = kw['outfile']
        run_gtapp('gtselect', logger, kw)

        kw = {k: args.__dict__[k] for k in gtmktime_keys}
        if kw['roicut'] is None:
            kw['roicut'] = 'no'

        if kw['filter'] is None:
            kw['filter'] = 'DATA_QUAL==1 && LAT_CONFIG==1'
            if args.rock_angle is not None:
                kw['filter'] += ' && ABS(ROCK_ANGLE)<%(rock)s ' % dict(
                    rock=args.rock_angle)
        kw['evfile'] = 'out.fits'
        kw['outfile'] = 'out_filtered.fits'
        if args.scfile is not None:
            kw['scfile'] = args.scfile
            staged_outfile = kw['outfile']
            run_gtapp('gtmktime', logger, kw)

        logger.info('cp %s %s', staged_outfile, outfile)
        shutil.copy(staged_outfile, outfile)

    os.chdir(cwd)
    logger.info('Deleting %s', tmpdir)
    shutil.rmtree(tmpdir)
    logger.info('Done.')
예제 #13
0
def main():
    usage = "%(prog)s [config_file] [options]"
    description = """
Run a quick analysis of an ROI performing the basic data and model
preparation and optimizing source parameters.  If the input config
file does not exist a new one will be created using the options
provided on the command-line.
"""
    parser = argparse.ArgumentParser(usage=usage, description=description)

    parser.add_argument(
        '--config',
        default=None,
        help='Set an existing configuration file that will be used as the '
        'baseline configuration.  Note that parameters set with command-line '
        'options (emin, emax, etc.) will override any settings in this file.')
    parser.add_argument('--outdir',
                        default=None,
                        help='Set the path to the analysis directory.')
    parser.add_argument(
        '--evfile',
        default=None,
        help='Set the path to the FT1 file or list of FT1 files.')
    parser.add_argument(
        '--scfile',
        default=None,
        help='Set the path to the FT2 file or list of FT2 files.')
    parser.add_argument('--ltcube',
                        default=None,
                        help='Set the path to the LT cube file.')
    parser.add_argument('--emin',
                        default=None,
                        help='Minimum energy selection (MeV).')
    parser.add_argument('--emax',
                        default=None,
                        help='Maximum energy selection (MeV).')
    parser.add_argument('--logemin',
                        default=None,
                        help='Minimum energy selection (log10(MeV)).')
    parser.add_argument('--logemax',
                        default=None,
                        help='Maximum energy selection (log10(MeV)).')
    parser.add_argument('--target',
                        default=None,
                        help='Name of a catalog source.')
    parser.add_argument('--ra', default=None, help='RA of ROI center.')
    parser.add_argument('--dec', default=None, help='DEC of ROI center.')
    parser.add_argument(
        'config_file',
        default=None,
        help='Path to a configuration file.  If this file does not exist then '
        'a new configuration file will be created.')

    args = vars(parser.parse_args())

    if not 'FERMI_DIFFUSE_DIR' in os.environ:
        os.environ['FERMI_DIFFUSE_DIR'] = os.path.expandvars(
            '$FERMI_DIR/refdata/fermi/galdiffuse')

    if not args['config_file']:
        args['config_file'] = os.path.join(args['outdir'], 'config.yaml')

    if not os.path.isdir(os.path.dirname(args['config_file'])):
        utils.mkdir(os.path.dirname(args['config_file']))

    # Create a config file
    if not os.path.isfile(args['config_file']):
        configpath = args['config_file']
        config = create_config(args)
        yaml.dump(config, open(configpath, 'w'))
        cfgstr = yaml.dump(config, default_flow_style=False)
        print('Creating new configuration...')
        print(cfgstr)
        config_file = configpath
    else:
        print('Using existing configuration...')
        config_file = args['config_file']

    run_analysis(config_file)
예제 #14
0
def main():

    usage = "usage: %(prog)s [options] "
    description = "Run tempo2 application on one or more FT1 files."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)

    parser.add_argument('--par_file',
                        default=None,
                        type=str,
                        required=True,
                        help='Ephemeris file')

    parser.add_argument('--scfile',
                        default=None,
                        type=str,
                        required=True,
                        help='FT2 file')

    parser.add_argument('--outdir', default=None, type=str, help='')

    parser.add_argument('--phase_colname',
                        default='PULSE_PHASE',
                        type=str,
                        help='Set the name of the phase column.')

    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')

    parser.add_argument('files',
                        nargs='+',
                        default=None,
                        help='List of directories in which the analysis will '
                        'be run.')

    args = parser.parse_args()

    if args.outdir is None:
        outdirs = [os.path.dirname(os.path.abspath(x)) for x in args.files]
    else:
        outdir = os.path.abspath(args.outdir)
        mkdir(args.outdir)
        outdirs = [outdir for x in args.files]

    input_files = [[os.path.abspath(x)] for x in args.files]
    output_files = [
        os.path.join(y, os.path.basename(x))
        for x, y in zip(args.files, outdirs)
    ]

    if args.batch:

        opts = vars(args).copy()
        del opts['files']
        del opts['batch']
        submit_jobs(
            'fermipy-run-tempo',  # 'python ' + os.path.abspath(__file__.rstrip('cd')),
            input_files,
            opts,
            output_files,
            overwrite=args.overwrite,
            dry_run=args.dry_run)

        # batch_opts = {'W' : args.time, 'R' : args.resources,
        #              'oo' : 'batch.log' }
        # args.batch=False
        # for infile, outfile in zip(input_files,output_files):
        #
        #    if os.path.isfile(outfile) and not args.overwrite:
        #        print('Output file exists, skipping.',outfile)
        #        continue
        #
        #    batch_opts['oo'] = os.path.join(outdir,
        #                                    os.path.splitext(outfile)[0] +
        #                                    '_tempo2.log')
        #    dispatch_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
        #                  [infile], args, batch_opts, dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.get(__file__, None, logging.INFO)

    par_file = os.path.abspath(args.par_file)
    ft2_file = os.path.abspath(args.scfile)

    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')

    logger.info('tmpdir %s', tmpdir)
    os.chdir(tmpdir)

    for infiles, outfile in zip(input_files, output_files):

        infile = infiles[0]

        staged_infile = os.path.join(tmpdir, os.path.basename(infile))
        logFile = os.path.splitext(infile)[0] + '_tempo2.log'

        print('cp %s %s' % (infile, staged_infile))
        os.system('cp %s %s' % (infile, staged_infile))

        if not re.search('\.root?', infile) is None:
            phase_merit(staged_infile, outfile, logFile, ft2_file, par_file,
                        args.dry_run)
        elif not re.search('\.fits?', infile) is None:
            phase_ft1(staged_infile, outfile, logFile, ft2_file, par_file,
                      args.dry_run)
        else:
            print('Unrecognized file extension: ', infile)

    os.chdir(cwd)
    shutil.rmtree(tmpdir)
예제 #15
0
def main():

    usage = "usage: %(prog)s [options] "
    description = "Run gtselect and gtmktime on one or more FT1 files.  "
    "Note that gtmktime will be skipped if no FT2 file is provided."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)
    parser.add_argument('--zmax', default=100., type=float, help='')
    parser.add_argument('--dcostheta', default=0.025, type=float, help='')
    parser.add_argument('--binsz', default=1.0, type=float, help='')
    parser.add_argument('--outdir', default=None, type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile', default=None, type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--scfile', default=None, type=str, help='',
                        required=True)

    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('--merge', default=False, action='store_true',
                        help='Merge input FT1 files into a single file.')

    parser.add_argument('files', nargs='+', default=None,
                        help='List of directories in which the analysis will '
                             'be run.')

    args = parser.parse_args()

    args.outdir = os.path.abspath(args.outdir)
    args.scfile = os.path.abspath(args.scfile)
    mkdir(args.outdir)
    input_files = [[os.path.abspath(x)] for x in args.files]
    output_files = [os.path.join(args.outdir, os.path.basename(x))
                    for x in args.files]

    if args.batch:
        opts = copy.deepcopy(args.__dict__)
        opts.pop('files')
        opts.pop('batch')
        submit_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
                    input_files, output_files, {k: v for k, v in opts.items()})
        sys.exit(0)

    logger = Logger.get(os.path.basename(__file__), None, logging.INFO)
    logger.info('Starting.')
    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')
    os.chdir(tmpdir)

    logger.info('tmpdir %s', tmpdir)
    logger.info('outdir %s', args.outdir)
    logger.info('outfile %s', args.outfile)

    for infiles, outfile in zip(input_files, output_files):

        logger.info('infiles %s', pprint.pformat(infiles))
        logger.info('outfile %s', outfile)

        kw = dict(evfile='list.txt',
                  scfile=args.scfile,
                  outfile='ltcube.fits',
                  binsz=args.binsz,
                  dcostheta=args.dcostheta,
                  zmax=args.zmax)

        create_filelist(infiles, 'list.txt')
        staged_outfile = kw['outfile']
        run_gtapp('gtltcube', logger, kw)
        logger.info('cp %s %s', staged_outfile, outfile)
        shutil.copy(staged_outfile, outfile)

    os.chdir(cwd)
    logger.info('Deleting %s', tmpdir)
    shutil.rmtree(tmpdir)
    logger.info('Done.')
예제 #16
0
def main():
    
    usage = "usage: %(prog)s [options] "
    description = "Run validation analysis"
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)

    parser.add_argument('--outdir', default=None, type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile', default=None, type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--data_type', default='agn', type=str)
    parser.add_argument('--mode', default='fill', type=str)
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('files', nargs='+', default=None,
                        help='List of directories in which the analysis will '
                             'be run.')
    
    args = parser.parse_args()
    
    if args.outdir is not None:
        args.outdir = os.path.abspath(args.outdir)
        mkdir(args.outdir)

    if args.mode == 'fill':
        input_files = [[os.path.abspath(x)] for x in args.files]
        output_files = [make_outpath(x,args.outdir) for x in args.files]
    elif args.mode == 'collect':        
        input_files = [[os.path.abspath(x) for x in args.files]]
        output_files = [args.outfile]

    print(input_files)
    print(output_files)
        
    if args.batch:

        batch_opts = {'W' : args.time, 'R' : args.resources,
                      'oo' : 'batch.log' }
        args.batch=False
        for infile, outfile in zip(input_files,output_files):
            
            if os.path.isfile(outfile) and not args.overwrite:
                print('Output file exists, skipping.',outfile)
                continue
            
            batch_opts['oo'] = os.path.splitext(outfile)[0] + '.log'
            dispatch_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
                          infile, args, batch_opts, dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.get(os.path.basename(__file__),None,logging.INFO)
    logger.info('Starting.')

    for infiles, outfile in zip(input_files,output_files):

        if args.data_type == 'agn':    
            acc = AGNAccumulator()

        for f in infiles:
            print('process',f)
            acc.process(f)

        print('write',outfile)
        acc.write(outfile)
        
    logger.info('Done.')
예제 #17
0
def main():

    gtselect_keys = ['tmin','tmax','emin','emax','zmax','evtype','evclass',
                     'phasemin','phasemax','convtype','rad','ra','dec']

    gtmktime_keys = ['roicut','filter']
    
    usage = "usage: %(prog)s [options] "
    description = "Run gtselect and gtmktime on one or more FT1 files.  "
    "Note that gtmktime will be skipped if no FT2 file is provided."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)
    
    for k in gtselect_keys:

        if k in ['evtype','evclass','convtype']:
            parser.add_argument('--%s'%k, default=None, type=int, help='')
        else:
            parser.add_argument('--%s'%k, default=None, type=float, help='')

    for k in gtmktime_keys:
        parser.add_argument('--%s'%k, default=None, type=str, help='')
        
    parser.add_argument('--rock_angle', default=None, type=float, help='')
        
    parser.add_argument('--outdir', default=None, type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile', default=None, type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--scfile', default=None, type=str, help='')
        
    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('--merge', default=False, action='store_true',
                        help='Merge input FT1 files into a single file.')

    parser.add_argument('files', nargs='+', default=None,
                        help='List of directories in which the analysis will '
                             'be run.')
    
    args = parser.parse_args()

    if args.merge:
        if not args.outfile:
            raise Exception('No output file defined.')        
        input_files = [[os.path.abspath(x) for x in args.files]]
        output_files = [os.path.abspath(args.outfile)]
    else:
        args.outdir = os.path.abspath(args.outdir)
        mkdir(args.outdir)
        input_files = [[os.path.abspath(x)] for x in args.files]
        output_files = [os.path.join(args.outdir,os.path.basename(x)) for x in args.files]

    if args.batch:

        opts = vars(args).copy()
        del opts['files']
        del opts['batch']
        submit_jobs('fermipy-select',
                    input_files, opts, output_files, overwrite=args.overwrite,
                    dry_run=args.dry_run)
        sys.exit(0)


    logger = Logger.get(os.path.basename(__file__),None,logging.INFO)

    logger.info('Starting.')
    
    if args.scfile is not None:
        args.scfile = os.path.abspath(args.scfile)
    
    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')
    os.chdir(tmpdir)

    logger.info('tmpdir %s',tmpdir)
    logger.info('outdir %s',args.outdir)
    logger.info('outfile %s',args.outfile)
    
    for infiles, outfile in zip(input_files,output_files):

        logger.info('infiles %s',pprint.pformat(infiles))
        logger.info('outfile %s',outfile)
        
        kw = { k : args.__dict__[k] for k in gtselect_keys }
        if kw['emax'] is None:
            kw['emax'] = 1E6

        create_filelist(infiles,'list.txt')
        kw['infile'] = 'list.txt'
        kw['outfile'] = 'out.fits'
        staged_outfile = kw['outfile']
        run_gtapp('gtselect',logger,kw)

        kw = { k : args.__dict__[k] for k in gtmktime_keys }
        if kw['roicut'] is None:
            kw['roicut'] = 'no'
        
        if kw['filter'] is None:
            kw['filter'] = 'DATA_QUAL==1 && LAT_CONFIG==1'
            if args.rock_angle is not None:
                kw['filter'] += ' && ABS(ROCK_ANGLE)<%(rock)s '%dict(rock=args.rock_angle)
        kw['evfile'] = 'out.fits'
        kw['outfile'] = 'out_filtered.fits'
        if args.scfile is not None:
            kw['scfile'] = args.scfile
            staged_outfile = kw['outfile']
            run_gtapp('gtmktime',logger,kw)
                            
        logger.info('cp %s %s',staged_outfile,outfile)
        shutil.copy(staged_outfile,outfile)
        
    os.chdir(cwd)
    logger.info('Deleting %s',tmpdir)
    shutil.rmtree(tmpdir)
    logger.info('Done.')
예제 #18
0
def main():

    gtselect_keys = ['tmin','tmax','emin','emax','zmax','evtype','evclass',
                     'phasemin','phasemax','convtype','rad','ra','dec']

    gtmktime_keys = ['roicut','filter']
    
    usage = "usage: %(prog)s [options] "
    description = "Run gtselect and gtmktime on one or more FT1 files.  "
    "Note that gtmktime will be skipped if no FT2 file is provided."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)
    
    for k in gtselect_keys:

        if k in ['evtype','evclass','convtype']:
            parser.add_argument('--%s'%k, default=None, type=int, help='')
        else:
            parser.add_argument('--%s'%k, default=None, type=float, help='')

    for k in gtmktime_keys:
        parser.add_argument('--%s'%k, default=None, type=str, help='')
        
    parser.add_argument('--rock_angle', default=None, type=float, help='')
        
    parser.add_argument('--outdir', default=None, type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--outfile', default=None, type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--scfile', default=None, type=str, help='')
        
    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('--merge', default=False, action='store_true',
                        help='Merge input FT1 files into a single file.')

    parser.add_argument('files', nargs='+', default=None,
                        help='List of directories in which the analysis will '
                             'be run.')
    
    args = parser.parse_args()

    if args.merge:
        if not args.outfile:
            raise Exception('No output file defined.')        
        input_files = [[os.path.abspath(x) for x in args.files]]
        output_files = [os.path.abspath(args.outfile)]
    else:
        args.outdir = os.path.abspath(args.outdir)
        mkdir(args.outdir)
        input_files = [[os.path.abspath(x)] for x in args.files]
        output_files = [os.path.join(args.outdir,os.path.basename(x)) for x in args.files]

    if args.batch:

        batch_opts = {'W' : args.time, 'R' : args.resources,
                      'oo' : 'batch.log' }
        args.batch=False
        for infile, outfile in zip(input_files,output_files):
            
            if os.path.isfile(outfile) and not args.overwrite:
                print('Output file exists, skipping.',outfile)
                continue
            
            batch_opts['oo'] = os.path.splitext(outfile)[0] + '_select.log'
            dispatch_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
                          infile, args, batch_opts, dry_run=args.dry_run)
        sys.exit(0)


    logger = Logger.get(os.path.basename(__file__),None,logging.INFO)

    logger.info('Starting.')
    
    if args.scfile is not None:
        args.scfile = os.path.abspath(args.scfile)
    
    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')
    os.chdir(tmpdir)

    logger.info('tmpdir %s',tmpdir)
    logger.info('outdir %s',args.outdir)
    logger.info('outfile %s',args.outfile)
    
    for infiles, outfile in zip(input_files,output_files):

        logger.info('infiles %s',pprint.pformat(infiles))
        logger.info('outfile %s',outfile)
        
        kw = { k : args.__dict__[k] for k in gtselect_keys }
        if kw['emax'] is None:
            kw['emax'] = 1E6

        create_filelist(infiles,'list.txt')
        kw['infile'] = 'list.txt'
        kw['outfile'] = 'out.fits'
        staged_outfile = kw['outfile']
        run_gtapp('gtselect',logger,kw)

        kw = { k : args.__dict__[k] for k in gtmktime_keys }
        if kw['roicut'] is None:
            kw['roicut'] = 'no'
        
        if kw['filter'] is None:
            kw['filter'] = 'DATA_QUAL==1 && LAT_CONFIG==1'
            if args.rock_angle is not None:
                kw['filter'] += ' && ABS(ROCK_ANGLE)<%(rock)s '%dict(rock=args.rock_angle)
        kw['evfile'] = 'out.fits'
        kw['outfile'] = 'out_filtered.fits'
        if args.scfile is not None:
            kw['scfile'] = args.scfile
            staged_outfile = kw['outfile']
            run_gtapp('gtmktime',logger,kw)
                            
        logger.info('cp %s %s',staged_outfile,outfile)
        shutil.copy(staged_outfile,outfile)
        
    os.chdir(cwd)
    logger.info('Deleting %s',tmpdir)
    shutil.rmtree(tmpdir)
    logger.info('Done.')
예제 #19
0
def main():

    gtselect_keys = ['tmin', 'tmax', 'emin', 'emax', 'zmax', 'evtype', 'evclass',
                     'phasemin', 'phasemax', 'convtype', 'rad', 'ra', 'dec']

    gtmktime_keys = ['roicut', 'filter']

    usage = "usage: %(prog)s [options] "
    description = "Run gtselect and gtmktime on one or more FT1 files.  "
    "Note that gtmktime will be skipped if no FT2 file is provided."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)

    for k in gtselect_keys:

        if k in ['evtype', 'evclass', 'convtype']:
            parser.add_argument('--%s' % k, default=None, type=int, help='')
        else:
            parser.add_argument('--%s' % k, default=None, type=float, help='')

    for k in gtmktime_keys:
        parser.add_argument('--%s' % k, default=None, type=str, help='')

    parser.add_argument('--rock_angle', default=None, type=float, help='')

    parser.add_argument('--outdir', default=None, type=str,
                        help='Path to output directory used when merge=False.')
    parser.add_argument('--output', default=None, type=str,
                        help='Path to output file used when merge=True.')
    parser.add_argument('--scfile', default=None, type=str, help='')

    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')
    parser.add_argument('--merge', default=False, action='store_true',
                        help='Merge input FT1 files into N files where N is determined '
                        'by files_per_split.')

    parser.add_argument('--files_per_split', default=100,
                        type=int, help='Set the number of files to combine in each '
                        'split of the input file list.')

    parser.add_argument('--file_idx_min', default=None,
                        type=int, help='Set the number of files to assign to '
                        'each batch job.')

    parser.add_argument('--file_idx_max', default=None,
                        type=int, help='Set the number of files to assign to '
                        'each batch job.')

    parser.add_argument('files', nargs='+', default=None,
                        help='List of files.')

    args = parser.parse_args()

    batch = vars(args).pop('batch')
    files = vars(args).pop('files')
    args.outdir = os.path.abspath(args.outdir)
    files = [os.path.abspath(f) for f in files]

    ft1_files = get_files(files, ['.fit', '.fits'])
    for i, f in enumerate(ft1_files):
        if re.search('^root\:\/\/', f) is None:
            ft1_files[i] = os.path.abspath(f)

    input_files = []
    output_files = []
    files_idx_min = []
    files_idx_max = []
    opts = []

    if args.file_idx_min is not None and args.file_idx_max is not None:

        files_idx_min = [args.file_idx_min]
        files_idx_max = [args.file_idx_max]
        input_files = [files]
        output_files = [args.output]

    elif args.merge:
        if not args.output:
            raise Exception('No output file defined.')

        nfiles = len(ft1_files)
        njob = int(np.ceil(nfiles / float(args.files_per_split)))
        for ijob, i in enumerate(range(0, nfiles, args.files_per_split)):

            if args.outdir is not None:
                mkdir(args.outdir)
                outdir = os.path.abspath(args.outdir)
            else:
                outdir = os.path.dirname(os.path.dirname(args.output))

            outfile = os.path.splitext(os.path.basename(args.output))[0]
            outfile += '_%03i.fits' % (ijob)
            outfile = os.path.join(outdir, outfile)
            input_files += [files]
            output_files += [outfile]
            files_idx_min += [i]
            files_idx_max += [i + args.files_per_split]
            opts += [vars(args).copy()]
            opts[-1]['output'] = outfile
            opts[-1]['file_idx_min'] = i
            opts[-1]['file_idx_max'] = i + args.files_per_split

    else:
        input_files = ft1_files
        files_idx_min = [i for i in range(len(ft1_files))]
        files_idx_max = [i + 1 for i in range(len(ft1_files))]
        output_files = [os.path.join(
            args.outdir, os.path.basename(x)) for x in ft1_files]
        opts = [vars(args).copy() for x in ft1_files]

    if batch:
        submit_jobs('fermipy-select',
                    input_files, opts, output_files, overwrite=args.overwrite,
                    dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.configure(os.path.basename(__file__), None, logging.INFO)
    logger.info('Starting.')

    if args.scfile is not None:
        args.scfile = os.path.abspath(args.scfile)

    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')
    os.chdir(tmpdir)

    logger.info('tmpdir %s', tmpdir)
    logger.info('outdir %s', args.outdir)
    logger.info('output %s', args.output)

    for infiles, outfile, idx_min, idx_max in zip(input_files, output_files,
                                                  files_idx_min, files_idx_max):

        logger.info('infiles %s', pprint.pformat(infiles))
        logger.info('outfile %s', outfile)
        infiles = get_files(infiles, ['.fit', '.fits'])
        if idx_min is not None:
            infiles = infiles[idx_min:idx_max]

        for i, f in enumerate(infiles):

            if re.search('^root\:\/\/', f) is None:
                continue
            os.system('xrdcp %s %s' % (f, f.split('/')[-1]))
            infiles[i] = os.path.join(tmpdir, f.split('/')[-1])

        kw = {k: args.__dict__[k] for k in gtselect_keys}
        if kw['emax'] is None:
            kw['emax'] = 1E6

        create_filelist(infiles, 'list.txt')
        kw['infile'] = 'list.txt'
        kw['outfile'] = 'out.fits'
        staged_outfile = kw['outfile']
        run_gtapp('gtselect', logger, kw)

        kw = {k: args.__dict__[k] for k in gtmktime_keys}
        if kw['roicut'] is None:
            kw['roicut'] = 'no'

        if kw['filter'] is None:
            kw['filter'] = 'DATA_QUAL==1 && LAT_CONFIG==1'
            if args.rock_angle is not None:
                kw['filter'] += ' && ABS(ROCK_ANGLE)<%(rock)s ' % dict(
                    rock=args.rock_angle)
        kw['evfile'] = 'out.fits'
        kw['outfile'] = 'out_filtered.fits'
        if args.scfile is not None:
            kw['scfile'] = args.scfile
            staged_outfile = kw['outfile']
            run_gtapp('gtmktime', logger, kw)

        logger.info('cp %s %s', staged_outfile, outfile)
        shutil.copy(staged_outfile, outfile)

    os.chdir(cwd)
    logger.info('Deleting %s', tmpdir)
    shutil.rmtree(tmpdir)
    logger.info('Done.')
예제 #20
0
def main():

    usage = "usage: %(prog)s [options] "
    description = "Preselect data."
    parser = argparse.ArgumentParser(usage=usage, description=description)
    parser.add_argument("-q", "--queue", default="kipac-ibq")
    parser.add_argument("--evfile", default=None, required=True)
    parser.add_argument("--scfile", default=None, required=True)
    parser.add_argument("--evclass",
                        default="P8R2_SOURCE",
                        choices=EVENTCLASS.keys(),
                        help="Event class selection.")
    parser.add_argument("--zmax",
                        default=100.,
                        type=float,
                        help="Maximum zenith angle for selection")
    parser.add_argument("--emin",
                        default=1.,
                        type=float,
                        help="Minimum energy for selection")
    parser.add_argument("--emax",
                        default=1000000.,
                        type=float,
                        help="Minimum energy for selection")
    parser.add_argument("--rock",
                        default=52.,
                        type=float,
                        help="Maximum rocking angle cut")
    parser.add_argument("--rock_min",
                        default=None,
                        type=float,
                        help="Minimum rocking angle cut")
    parser.add_argument("--chatter",
                        default=2,
                        type=int,
                        help="ST chatter level")
    parser.add_argument("--gtifile_grb",
                        default=None,
                        type=str,
                        help="GRB GTI file.")
    parser.add_argument("--gtifile_sun",
                        default=None,
                        type=str,
                        help="Sun GTI file.")

    args = parser.parse_args()

    basedir = pwd()
    evclsmin, evclass = EVENTCLASS[args.evclass]

    # Setup gtmktime filter
    gti_dir = '/u/gl/mdwood/ki20/mdwood/fermi/data'
    gti_grb = '%s/nogrb.gti' % gti_dir
    gti_sfr = '%s/nosolarflares.gti' % gti_dir

    #sfr_gticut = "gtifilter(\"%s\",(START+STOP)/2)" % (gti_sfr)
    #gticut_sun = "ANGSEP(RA_SUN,DEC_SUN,RA_ZENITH,DEC_ZENITH)>115"

    mktime_filter = 'DATA_QUAL==1 && LAT_CONFIG==1 '
    if args.rock is not None:
        mktime_filter += '&& ABS(ROCK_ANGLE)<%(rock)s ' % dict(rock=args.rock)

    if args.rock_min is not None:
        mktime_filter += '&& ABS(ROCK_ANGLE)>%(rock)s ' % dict(
            rock=args.rock_min)

    if args.gtifile_grb:
        gticut_grb = "gtifilter(\"%s\",START) && " % (args.gtifile_grb)
        gticut_grb += "gtifilter(\"%s\",STOP)" % (args.gtifile_grb)
        mktime_filter += '&& %s' % gticut_grb

    if args.gtifile_sun:
        gticut_sun = "gtifilter(\"%s\",(START+STOP)/2)" % (args.gtifile_sun)
        mktime_filter += '&& (ANGSEP(RA_SUN,DEC_SUN,RA_ZENITH,DEC_ZENITH)>115 || %s)' % gticut_sun

    #if args.transient_cut:
    #    mktime_filter += '&& %s' % gticut_grb
    #    mktime_filter += '&& (%s || %s)' % (sfr_gticut, gticut_sun)

    # First take care of the scfile
    scfile = os.path.basename(args.scfile)
    if not os.path.lexists(scfile):
        os.symlink(args.scfile.strip('@'), scfile)

    # Now take care of the evfile
    if args.evfile.startswith('@'):
        evfiles = np.loadtxt(args.evfile.strip('@'), dtype='str')
    else:
        evfiles = [args.evfile]

    # Now take care of the scfile
    if args.scfile.startswith('@'):
        scfiles = np.loadtxt(args.scfile.strip('@'), dtype='str')
    else:
        scfiles = [args.scfile] * len(evfiles)

    # Now create an output directory
    outdir = mkdir(join(basedir, 'ft1'))
    logdir = mkdir(join(basedir, 'log'))

    lst, cmnds, logs = [], [], []
    jobname = "preprocess"
    tstarts, tstops = [], []
    for evfile, scfile in zip(evfiles, scfiles):
        # PFILES
        scratch = join("/scratch", os.environ["USER"])
        os.environ['PFILES'] = scratch + ';' + \
            os.environ['PFILES'].split(';')[-1]

        # Deal with the times
        header = fits.open(evfile)[0].header
        tstart = int(float(header['TSTART']))
        tstop = int(float(header['TSTOP']))
        tstarts.append(tstart)
        tstops.append(tstop)
        outfile = join(
            outdir, "%s_%i_%i_z%g_r%g_ft1.fits" %
            (args.evclass, tstart, tstop, args.zmax, args.rock))

        logfile = join(logdir, basename(outfile).replace('fits', 'log'))

        params = dict(evfile=evfile,
                      scfile=scfile,
                      select="${workdir}/select_ft1.fits",
                      outfile=outfile,
                      evclass=evclass,
                      zmax=args.zmax,
                      emin=args.emin,
                      emax=args.emax,
                      filter=mktime_filter,
                      chatter=args.chatter)

        setup = """
mkdir /scratch/$USER >/dev/null 2>&1;
workdir=$(mktemp -d -p /scratch/$USER);\n
"""

        select = """gtselect \
 infile=%(evfile)s \
 outfile=%(select)s \
 tmin=0 tmax=0 emin=%(emin)s emax=%(emax)s \
 ra=0 dec=0 rad=180 zmax=%(zmax)s \
 evclass=%(evclass)s \
 chatter=4;
""" % params

        mktime = """gtmktime \
 evfile=%(select)s \
 outfile=%(outfile)s \
 scfile=%(scfile)s \
 filter='%(filter)s' \
 roicut='no' \
 chatter=4;
""" % params

        cleanup = "\n\nstatus=$?;\nrm -rf $workdir;\nexit $status;"

        cmnd = setup + select + mktime + cleanup

        logs.append(logfile)
        cmnds.append(cmnd)
        lst.append(outfile)
    bsub(jobname, cmnds, logs, W='300', submit=True)

    lstfile = "%s_%s_%s_z%g_r%g_ft1.lst" % (args.evclass, min(tstarts),
                                            max(tstops), args.zmax, args.rock)
    print("Writing ft1 file list: %s\n" % lstfile)
    np.savetxt(lstfile, sorted(lst), fmt="%s")

    print("Done.")
예제 #21
0
def main():

    usage = "Usage: %(prog)s  [options] input"
    description = "python script"
    parser = argparse.ArgumentParser(usage=usage, description=description)
    parser.add_argument("-d", "--dryrun", action='store_true')
    parser.add_argument("-s", "--sleep", default='1m',
                        help="Pause between")
    parser.add_argument("--ls1", action='store_true', default=False,
                        help='Fetch LS1 files.')
    parser.add_argument("--emin", default=100)
    parser.add_argument("--emax", default=1e6)
    parser.add_argument("--tmin", default=239557414, type=int,
                        help="Min time; default is start of first LAT run")
    parser.add_argument("--tmax", default=None, type=int,
                        help="Default is current time.")
    parser.add_argument("--evtclass", default="Source",
                        help="Event class")
    parser.add_argument("--evtsample", default="P7.6_P130_BASE",
                        choices=['P7.6_P130_BASE', 'P6_public_v3',
                                 'P7_P202_BASE', 'P7_P203_BASE', 'P8_P301_BASE',
                                 'P8_P302_BASE', 'P8_P302_ALL'],
                        help="Event sample")
    parser.add_argument("--chunk", default=int(YEAR // 12), type=int,
                        help="Time chunk for download. Default is ~1 month.")

    args = parser.parse_args()

    basedir = os.environ['PWD']
    codedir = join(basedir, dirname(os.path.relpath(__file__)))
    logdir = join(basedir, "log")
    if not args.dryrun:
        logdir = mkdir(logdir)
    astro = astroserver()

    chunk = args.chunk
    # Might want to think more about how tmin and tmax are set
    first = args.tmin
    if args.tmax is None:
        args.tmax = int(utc2met())

    emin, emax = args.emin, args.emax
    evtclass = args.evtclass
    evtsample = args.evtsample
    sample = '_'.join(evtsample.split('_')[:-1])
    events = evtclass.upper()

    # Break data into chunks
    epsilon = 1e-6
    times = np.arange(args.tmin, args.tmax + epsilon, chunk).astype(int)

    # Get new full ft2 file.
    # Assumption is that it is a longer time period...
    ft2 = join(basedir, "%s_%s_%s_%s_ft2.fits" %
               (sample, events, min(times), max(times)))
    jobname = 'ft2'
    if os.path.exists(ft2):
        # exact ft2 already exists; skip
        print("%s exists; skipping.\n" % ft2)
    else:
        # Remove old ft2 file and replace with link
        if not args.dryrun:
            # for f in glob.glob(join(basedir, "*ft2.fits")):
            #    os.remove(f)
            #    os.symlink(ft2, f)
            for f in glob.glob(join(basedir, "*ft2_fix_checksums.sh")):
                os.remove(f)

        logfile = join(logdir, basename(ft2).replace('fits', 'log'))
        command = astro('storeft2',
                        output_ft2_30s=ft2,
                        _event_sample=evtsample,
                        minTimestamp=min(times),
                        maxTimestamp=max(times),
                        excludeMaxTimestamp='',
                        quiet='',
                        brief='',
                        )

        print(command)
        bsub(jobname, command, logfile, sleep=args.sleep, submit=not args.dryrun,
             W=1000, R='rhel60')

    # Download ft1, ft2 files
    ft1dir = mkdir(join(basedir, 'ft1'))
    ft1_lst, ft1_cmnds, ft1_logs = [], [], []

    ls1dir = mkdir(join(basedir, 'ls1'))
    ls1_lst, ls1_cmnds, ls1_logs = [], [], []

    ft2dir = mkdir(join(basedir, 'ft2'))
    ft2_lst, ft2_cmnds, ft2_logs = [], [], []

    for tmin, tmax in zip(times[:-1], times[1:]):

        # If ft1 file exists, skip it...
        ft1 = join(ft1dir, "%s_%s_%s_%s_ft1.fits" %
                   (sample, events, tmin, tmax))
        if os.path.exists(ft1):
            print("%s exists; skipping.\n" % ft1)
        else:
            ft1_kw = dict(_output_ft1=ft1,
                          _event_sample=evtsample,
                          minTimestamp=tmin,
                          maxTimestamp=tmax,
                          minEnergy=emin,
                          maxEnergy=emax,
                          _event_class_name=evtclass,
                          excludeMaxTimestamp='',
                          quiet='',
                          brief='')

            ft1_cmnd = astro("store", **ft1_kw)
            ft1_logs.append(join(logdir, basename(ft1).replace('fits', 'log')))
            ft1_cmnds.append(ft1_cmnd)
        ft1_lst.append(ft1)

        # If ls1 file exists, skip it...
        ls1 = join(ls1dir, "%s_%s_%s_%s_ls1.fits" %
                   (sample, events, tmin, tmax))
        if not args.ls1:
            print("%s; skipping.\n" % ls1)
        elif os.path.exists(ls1):
            print("%s exists; skipping.\n" % ls1)
        else:
            ls1_kw = dict(_output_ls1=ls1,
                          _event_sample=evtsample,
                          _output_ls1_max_bytes_per_file=0,
                          minTimestamp=tmin,
                          maxTimestamp=tmax,
                          minEnergy=emin,
                          maxEnergy=emax,
                          _event_class_name=evtclass,
                          excludeMaxTimestamp='',
                          quiet='',
                          brief='')

            ls1_cmnd = astro("store", **ls1_kw)
            ls1_logs.append(join(logdir, basename(ls1).replace('fits', 'log')))
            ls1_cmnds.append(ls1_cmnd)
        ls1_lst.append(ls1)

        # If ft2 file exists, skip it...
        ft2 = join(ft2dir, "%s_%s_%s_%s_ft2.fits" %
                   (sample, events, tmin, tmax))
        if os.path.exists(ft2):
            print("%s exists; skipping.\n" % ft2)
        else:
            ft2_cmnd = astro('storeft2',
                             output_ft2_30s=ft2,
                             _event_sample=evtsample,
                             minTimestamp=tmin,
                             maxTimestamp=tmax,
                             excludeMaxTimestamp='',
                             quiet='',
                             brief='',
                             )
            ft2_logs.append(join(logdir, basename(ft2).replace('fits', 'log')))
            ft2_cmnds.append(ft2_cmnd)
        ft2_lst.append(ft2)

    resources = 'bullet,hequ,kiso'

    bsub('ft1', ft1_cmnds, ft1_logs, sleep=args.sleep, submit=not args.dryrun,
         W=1000, R=resources)
    bsub('ls1', ls1_cmnds, ls1_logs, sleep=args.sleep, submit=not args.dryrun,
         W=1000, R=resources)
    bsub('ft2', ft2_cmnds, ft2_logs, sleep=args.sleep, submit=not args.dryrun,
         W=1000, R=resources)

    # Create list of ft1 files
    ft1_lstfile = join(basedir, "%s_%s_%s_%s_ft1.lst" %
                       (sample, events, min(times), max(times)))
    ls1_lstfile = join(basedir, "%s_%s_%s_%s_ls1.lst" %
                       (sample, events, min(times), max(times)))
    ft2_lstfile = join(basedir, "%s_%s_%s_%s_ft2.lst" %
                       (sample, events, min(times), max(times)))
    if not args.dryrun:
        for f in glob.glob(join(basedir, "*.lst")):
            os.remove(f)
        print("Creating ft1 file list: %s" % ft1_lstfile)
        np.savetxt(ft1_lstfile, ft1_lst, fmt='%s')
        print("Creating ls1 file list: %s" % ls1_lstfile)
        np.savetxt(ls1_lstfile, ls1_lst, fmt='%s')
        print("Creating ft2 file list: %s" % ft2_lstfile)
        np.savetxt(ft2_lstfile, ft2_lst, fmt='%s')
예제 #22
0
def main():
    usage = "%(prog)s [config_file] [options]"
    description = """
Run a quick analysis of an ROI performing the basic data and model
preparation and optimizing source parameters.  If the input config
file does not exist a new one will be created using the options
provided on the command-line.
"""
    parser = argparse.ArgumentParser(usage=usage, description=description)

    parser.add_argument('--config', default=None,
                        help='Set an existing configuration file that will be used as the '
                        'baseline configuration.  Note that parameters set with command-line '
                        'options (emin, emax, etc.) will override any settings in this file.')
    parser.add_argument('--outdir', default=None,
                        help='Set the path to the analysis directory.')
    parser.add_argument('--evfile', default=None,
                        help='Set the path to the FT1 file or list of FT1 files.')
    parser.add_argument('--scfile', default=None,
                        help='Set the path to the FT2 file or list of FT2 files.')
    parser.add_argument('--ltcube', default=None,
                        help='Set the path to the LT cube file.')
    parser.add_argument('--emin', default=None,
                        help='Minimum energy selection (MeV).')
    parser.add_argument('--emax', default=None,
                        help='Maximum energy selection (MeV).')
    parser.add_argument('--logemin', default=None,
                        help='Minimum energy selection (log10(MeV)).')
    parser.add_argument('--logemax', default=None,
                        help='Maximum energy selection (log10(MeV)).')
    parser.add_argument('--target', default=None,
                        help='Name of a catalog source.')
    parser.add_argument('--ra', default=None,
                        help='RA of ROI center.')
    parser.add_argument('--dec', default=None,
                        help='DEC of ROI center.')
    parser.add_argument('config_file', default=None,
                        help='Path to a configuration file.  If this file does not exist then '
                        'a new configuration file will be created.')

    args = vars(parser.parse_args())

    if not 'FERMI_DIFFUSE_DIR' in os.environ:
        os.environ['FERMI_DIFFUSE_DIR'] = '$GLAST_EXT/diffuseModels'

    if not args['config_file']:
        args['config_file'] = os.path.join(args['outdir'], 'config.yaml')

    if not os.path.isdir(os.path.dirname(args['config_file'])):
        utils.mkdir(os.path.dirname(args['config_file']))

    # Create a config file
    if not os.path.isfile(args['config_file']):
        configpath = args['config_file']
        config = create_config(args)
        yaml.dump(config, open(configpath, 'w'))
        cfgstr = yaml.dump(config, default_flow_style=False)
        print('Creating new configuration...')
        print(cfgstr)
        config_file = configpath
    else:
        print('Using existing configuration...')
        config_file = args['config_file']

    run_analysis(config_file)
예제 #23
0
def main():
    
    usage = "usage: %(prog)s [options] "
    description = "Run tempo2 application on one or more FT1 files."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    add_lsf_args(parser)
    
    parser.add_argument('--par_file', default=None, type=str, required=True,
                        help='Ephemeris file')

    parser.add_argument('--scfile', default=None, type=str, required=True,
                        help='FT2 file')

    parser.add_argument('--outdir', default=None, type=str, help='')
    
    parser.add_argument('--phase_colname', default='PULSE_PHASE',
                        type=str, help='Set the name of the phase column.')
    
    parser.add_argument('--dry_run', default=False, action='store_true')
    parser.add_argument('--overwrite', default=False, action='store_true')

    parser.add_argument('files', nargs='+', default=None,
                        help='List of directories in which the analysis will '
                             'be run.')
    
    args = parser.parse_args()

    if args.outdir is None:
        outdirs = [os.path.dirname(os.path.abspath(x)) for x in args.files]
    else:
        outdir = os.path.abspath(args.outdir)
        mkdir(args.outdir)
        outdirs = [outdir for x in args.files]

    input_files = [os.path.abspath(x) for x in args.files]
    output_files = [os.path.join(y,os.path.basename(x))
                    for x, y in zip(args.files,outdirs)]
    
    if args.batch:

        batch_opts = {'W' : args.time, 'R' : args.resources,
                      'oo' : 'batch.log' }
        args.batch=False
        for infile, outfile in zip(input_files,output_files):
            
            if os.path.isfile(outfile) and not args.overwrite:
                print('Output file exists, skipping.',outfile)
                continue
            
            batch_opts['oo'] = os.path.join(outdir,
                                            os.path.splitext(outfile)[0] +
                                            '_tempo2.log')            
            dispatch_jobs('python ' + os.path.abspath(__file__.rstrip('cd')),
                          [infile], args, batch_opts, dry_run=args.dry_run)
        sys.exit(0)

    logger = Logger.get(__file__,None,logging.INFO)
        
    par_file = os.path.abspath(args.par_file)
    ft2_file = os.path.abspath(args.scfile)
    
    cwd = os.getcwd()
    user = os.environ['USER']
    tmpdir = tempfile.mkdtemp(prefix=user + '.', dir='/scratch')

    logger.info('tmpdir %s',tmpdir)
    os.chdir(tmpdir)

    for infile, outfile in zip(input_files,output_files):

        staged_infile = os.path.join(tmpdir,os.path.basename(x))
        logFile = os.path.splitext(x)[0] + '_tempo2.log'

        print('cp %s %s' % (infile, staged_infile))
        os.system('cp %s %s' % (infile, staged_infile))

        if not re.search('\.root?', x) is None:
            phase_merit(staged_infile, outfile, logFile, ft2_file, par_file, args.dry_run)
        elif not re.search('\.fits?', x) is None:
            phase_ft1(staged_infile, outfile, logFile, ft2_file, par_file, args.dry_run)
        else:
            print('Unrecognized file extension: ', x)

    os.chdir(cwd)
    shutil.rmtree(tmpdir)
예제 #24
0
def main():

    usage = "usage: %(prog)s [options] "
    description = "Preselect data."
    parser = argparse.ArgumentParser(usage=usage, description=description)
    parser.add_argument("-q", "--queue", default="kipac-ibq")
    parser.add_argument("--evfile", default=None, required=True)
    parser.add_argument("--scfile", default=None, required=True)
    parser.add_argument("--evclass", default="P8R2_SOURCE",
                        choices=EVENTCLASS.keys(),
                        help="Event class selection.")
    parser.add_argument("--zmax", default=100., type=float,
                        help="Maximum zenith angle for selection")
    parser.add_argument("--emin", default=1., type=float,
                        help="Minimum energy for selection")
    parser.add_argument("--emax", default=1000000., type=float,
                        help="Minimum energy for selection")
    parser.add_argument("--transient_cut", default=False, action='store_true',
                        help="Apply selection to remove GRBs and SFRs.")
    parser.add_argument("--rock", default=52., type=float,
                        help="Maximum rocking angle cut")
    parser.add_argument("--rock_min", default=None, type=float,
                        help="Minimum rocking angle cut")
    parser.add_argument("--chatter", default=2, type=int,
                        help="ST chatter level")

    args = parser.parse_args()

    basedir = pwd()
    evclsmin, evclass = EVENTCLASS[args.evclass]

    # Setup gtmktime filter
    gti_dir = '/u/gl/mdwood/ki20/mdwood/fermi/data'
    gti_grb = '%s/nogrb.gti' % gti_dir
    gti_sfr = '%s/nosolarflares.gti' % gti_dir

    grb_gticut = "gtifilter(\"%s\",START) && " % (gti_grb)
    grb_gticut += "gtifilter(\"%s\",STOP)" % (gti_grb)
    sfr_gticut = "gtifilter(\"%s\",(START+STOP)/2)" % (gti_sfr)
    sun_gticut = "ANGSEP(RA_SUN,DEC_SUN,RA_ZENITH,DEC_ZENITH)>115"

    mktime_filter = 'DATA_QUAL==1 && LAT_CONFIG==1 '
    if args.rock is not None:
        mktime_filter += '&& ABS(ROCK_ANGLE)<%(rock)s ' % dict(rock=args.rock)

    if args.rock_min is not None:
        mktime_filter += '&& ABS(ROCK_ANGLE)>%(rock)s ' % dict(rock=args.rock_min)

    if args.transient_cut:
        mktime_filter += '&& %s' % grb_gticut
        mktime_filter += '&& (%s || %s)' % (sfr_gticut, sun_gticut)

    # First take care of the scfile
    scfile = os.path.basename(args.scfile)
    if not os.path.lexists(scfile):
        os.symlink(args.scfile.strip('@'), scfile)

    # Now take care of the evfile
    if args.evfile.startswith('@'):
        evfiles = np.loadtxt(args.evfile.strip('@'), dtype='str')
    else:
        evfiles = [args.evfile]

    # Now take care of the scfile
    if args.scfile.startswith('@'):
        scfiles = np.loadtxt(args.scfile.strip('@'), dtype='str')
    else:
        scfiles = [args.scfile] * len(evfiles)

    # Now create an output directory
    outdir = mkdir(join(basedir, 'ft1'))
    logdir = mkdir(join(basedir, 'log'))

    lst, cmnds, logs = [], [], []
    jobname = "preprocess"
    tstarts, tstops = [], []
    for evfile, scfile in zip(evfiles, scfiles):
        # PFILES
        scratch = join("/scratch", os.environ["USER"])
        os.environ['PFILES'] = scratch + ';' + \
            os.environ['PFILES'].split(';')[-1]

        # Deal with the times
        header = fits.open(evfile)[0].header
        tstart = int(float(header['TSTART']))
        tstop = int(float(header['TSTOP']))
        tstarts.append(tstart)
        tstops.append(tstop)

        if args.transient_cut:
            outfile = join(outdir, "%s_%i_%i_z%g_r%g_gti_ft1.fits" % (
                args.evclass, tstart, tstop, args.zmax, args.rock))
        else:
            outfile = join(outdir, "%s_%i_%i_z%g_r%g_ft1.fits" %
                           (args.evclass, tstart, tstop, args.zmax, args.rock))

        logfile = join(logdir, basename(outfile).replace('fits', 'log'))

        params = dict(evfile=evfile,
                      scfile=scfile,
                      select="${workdir}/select_ft1.fits",
                      outfile=outfile,
                      evclass=evclass,
                      zmax=args.zmax,
                      emin=args.emin,
                      emax=args.emax,
                      filter=mktime_filter,
                      chatter=args.chatter)

        setup = """
mkdir /scratch/$USER >/dev/null 2>&1;
workdir=$(mktemp -d -p /scratch/$USER);\n
"""

        select = """gtselect \
 infile=%(evfile)s \
 outfile=%(select)s \
 tmin=0 tmax=0 emin=%(emin)s emax=%(emax)s \
 ra=0 dec=0 rad=180 zmax=%(zmax)s \
 evclass=%(evclass)s \
 chatter=4;
""" % params

        mktime = """gtmktime \
 evfile=%(select)s \
 outfile=%(outfile)s \
 scfile=%(scfile)s \
 filter='%(filter)s' \
 roicut='no' \
 chatter=4;
""" % params

        cleanup = "\n\nstatus=$?;\nrm -rf $workdir;\nexit $status;"

        cmnd = setup + select + mktime + cleanup

        logs.append(logfile)
        cmnds.append(cmnd)
        lst.append(outfile)
    bsub(jobname, cmnds, logs, W='300', submit=True)

    lstfile = "%s_%s_%s_z%g_r%g_ft1.lst" % (
        args.evclass, min(tstarts), max(tstops), args.zmax, args.rock)
    print "Writing ft1 file list: %s\n" % lstfile
    np.savetxt(lstfile, sorted(lst), fmt="%s")

    print "Done."
예제 #25
0
for c in args.configs:

    config = utils.merge_dict(config,yaml.load(open(c)),
                              add_new_keys=True)   
#    config.update(yaml.load(open(c)))

src_list = yaml.load(open(args.source_list))
basedir = args.basedir

bash_script = """
cat $0
python {script} --config={config} --source="{source}"
"""

scriptdir = os.path.join(basedir,'scripts')
utils.mkdir(scriptdir)
os.system('cp %s %s'%(args.script,scriptdir))

for name, v in src_list.items():

#    if isinstance(target,dict):
#        name = 'hp_region_%03i_%04i'%(target['nside'],target['pix'])
#        theta, phi = hp.pix2ang(target['nside'],target['pix'])        
#        config['selection']['glat'] = np.degrees(np.pi/2.-theta)
#        config['selection']['glon'] = np.degrees(phi)
#    else:
#        name = target
#        config['selection']['target'] = name 

    print name
    dirname = os.path.join(basedir,name)