Esempio n. 1
0
    def _make_extension_fits(self, ext, filename, **kwargs):

        maps = {
            'EXT_TOT_MAP': ext['ext_tot_map'],
            'EXT_SRC_MAP': ext['ext_src_map'],
            'EXT_BKG_MAP': ext['ext_bkg_map'],
            'PTSRC_TOT_MAP': ext['ptsrc_tot_map'],
            'PTSRC_SRC_MAP': ext['ptsrc_src_map'],
            'PTSRC_BKG_MAP': ext['ptsrc_bkg_map']
        }

        hdu_images = []
        for k, v in sorted(maps.items()):
            if v is None:
                continue
            hdu_images += [v.make_hdu(k)]

        tab = fits_utils.dict_to_table(ext)
        hdu_data = fits.table_to_hdu(tab)
        hdu_data.name = 'EXT_DATA'

        if ext.get('tsmap'):
            hdus = [ext['tsmap'].make_hdu(hdu='PRIMARY')]
        else:
            hdus = [fits.PrimaryHDU()]

        hdus += [hdu_data] + hdu_images
        hdus[0].header['CONFIG'] = json.dumps(utils.tolist(ext['config']))
        hdus[1].header['CONFIG'] = json.dumps(utils.tolist(ext['config']))
        fits_utils.write_hdus(hdus,
                              filename,
                              keywords={'SRCNAME': ext['name']})
Esempio n. 2
0
    def _make_extension_fits(self, ext, filename, **kwargs):

        maps = {'EXT_TOT_MAP': ext['ext_tot_map'],
                'EXT_SRC_MAP': ext['ext_src_map'],
                'EXT_BKG_MAP': ext['ext_bkg_map'],
                'PTSRC_TOT_MAP': ext['ptsrc_tot_map'],
                'PTSRC_SRC_MAP': ext['ptsrc_src_map'],
                'PTSRC_BKG_MAP': ext['ptsrc_bkg_map']}

        hdu_images = []
        for k, v in sorted(maps.items()):
            if v is None:
                continue
            hdu_images += [v.make_hdu(k)]

        tab = fits_utils.dict_to_table(ext)
        hdu_data = fits.table_to_hdu(tab)
        hdu_data.name = 'EXT_DATA'

        if ext.get('tsmap'):
            hdus = [ext['tsmap'].make_hdu(hdu='PRIMARY')]
        else:
            hdus = [fits.PrimaryHDU()]

        hdus += [hdu_data] + hdu_images
        hdus[0].header['CONFIG'] = json.dumps(utils.tolist(ext['config']))
        hdus[1].header['CONFIG'] = json.dumps(utils.tolist(ext['config']))
        fits_utils.write_hdus(hdus, filename,
                              keywords={'SRCNAME': ext['name']})
Esempio n. 3
0
def clone_configs(basedir, base_configs, opt_configs, scripts, args=''):
    """
    """
    config = {}
    for c in base_configs:
        config = utils.merge_dict(config,
                                  yaml.load(open(c)),
                                  add_new_keys=True)

    scriptdir = os.path.abspath(os.path.join(basedir, 'scripts'))
    utils.mkdir(scriptdir)
    bash_scripts = []
    for script_in in scripts:
        bash_script = """
cat $0
{scriptexe} --config={config} {args}
"""

        if os.path.isfile(script_in):
            script = os.path.basename(script_in)
            scriptpath = os.path.join(scriptdir, script)
            scriptexe = 'python ' + scriptpath
            os.system('cp %s %s' % (script_in, scriptdir))
        elif cmd_exists(script_in):
            scriptexe = script_in
            script = script_in
        else:
            raise Exception('Could not find script: %s' % script_in)

        bash_scripts.append((script, scriptexe, bash_script))

    for name, vdict in opt_configs.items():

        dirname = os.path.abspath(os.path.join(basedir, name))
        utils.mkdir(dirname)

        cfgfile = os.path.join(dirname, 'config.yaml')
        for script_in, bash_script in zip(scripts, bash_scripts):
            runscript = os.path.splitext(bash_script[0])[0] + '.sh'
            runscript = os.path.join(dirname, runscript)
            with open(os.path.join(runscript), 'wt') as f:
                f.write(bash_script[2].format(source=name,
                                              scriptexe=bash_script[1],
                                              config=cfgfile,
                                              args=args))

        if not config:
            continue

        c = copy.deepcopy(config)
        c = utils.merge_dict(c, vdict, add_new_keys=True)
        yaml.dump(utils.tolist(c),
                  open(cfgfile, 'w'),
                  default_flow_style=False)
Esempio n. 4
0
def clone_configs(basedir, base_configs, opt_configs, scripts, args=''):
    """
    """
    config = {}
    for c in base_configs:
        config = utils.merge_dict(config, yaml.load(open(c)),
                                  add_new_keys=True)

    scriptdir = os.path.abspath(os.path.join(basedir, 'scripts'))
    utils.mkdir(scriptdir)
    bash_scripts = []
    for script_in in scripts:
        bash_script = """
cat $0
{scriptexe} --config={config} {args}
"""

        if os.path.isfile(script_in):
            script = os.path.basename(script_in)
            scriptpath = os.path.join(scriptdir, script)
            scriptexe = 'python ' + scriptpath
            os.system('cp %s %s' % (script_in, scriptdir))
        elif cmd_exists(script_in):
            scriptexe = script_in
            script = script_in
        else:
            raise Exception('Could not find script: %s' % script_in)

        bash_scripts.append((script, scriptexe, bash_script))

    for name, vdict in opt_configs.items():

        dirname = os.path.abspath(os.path.join(basedir, name))
        utils.mkdir(dirname)

        cfgfile = os.path.join(dirname, 'config.yaml')
        for script_in, bash_script in zip(scripts, bash_scripts):
            runscript = os.path.splitext(bash_script[0])[0] + '.sh'
            runscript = os.path.join(dirname, runscript)
            with open(os.path.join(runscript), 'wt') as f:
                f.write(bash_script[2].format(source=name,
                                              scriptexe=bash_script[1],
                                              config=cfgfile,
                                              args=args))

        if not config:
            continue

        c = copy.deepcopy(config)
        c = utils.merge_dict(c, vdict, add_new_keys=True)
        yaml.dump(utils.tolist(c), open(cfgfile, 'w'),
                  default_flow_style=False)
Esempio n. 5
0
def _process_lc_bin(itime, name, config, basedir, workdir, diff_sources, const_spectrum, roi, lck_params,
                    **kwargs):
    i, time = itime

    roi = copy.deepcopy(roi)

    config = copy.deepcopy(config)
    config['selection']['tmin'] = time[0]
    config['selection']['tmax'] = time[1]

    # create output directories labeled in MET vals
    outdir = basedir + 'lightcurve_%.0f_%.0f' % (time[0], time[1])
    config['fileio']['outdir'] = os.path.join(workdir, outdir)
    config['logging']['prefix'] = 'lightcurve_%.0f_%.0f ' % (time[0], time[1])
    config['fileio']['logfile'] = os.path.join(config['fileio']['outdir'],
                                               'fermipy.log')
    utils.mkdir(config['fileio']['outdir'])

    yaml.dump(utils.tolist(config),
              open(os.path.join(config['fileio']['outdir'],
                                'config.yaml'), 'w'))

    xmlfile = os.path.join(config['fileio']['outdir'], 'base.xml')

    try:
        from fermipy.gtanalysis import GTAnalysis
        gta = GTAnalysis(config, roi, loglevel=logging.DEBUG)
        gta.logger.info('Fitting time range %i %i' % (time[0], time[1]))
        gta.setup()
    except:
        print('Analysis failed in time range %i %i' %
              (time[0], time[1]))
        print(sys.exc_info()[0])
        raise
        return {}

    gta._lck_params = lck_params
    # Recompute source map for source of interest and sources within 3 deg
    if gta.config['gtlike']['use_scaled_srcmap']:
        names = [s.name for s in
                 gta.roi.get_sources(distance=3.0, skydir=gta.roi[name].skydir)
                 if not s.diffuse]
        gta.reload_sources(names)

    # Write the current model
    gta.write_xml(xmlfile)

    # Optimize the model
    gta.optimize(skip=diff_sources,
                 shape_ts_threshold=kwargs.get('shape_ts_threshold'))

    fit_results = _fit_lc(gta, name, **kwargs)
    gta.write_xml('fit_model_final.xml')
    srcmodel = copy.deepcopy(gta.get_src_model(name))
    numfree = gta.get_free_param_vector().count(True)

    max_ts_thresholds = [None, 4, 9]
    for max_ts in max_ts_thresholds:
        if max_ts is not None:
            gta.free_sources(minmax_ts=[None, max_ts], free=False, exclude=[name])

        # rerun fit using params from full time (constant) fit using same
        # param vector as the successful fit to get loglike
        specname, spectrum = const_spectrum
        gta.set_source_spectrum(name, spectrum_type=specname,
                                spectrum_pars=spectrum,
                                update_source=False)
        gta.free_source(name, free=False)
        const_fit_results = gta.fit()
        if not const_fit_results['fit_success']:
            continue
        const_srcmodel = gta.get_src_model(name)

        # rerun using shape fixed to full time fit
        # for the fixed-shape lightcurve
        gta.free_source(name, pars='norm')
        fixed_fit_results = gta.fit()
        if not fixed_fit_results['fit_success']:
            continue
        fixed_srcmodel = gta.get_src_model(name)
        break
    
    # special lc output
    o = {'flux_const': const_srcmodel['flux'],
         'loglike_const': const_fit_results['loglike'],
         'fit_success': fit_results['fit_success'],
         'fit_success_fixed': fixed_fit_results['fit_success'],
         'fit_quality': fit_results['fit_quality'],
         'fit_status': fit_results['fit_status'],
         'num_free_params': numfree,
         'config': config}

    # full flux output
    if fit_results['fit_success'] == 1:
        for k in defaults.source_flux_output.keys():
            if not k in srcmodel:
                continue
            o[k] = srcmodel[k]
            o[k+'_fixed'] = fixed_srcmodel[k]

    gta.logger.info('Finished time range %i %i' % (time[0], time[1]))
    return o
Esempio n. 6
0
    def _make_lc(self, name, **kwargs):

        # make array of time values in MET
        if kwargs['time_bins']:
            times = np.array(kwargs['time_bins'])
        elif kwargs['nbins']:
            times = np.linspace(self.tmin, self.tmax,
                                kwargs['nbins'] + 1)
        else:
            times = np.arange(self.tmin, self.tmax,
                              kwargs['binsz'])

        o = self._create_lc_dict(name, times)
        o['config'] = kwargs

        diff_sources = [s.name for s in self.roi.sources if s.diffuse]
        skydir = self.roi[name].skydir

        if kwargs.get('free_radius', None) is not None:
            kwargs['free_sources'] += [s.name for s in
                                       self.roi.get_sources(skydir=skydir,
                                                            distance=kwargs[
                                                                'free_radius'],
                                                            exclude=diff_sources)]

        for i, time in enumerate(zip(times[:-1], times[1:])):

            self.logger.info('Fitting time range %i %i', time[0], time[1])

            config = copy.deepcopy(self.config)
            config['selection']['tmin'] = time[0]
            config['selection']['tmax'] = time[1]
            config['ltcube']['use_local_ltcube'] = kwargs['use_local_ltcube']
            config['model']['diffuse_dir'] = [self.workdir]

            if config['components'] is None:
                config['components'] = []

            for j, c in enumerate(self.components):
                if len(config['components']) <= j:
                    config['components'] += [{}]

                data_cfg = {'evfile': c.files['ft1'],
                            'scfile': c.data_files['scfile'],
                            'ltcube': None}

                config['components'][j] = \
                    utils.merge_dict(config['components'][j],
                                     {'data': data_cfg},
                                     add_new_keys=True)

            # create output directories labeled in MET vals
            outdir = 'lightcurve_%.0f_%.0f' % (time[0], time[1])
            config['fileio']['outdir'] = os.path.join(self.workdir, outdir)
            utils.mkdir(config['fileio']['outdir'])

            yaml.dump(utils.tolist(config),
                      open(os.path.join(config['fileio']['outdir'],
                                        'config.yaml'), 'w'))

            xmlfile = os.path.join(config['fileio']['outdir'], 'base.xml')

            # Make a copy of the source maps. TODO: Implement a
            # correction to account for the difference in exposure for
            # each time bin.
            #     for c in self.components:
            #        shutil.copy(c._files['srcmap'],config['fileio']['outdir'])

            try:
                gta = self.clone(config, loglevel=logging.DEBUG)
                gta.setup()
            except:
                self.logger.warning('Analysis failed in time range %i %i',
                                    time[0], time[1])
                continue

            # Write the current model
            gta.write_xml(xmlfile)

            # Optimize the model (skip diffuse?)
            gta.optimize(skip=diff_sources)

            fit_results = self._fit_lc(gta, name, **kwargs)
            gta.write_xml('fit_model_final.xml')
            output = gta.get_src_model(name)

            if fit_results['fit_success'] == 1:
                for k in defaults.source_flux_output.keys():
                    if not k in output:
                        continue
                    if (isinstance(output[k], np.ndarray) and
                            o[k][i].shape != output[k].shape):
                        self.logger.warning(
                            'Incompatible shape for column %s', k)
                        continue
                    o[k][i] = output[k]

            self.logger.info('Finished time range %i %i', time[0], time[1])

        src = self.roi.get_source_by_name(name)
        return o
Esempio n. 7
0
def _process_lc_bin(itime, name, config, basedir, workdir, diff_sources, const_spectrum, roi, lck_params,
                    **kwargs):
    i, time = itime

    roi = copy.deepcopy(roi)

    config = copy.deepcopy(config)
    config['selection']['tmin'] = time[0]
    config['selection']['tmax'] = time[1]

    # create output directories labeled in MET vals
    outdir = basedir + 'lightcurve_%.0f_%.0f' % (time[0], time[1])
    config['fileio']['outdir'] = os.path.join(workdir, outdir)
    config['logging']['prefix'] = 'lightcurve_%.0f_%.0f ' % (time[0], time[1])
    config['fileio']['logfile'] = os.path.join(config['fileio']['outdir'],
                                               'fermipy.log')
    utils.mkdir(config['fileio']['outdir'])

    yaml.dump(utils.tolist(config),
              open(os.path.join(config['fileio']['outdir'],
                                'config.yaml'), 'w'))

    xmlfile = os.path.join(config['fileio']['outdir'], 'base.xml')

    try:
        from fermipy.gtanalysis import GTAnalysis
        gta = GTAnalysis(config, roi, loglevel=logging.DEBUG)
        gta.logger.info('Fitting time range %i %i' % (time[0], time[1]))
        gta.setup()
    except:
        print('Analysis failed in time range %i %i' %
              (time[0], time[1]))
        print(sys.exc_info()[0])
        raise
        return {}

    gta._lck_params = lck_params
    # Recompute source map for source of interest and sources within 3 deg
    if gta.config['gtlike']['use_scaled_srcmap']:
        names = [s.name for s in
                 gta.roi.get_sources(distance=3.0, skydir=gta.roi[name].skydir)
                 if not s.diffuse]
        gta.reload_sources(names)

    # Write the current model
    gta.write_xml(xmlfile)

    # Optimize the model
    gta.optimize(skip=diff_sources,
                 shape_ts_threshold=kwargs.get('shape_ts_threshold'))

    fit_results = _fit_lc(gta, name, **kwargs)
    gta.write_xml('fit_model_final.xml')
    srcmodel = copy.deepcopy(gta.get_src_model(name))
    numfree = gta.get_free_param_vector().count(True)
    
    const_srcmodel = gta.get_src_model(name).copy()
    fixed_fit_results = fit_results.copy()
    fixed_srcmodel = gta.get_src_model(name).copy()
    fixed_fit_results['fit_success'],fixed_srcmodel['fit_success'] = [False,False]
    fixed_fit_results['fit_quality'],fixed_srcmodel['fit_quality'] = [0,0]
    max_ts_thresholds = [None, 4, 9, 16, 25]
    for max_ts in max_ts_thresholds:
        if max_ts is not None:
            gta.free_sources(minmax_ts=[None, max_ts], free=False, exclude=[name])

        # rerun fit using params from full time (constant) fit using same
        # param vector as the successful fit to get loglike
        specname, spectrum = const_spectrum
        gta.set_source_spectrum(name, spectrum_type=specname,
                                spectrum_pars=spectrum,
                                update_source=False)
        gta.free_source(name, free=False)
        const_fit_results = gta.fit()
        if not const_fit_results['fit_success']:
            continue
        const_srcmodel = gta.get_src_model(name)
        # rerun using shape fixed to full time fit
        # for the fixed-shape lightcurve
        gta.free_source(name, pars='norm')
        fixed_fit_results = gta.fit()
	if not fixed_fit_results['fit_success']:
            continue
        fixed_srcmodel = gta.get_src_model(name)
        break
    
    # special lc output
    o = {'flux_const': const_srcmodel['flux'],
         'loglike_const': const_fit_results['loglike'],
         'fit_success': fit_results['fit_success'],
         'fit_success_fixed': fixed_fit_results['fit_success'],
         'fit_quality': fit_results['fit_quality'],
         'fit_status': fit_results['fit_status'],
         'num_free_params': numfree,
         'config': config}
    # full flux output
    if fit_results['fit_success'] == 1:
        for k in defaults.source_flux_output.keys():
            if not k in srcmodel:
                continue
            o[k] = srcmodel[k]
            o[k+'_fixed'] = fixed_srcmodel[k]

    gta.logger.info('Finished time range %i %i' % (time[0], time[1]))
    return o
Esempio n. 8
0
from fermipy.utils import tolist
import yaml
import numpy as np

gtutils.init_function_pars()


par_names = gtutils.FUNCTION_PAR_NAMES

o = {}

for k, v in par_names.items():

    o.setdefault(k,{})
    o[k]['par_names'] = v
    o[k]['norm_par'] = gtutils.FUNCTION_NORM_PARS[k]
    o[k]['defaults'] = gtutils.FUNCTION_DEFAULT_PARS[k]

    for pname, p in o[k]['defaults'].items():
        o[k]['defaults'][pname]['error'] = np.nan

o['CompositeSource'] = {'defaults' : {}, 'norm_par' : None,
                        'par_names': [] }

modelfile = os.path.join('$FERMIPY_ROOT',
                         'data', 'models.yaml')


yaml.dump(tolist(o),
          open(os.path.expandvars(modelfile), 'w'))#,default_flow_style=False)
Esempio n. 9
0
import os
from fermipy import gtutils
from fermipy.utils import tolist
import yaml
import numpy as np

gtutils.init_function_pars()

par_names = gtutils.FUNCTION_PAR_NAMES

o = {}

for k, v in par_names.items():

    o.setdefault(k, {})
    o[k]['par_names'] = v
    o[k]['norm_par'] = gtutils.FUNCTION_NORM_PARS[k]
    o[k]['defaults'] = gtutils.FUNCTION_DEFAULT_PARS[k]

    for pname, p in o[k]['defaults'].items():
        o[k]['defaults'][pname]['error'] = np.nan

o['CompositeSource'] = {'defaults': {}, 'norm_par': None, 'par_names': []}

modelfile = os.path.join('$FERMIPY_DATA_DIR', 'models.yaml')

yaml.dump(tolist(o), open(os.path.expandvars(modelfile),
                          'w'))  #,default_flow_style=False)
Esempio n. 10
0
#    else:
#        name = target
#        config['selection']['target'] = name 

    print name
    dirname = os.path.join(basedir,name)    
    utils.mkdir(dirname)

    script = os.path.basename(args.script)
    scriptpath = os.path.abspath(os.path.join(dirname,script))
    cfgfile = os.path.abspath(os.path.join(dirname,'config.yaml'))
    
    os.system('ln -sf %s %s'%(os.path.abspath(os.path.join(scriptdir,script)),
                             scriptpath))

    runscript = os.path.abspath(os.path.join(dirname,os.path.splitext(script)[0] + '.sh'))
    
    with open(os.path.join(runscript),'wt') as f:
        f.write(bash_script.format(source=name,config=cfgfile,
                                   script=scriptpath))

    if not config:
        continue
        
    c = copy.deepcopy(config)
    c = utils.merge_dict(c,v,add_new_keys=True)
    yaml.dump(utils.tolist(c),open(cfgfile,'w'),default_flow_style=False)