Beispiel #1
0
def main():

    usage = "usage: %(prog)s [config file]"
    description = "Run fermipy analysis chain."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    parser.add_argument('--config', default='sample_config.yaml')
    parser.add_argument('--source', default=None)

    args = parser.parse_args()
    gta = GTAnalysis(args.config)

    if args.source is None:
        src_name = gta.roi.sources[0].name

    gta.setup()
    gta.optimize()

    loc = gta.localize(src_name, free_radius=1.0, update=True, make_plots=True)

    model = {'Index': 2.0, 'SpatialModel': 'PointSource'}
    srcs = gta.find_sources(model=model,
                            sqrt_ts_threshold=5.0,
                            min_separation=0.5)

    sed = gta.sed(src_name, free_radius=1.0, make_plots=True)
    gta.tsmap(make_plots=True)
    gta.write_roi('fit0')
    lc = gta.lightcurve(src_name,
                        binsz=86400. * 7.0,
                        free_radius=3.0,
                        use_scaled_srcmap=True,
                        multithread=False)
Beispiel #2
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError("Trying to run fermipy analysis, but don't have ST")

        gta = GTAnalysis(args.config,
                         logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})
        gta.setup(overwrite=False)
        gta.load_roi('fit_baseline')
        gta.print_roi()

        basedir = os.path.dirname(args.config)
        # This should be a no-op, b/c it was done in the baseline analysis

        gta.free_sources(skydir=gta.roi.skydir, distance=1.0, pars='norm')

        for profile in args.profiles:
            pkey, pdict = SEDAnalysis._build_profile_dict(basedir, profile)
            # test_case need to be a dict with spectrum and morphology
            gta.add_source(pkey, pdict)
            # refit the ROI
            gta.fit()
            # build the SED
            gta.sed(pkey, outfile="sed_%s.fits" % pkey)
            # remove the source
            gta.delete_source(pkey)
            # put the ROI back to how it was
            gta.load_xml('fit_baseline')

        return gta
Beispiel #3
0
    def __init__(self, configFile):

        self.gta = GTAnalysis(configFile, logging={'verbosity': 3})
        self.target = None
        self.targetRadius = None
        self.distance = None
        self.catalog = fits.getdata('/users-data/mfalxa/code/gll_psch_v13.fit',
                                    1)
Beispiel #4
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError(
                "Trying to run fermipy analysis, but don't have ST")

        workdir = os.path.dirname(args.config)
        _config_file = self._clone_config_and_srcmaps(args.config, args.seed)

        gta = GTAnalysis(_config_file,
                         logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})
        gta.load_roi(args.roi_baseline)

        simfile = os.path.join(workdir,
                               'sim_%s_%s.yaml' % (args.sim, args.sim_profile))

        mcube_file = "%s_%s_%06i" % (args.sim, args.sim_profile, args.seed)
        sim_config = utils.load_yaml(simfile)

        injected_source = sim_config.get('injected_source', None)
        if injected_source is not None:
            src_dict = injected_source['source_model']
            src_dict['ra'] = gta.config['selection']['ra']
            src_dict['dec'] = gta.config['selection']['dec']
            injected_name = injected_source['name']
            gta.add_source(injected_name, src_dict)
            gta.write_model_map(mcube_file)
            mc_spec_dict = dict(
                true_counts=gta.model_counts_spectrum(injected_name),
                energies=gta.energies,
                model=src_dict)
            mcspec_file = os.path.join(
                workdir, "mcspec_%s_%06i.yaml" % (mcube_file, args.seed))
            utils.write_yaml(mc_spec_dict, mcspec_file)
        else:
            injected_name = None

        gta.write_roi('sim_baseline_%06i' % args.seed)

        test_sources = {}
        for profile in args.profiles:
            profile_path = os.path.join(workdir, 'profile_%s.yaml' % profile)
            test_source = load_yaml(profile_path)
            test_sources[profile] = test_source
            first = args.seed
            last = first + args.nsims
            for seed in range(first, last):
                self._run_simulation(gta,
                                     args.roi_baseline,
                                     injected_name,
                                     test_sources,
                                     first,
                                     seed,
                                     non_null_src=args.non_null_src,
                                     do_find_src=args.do_find_src)
def extract_photons_data(tmin, tmax, outdir=None, find_directory=False):
    """Perform GTAnalysis setup with custom tmin and tmax.
    For details see PKS notebook"""
    config_path, outdir = prepare_temp_config(tmin, tmax, outdir,
                                              find_directory)

    gta = GTAnalysis(config_path, logging={'verbosity': 3})
    gta.setup()

    return outdir
Beispiel #6
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError("Trying to run fermipy analysis, but don't have ST")
        
        gta = GTAnalysis(args.config, logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})

        gta.setup(overwrite=False)
        gta.free_sources(False)
        gta.print_roi()
        gta.optimize()
        gta.print_roi()

        exclude = ['3FGL J1707.8+5626']

        # Localize all point sources
        for src in sorted(gta.roi.sources, key=lambda t: t['ts'], reverse=True):
            #    for s in gta.roi.sources:

            if not src['SpatialModel'] == 'PointSource':
                continue
            if src['offset_roi_edge'] > -0.1:
                continue

            if src.name in exclude:
                continue
            if not '3FGL' in src.name:
                continue

            gta.localize(src.name, nstep=5, dtheta_max=0.5, update=True,
                         prefix='base', make_plots=True)

        gta.optimize()
        gta.print_roi()

        gta.write_roi('base_roi', make_plots=True)

        gta.find_sources(sqrt_ts_threshold=5.0)
        gta.optimize()
        gta.print_roi()
        gta.print_params()

        gta.free_sources(skydir=gta.roi.skydir, distance=1.0, pars='norm')
        gta.fit()
        gta.print_roi()
        gta.print_params()

        gta.write_roi('fit_baseline', make_plots=True)
Beispiel #7
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError(
                "Trying to run fermipy analysis, but don't have ST")

        if is_not_null(args.roi_baseline):
            gta = GTAnalysis.create(args.roi_baseline, args.config)
        else:
            gta = GTAnalysis(args.config,
                             logging={'verbosity': 3},
                             fileio={'workdir_regex': '\.xml$|\.npy$'})
        gta.print_roi()
        
        test_source = args.target
        gta.sed(test_source, outfile='sed_%s.fits' % 'FL8Y', make_plots=True)
        gta.extension(test_source, make_plots=True)
        return gta
Beispiel #8
0
def main():
        
    usage = "usage: %(prog)s [config file]"
    description = "Run fermipy analysis chain."
    parser = argparse.ArgumentParser(usage=usage,description=description)

    parser.add_argument('--config', default = 'sample_config.yaml')
    parser.add_argument('--source', default = None)

    args = parser.parse_args()
    gta = GTAnalysis(args.config)

    if args.source is None:
        src_name = gta.roi.sources[0].name
    
    gta.setup()
    gta.optimize()

    if (gta.roi[src_name]['ts'] > 1000. and
        gta.roi[src_name]['SpectrumType'] == 'PowerLaw'):
        gta.set_source_spectrum(src_name, spectrum_type='LogParabola',
                                spectrum_pars={'beta' : {'value' : 0.0, 'scale' : 1.0,
                                                         'min' : 0.0, 'max' : 2.0}})

    gta.free_source(src_name)
    gta.fit()
    gta.free_source(src_name, False)

    loc = gta.localize(src_name, free_radius=1.0, update=True, make_plots=True)

    model = {'Index' : 2.0, 'SpatialModel' : 'PointSource'}
    srcs = gta.find_sources(model=model, sqrt_ts_threshold=5.0,
                            min_separation=0.5)
    
    sed = gta.sed(src_name, free_radius=1.0, make_plots=True)
    gta.tsmap(make_plots=True)
    gta.tsmap(prefix='excludeSource', exclude=[src_name], make_plots=True)

    gta.write_roi('fit0')    
    lc = gta.lightcurve(src_name, binsz=86400.*28.0, free_radius=3.0, use_scaled_srcmap=True,
                        multithread=False)
Beispiel #9
0
def run_analysis(config):
    print('Running analysis...')

    gta = GTAnalysis(config)
    gta.setup()
    gta.optimize()

    gta.print_roi()

    # Localize and generate SED for first source in ROI
    srcname = gta.roi.sources[0].name

    gta.free_source(srcname)
    gta.fit()

    gta.localize(srcname)
    gta.sed(srcname)

    gta.write_roi('roi', make_plots=True)
    gta.tsmap(make_plots=True)
    gta.residmap(make_plots=True)
Beispiel #10
0
def main():
    # Argument defintion
    usage = "usage: %(prog)s [options]" 
    description = "Run an analysis script"
    
    parser = argparse.ArgumentParser(usage,description=description)
    parser.add_argument('-c', "--config",
                        type=argparse.FileType('r'),
                        default="config.yaml",
                        help="Input file")
    parser.add_argument('-o', "--output",
                        type=str,
                        default='baseline',
                        help="Output file prefix")

    args = parser.parse_args(sys.argv[1:])
   
    gta = GTAnalysis(args.config.name)
    gta.setup()
    
    gta.write_roi(args.output, save_model_map=True, save_weight_map=True, make_plots=True)
    return gta
Beispiel #11
0
def main():

    usage = "usage: %(prog)s [config file]"
    description = "Run fermipy analysis chain."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    parser.add_argument('--config', default='sample_config.yaml')
    parser.add_argument('--source', default=None)

    args = parser.parse_args()
    gta = GTAnalysis(args.config)

    if args.source is None:
        src_name = gta.roi.sources[0].name

    gta.setup()
    gta.optimize()

    loc = gta.localize(src_name, free_radius=1.0, update=True, make_plots=True)

    model = {'Index': 2.0, 'SpatialModel': 'PointSource'}
    srcs = gta.find_sources(model=model,
                            sqrt_ts_threshold=5.0,
                            min_separation=0.5)

    sed = gta.sed(src_name, free_radius=1.0, make_plots=True)
    gta.tsmap(make_plots=True)
    gta.write_roi('fit0')
    # make sure bins are shifted to line up with the end of the time window (where the neutrino arrived)
    tmax = 528835414
    LCbins = tmax - 119 * 28 * 24 * 3600 + numpy.linspace(0, 119,
                                                          120) * 28 * 24 * 3600
    lc = gta.lightcurve(src_name,
                        time_bins=list(LCbins),
                        free_radius=3.0,
                        use_scaled_srcmap=True,
                        multithread=False,
                        shape_ts_threshold=100)
Beispiel #12
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError(
                "Trying to run fermipy analysis, but don't have ST")

        gta = GTAnalysis(args.config,
                         logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})

        gta.setup(overwrite=False)

        baseline_roi_fit(gta,
                         make_plots=args.make_plots,
                         minmax_npred=[1e3, np.inf])

        localize_sources(gta,
                         nstep=5,
                         dtheta_max=0.5,
                         update=True,
                         prefix='base',
                         make_plots=args.make_plots)

        gta.find_sources(sqrt_ts_threshold=5.0,
                         search_skydir=gta.roi.skydir,
                         search_minmax_radius=[1.0, np.nan])
        gta.optimize()
        gta.print_roi()
        gta.print_params()

        gta.free_sources(skydir=gta.roi.skydir, distance=1.0, pars='norm')
        gta.fit(covar=True)
        gta.print_roi()
        gta.print_params()

        gta.write_roi(args.roi_baseline, make_plots=args.make_plots)
Beispiel #13
0
def _process_lc_bin(itime, name, config, basedir, workdir, diff_sources, const_spectrum, roi, lck_params,
                    **kwargs):
    i, time = itime

    roi = copy.deepcopy(roi)

    config = copy.deepcopy(config)
    config['selection']['tmin'] = time[0]
    config['selection']['tmax'] = time[1]

    # create output directories labeled in MET vals
    outdir = basedir + 'lightcurve_%.0f_%.0f' % (time[0], time[1])
    config['fileio']['outdir'] = os.path.join(workdir, outdir)
    config['logging']['prefix'] = 'lightcurve_%.0f_%.0f ' % (time[0], time[1])
    config['fileio']['logfile'] = os.path.join(config['fileio']['outdir'],
                                               'fermipy.log')
    utils.mkdir(config['fileio']['outdir'])

    yaml.dump(utils.tolist(config),
              open(os.path.join(config['fileio']['outdir'],
                                'config.yaml'), 'w'))

    xmlfile = os.path.join(config['fileio']['outdir'], 'base.xml')

    try:
        from fermipy.gtanalysis import GTAnalysis
        gta = GTAnalysis(config, roi, loglevel=logging.DEBUG)
        gta.logger.info('Fitting time range %i %i' % (time[0], time[1]))
        gta.setup()
    except:
        print('Analysis failed in time range %i %i' %
              (time[0], time[1]))
        print(sys.exc_info()[0])
        raise
        return {}

    gta._lck_params = lck_params
    # Recompute source map for source of interest and sources within 3 deg
    if gta.config['gtlike']['use_scaled_srcmap']:
        names = [s.name for s in
                 gta.roi.get_sources(distance=3.0, skydir=gta.roi[name].skydir)
                 if not s.diffuse]
        gta.reload_sources(names)

    # Write the current model
    gta.write_xml(xmlfile)

    # Optimize the model
    gta.optimize(skip=diff_sources,
                 shape_ts_threshold=kwargs.get('shape_ts_threshold'))

    fit_results = _fit_lc(gta, name, **kwargs)
    gta.write_xml('fit_model_final.xml')
    srcmodel = copy.deepcopy(gta.get_src_model(name))
    numfree = gta.get_free_param_vector().count(True)
    
    const_srcmodel = gta.get_src_model(name).copy()
    fixed_fit_results = fit_results.copy()
    fixed_srcmodel = gta.get_src_model(name).copy()
    fixed_fit_results['fit_success'],fixed_srcmodel['fit_success'] = [False,False]
    fixed_fit_results['fit_quality'],fixed_srcmodel['fit_quality'] = [0,0]
    max_ts_thresholds = [None, 4, 9, 16, 25]
    for max_ts in max_ts_thresholds:
        if max_ts is not None:
            gta.free_sources(minmax_ts=[None, max_ts], free=False, exclude=[name])

        # rerun fit using params from full time (constant) fit using same
        # param vector as the successful fit to get loglike
        specname, spectrum = const_spectrum
        gta.set_source_spectrum(name, spectrum_type=specname,
                                spectrum_pars=spectrum,
                                update_source=False)
        gta.free_source(name, free=False)
        const_fit_results = gta.fit()
        if not const_fit_results['fit_success']:
            continue
        const_srcmodel = gta.get_src_model(name)
        # rerun using shape fixed to full time fit
        # for the fixed-shape lightcurve
        gta.free_source(name, pars='norm')
        fixed_fit_results = gta.fit()
	if not fixed_fit_results['fit_success']:
            continue
        fixed_srcmodel = gta.get_src_model(name)
        break
    
    # special lc output
    o = {'flux_const': const_srcmodel['flux'],
         'loglike_const': const_fit_results['loglike'],
         'fit_success': fit_results['fit_success'],
         'fit_success_fixed': fixed_fit_results['fit_success'],
         'fit_quality': fit_results['fit_quality'],
         'fit_status': fit_results['fit_status'],
         'num_free_params': numfree,
         'config': config}
    # full flux output
    if fit_results['fit_success'] == 1:
        for k in defaults.source_flux_output.keys():
            if not k in srcmodel:
                continue
            o[k] = srcmodel[k]
            o[k+'_fixed'] = fixed_srcmodel[k]

    gta.logger.info('Finished time range %i %i' % (time[0], time[1]))
    return o
Beispiel #14
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError(
                "Trying to run fermipy analysis, but don't have ST")

        if args.load_baseline:
            gta = GTAnalysis.create(args.roi_baseline, args.config)
        else:
            gta = GTAnalysis(args.config,
                             logging={'verbosity': 3},
                             fileio={'workdir_regex': '\.xml$|\.npy$'})
            gta.setup()
            if is_not_null(args.input_pars):
                gta.load_parameters_from_yaml(args.input_pars)
            gta.write_roi(args.roi_baseline,
                          save_model_map=True,
                          save_weight_map=True,
                          make_plots=args.make_plots)

        src_list = get_src_names(gta)
        plotter = plotting.AnalysisPlotter(gta.config['plotting'],
                                           fileio=gta.config['fileio'],
                                           logging=gta.config['logging'])

        if is_null(args.fit_strategy):
            return

        fit_strategy = load_yaml(args.fit_strategy)
        npred_current = None
        npred_prev = None

        plots_only = False

        for fit_stage in fit_strategy:
            mask = fit_stage.get('mask', None)
            npred_threshold = fit_stage.get('npred_threshold', 1.0e4)
            frac_threshold = fit_stage.get('frac_threshold', 0.5)
            npred_frac = fit_stage.get('npred_frac', 0.9999)

            if plots_only:
                gta.load_roi("%s.npy" % fit_stage['key'])
                npred_current = set_wts_get_npred_wt(gta, mask)
                skip_list_region = get_unchanged(src_list,
                                                 npred_current,
                                                 npred_prev,
                                                 frac_threshold=frac_threshold)
            else:
                npred_current = set_wts_get_npred_wt(gta, mask)
                skip_list_region = get_unchanged(src_list,
                                                 npred_current,
                                                 npred_prev,
                                                 frac_threshold=frac_threshold)
                gta.optimize(npred_frac=npred_frac,
                             npred_threshold=npred_threshold,
                             skip=skip_list_region)

            snapshot(gta,
                     plotter,
                     fit_stage['key'],
                     make_plots=args.make_plots)
            npred_prev = npred_current
            npred_current = build_srcdict(gta, 'npred_wt')
Beispiel #15
0
            texp_file = path.join(src.srcconf[s][i]['fileio']['outdir'],
                "exposure_{0[emin]:n}-{0[emax]:n}MeV.npz".format(src.srcconf[s][i]['selection']))

            if path.isfile(texp_file):

                npzfile = np.load(texp_file)
                texp, front, back = npzfile['texp'], npzfile['front'], npzfile['back']
                logging.info("loaded exposure from {0:s}".format(texp_file))

            else:
                # get the gta object
                config = myconf2fermipy(src.srcconf[s][i])
                config['fileio']['scratchdir'] = None
                config['fileio']['usescratch'] = False
                #try:
                gta = GTAnalysis(config,logging={'verbosity' : 3})
                #except Exception as e:
                #    logging.error("{0}".format(e))
                #    config['selection']['target'] = None
                #    gta = GTAnalysis(config,logging={'verbosity' : 3})
                sep = gta.roi.sources[0]['offset'] 
                print (config['selection']['target']) 
                #    logging.warning("Source closets to ROI center is {0:.3f} degree away".format(sep))
                #    if sep < 0.1:
                #        config['selection']['target'] = gta.roi.sources[0]['name']
                #        gta.config['selection']['target'] = config['selection']['target']
                #        logging.info("Set target to {0:s}".format(config['selection']['target']))

                #logging.info("Calculating exposure for {0:.1f} MeV".format(energy))
                front, back = [],[]
                for energy in earray:
Beispiel #16
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError(
                "Trying to run fermipy analysis, but don't have ST")

        if is_null(args.skydirs):
            skydir_dict = None
        else:
            skydir_dict = load_yaml(args.skydirs)

        gta = GTAnalysis(args.config,
                         logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})
        #gta.setup(overwrite=False)
        gta.load_roi(args.roi_baseline)
        gta.print_roi()

        basedir = os.path.dirname(args.config)
        # This should be a no-op, b/c it was done in the baseline analysis

        for profile in args.profiles:
            if skydir_dict is None:
                skydir_keys = [None]
            else:
                skydir_keys = sorted(skydir_dict.keys())

            for skydir_key in skydir_keys:
                if skydir_key is None:
                    pkey, pdict = AnalyzeSED._build_profile_dict(
                        basedir, profile)
                else:
                    skydir_val = skydir_dict[skydir_key]
                    pkey, pdict = AnalyzeSED._build_profile_dict(
                        basedir, profile)
                    pdict['ra'] = skydir_val['ra']
                    pdict['dec'] = skydir_val['dec']
                    pkey += "_%06i" % skydir_key

                outfile = "sed_%s.fits" % pkey

                # Add the source and get the list of correlated soruces
                correl_dict = add_source_get_correlated(gta,
                                                        pkey,
                                                        pdict,
                                                        correl_thresh=0.25)

                # Write the list of correlated sources
                correl_yaml = os.path.join(basedir, "correl_%s.yaml" % pkey)
                write_yaml(correl_dict, correl_yaml)

                gta.free_sources(False)
                for src_name in correl_dict.keys():
                    gta.free_source(src_name, pars='norm')

                # build the SED
                gta.sed(pkey, outfile=outfile, make_plots=args.make_plots)

                # remove the source
                gta.delete_source(pkey)
                # put the ROI back to how it was
                gta.load_xml(args.roi_baseline)

        return gta
Beispiel #17
0
def _get_fermipy_instance(configuration, likelihood_model):
    """
    Generate a 'model' configuration section for fermipy starting from a likelihood model from astromodels

    :param configuration: a dictionary containing the configuration for fermipy
    :param likelihood_model: the input likelihood model from astromodels
    :type likelihood_model: astromodels.Model
    :return: a dictionary with the 'model' section of the fermipy configuration
    """

    # Generate a new 'model' section in the configuration which reflects the model
    # provided as input

    # Get center and radius of ROI
    ra_center = float(configuration["selection"]["ra"])
    dec_center = float(configuration["selection"]["dec"])

    roi_width = float(configuration["binning"]["roiwidth"])
    roi_radius = old_div(roi_width, np.sqrt(2.0))

    # Get IRFS
    irfs = evclass_irf[int(configuration["selection"]["evclass"])]

    log.info(f"Using IRFs {irfs}")

    if "gtlike" in configuration and "irfs" in configuration["gtlike"]:

        if irfs.upper() != configuration["gtlike"]["irfs"].upper():
            log.critical(
                "Evclass points to IRFS %s, while you specified %s in the "
                "configuration" % (irfs, configuration["gtlike"]["irfs"]))

    else:

        if not "gtlike" in configuration:

            configuration["gtlike"] = {}

        configuration["gtlike"]["irfs"] = irfs

    # The fermipy model is just a dictionary. It corresponds to the 'model' section
    # of the configuration file (http://fermipy.readthedocs.io/en/latest/config.html#model)

    fermipy_model = {}

    # Find Galactic and Isotropic templates appropriate for this IRFS
    # (information on the ROI is used to cut the Galactic template, which speeds up the
    # analysis a lot)
    # NOTE: these are going to be absolute paths

    galactic_template = str(
        sanitize_filename(
            findGalacticTemplate(irfs, ra_center, dec_center, roi_radius),
            True  # noqa: F821
        ))
    isotropic_template = str(
        sanitize_filename(findIsotropicTemplate(irfs), True))  # noqa: F821

    # Add them to the fermipy model

    fermipy_model["galdiff"] = galactic_template
    fermipy_model["isodiff"] = isotropic_template

    # Now iterate over all sources contained in the likelihood model
    sources = []

    # point sources
    for point_source in list(likelihood_model.point_sources.values()
                             ):  # type: astromodels.PointSource

        this_source = {
            "Index": 2.56233,
            "Scale": 572.78,
            "Prefactor": 2.4090e-12
        }
        this_source["name"] = point_source.name
        this_source["ra"] = point_source.position.ra.value
        this_source["dec"] = point_source.position.dec.value

        # The spectrum used here is unconsequential, as it will be substituted by a FileFunction
        # later on. So I will just use PowerLaw for everything
        this_source["SpectrumType"] = "PowerLaw"

        sources.append(this_source)

    # extended sources
    for extended_source in list(likelihood_model.extended_sources.values()
                                ):  # type: astromodels.ExtendedSource

        raise NotImplementedError("Extended sources are not supported yet")

    # Add all sources to the model
    fermipy_model["sources"] = sources

    # Now we can finally instance the GTAnalysis instance
    configuration["model"] = fermipy_model

    gta = GTAnalysis(configuration)  # noqa: F821

    # This will take a long time if it's the first time we run with this model
    gta.setup()

    # Substitute all spectra for point sources with FileSpectrum, so that we will be able to control
    # them from 3ML

    energies_keV = None

    for point_source in list(likelihood_model.point_sources.values()
                             ):  # type: astromodels.PointSource

        # Fix this source, so fermipy will not optimize by itself the parameters
        gta.free_source(point_source.name, False)

        # This will substitute the current spectrum with a FileFunction with the same shape and flux
        gta.set_source_spectrum(point_source.name,
                                "FileFunction",
                                update_source=False)

        # Get the energies at which to evaluate this source
        this_log_energies, _flux = gta.get_source_dnde(point_source.name)
        this_energies_keV = (10**this_log_energies * 1e3
                             )  # fermipy energies are in GeV, we need keV

        if energies_keV is None:

            energies_keV = this_energies_keV

        else:

            # This is to make sure that all sources are evaluated at the same energies

            if not np.all(energies_keV == this_energies_keV):
                log.critical(
                    "All sources should be evaluated at the same energies.")

        dnde = point_source(energies_keV)  # ph / (cm2 s keV)
        dnde_per_MeV = dnde * 1000.0  # ph / (cm2 s MeV)
        gta.set_source_dnde(point_source.name, dnde_per_MeV, False)

    # Same for extended source
    for extended_source in list(likelihood_model.extended_sources.values()
                                ):  # type: astromodels.ExtendedSource

        raise NotImplementedError("Extended sources are not supported yet")

    return gta, energies_keV
Beispiel #18
0
def main():

    usage = "usage: %(prog)s [config file]"
    description = "Run fermipy analysis chain."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    parser.add_argument('--config', default='sample_config.yaml')
    parser.add_argument('--source', default=None)

    args = parser.parse_args()
    gta = GTAnalysis(args.config,
                     logging={'verbosity': 3},
                     fileio={'workdir_regex': '\.xml$|\.npy$'})

    model0 = {'SpatialModel': 'PointSource', 'Index': 1.5}
    model1 = {'SpatialModel': 'PointSource', 'Index': 2.0}
    model2 = {'SpatialModel': 'PointSource', 'Index': 2.7}

    src_name = gta.config['selection']['target']

    gta.setup(overwrite=True)
    gta.free_sources(False)
    gta.print_roi()
    gta.optimize()
    gta.print_roi()

    exclude = []

    # Localize all point sources
    for s in sorted(gta.roi.sources, key=lambda t: t['ts'], reverse=True):
        #    for s in gta.roi.sources:

        if not s['SpatialModel'] == 'PointSource':
            continue
        if s['offset_roi_edge'] > -0.1:
            continue

        if s.name in exclude:
            continue
        if not '3FGL' in s.name:
            continue
        if s.name == src_name:
            continue

        gta.localize(s.name,
                     nstep=5,
                     dtheta_max=0.5,
                     update=True,
                     prefix='base',
                     make_plots=True)

    gta.optimize()
    gta.print_roi()

    gta.write_roi('base_roi', make_plots=True)

    exclude = [src_name]
    if not 'carina_2' in exclude:
        exclude += ['carina_2']
    if not 'carina_3' in exclude:
        exclude += ['carina_3']

    gta.tsmap('base', model=model0, make_plots=True, exclude=exclude)
    gta.residmap('base', model=model0, make_plots=True, exclude=exclude)
    gta.tsmap('base', model=model1, make_plots=True, exclude=exclude)
    gta.residmap('base', model=model1, make_plots=True, exclude=exclude)
    gta.tsmap('base', model=model2, make_plots=True, exclude=exclude)
    gta.residmap('base', model=model2, make_plots=True, exclude=exclude)

    gta.find_sources(sqrt_ts_threshold=5.0)
    gta.optimize()
    gta.print_roi()
    gta.print_params()

    gta.free_sources(skydir=gta.roi.skydir, distance=1.0, pars='norm')
    gta.fit()
    gta.print_roi()
    gta.print_params()

    gta.write_roi('fit0_roi', make_plots=True)

    m = gta.tsmap('fit0', model=model0, make_plots=True, exclude=exclude)
    gta.plotter.make_tsmap_plots(m, gta.roi, zoom=2, suffix='tsmap_zoom')
    gta.residmap('fit0', model=model0, make_plots=True, exclude=exclude)
    gta.tsmap('fit0', model=model1, make_plots=True, exclude=exclude)
    gta.plotter.make_tsmap_plots(m, gta.roi, zoom=2, suffix='tsmap_zoom')
    gta.residmap('fit0', model=model1, make_plots=True, exclude=exclude)
    gta.tsmap('fit0', model=model2, make_plots=True, exclude=exclude)
    gta.plotter.make_tsmap_plots(m, gta.roi, zoom=2, suffix='tsmap_zoom')
    gta.residmap('fit0', model=model2, make_plots=True, exclude=exclude)

    gta.sed(src_name, prefix='fit0', make_plots=True, free_radius=1.0)

    gta.free_source(src_name)
    gta.fit(reoptimize=True)
    gta.print_roi()
    gta.print_params()

    gta.write_roi('fit1_roi', make_plots=True)
Beispiel #19
0
def FGES_BinnedAnalysis(prefix, ANALYSISDIR, numsources, xmlsources, spectrum,
                        spectrumpoints, spectrumpointsUL, spectrum_mev_or_erg,
                        spectrum_mev_or_tev, configfile):

    ANALYSISDIR = ANALYSISDIR + prefix + '/'
    i = numsources  #number of sources
    sources_names = ''
    for x in range(0, i):
        sources_names += str(xmlsources[x])

    #Run the likelihood analysis up to doing the fit
    gta = GTAnalysis(ANALYSISDIR + configfile, logging={'verbosity': 3})
    gta.setup()

    #Print the pre likelihood fit parameters
    gta.print_roi()
    for x in range(0, i):
        print(gta.roi[xmlsources[x]])

    #Do an initial optimization of parameters
    gta.optimize()

    gta.print_roi()

    #Prepare to get the likelihood
    #Free the normalizations of sources within 7 degrees of the center of the field of view
    gta.free_sources(distance=7.0, pars='norm')
    gta.free_source('galdiff')
    gta.free_source('isodiff')
    for x in range(0, i):
        gta.free_source(xmlsources[x])

    #LIKELIHOOD ANALYSIS
    fit_results = gta.fit()

    #print out and return the results
    print('Fit Quality: ', fit_results['fit_quality'])
    for x in range(0, i):
        print(gta.roi[xmlsources[x]])
    gta.write_roi(sources_names + 'fit')

    #RESIDUAL MAP
    model = {'Index': 2.0, 'SpatialModel': 'PointSource'}
    maps = gta.residmap('residual', model=model, make_plots=True)

    # Generate residual map with source of interest removed from the model
    model_nosource = {'Index': 2.0, 'SpatialModel': 'PointSource'}
    maps_nosource = gta.residmap('residual_wsource',
                                 model=model_nosource,
                                 exclude=xmlsources,
                                 make_plots=True)

    #TS Map
    tsmap = gta.tsmap('tsmap',
                      model={
                          'SpatialModel': 'PointSource',
                          'Index': 2.0
                      },
                      exclude=xmlsources,
                      make_plots=True)
    tsmap_wSNR = gta.tsmap('tsmap_wSNR',
                           model={
                               'SpatialModel': 'PointSource',
                               'Index': 2.0
                           },
                           make_plots=True)

    #PLOT SEDs
    for x in range(0, i):
        c = np.load('10to500gev/' + sources_names + 'fit.npy').flat[0]
        sorted(c['sources'].keys())
        c['sources'][xmlsources[x]]['flux']
        print(c['sources'][xmlsources[x]]['param_names'][:4])
        print(c['sources'][xmlsources[x]]['param_values'][:4])
        c['sources'][xmlsources[x]]['ts']

        E = np.array(c['sources'][xmlsources[x]]['model_flux']['energies'])
        dnde = np.array(c['sources'][xmlsources[x]]['model_flux']['dnde'])
        dnde_hi = np.array(
            c['sources'][xmlsources[x]]['model_flux']['dnde_hi'])
        dnde_lo = np.array(
            c['sources'][xmlsources[x]]['model_flux']['dnde_lo'])

        if spectrum_mev_or_erg == "erg":
            suffix = 'erg'
            mult = 0.00000160218
        elif spectrum_mev_or_erg == "mev":
            suffix = 'MeV'
            mult = 1

        if spectrum_mev_or_tev == "mev":
            xaxis = 'MeV'
            denominator = 1
        elif spectrum_mev_or_tev == "tev":
            xaxis = 'TeV'
            denominator = 1000000

        if spectrum:
            plt.loglog(E, (E**2) * dnde, 'k--')
            plt.loglog(E, (E**2) * dnde_hi, 'k')
            plt.loglog(E, (E**2) * dnde_lo, 'k')
            plt.xlabel('E [MeV]')
            plt.ylabel(r'E$^2$ dN/dE [MeV cm$^{-2}$ s$^{-1}$]')
            plt.savefig('spectrum_' + xmlsources[x] + '.png')

        #GET SED POINTS
        if spectrumpoints:
            sed = gta.sed(xmlsources[x], make_plots=True)
            #sed = gta.sed(xmlsource,prefix=xmlsource + 'spectrum',loge_bins=)
            src = gta.roi[xmlsources[x]]
            #Plot without upper limits
            plt.loglog(E, (E**2) * dnde, 'k--')
            plt.loglog(E, (E**2) * dnde_hi, 'k')
            plt.loglog(E, (E**2) * dnde_lo, 'k')
            plt.errorbar(np.array(sed['e_ctr']),
                         sed['e2dnde'],
                         yerr=sed['e2dnde_err'],
                         fmt='o')
            plt.xlabel('E [MeV]')
            plt.ylabel(r'E$^{2}$ dN/dE [MeV cm$^{-2}$ s$^{-1}$]')
            #plt.show()
            plt.savefig('spectrumpoints_' + xmlsources[x] + '.png')
            #Plot with upper limits, last 5 points
            plt.loglog(E, (E**2) * dnde, 'k--')
            plt.loglog(E, (E**2) * dnde_hi, 'k')
            plt.loglog(E, (E**2) * dnde_lo, 'k')
            plt.errorbar(sed['e_ctr'][:-5],
                         sed['e2dnde'][:-5],
                         yerr=sed['e2dnde_err'][:-5],
                         fmt='o')
            plt.errorbar(np.array(sed['e_ctr'][-5:]),
                         sed['e2dnde_ul95'][-5:],
                         yerr=0.2 * sed['e2dnde_ul95'][-5:],
                         fmt='o',
                         uplims=True)
            plt.xlabel('E [MeV]')
            plt.ylabel(r'E$^{2}$ dN/dE [MeV cm$^{-2}$ s$^{-1}$]')
            plt.savefig('spectrumpointsUL_' + xmlsources[x] + '.png')
        plt.clf()
Beispiel #20
0
def main():
    usage = "usage: %(prog)s -c config.yaml"
    description = "Run the lc analysis"
    parser = argparse.ArgumentParser(usage=usage, description=description)
    parser.add_argument('-c', '--conf', required=True)
    parser.add_argument('-i',
                        required=False,
                        default=0,
                        help='Set local or scratch calculation',
                        type=int)
    parser.add_argument('--state',
                        help='analysis state',
                        choices=['avgspec', 'setup'],
                        default='avgspec')
    parser.add_argument('--forcepl',
                        default=0,
                        help='Force the target source to have power-law shape',
                        type=int)
    parser.add_argument('--createsed',
                        default=0,
                        help='Create SED from best fit model',
                        type=int)
    parser.add_argument(
        '--adaptive',
        default=0,
        help='Use adaptive binning for minute scale light curves',
        type=int)
    parser.add_argument('--srcprob', default = 0,
                        help='Calculate the source probability for the photons,' \
                            ' only works when no sub orbit time scales are used',
                        type=int)
    parser.add_argument(
        '--mincounts',
        default=2,
        help='Minimum number of counts within LC bin to run analysis',
        type=int)
    parser.add_argument('--simulate', default = None,
                        help='None or full path to yaml file which contains src name' \
                        'and spec to be simulated',
                        )
    parser.add_argument(
        '--make_plots',
        default=0,
        type=int,
        help='Create sed plot',
    )
    parser.add_argument(
        '--randomize',
        default=1,
        help=
        'If you simulate, use Poisson realization. If false, use Asimov data set',
        type=int)
    args = parser.parse_args()

    utils.init_logging('DEBUG')
    config = yaml.load(open(args.conf))
    tmpdir, job_id = lsf.init_lsf()
    if not job_id:
        job_id = args.i
    logging.info('tmpdir: {0:s}, job_id: {1:n}'.format(tmpdir, job_id))
    os.chdir(tmpdir)  # go to tmp directory
    logging.info('Entering directory {0:s}'.format(tmpdir))
    logging.info('PWD is {0:s}'.format(os.environ["PWD"]))

    # copy the ft1,ft2 and ltcube files
    #for k in ['evfile','scfile','ltcube']:
    # don't stage them, done automatically by fermipy if needed
    #        config[k] = utils.copy2scratch(config[k], tmpdir)
    # set the scratch directories
    logging.debug(config['data'])
    config['fileio']['scratchdir'] = tmpdir

    # set the log file
    logdir = copy.deepcopy(config['fileio']['logfile'])
    config['fileio']['logfile'] = path.join(tmpdir, 'fermipy.log')
    # debugging: all files will be saved (default is False)
    #config['fileio']['savefits'] = True

    # if simulating an orbit, save fits files
    if args.simulate is not None:
        config['fileio']['savefits'] = True

    # copy all fits files already present in outdir
    # run the analysis
    lc_config = copy.deepcopy(config['lightcurve'])
    fit_config = copy.deepcopy(config['fit_pars'])

    # remove parameters from config file not accepted by fermipy
    for k in ['configname', 'tmp', 'log', 'fit_pars']:
        config.pop(k, None)
    if 'adaptive' in config['lightcurve'].keys():
        config['lightcurve'].pop('adaptive', None)

    # set the correct time bin
    config['selection']['tmin'], config['selection']['tmax'], nj = set_lc_bin(
        config['selection']['tmin'],
        config['selection']['tmax'],
        config['lightcurve']['binsz'],
        job_id - 1 if job_id > 0 else 0,
        ft1=config['data']['evfile'])
    logging.debug('setting light curve bin' + \
        '{0:n}, between {1[tmin]:.0f} and {1[tmax]:.0f}'.format(job_id, config['selection']))
    if args.adaptive:
        config['fileio']['outdir'] = utils.mkdir(
            path.join(config['fileio']['outdir'],
                      'adaptive{0:.0f}/'.format(lc_config['adaptive'])))

    if args.state == 'setup':
        config['fileio']['outdir'] = utils.mkdir(
            path.join(config['fileio']['outdir'],
                      'setup{0:05n}/'.format(job_id if job_id > 0 else 1)))
    else:
        config['fileio']['outdir'] = utils.mkdir(
            path.join(config['fileio']['outdir'],
                      '{0:05n}/'.format(job_id if job_id > 0 else 1)))

    logging.info('Starting with fermipy analysis')
    logging.info('using fermipy version {0:s}'.format(fermipy.__version__))
    logging.info('located at {0:s}'.format(fermipy.__file__))

    if config['data']['ltcube'] == '':
        config['data'].pop('ltcube', None)

    compute_sub_gti_lc = False
    if type(config['lightcurve']['binsz']) == str:
        if len(config['lightcurve']['binsz'].strip('gti')):
            compute_sub_gti_lc = True
            if config['lightcurve']['binsz'].find('min') > 0:
                config['lightcurve']['binsz'] = float(
                    config['lightcurve']['binsz'].strip('gti').strip(
                        'min')) * 60.
                logging.info("set time bin length to {0:.2f}s".format(
                    config['lightcurve']['binsz']))
        else:
            config['lightcurve']['binsz'] = 3. * 3600.
    try:
        gta = GTAnalysis(config, logging={'verbosity': 3})
    except Exception as e:
        logging.error("{0}".format(e))
        config['selection']['target'] = None
        gta = GTAnalysis(config, logging={'verbosity': 3})
        sep = gta.roi.sources[0]['offset']
        logging.warning(
            "Source closets to ROI center is {0:.3f} degree away".format(sep))
        if sep < 0.1:
            config['selection']['target'] = gta.roi.sources[0]['name']
            gta.config['selection']['target'] = config['selection']['target']
            logging.info("Set target to {0:s}".format(
                config['selection']['target']))

    # stage the full time array analysis results to the tmp dir
    # do not copy png images
    files = [
        fn for fn in glob(fit_config['avgspec'])
        if fn.find('.xml') > 0 or fn.find('.npy') > 0
    ]
    files += [config['data']['evfile']]
    utils.copy2scratch(files, gta.workdir)

    # we're using actual data
    if args.simulate is None:
        # check before the analysis start if there are any events in the master file
        # in the specified time range
        logging.info('Checking for events in initial ft1 file')
        t = Table.read(path.join(gta.workdir,
                                 path.basename(config['data']['evfile'])),
                       hdu='EVENTS')
        logging.info("times in base ft1: {0} {1} {2}".format(
            t["TIME"].max(), t["TIME"].min(),
            t["TIME"].max() - t["TIME"].min()))
        m = (t["TIME"] >= config['selection']['tmin']) & (
            t["TIME"] <= config['selection']['tmax'])
        if np.sum(m) < args.mincounts + 1:
            logging.error(
                "*** Only {0:n} events between tmin and tmax! Exiting".format(
                    np.sum(m)))
            assert np.sum(m) > args.mincounts
        else:
            logging.info("{0:n} events between tmin and tmax".format(
                np.sum(m)))

        # check how many bins are in each potential light curve bin
        if compute_sub_gti_lc:
            # select time of first and last
            # photon instead of GTI time
            m = (t["TIME"] >= config['selection']['tmin']) & \
                 (t["TIME"] <= config['selection']['tmax'])

            tmin = t["TIME"][m].min() - 1.
            tmax = t["TIME"][m].max() + 1.
            logging.info("There will be up to {0:n} time bins".format(np.ceil(
                (tmax - tmin) / \
                config['lightcurve']['binsz'])))

            bins = np.arange(tmin, tmax, config['lightcurve']['binsz'])
            bins = np.concatenate([bins, [config['selection']['tmax']]])
            counts = calc_counts(t, bins)
            # remove the starting times of the bins with zero counts
            # and rebin the data
            logging.info("Counts before rebinning: {0}".format(counts))
            mincounts = 10.
            mc = counts < mincounts
            if np.sum(mc):
                # remove trailing zeros
                if np.any(counts == 0.):
                    mcounts_post, mcounts_pre = rm_trailing_zeros(counts)
                    counts = counts[mcounts_post & mcounts_pre]
                    bins = np.concatenate([
                        bins[:-1][mcounts_post & mcounts_pre],
                        [bins[1:][mcounts_post & mcounts_pre].max()]
                    ])
                bins = rebin(counts, bins)
                logging.info("Bin lengths after rebinning: {0}".format(
                    np.diff(bins)))
                logging.info("Bin times after rebinning: {0}".format(bins))
                counts = calc_counts(t, bins)
                logging.info("Counts after rebinning: {0}".format(counts))
            else:
                logging.info("Regular time binning will be used")
            bins = list(bins)

    logging.info('Running fermipy setup')
    try:
        gta.setup()
    except (RuntimeError, IndexError) as e:
        logging.error(
            'Caught Runtime/Index Error while initializing analysis object')
        logging.error('Printing error:')
        logging.error(e)
        if e.message.find("File not found") >= 0 and e.message.find(
                'srcmap') >= 0:
            logging.error("*** Srcmap calculation failed ***")
        if e.message.find("NDSKEYS") >= 0 and e.message.find('srcmap') >= 0:
            logging.error(
                "*** Srcmap calculation failed with NDSKEYS keyword not found in header ***"
            )

        logging.info("Checking if there are events in ft1 file")
        ft1 = path.join(gta.workdir, 'ft1_00.fits')
        f = glob(ft1)
        if not len(f):
            logging.error(
                "*** no ft1 file found at location {0:s}".format(ft1))
            raise
        t = Table.read(f[0], hdu='EVENTS')
        if not len(t):
            logging.error("*** The ft1 file contains no events!! ***".format(
                len(t)))
        else:
            logging.info("The ft1 file contains {0:n} event(s)".format(len(t)))
        return

    # end here if you only want to calulate
    # intermediate fits files
    if args.state == 'setup':
        return gta

    logging.info('Loading the fit for the average spectrum')
    gta.load_roi('avgspec')  # reload the average spectral fit
    logging.info('Running fermipy optimize and fit')

    # we're using actual data
    if args.simulate is None:
        if args.forcepl:
            gta = set_src_spec_pl(
                gta, gta.get_source_name(config['selection']['target']))
# to do add EBL absorption at some stage ...
#        gta = add_ebl_atten(gta, gta.get_source_name(config['selection']['target']), fit_config['z'])

# make sure you are fitting data
        gta.simulate_roi(restore=True)

        if compute_sub_gti_lc:
            if args.adaptive:
                # do import only here since root must be compiled
                from fermiAnalysis import adaptivebinning as ab
                # compute the exposure
                energy = 1000.
                texp, front, back = ab.comp_exposure_phi(gta, energy=1000.)
                # compute the bins
                result = ab.time_bins(
                    gta,
                    texp,
                    0.5 * (front + back),
                    #critval = 20., # bins with ~20% unc
                    critval=lc_config['adaptive'],
                    Epivot=None,  # compute on the fly
                    #                        tstart = config['selection']['tmin'],
                    #                        tstop = config['selection']['tmax']
                )

                # cut the bins to this GTI
                mask = result['tstop'] > config['selection']['tmin']
                mask = mask & (result['tstart'] < config['selection']['tmax'])

                # try again with catalog values
                if not np.sum(mask):
                    logging.error(
                        "Adaptive bins outside time window, trying catalog values for flux"
                    )
                    result = ab.time_bins(
                        gta,
                        texp,
                        0.5 * (front + back),
                        critval=lc_config['adaptive'],  # bins with ~20% unc
                        Epivot=None,  # compute on the fly
                        forcecatalog=True,
                        #                        tstart = config['selection']['tmin'],
                        #                        tstop = config['selection']['tmax']
                    )

                    # cut the bins to this GTI
                    mask = result['tstop'] > config['selection']['tmin']
                    mask = mask & (result['tstart'] <
                                   config['selection']['tmax'])
                    if not np.sum(mask):
                        logging.error(
                            "Adaptive bins do not cover selected time interval!"
                        )
                        logging.error("Using original bins")

                    else:
                        bins = np.concatenate((result['tstart'][mask],
                                               [result['tstop'][mask][-1]]))
                        bins[0] = np.max(
                            [config['selection']['tmin'], bins[0]])
                        bins[-1] = np.min(
                            [config['selection']['tmax'], bins[-1]])
                        bins = list(bins)

                        # removing trailing zeros
                        counts = calc_counts(t, bins)
                        mcounts_post, mcounts_pre = rm_trailing_zeros(counts)
                        logging.info(
                            "count masks: {0} {1}, bins: {2}, counts: {3}".
                            format(mcounts_post, mcounts_pre, bins, counts))
                        counts = counts[mcounts_post & mcounts_pre]
                        bins = np.concatenate([
                            np.array(bins)[:-1][mcounts_post & mcounts_pre],
                            [
                                np.array(bins)[1:][mcounts_post
                                                   & mcounts_pre].max()
                            ]
                        ])
                        logging.info(
                            "Using bins {0}, total n={1:n} bins".format(
                                bins,
                                len(bins) - 1))
                        logging.info("bins widths : {0}".format(np.diff(bins)))
                        logging.info("counts per bin: {0} ".format(
                            calc_counts(t, bins)))
                        bins = list(bins)


# TODO: test that this is working also with GTIs that have little or no counts

            lc = gta.lightcurve(
                config['selection']['target'],
                binsz=config['lightcurve']['binsz'],
                free_background=config['lightcurve']['free_background'],
                free_params=config['lightcurve']['free_params'],
                free_radius=config['lightcurve']['free_radius'],
                make_plots=False,
                multithread=True,
                nthread=4,
                #multithread = False,
                #nthread = 1,
                save_bin_data=True,
                shape_ts_threshold=16.,
                use_scaled_srcmap=True,
                use_local_ltcube=True,
                write_fits=True,
                write_npy=True,
                time_bins=bins,
                outdir='{0:.0f}s'.format(config['lightcurve']['binsz']))
        else:
            # run the fitting of the entire time and energy range
            try:
                o = gta.optimize()  # perform an initial fit
                logging.debug(o)
            except RuntimeError as e:
                logging.error("Error in optimize: {0}".format(e))
                logging.info("Trying to continue ...")

            gta = set_free_pars_lc(gta, config, fit_config)

            f = gta.fit()

            if 'fix_sources' in fit_config.keys():
                skip = fit_config['fix_sources'].keys()
            else:
                skip = []

            gta, f = refit(gta,
                           config['selection']['target'],
                           f,
                           fit_config['ts_fixed'],
                           skip=skip)
            gta.print_roi()
            gta.write_roi('lc')

            if args.createsed:
                if args.make_plots:
                    init_matplotlib_backend()
                gta.load_roi('lc')  # reload the average spectral fit
                logging.info('Running sed for {0[target]:s}'.format(
                    config['selection']))
                sed = gta.sed(config['selection']['target'],
                            prefix = 'lc_sed',
                            free_radius = None if config['sed']['free_radius'] == 0. \
                                else config['sed']['free_radius'],
                            free_background= config['sed']['free_background'],
                            free_pars = fa.allnorm,
                            make_plots = args.make_plots,
                            cov_scale = config['sed']['cov_scale'],
                            use_local_index = config['sed']['use_local_index'],
                            bin_index = config['sed']['bin_index']
                            )

        # debugging: calculate sed and resid maps for each light curve bin
        #logging.info('Running sed for {0[target]:s}'.format(config['selection']))
        #sed = gta.sed(config['selection']['target'], prefix = 'lc')
        #model = {'Scale': 1000., 'Index' : fit_config['new_src_pl_index'], 'SpatialModel' : 'PointSource'}
        #resid_maps = gta.residmap('lc',model=model, make_plots=True, write_fits = True, write_npy = True)

            if args.srcprob:
                logging.info("Running srcprob with srcmdl {0:s}".format('lc'))
                gta.compute_srcprob(xmlfile='lc', overwrite=True)

    # we are simulating a source
    else:
        # TODO: I probably have to run the setup here. Do on weekly files, i.e., no time cut? Only do that later?

        with open(args.simulate) as f:
            simsource = np.load(f, allow_pickle=True).flat[0]

        # set the source to the simulation value
        gta.set_source_spectrum(
            simsource['target'],
            spectrum_type=simsource['spectrum_type'],
            spectrum_pars=simsource['spectrum_pars'][job_id - 1])

        logging.info("changed spectral parameters to {0}".format(
            gta.roi.get_source_by_name(simsource['target']).spectral_pars))

        # simulate the ROI
        gta.simulate_roi(randomize=bool(args.randomize))
        gta = set_free_pars_lc(gta, config, fit_config)

        # fit the simulation
        f = gta.fit()
        gta, f = refit(gta, config['selection']['target'], f,
                       fit_config['ts_fixed'])
        gta.print_roi()
        gta.write_roi('lc_simulate_{0:s}'.format(simsource['suffix']))
    return gta
with open(cwd+'/config_local.yaml', 'r') as i:
    config = yaml.safe_load(i)

config['fileio']['workdir'] =  cwd+'/fits'
config['fileio']['outdir'] = cwd+'/fits'
config['fileio']['logfile'] = cwd+'/fits/fermipy.log'
config['data']['ltcube'] = cwd+'/fits/ltcube_00.fits'
config['model']['galdiff'] = path_to_conda+'/share/fermitools/refdata/fermi/galdiffuse/gll_iem_v07.fits'
config['model']['isodiff'] = path_to_conda+'/share/fermitools/refdata/fermi/galdiffuse/iso_P8R3_SOURCE_V3_v1.txt'
config['logging']['verbosity'] = 4
source = config['selection']['target']
with open(cwd+'/config_modified.yaml', 'w') as o:
    yaml.dump(config, o)
likelihoods = np.zeros((5))
gta = GTAnalysis(config='config_modified.yaml')
gta.setup()
model = {'Index' : 2.0, 'SpatialModel' : 'PointSource'}

for i in range(1,6):
    gta.optimize()
    gta.free_sources(free=False)
    gta.free_source(source)
    gta.free_source('galdiff')
    gta.free_source('isodiff')
    gta.free_sources(distance=3, pars='norm')
    gta.free_sources(minmax_ts=[100, None], pars='norm')
    gta.fit(optimizer='NEWMINUIT', reoptimize=True)
    maps = gta.residmap(f'../maps/opt_alternating{i}', model=model, make_plots=True)
    maps = gta.tsmap(f'../maps/opt_alternating_{i}', model=model, make_plots=True)
    gta.write_roi(f'opt_{i}', make_plots=True)
Beispiel #22
0
def init_gta(configfile, i=1, logging_level="INFO", tsmin=100):
    """
    Initialize the fermipy analysis

    Add filter to config expression excising the brightest GRBs and 
    solar flares
    """
    utils.init_logging(logging_level)
    if isinstance(configfile, dict):
        config = configfile
    else:
        config = yaml.load(open(configfile))
    tmpdir, job_id = lsf.init_lsf()
    if not job_id:
        job_id = i
        tmpdir = os.environ["PWD"]
    logging.info('tmpdir: {0:s}, job_id: {1:n}'.format(tmpdir, job_id))
    os.chdir(tmpdir)  # go to tmp directory
    logging.info('Entering directory {0:s}'.format(tmpdir))
    logging.info('PWD is {0:s}'.format(os.environ["PWD"]))

    # copy the ft1,ft2 and ltcube files
    #for k in ['evfile','scfile','ltcube']:
    # don't stage them, done automatically by fermipy if needed
    #        config[k] = utils.copy2scratch(config[k], tmpdir)
    # set the scratch directories
    logging.debug(config['data'])
    logging.debug(config['selection'])
    config['fileio']['scratchdir'] = tmpdir

    # set the log file
    logdir = copy.deepcopy(config['fileio']['logfile'])
    config['fileio']['logfile'] = path.join(tmpdir, 'fermipy.log')

    # copy the background files if we are on cluster
    #if not args.state == 'setup' and job_id:
    #for k in ['galdiff','isodiff']:
    #config['model'][k] = utils.copy2scratch(path.expandvars(config['model'][k]), tmpdir)

    # copy all fits files already present in outdir
    # run the analysis
    fit_config = copy.deepcopy(config['fit_pars'])

    # create a file with GTIs excluding solar flares and GRBs

    if 'gtiexclude' in fit_config.keys():
        # create fits file with gtis to be excluded
        # uses ftcreate which causes fermipy to crash with segmentation fault
        if not path.isfile(fit_config['gtiexclude']):
            fit_config['gtiexclude'] = excise_solar_flares_grbs(tsmin=tsmin,
                                                                outdir=tmpdir)
        config['selection'][
            'filter'] += " && gtifilter('{0:s}', START)".format(
                fit_config['gtiexclude'])
        #config['selection']['filter'] += " && gtifilter('{0:s}', STOP)".format(fit_config['gtiexclude'])

    # remove parameters from config file not accepted by fermipy
    for k in ['configname', 'tmp', 'log', 'fit_pars']:
        config.pop(k, None)
    if 'adaptive' in config['lightcurve'].keys():
        config['lightcurve'].pop('adaptive', None)

    if config['data']['ltcube'] == '':
        config['data'].pop('ltcube', None)

    logging.info('Starting with fermipy analysis')

    if type(config['lightcurve']['binsz']) == str:
        config['lightcurve']['binsz'] = 3. * 3600.

    try:
        gta = GTAnalysis(config, logging={'verbosity': 3})
    # exception could be caused by unknown source,
    # remove target name and try again
    except Exception as e:
        logging.warning('Cought Exception {0}'.format(e))
        logging.warning('Trying to remove target and working with coordinates')
        if 'target' in config['selection'] and \
            ('ra' in config['selection'] or 'glon' in config['selection']):
            config['selection']['target'] = None
            gta = GTAnalysis(config, logging={'verbosity': 3})
        else:
            raise Exception(
                "No coordinates specified in config file selection")
    return gta, config, fit_config, job_id
Beispiel #23
0
cFile = open('config.yaml', 'w')
cFile.write("data:\n  evfile : "+ft1+"\n  scfile : "+ft2+"\n")
cFile.write("\nbinning:\n  roiwidth : "+roiSize+"\n  binsz : "+binsz+"\n  binsperdec : "+binsperdec+"\n")
cFile.write("\nselection:\n  tmin : "+str(tstart)+"\n  tmax : "+str(tstop)+"\n  evclass : "+evclass+"\n  ra : "+str(srcRA)+"\n  dec : "+str(srcDec)+"\n  zmax : "+zmax+"\n  emin : "+emin+"\n  emax : "+emax+"\n  evtype : "+evtype+"\n  filter : "+fil+"\n")
cFile.write("\ngtlike :\n  edisp : "+edisp+"\n  irfs : "+irfs+"\n  edisp_disable : ['isodiff']\n")
cFile.write("\nltcube: \n  use_local_ltcube : True\n")
cFile.write("\nmodel:\n  src_roiwidth : "+str(RAD)+"\n  galdiff  : '$FERMI_DIFFUSE_DIR/gll_iem_v07.fits'\n  isodiff  : '$FERMI_DIFFUSE_DIR/iso_P8R3_SOURCE_V2_v1.txt'\n  catalogs : [4FGL]")
cFile.close()

#####################
# START OF ANALYSIS #
#####################

#initializing analysis object
gta = GTAnalysis('config.yaml',logging={'verbosity': 3})

gta.setup(overwrite = True)

#first optimization run with output
fit_res = gta.optimize()

gta.write_roi('fit_optimize')

#free parameters for full likelihood fit
gta.free_sources(pars='norm')
gta.free_sources(distance = 3.0)
gta.free_source('galdiff')
gta.free_source('isodiff')

            f.write(os.path.join(od, 'ft1_00.fits') + '\n')

    # modify base config to include merged files
    with open(BASE_CONFIG) as infile, \
            open(CONFIG_FINAL_FILE, 'w') as outfile:
        config = yaml.load(infile)
        config['data']['evfile'] = os.path.join(os.getcwd(), FT1_FILES_LIST)
        config['data']['ltcube'] = os.path.join(os.getcwd(), LTCUBE_FINAL_FILE)
        config['fileio'] = {'outdir': 'out_merged/'}
        outfile.write('# Automatically merged from directories:\n')
        for outdir in outdirs:
            outfile.write('# {}\n'.format(outdir))
        outfile.write('\n')
        yaml.dump(config, outfile, indent=4)

    # some generic processing just for sanity check
    gta = GTAnalysis(CONFIG_FINAL_FILE, logging={'verbosity': 3})
    gta.setup()
    gta.free_source('4FGL J1512.8-0906', free=True, pars=['Index'])
    gta.free_source('4FGL J1512.8-0906', free=True, pars='norm')
    # Free Normalization of all Sources within 3 deg of ROI center
    gta.free_sources(distance=3.0, pars='norm')
    # Free all parameters of isotropic and galactic diffuse components
    gta.free_source('galdiff')
    gta.free_source('isodiff')
    gta.optimize()
    gta.print_roi()
    fit_res = gta.fit()
    print('Fit Quality: ', fit_res['fit_quality'])
    print(gta.roi['4FGL J1512.8-0906'])
def main(cmd_line):

    #takes integer arguement specifying the simulation number
    sim = cmd_line[1]

    indir = "/zfs/astrohe/ckarwin/Machine_Learning_GC/Sim_2/Dame_Maps/"
    outdir = indir + "Simulation_Output/sim_%s" % sim

    if (os.path.isdir(outdir) == True):
        shutil.rmtree(outdir)
    os.system('mkdir %s' % outdir)
    os.chdir(outdir)

    #A single simulation should first be ran, which will generate all the needed data products that can be reused for subsequent simulations.
    #The data products that are copied below are for subsequent simulations after the first run.
    shutil.copy2('%s/srcmap_00.fits' % indir, 'srcmap_00.fits')
    shutil.copy2('%s/bexpmap_00.fits' % indir, 'bexpmap_00.fits')
    shutil.copy2('%s/ccube_00.fits' % indir, 'ccube_00.fits')
    shutil.copy2('%s/config.yaml' % indir, 'config.yaml')
    shutil.copy2('%s/ft1_00.fits' % indir, 'ft1_00.fits')
    shutil.copy2('%s/LAT_Final_Excess_Template.fits' % indir,
                 'LAT_Final_Excess_Template.fits')

    #setup analysis:
    gta = GTAnalysis('config.yaml', logging={'verbosity': 3})
    gta.setup()
    #gta.load_roi("after_setup")

    #set components to zero for simulations:
    gta.set_norm("MapSource", 0.0)  #excess template
    gta.set_norm("galdiff04", 0.0)  #CO12_0-5
    gta.set_norm("galdiff05", 0.0)  #CO12_6-9
    gta.set_norm("galdiff06", 0.0)  #CO12_10-12
    gta.set_norm("galdiff07", 0.0)  #CO12_13-16

    #run simulations:
    gta.write_roi('before_sim')
    gta.simulate_roi(randomize=True)

    #delete sources that were simulated:
    gta.delete_source("galdiff00", delete_source_map=False)
    gta.delete_source("galdiff01", delete_source_map=False)
    gta.delete_source("galdiff02", delete_source_map=False)
    gta.delete_source("galdiff03", delete_source_map=False)

    #set random normalizations of sources for performing fit:
    #n4 =  np.random.normal(1.0,0.2)
    #n5 =  np.random.normal(1.0,0.2)
    #n6 =  np.random.normal(1.0,0.2)
    #nms = np.random.normal(1e-4,0.5e-4)
    gta.set_norm("galdiff04", 0.8)
    gta.set_norm("galdiff05", 0.8)
    gta.set_norm("galdiff06", 1.2)
    gta.set_norm("galdiff07", 1.2)

    #perform fit for null hypothesis:
    gta.free_sources(free=True)
    gta.free_source("galdiff07", free=False)
    gta.free_source("MapSource", free=False)
    Fit = gta.fit()
    null = Fit["loglike"]
    gta.write_roi('after_null_fit')
    gta.write_model_map("null_model")

    #set normalizations of sources for performing alternative fit:
    gta.set_norm("galdiff04", 0.8)
    gta.set_norm("galdiff05", 0.8)
    gta.set_norm("galdiff06", 1.2)
    gta.set_norm("galdiff07", 1.2)
    gta.set_norm("MapSource", 1e-4)

    gta.free_sources(free=True)
    #gta.free_source("galdiff07",free=False)
    Fit2 = gta.fit()
    alternative = Fit2["loglike"]
    gta.write_roi('after_alternative_fit')
    gta.write_model_map("alternative_model")

    #calculate source spectrum:
    ltcube = '/zfs/astrohe/ckarwin/Stacking_Analysis/UFOs/NGC_4151_Analysis/MakeLTCube/zmax_105/UFOs_binned_ltcube.fits'
    obs = BinnedObs(srcMaps='srcmap_00.fits',
                    expCube=ltcube,
                    binnedExpMap='bexpmap_00.fits',
                    irfs='P8R3_SOURCE_V2')
    like = BinnedAnalysis(obs,
                          'after_alternative_fit_00.xml',
                          optimizer='MINUIT')
    Elist, Flist = CalcFlux(like, 'MapSource')
    data = {"energ[MeV]": Elist, "flux[MeV/cm^2/s]": Flist}
    df = pd.DataFrame(data=data)
    df.to_csv("excess_flux.dat", sep="\t", index=False)

    #calculte TS:
    TS = -2 * (null - alternative)

    #write results:
    savefile = "TS_sim_%s.txt" % sim
    f = open(savefile, "w")
    f.write(str(TS))
    f.close()

    #rm ft file to reduce storage:
    os.system('rm ft1_00.fits')

    return
Beispiel #26
0
from fermipy.gtanalysis import GTAnalysis
import argparse

usage = "usage: %(prog)s [config file]"
description = "Run fermipy analysis chain."
parser = argparse.ArgumentParser(usage=usage, description=description)

parser.add_argument('--config', default='sample_config.yaml')

args = parser.parse_args()

gta = GTAnalysis(args.config)

gta.setup()

# Iteratively optimize all components in the ROI
gta.optimize()

# Fix sources w/ TS < 10
gta.free_sources(minmax_ts=[None, 10], free=False)

# Free sources within 3 degrees of ROI center
gta.free_sources(distance=3.0)

# Free sources by name
gta.free_source('mkn421')
gta.free_source('galdiff')
gta.free_source('isodiff')

# Free only the normalization of a specific source
gta.free_norm('3FGL J1129.0+3705')