예제 #1
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError(
                "Trying to run fermipy analysis, but don't have ST")

        gta = GTAnalysis(args.config, logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})

        gta.setup(overwrite=False)

        baseline_roi_fit(gta, make_plots=args.make_plots,
                         minmax_npred=[1e3, np.inf])

        localize_sources(gta, nstep=5, dtheta_max=0.5, update=True,
                         prefix='base', make_plots=args.make_plots)

        gta.find_sources(sqrt_ts_threshold=5.0, search_skydir=gta.roi.skydir,
                         search_minmax_radius=[1.0, np.nan])
        gta.optimize()
        gta.print_roi()
        gta.print_params()

        gta.free_sources(skydir=gta.roi.skydir, distance=1.0, pars='norm')
        gta.fit(covar=True)
        gta.print_roi()
        gta.print_params()

        gta.write_roi(args.roi_baseline, make_plots=args.make_plots)
예제 #2
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError("Trying to run fermipy analysis, but don't have ST")

        gta = GTAnalysis(args.config,
                         logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})
        gta.setup(overwrite=False)
        gta.load_roi('fit_baseline')
        gta.print_roi()

        basedir = os.path.dirname(args.config)
        # This should be a no-op, b/c it was done in the baseline analysis

        gta.free_sources(skydir=gta.roi.skydir, distance=1.0, pars='norm')

        for profile in args.profiles:
            pkey, pdict = SEDAnalysis._build_profile_dict(basedir, profile)
            # test_case need to be a dict with spectrum and morphology
            gta.add_source(pkey, pdict)
            # refit the ROI
            gta.fit()
            # build the SED
            gta.sed(pkey, outfile="sed_%s.fits" % pkey)
            # remove the source
            gta.delete_source(pkey)
            # put the ROI back to how it was
            gta.load_xml('fit_baseline')

        return gta
예제 #3
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError("Trying to run fermipy analysis, but don't have ST")
        
        gta = GTAnalysis(args.config, logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})

        gta.setup(overwrite=False)
        gta.free_sources(False)
        gta.print_roi()
        gta.optimize()
        gta.print_roi()

        exclude = ['3FGL J1707.8+5626']

        # Localize all point sources
        for src in sorted(gta.roi.sources, key=lambda t: t['ts'], reverse=True):
            #    for s in gta.roi.sources:

            if not src['SpatialModel'] == 'PointSource':
                continue
            if src['offset_roi_edge'] > -0.1:
                continue

            if src.name in exclude:
                continue
            if not '3FGL' in src.name:
                continue

            gta.localize(src.name, nstep=5, dtheta_max=0.5, update=True,
                         prefix='base', make_plots=True)

        gta.optimize()
        gta.print_roi()

        gta.write_roi('base_roi', make_plots=True)

        gta.find_sources(sqrt_ts_threshold=5.0)
        gta.optimize()
        gta.print_roi()
        gta.print_params()

        gta.free_sources(skydir=gta.roi.skydir, distance=1.0, pars='norm')
        gta.fit()
        gta.print_roi()
        gta.print_params()

        gta.write_roi('fit_baseline', make_plots=True)
예제 #4
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError(
                "Trying to run fermipy analysis, but don't have ST")

        gta = GTAnalysis(args.config,
                         logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})

        gta.setup(overwrite=False)

        baseline_roi_fit(gta,
                         make_plots=args.make_plots,
                         minmax_npred=[1e3, np.inf])

        localize_sources(gta,
                         nstep=5,
                         dtheta_max=0.5,
                         update=True,
                         prefix='base',
                         make_plots=args.make_plots)

        gta.find_sources(sqrt_ts_threshold=5.0,
                         search_skydir=gta.roi.skydir,
                         search_minmax_radius=[1.0, np.nan])
        gta.optimize()
        gta.print_roi()
        gta.print_params()

        gta.free_sources(skydir=gta.roi.skydir, distance=1.0, pars='norm')
        gta.fit(covar=True)
        gta.print_roi()
        gta.print_params()

        gta.write_roi(args.roi_baseline, make_plots=args.make_plots)
예제 #5
0
    gta = GTAnalysis(args.config, logging={"verbosity": 3})

    gta.setup()
    gta.simulate_roi(restore=True)

    ext_fit_data = []
    halo_fit_data = []

    gta.write_roi("base_model", save_model_map=False, make_plots=False)

    for i in range(1):

        gta.load_xml("base_model")

        gta.simulate_roi(randomize=False)

        gta.free_source("testsource")
        gta.update_source("testsource")
        #        gta.fit()
        gta.free_sources(free=False)

        gta.extension("testsource", width=np.logspace(-2.5, -0.5, 9))

        ext_fit_data += [copy.deepcopy(gta.roi["testsource"])]

        gta.write_roi("fit%04i" % i, save_model_map=False, make_plots=False, format="npy")

        fit_halo(gta, "fit%04i" % i, "testsource", halo_width, halo_index)

    np.save(os.path.join(gta._savedir, "ext_fit_data.npy"), ext_fit_data)
예제 #6
0
파일: run.py 프로젝트: jefemagril/fermipy
description = "Run fermipy analysis chain."
parser = argparse.ArgumentParser(usage=usage,description=description)

parser.add_argument('--config', default = 'sample_config.yaml')

args = parser.parse_args()

gta = GTAnalysis(args.config)

gta.setup()

# Iteratively optimize all components in the ROI
gta.optimize()

# Fix sources w/ TS < 10
gta.free_sources(minmax_ts=[None,10],free=False)

# Free sources within 3 degrees of ROI center
gta.free_sources(distance=3.0)

# Free sources by name
gta.free_source('mkn421')
gta.free_source('galdiff')
gta.free_source('isodiff')

# Free only the normalization of a specific source
gta.free_norm('3FGL J1129.0+3705')

gta.fit()

# Compute the SED for a source
예제 #7
0
#####################
# START OF ANALYSIS #
#####################

#initializing analysis object
gta = GTAnalysis('config.yaml',logging={'verbosity': 3})

gta.setup(overwrite = True)

#first optimization run with output
fit_res = gta.optimize()

gta.write_roi('fit_optimize')

#free parameters for full likelihood fit
gta.free_sources(pars='norm')
gta.free_sources(distance = 3.0)
gta.free_source('galdiff')
gta.free_source('isodiff')


#do the likelihood fit
fit_results = gta.fit()
if fit_results['fit_success']!=True:
    gta.load_roi('fit_optimize.npy')
    gta.free_sources(free=False)
    gta.free_sources(pars='norm', distance = 3.0)
    gta.free_sources(distance = 1.)
    gta.free_source('galdiff')
    gta.free_source('isodiff')
    fit_res2 = gta.fit()
예제 #8
0
def main():

    usage = "usage: %(prog)s [config file]"
    description = "Run fermipy analysis chain."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    parser.add_argument('--config', default='sample_config.yaml')
    parser.add_argument('--source', default=None)

    args = parser.parse_args()
    gta = GTAnalysis(args.config,
                     logging={'verbosity': 3},
                     fileio={'workdir_regex': '\.xml$|\.npy$'})

    model0 = {'SpatialModel': 'PointSource', 'Index': 1.5}
    model1 = {'SpatialModel': 'PointSource', 'Index': 2.0}
    model2 = {'SpatialModel': 'PointSource', 'Index': 2.7}

    src_name = gta.config['selection']['target']

    gta.setup(overwrite=True)
    gta.free_sources(False)
    gta.print_roi()
    gta.optimize()
    gta.print_roi()

    exclude = []

    # Localize all point sources
    for s in sorted(gta.roi.sources, key=lambda t: t['ts'], reverse=True):
        #    for s in gta.roi.sources:

        if not s['SpatialModel'] == 'PointSource':
            continue
        if s['offset_roi_edge'] > -0.1:
            continue

        if s.name in exclude:
            continue
        if not '3FGL' in s.name:
            continue
        if s.name == src_name:
            continue

        gta.localize(s.name,
                     nstep=5,
                     dtheta_max=0.5,
                     update=True,
                     prefix='base',
                     make_plots=True)

    gta.optimize()
    gta.print_roi()

    gta.write_roi('base_roi', make_plots=True)

    exclude = [src_name]
    if not 'carina_2' in exclude:
        exclude += ['carina_2']
    if not 'carina_3' in exclude:
        exclude += ['carina_3']

    gta.tsmap('base', model=model0, make_plots=True, exclude=exclude)
    gta.residmap('base', model=model0, make_plots=True, exclude=exclude)
    gta.tsmap('base', model=model1, make_plots=True, exclude=exclude)
    gta.residmap('base', model=model1, make_plots=True, exclude=exclude)
    gta.tsmap('base', model=model2, make_plots=True, exclude=exclude)
    gta.residmap('base', model=model2, make_plots=True, exclude=exclude)

    gta.find_sources(sqrt_ts_threshold=5.0)
    gta.optimize()
    gta.print_roi()
    gta.print_params()

    gta.free_sources(skydir=gta.roi.skydir, distance=1.0, pars='norm')
    gta.fit()
    gta.print_roi()
    gta.print_params()

    gta.write_roi('fit0_roi', make_plots=True)

    m = gta.tsmap('fit0', model=model0, make_plots=True, exclude=exclude)
    gta.plotter.make_tsmap_plots(m, gta.roi, zoom=2, suffix='tsmap_zoom')
    gta.residmap('fit0', model=model0, make_plots=True, exclude=exclude)
    gta.tsmap('fit0', model=model1, make_plots=True, exclude=exclude)
    gta.plotter.make_tsmap_plots(m, gta.roi, zoom=2, suffix='tsmap_zoom')
    gta.residmap('fit0', model=model1, make_plots=True, exclude=exclude)
    gta.tsmap('fit0', model=model2, make_plots=True, exclude=exclude)
    gta.plotter.make_tsmap_plots(m, gta.roi, zoom=2, suffix='tsmap_zoom')
    gta.residmap('fit0', model=model2, make_plots=True, exclude=exclude)

    gta.sed(src_name, prefix='fit0', make_plots=True, free_radius=1.0)

    gta.free_source(src_name)
    gta.fit(reoptimize=True)
    gta.print_roi()
    gta.print_params()

    gta.write_roi('fit1_roi', make_plots=True)
예제 #9
0
description = "Run fermipy analysis chain."
parser = argparse.ArgumentParser(usage=usage, description=description)

parser.add_argument('--config', default='sample_config.yaml')

args = parser.parse_args()

gta = GTAnalysis(args.config)

gta.setup()

# Iteratively optimize all components in the ROI
gta.optimize()

# Fix sources w/ TS < 10
gta.free_sources(minmax_ts=[None, 10], free=False)

# Free sources within 3 degrees of ROI center
gta.free_sources(distance=3.0)

# Free sources by name
gta.free_source('mkn421')
gta.free_source('galdiff')
gta.free_source('isodiff')

# Free only the normalization of a specific source
gta.free_norm('3FGL J1129.0+3705')

gta.fit()

# Compute the SED for a source
예제 #10
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError(
                "Trying to run fermipy analysis, but don't have ST")

        if is_null(args.skydirs):
            skydir_dict = None
        else:
            skydir_dict = load_yaml(args.skydirs)

        gta = GTAnalysis(args.config,
                         logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})
        #gta.setup(overwrite=False)
        gta.load_roi(args.roi_baseline)
        gta.print_roi()

        basedir = os.path.dirname(args.config)
        # This should be a no-op, b/c it was done in the baseline analysis

        for profile in args.profiles:
            if skydir_dict is None:
                skydir_keys = [None]
            else:
                skydir_keys = sorted(skydir_dict.keys())

            for skydir_key in skydir_keys:
                if skydir_key is None:
                    pkey, psrc_name, pdict = build_profile_dict(basedir, profile)
                else:
                    skydir_val = skydir_dict[skydir_key]
                    pkey, psrc_name, pdict = build_profile_dict(basedir, profile)
                    pdict['ra'] = skydir_val['ra']
                    pdict['dec'] = skydir_val['dec']
                    pkey += "_%06i" % skydir_key

                outfile = "sed_%s.fits" % pkey

                # Add the source and get the list of correlated soruces
                correl_dict, test_src_name = add_source_get_correlated(gta, psrc_name, 
                                                                       pdict, correl_thresh=0.25, 
                                                                       non_null_src=args.non_null_src)

                # Write the list of correlated sources
                correl_yaml = os.path.join(basedir, "correl_%s.yaml" % pkey)
                write_yaml(correl_dict, correl_yaml)

                gta.free_sources(False)
                for src_name in correl_dict.keys():
                    gta.free_source(src_name, pars='norm')

                # build the SED
                if args.non_null_src:
                    gta.update_source(test_src_name, reoptimize=True)
                    gta.write_roi("base_%s"% pkey, make_plots=False)
                gta.sed(test_src_name, prefix=pkey, outfile=outfile, make_plots=args.make_plots)

                # remove the source
                gta.delete_source(test_src_name)
                # put the ROI back to how it was
                gta.load_xml(args.roi_baseline)

        return gta
def main(cmd_line):

    #takes integer arguement specifying the simulation number
    sim = cmd_line[1]

    indir = "/zfs/astrohe/ckarwin/Machine_Learning_GC/Sim_2/Dame_Maps/"
    outdir = indir + "Simulation_Output/sim_%s" % sim

    if (os.path.isdir(outdir) == True):
        shutil.rmtree(outdir)
    os.system('mkdir %s' % outdir)
    os.chdir(outdir)

    #A single simulation should first be ran, which will generate all the needed data products that can be reused for subsequent simulations.
    #The data products that are copied below are for subsequent simulations after the first run.
    shutil.copy2('%s/srcmap_00.fits' % indir, 'srcmap_00.fits')
    shutil.copy2('%s/bexpmap_00.fits' % indir, 'bexpmap_00.fits')
    shutil.copy2('%s/ccube_00.fits' % indir, 'ccube_00.fits')
    shutil.copy2('%s/config.yaml' % indir, 'config.yaml')
    shutil.copy2('%s/ft1_00.fits' % indir, 'ft1_00.fits')
    shutil.copy2('%s/LAT_Final_Excess_Template.fits' % indir,
                 'LAT_Final_Excess_Template.fits')

    #setup analysis:
    gta = GTAnalysis('config.yaml', logging={'verbosity': 3})
    gta.setup()
    #gta.load_roi("after_setup")

    #set components to zero for simulations:
    gta.set_norm("MapSource", 0.0)  #excess template
    gta.set_norm("galdiff04", 0.0)  #CO12_0-5
    gta.set_norm("galdiff05", 0.0)  #CO12_6-9
    gta.set_norm("galdiff06", 0.0)  #CO12_10-12
    gta.set_norm("galdiff07", 0.0)  #CO12_13-16

    #run simulations:
    gta.write_roi('before_sim')
    gta.simulate_roi(randomize=True)

    #delete sources that were simulated:
    gta.delete_source("galdiff00", delete_source_map=False)
    gta.delete_source("galdiff01", delete_source_map=False)
    gta.delete_source("galdiff02", delete_source_map=False)
    gta.delete_source("galdiff03", delete_source_map=False)

    #set random normalizations of sources for performing fit:
    #n4 =  np.random.normal(1.0,0.2)
    #n5 =  np.random.normal(1.0,0.2)
    #n6 =  np.random.normal(1.0,0.2)
    #nms = np.random.normal(1e-4,0.5e-4)
    gta.set_norm("galdiff04", 0.8)
    gta.set_norm("galdiff05", 0.8)
    gta.set_norm("galdiff06", 1.2)
    gta.set_norm("galdiff07", 1.2)

    #perform fit for null hypothesis:
    gta.free_sources(free=True)
    gta.free_source("galdiff07", free=False)
    gta.free_source("MapSource", free=False)
    Fit = gta.fit()
    null = Fit["loglike"]
    gta.write_roi('after_null_fit')
    gta.write_model_map("null_model")

    #set normalizations of sources for performing alternative fit:
    gta.set_norm("galdiff04", 0.8)
    gta.set_norm("galdiff05", 0.8)
    gta.set_norm("galdiff06", 1.2)
    gta.set_norm("galdiff07", 1.2)
    gta.set_norm("MapSource", 1e-4)

    gta.free_sources(free=True)
    #gta.free_source("galdiff07",free=False)
    Fit2 = gta.fit()
    alternative = Fit2["loglike"]
    gta.write_roi('after_alternative_fit')
    gta.write_model_map("alternative_model")

    #calculate source spectrum:
    ltcube = '/zfs/astrohe/ckarwin/Stacking_Analysis/UFOs/NGC_4151_Analysis/MakeLTCube/zmax_105/UFOs_binned_ltcube.fits'
    obs = BinnedObs(srcMaps='srcmap_00.fits',
                    expCube=ltcube,
                    binnedExpMap='bexpmap_00.fits',
                    irfs='P8R3_SOURCE_V2')
    like = BinnedAnalysis(obs,
                          'after_alternative_fit_00.xml',
                          optimizer='MINUIT')
    Elist, Flist = CalcFlux(like, 'MapSource')
    data = {"energ[MeV]": Elist, "flux[MeV/cm^2/s]": Flist}
    df = pd.DataFrame(data=data)
    df.to_csv("excess_flux.dat", sep="\t", index=False)

    #calculte TS:
    TS = -2 * (null - alternative)

    #write results:
    savefile = "TS_sim_%s.txt" % sim
    f = open(savefile, "w")
    f.write(str(TS))
    f.close()

    #rm ft file to reduce storage:
    os.system('rm ft1_00.fits')

    return
예제 #12
0
config['fileio']['outdir'] = cwd+'/fits'
config['fileio']['logfile'] = cwd+'/fits/fermipy.log'
config['data']['ltcube'] = cwd+'/fits/ltcube_00.fits'
config['model']['galdiff'] = path_to_conda+'/share/fermitools/refdata/fermi/galdiffuse/gll_iem_v07.fits'
config['model']['isodiff'] = path_to_conda+'/share/fermitools/refdata/fermi/galdiffuse/iso_P8R3_SOURCE_V3_v1.txt'
config['logging']['verbosity'] = 4
source = config['selection']['target']
with open(cwd+'/config_modified.yaml', 'w') as o:
    yaml.dump(config, o)
likelihoods = np.zeros((5))
gta = GTAnalysis(config='config_modified.yaml')
gta.setup()
model = {'Index' : 2.0, 'SpatialModel' : 'PointSource'}

for i in range(1,6):
    gta.optimize()
    gta.free_sources(free=False)
    gta.free_source(source)
    gta.free_source('galdiff')
    gta.free_source('isodiff')
    gta.free_sources(distance=3, pars='norm')
    gta.free_sources(minmax_ts=[100, None], pars='norm')
    gta.fit(optimizer='NEWMINUIT', reoptimize=True)
    maps = gta.residmap(f'../maps/opt_alternating{i}', model=model, make_plots=True)
    maps = gta.tsmap(f'../maps/opt_alternating_{i}', model=model, make_plots=True)
    gta.write_roi(f'opt_{i}', make_plots=True)
    likelihoods[i-1] = - gta.like()


np.savetxt('optimization_process_likes_alternating.dat', likelihoods)
예제 #13
0
def FGES_BinnedAnalysis(prefix, ANALYSISDIR, numsources, xmlsources, spectrum,
                        spectrumpoints, spectrumpointsUL, spectrum_mev_or_erg,
                        spectrum_mev_or_tev, configfile):

    ANALYSISDIR = ANALYSISDIR + prefix + '/'
    i = numsources  #number of sources
    sources_names = ''
    for x in range(0, i):
        sources_names += str(xmlsources[x])

    #Run the likelihood analysis up to doing the fit
    gta = GTAnalysis(ANALYSISDIR + configfile, logging={'verbosity': 3})
    gta.setup()

    #Print the pre likelihood fit parameters
    gta.print_roi()
    for x in range(0, i):
        print(gta.roi[xmlsources[x]])

    #Do an initial optimization of parameters
    gta.optimize()

    gta.print_roi()

    #Prepare to get the likelihood
    #Free the normalizations of sources within 7 degrees of the center of the field of view
    gta.free_sources(distance=7.0, pars='norm')
    gta.free_source('galdiff')
    gta.free_source('isodiff')
    for x in range(0, i):
        gta.free_source(xmlsources[x])

    #LIKELIHOOD ANALYSIS
    fit_results = gta.fit()

    #print out and return the results
    print('Fit Quality: ', fit_results['fit_quality'])
    for x in range(0, i):
        print(gta.roi[xmlsources[x]])
    gta.write_roi(sources_names + 'fit')

    #RESIDUAL MAP
    model = {'Index': 2.0, 'SpatialModel': 'PointSource'}
    maps = gta.residmap('residual', model=model, make_plots=True)

    # Generate residual map with source of interest removed from the model
    model_nosource = {'Index': 2.0, 'SpatialModel': 'PointSource'}
    maps_nosource = gta.residmap('residual_wsource',
                                 model=model_nosource,
                                 exclude=xmlsources,
                                 make_plots=True)

    #TS Map
    tsmap = gta.tsmap('tsmap',
                      model={
                          'SpatialModel': 'PointSource',
                          'Index': 2.0
                      },
                      exclude=xmlsources,
                      make_plots=True)
    tsmap_wSNR = gta.tsmap('tsmap_wSNR',
                           model={
                               'SpatialModel': 'PointSource',
                               'Index': 2.0
                           },
                           make_plots=True)

    #PLOT SEDs
    for x in range(0, i):
        c = np.load('10to500gev/' + sources_names + 'fit.npy').flat[0]
        sorted(c['sources'].keys())
        c['sources'][xmlsources[x]]['flux']
        print(c['sources'][xmlsources[x]]['param_names'][:4])
        print(c['sources'][xmlsources[x]]['param_values'][:4])
        c['sources'][xmlsources[x]]['ts']

        E = np.array(c['sources'][xmlsources[x]]['model_flux']['energies'])
        dnde = np.array(c['sources'][xmlsources[x]]['model_flux']['dnde'])
        dnde_hi = np.array(
            c['sources'][xmlsources[x]]['model_flux']['dnde_hi'])
        dnde_lo = np.array(
            c['sources'][xmlsources[x]]['model_flux']['dnde_lo'])

        if spectrum_mev_or_erg == "erg":
            suffix = 'erg'
            mult = 0.00000160218
        elif spectrum_mev_or_erg == "mev":
            suffix = 'MeV'
            mult = 1

        if spectrum_mev_or_tev == "mev":
            xaxis = 'MeV'
            denominator = 1
        elif spectrum_mev_or_tev == "tev":
            xaxis = 'TeV'
            denominator = 1000000

        if spectrum:
            plt.loglog(E, (E**2) * dnde, 'k--')
            plt.loglog(E, (E**2) * dnde_hi, 'k')
            plt.loglog(E, (E**2) * dnde_lo, 'k')
            plt.xlabel('E [MeV]')
            plt.ylabel(r'E$^2$ dN/dE [MeV cm$^{-2}$ s$^{-1}$]')
            plt.savefig('spectrum_' + xmlsources[x] + '.png')

        #GET SED POINTS
        if spectrumpoints:
            sed = gta.sed(xmlsources[x], make_plots=True)
            #sed = gta.sed(xmlsource,prefix=xmlsource + 'spectrum',loge_bins=)
            src = gta.roi[xmlsources[x]]
            #Plot without upper limits
            plt.loglog(E, (E**2) * dnde, 'k--')
            plt.loglog(E, (E**2) * dnde_hi, 'k')
            plt.loglog(E, (E**2) * dnde_lo, 'k')
            plt.errorbar(np.array(sed['e_ctr']),
                         sed['e2dnde'],
                         yerr=sed['e2dnde_err'],
                         fmt='o')
            plt.xlabel('E [MeV]')
            plt.ylabel(r'E$^{2}$ dN/dE [MeV cm$^{-2}$ s$^{-1}$]')
            #plt.show()
            plt.savefig('spectrumpoints_' + xmlsources[x] + '.png')
            #Plot with upper limits, last 5 points
            plt.loglog(E, (E**2) * dnde, 'k--')
            plt.loglog(E, (E**2) * dnde_hi, 'k')
            plt.loglog(E, (E**2) * dnde_lo, 'k')
            plt.errorbar(sed['e_ctr'][:-5],
                         sed['e2dnde'][:-5],
                         yerr=sed['e2dnde_err'][:-5],
                         fmt='o')
            plt.errorbar(np.array(sed['e_ctr'][-5:]),
                         sed['e2dnde_ul95'][-5:],
                         yerr=0.2 * sed['e2dnde_ul95'][-5:],
                         fmt='o',
                         uplims=True)
            plt.xlabel('E [MeV]')
            plt.ylabel(r'E$^{2}$ dN/dE [MeV cm$^{-2}$ s$^{-1}$]')
            plt.savefig('spectrumpointsUL_' + xmlsources[x] + '.png')
        plt.clf()
예제 #14
0
파일: run.py 프로젝트: cdeil/fermipy
description = "Run fermipy analysis chain."
parser = argparse.ArgumentParser(usage=usage,description=description)

parser.add_argument('--config', default = 'sample_config.yaml')

args = parser.parse_args()

gta = GTAnalysis(args.config)

gta.setup()

# Iteratively optimize all components in the ROI
gta.optimize()

# Fix sources w/ significance < 10
gta.free_sources(cuts=('Detection_Significance',0,10),free=False)

# Free sources within 3 degrees of ROI center
gta.free_sources(distance=3.0)

# Free sources by name
gta.free_source('mkn421')
gta.free_source('galdiff')
gta.free_source('isodiff')

# Free only the normalization of a specific source
gta.free_norm('3FGL J1129.0+3705')

gta.fit()

# Compute the SED for a source
예제 #15
0
def _process_lc_bin(itime, name, config, basedir, workdir, diff_sources, const_spectrum, roi, lck_params,
                    **kwargs):
    i, time = itime

    roi = copy.deepcopy(roi)

    config = copy.deepcopy(config)
    config['selection']['tmin'] = time[0]
    config['selection']['tmax'] = time[1]

    # create output directories labeled in MET vals
    outdir = basedir + 'lightcurve_%.0f_%.0f' % (time[0], time[1])
    config['fileio']['outdir'] = os.path.join(workdir, outdir)
    config['logging']['prefix'] = 'lightcurve_%.0f_%.0f ' % (time[0], time[1])
    config['fileio']['logfile'] = os.path.join(config['fileio']['outdir'],
                                               'fermipy.log')
    utils.mkdir(config['fileio']['outdir'])

    yaml.dump(utils.tolist(config),
              open(os.path.join(config['fileio']['outdir'],
                                'config.yaml'), 'w'))

    xmlfile = os.path.join(config['fileio']['outdir'], 'base.xml')

    try:
        from fermipy.gtanalysis import GTAnalysis
        gta = GTAnalysis(config, roi, loglevel=logging.DEBUG)
        gta.logger.info('Fitting time range %i %i' % (time[0], time[1]))
        gta.setup()
    except:
        print('Analysis failed in time range %i %i' %
              (time[0], time[1]))
        print(sys.exc_info()[0])
        raise
        return {}

    gta._lck_params = lck_params
    # Recompute source map for source of interest and sources within 3 deg
    if gta.config['gtlike']['use_scaled_srcmap']:
        names = [s.name for s in
                 gta.roi.get_sources(distance=3.0, skydir=gta.roi[name].skydir)
                 if not s.diffuse]
        gta.reload_sources(names)

    # Write the current model
    gta.write_xml(xmlfile)

    # Optimize the model
    gta.optimize(skip=diff_sources,
                 shape_ts_threshold=kwargs.get('shape_ts_threshold'))

    fit_results = _fit_lc(gta, name, **kwargs)
    gta.write_xml('fit_model_final.xml')
    srcmodel = copy.deepcopy(gta.get_src_model(name))
    numfree = gta.get_free_param_vector().count(True)
    
    const_srcmodel = gta.get_src_model(name).copy()
    fixed_fit_results = fit_results.copy()
    fixed_srcmodel = gta.get_src_model(name).copy()
    fixed_fit_results['fit_success'],fixed_srcmodel['fit_success'] = [False,False]
    fixed_fit_results['fit_quality'],fixed_srcmodel['fit_quality'] = [0,0]
    max_ts_thresholds = [None, 4, 9, 16, 25]
    for max_ts in max_ts_thresholds:
        if max_ts is not None:
            gta.free_sources(minmax_ts=[None, max_ts], free=False, exclude=[name])

        # rerun fit using params from full time (constant) fit using same
        # param vector as the successful fit to get loglike
        specname, spectrum = const_spectrum
        gta.set_source_spectrum(name, spectrum_type=specname,
                                spectrum_pars=spectrum,
                                update_source=False)
        gta.free_source(name, free=False)
        const_fit_results = gta.fit()
        if not const_fit_results['fit_success']:
            continue
        const_srcmodel = gta.get_src_model(name)
        # rerun using shape fixed to full time fit
        # for the fixed-shape lightcurve
        gta.free_source(name, pars='norm')
        fixed_fit_results = gta.fit()
	if not fixed_fit_results['fit_success']:
            continue
        fixed_srcmodel = gta.get_src_model(name)
        break
    
    # special lc output
    o = {'flux_const': const_srcmodel['flux'],
         'loglike_const': const_fit_results['loglike'],
         'fit_success': fit_results['fit_success'],
         'fit_success_fixed': fixed_fit_results['fit_success'],
         'fit_quality': fit_results['fit_quality'],
         'fit_status': fit_results['fit_status'],
         'num_free_params': numfree,
         'config': config}
    # full flux output
    if fit_results['fit_success'] == 1:
        for k in defaults.source_flux_output.keys():
            if not k in srcmodel:
                continue
            o[k] = srcmodel[k]
            o[k+'_fixed'] = fixed_srcmodel[k]

    gta.logger.info('Finished time range %i %i' % (time[0], time[1]))
    return o
예제 #16
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        if not HAVE_ST:
            raise RuntimeError(
                "Trying to run fermipy analysis, but don't have ST")

        if is_null(args.skydirs):
            skydir_dict = None
        else:
            skydir_dict = load_yaml(args.skydirs)

        gta = GTAnalysis(args.config,
                         logging={'verbosity': 3},
                         fileio={'workdir_regex': '\.xml$|\.npy$'})
        #gta.setup(overwrite=False)
        gta.load_roi(args.roi_baseline)
        gta.print_roi()

        basedir = os.path.dirname(args.config)
        # This should be a no-op, b/c it was done in the baseline analysis

        for profile in args.profiles:
            if skydir_dict is None:
                skydir_keys = [None]
            else:
                skydir_keys = sorted(skydir_dict.keys())

            for skydir_key in skydir_keys:
                if skydir_key is None:
                    pkey, pdict = AnalyzeSED._build_profile_dict(
                        basedir, profile)
                else:
                    skydir_val = skydir_dict[skydir_key]
                    pkey, pdict = AnalyzeSED._build_profile_dict(
                        basedir, profile)
                    pdict['ra'] = skydir_val['ra']
                    pdict['dec'] = skydir_val['dec']
                    pkey += "_%06i" % skydir_key

                outfile = "sed_%s.fits" % pkey

                # Add the source and get the list of correlated soruces
                correl_dict = add_source_get_correlated(gta,
                                                        pkey,
                                                        pdict,
                                                        correl_thresh=0.25)

                # Write the list of correlated sources
                correl_yaml = os.path.join(basedir, "correl_%s.yaml" % pkey)
                write_yaml(correl_dict, correl_yaml)

                gta.free_sources(False)
                for src_name in correl_dict.keys():
                    gta.free_source(src_name, pars='norm')

                # build the SED
                gta.sed(pkey, outfile=outfile, make_plots=args.make_plots)

                # remove the source
                gta.delete_source(pkey)
                # put the ROI back to how it was
                gta.load_xml(args.roi_baseline)

        return gta
예제 #17
0
def _process_lc_bin(itime, name, config, basedir, workdir, diff_sources, const_spectrum, roi, lck_params,
                    **kwargs):
    i, time = itime

    roi = copy.deepcopy(roi)

    config = copy.deepcopy(config)
    config['selection']['tmin'] = time[0]
    config['selection']['tmax'] = time[1]

    # create output directories labeled in MET vals
    outdir = basedir + 'lightcurve_%.0f_%.0f' % (time[0], time[1])
    config['fileio']['outdir'] = os.path.join(workdir, outdir)
    config['logging']['prefix'] = 'lightcurve_%.0f_%.0f ' % (time[0], time[1])
    config['fileio']['logfile'] = os.path.join(config['fileio']['outdir'],
                                               'fermipy.log')
    utils.mkdir(config['fileio']['outdir'])

    yaml.dump(utils.tolist(config),
              open(os.path.join(config['fileio']['outdir'],
                                'config.yaml'), 'w'))

    xmlfile = os.path.join(config['fileio']['outdir'], 'base.xml')

    try:
        from fermipy.gtanalysis import GTAnalysis
        gta = GTAnalysis(config, roi, loglevel=logging.DEBUG)
        gta.logger.info('Fitting time range %i %i' % (time[0], time[1]))
        gta.setup()
    except:
        print('Analysis failed in time range %i %i' %
              (time[0], time[1]))
        print(sys.exc_info()[0])
        raise
        return {}

    gta._lck_params = lck_params
    # Recompute source map for source of interest and sources within 3 deg
    if gta.config['gtlike']['use_scaled_srcmap']:
        names = [s.name for s in
                 gta.roi.get_sources(distance=3.0, skydir=gta.roi[name].skydir)
                 if not s.diffuse]
        gta.reload_sources(names)

    # Write the current model
    gta.write_xml(xmlfile)

    # Optimize the model
    gta.optimize(skip=diff_sources,
                 shape_ts_threshold=kwargs.get('shape_ts_threshold'))

    fit_results = _fit_lc(gta, name, **kwargs)
    gta.write_xml('fit_model_final.xml')
    srcmodel = copy.deepcopy(gta.get_src_model(name))
    numfree = gta.get_free_param_vector().count(True)

    max_ts_thresholds = [None, 4, 9]
    for max_ts in max_ts_thresholds:
        if max_ts is not None:
            gta.free_sources(minmax_ts=[None, max_ts], free=False, exclude=[name])

        # rerun fit using params from full time (constant) fit using same
        # param vector as the successful fit to get loglike
        specname, spectrum = const_spectrum
        gta.set_source_spectrum(name, spectrum_type=specname,
                                spectrum_pars=spectrum,
                                update_source=False)
        gta.free_source(name, free=False)
        const_fit_results = gta.fit()
        if not const_fit_results['fit_success']:
            continue
        const_srcmodel = gta.get_src_model(name)

        # rerun using shape fixed to full time fit
        # for the fixed-shape lightcurve
        gta.free_source(name, pars='norm')
        fixed_fit_results = gta.fit()
        if not fixed_fit_results['fit_success']:
            continue
        fixed_srcmodel = gta.get_src_model(name)
        break
    
    # special lc output
    o = {'flux_const': const_srcmodel['flux'],
         'loglike_const': const_fit_results['loglike'],
         'fit_success': fit_results['fit_success'],
         'fit_success_fixed': fixed_fit_results['fit_success'],
         'fit_quality': fit_results['fit_quality'],
         'fit_status': fit_results['fit_status'],
         'num_free_params': numfree,
         'config': config}

    # full flux output
    if fit_results['fit_success'] == 1:
        for k in defaults.source_flux_output.keys():
            if not k in srcmodel:
                continue
            o[k] = srcmodel[k]
            o[k+'_fixed'] = fixed_srcmodel[k]

    gta.logger.info('Finished time range %i %i' % (time[0], time[1]))
    return o
예제 #18
0
class ExtensionFit:
    def __init__(self, configFile):

        self.gta = GTAnalysis(configFile, logging={'verbosity': 3})
        self.target = None
        self.targetRadius = None
        self.distance = None
        self.catalog = fits.getdata('/users-data/mfalxa/code/gll_psch_v13.fit',
                                    1)

    def setSourceName(self, sourceObject, newName):
        self.gta.delete_source(sourceObject['name'])
        self.gta.add_source(newName, sourceObject)

    ''' INITIALIZE '''

    def initialize(self, sizeROI, rInner, addToROI, TSMin, debug):

        self.gta.setup()
        if self.gta.config['selection']['emin'] >= 10000:
            self.gta.set_parameter('galdiff', 'Scale', 30000)

        if debug == True:
            self.gta.make_plots('startAll')
            self.gta.residmap(prefix='startAll', make_plots=True)

        # Get model source names
        sourceList = self.gta.get_sources(exclude=['isodiff', 'galdiff'])

        # Delete sources unassociated with TS < 50
        for i in range(len(sourceList)):
            if sourceList[i]['catalog']['TS_value'] < TSMin and self.catalog[
                    'CLASS'][self.catalog['Source_Name'] == sourceList[i]
                             ['name']][0] == '':
                self.gta.delete_source(sourceList[i]['name'])

        closests = self.gta.get_sources(distance=rInner,
                                        exclude=['isodiff', 'galdiff'])

        # Delete all unidentified sources
        for i in range(len(closests)):
            if self.catalog['CLASS'][self.catalog['Source_Name'] == closests[i]
                                     ['name']][0].isupper() == False:
                self.gta.delete_source(closests[i]['name'])
            if self.catalog['CLASS'][self.catalog['Source_Name'] == closests[i]
                                     ['name']][0] == 'SFR':
                self.target = closests[i]
                self.setSourceName(self.target, 'TESTSOURCE')

# If debug, save ROI and make plots
        if debug == True:
            self.gta.write_roi('startModel')
            self.gta.residmap(prefix='start', make_plots=True)
            self.gta.make_plots('start')

        # Optmize spectral parameters for sources with npred > 1
        self.gta.optimize(npred_threshold=1, skip=['isodiff'])

        # Get model source names
        sourceList = self.gta.get_sources(distance=sizeROI + addToROI,
                                          square=True,
                                          exclude=['isodiff', 'galdiff'])

        # Iterate source localizing on source list
        for i in range(len(sourceList)):
            if sourceList[i].extended == False:
                self.gta.localize(sourceList[i]['name'],
                                  write_fits=False,
                                  write_npy=False,
                                  update=True)

        # Free sources within ROI size + extra distance from center
        self.gta.free_sources(distance=sizeROI + addToROI, square=True)

        # Re-optimize ROI
        self.gta.optimize(skip=['isodiff'])

        # Save and make plots if debug
        if debug == True:
            self.gta.write_roi('modelInitialized')
            self.gta.residmap(prefix='initialized', make_plots=True)
            self.gta.make_plots('initialized')

        # Lock sources
        self.gta.free_sources(free=False)

    ''' OUTER REGION '''
    def outerRegionAnalysis(self, sizeROI, rInner, sqrtTsThreshold,
                            minSeparation, debug):

        self.gta.free_sources(distance=sizeROI,
                              pars='norm',
                              square=True,
                              free=True)
        self.gta.free_sources(distance=rInner, free=False)
        self.gta.free_source('galdiff', free=True)
        self.gta.free_source('isodiff', free=False)

        # Seek new sources until none are found
        sourceModel = {
            'SpectrumType': 'PowerLaw',
            'Index': 2.0,
            'Scale': 30000,
            'Prefactor': 1.e-15,
            'SpatialModel': 'PointSource'
        }
        newSources = self.gta.find_sources(sqrt_ts_threshold=sqrtTsThreshold,
                                           min_separation=minSeparation,
                                           model=sourceModel,
                                           **{
                                               'search_skydir':
                                               self.gta.roi.skydir,
                                               'search_minmax_radius':
                                               [rInner, sizeROI]
                                           })

        if len(newSources) > 0:
            for i in range(len(newSources)):
                if newSources['sources'][i]['ts'] > 100.:
                    self.gta.set_source_spectrum(
                        newSources['sources'][i]['name'],
                        spectrum_type='LogParabola')
                    self.gta.free_source(newSources['sources'][i]['name'])
                    self.gta.fit()
                    self.gta.free_source(newSources['sources'][i]['name'],
                                         free=False)

        # Optimize all ROI
        self.gta.optimize(skip=['isodiff'])

        # Save sources found
        if debug == True:
            self.gta.residmap(prefix='outer', make_plots=True)
            self.gta.write_roi('outerAnalysisROI')
            self.gta.make_plots('outer')

    ''' INNER REGION '''

    def innerRegionAnalysis(self, sizeROI, rInner, maxIter, sqrtTsThreshold,
                            minSeparation, dmMin, TSm1Min, TSextMin, debug):

        self.gta.free_sources(distance=sizeROI, square=True, free=False)
        self.gta.free_sources(distance=rInner, free=True, exclude=['isodiff'])

        # Keep closest source if identified with star forming region in catalog or look for new source closest to center within Rinner
        if self.target != None:
            print('Closest source identified with star forming region : ',
                  self.target['name'])
            self.gta.set_source_morphology('TESTSOURCE',
                                           **{'spatial_model': 'PointSource'})
        else:
            closeSources = self.gta.find_sources(sqrt_ts_threshold=2.,
                                                 min_separation=minSeparation,
                                                 max_iter=1,
                                                 **{
                                                     'search_skydir':
                                                     self.gta.roi.skydir,
                                                     'search_minmax_radius':
                                                     [0., rInner]
                                                 })
            dCenter = np.array([])
            for i in range(len(closeSources['sources'])):
                dCenter = np.append(
                    dCenter,
                    self.gta.roi.skydir.separation(
                        closeSources['sources'][i].skydir).value)
            self.target = closeSources['sources'][np.argmin(dCenter)]
            print('Target name : ', self.target['name'])
            self.setSourceName(self.target, 'TESTSOURCE')
            for i in [
                    x for x in range(len(closeSources['sources']))
                    if x != (np.argmin(dCenter))
            ]:
                self.gta.delete_source(closeSources['sources'][i]['name'])
            self.gta.optimize(skip=['isodiff'])

        # Initialize n sources array
        nSources = []

        # Save ROI without extension fit
        self.gta.write_roi('nSourcesFit')

        if debug == True:
            self.gta.make_plots('innerInit')
            self.gta.residmap(prefix='innerInit', make_plots=True)

        # Test for extension
        extensionTest = self.gta.extension('TESTSOURCE',
                                           make_plots=True,
                                           write_npy=debug,
                                           write_fits=debug,
                                           spatial_model='RadialDisk',
                                           update=True,
                                           free_background=True,
                                           fit_position=True)
        extLike = extensionTest['loglike_ext']
        TSext = extensionTest['ts_ext']
        print('TSext : ', TSext)
        extAIC = 2 * (len(self.gta.get_free_param_vector()) -
                      self.gta._roi_data['loglike'])
        self.gta.write_roi('extFit')

        if debug == True:
            self.gta.residmap(prefix='ext0', make_plots=True)
            self.gta.make_plots('ext0')

        self.gta.load_roi('nSourcesFit', reload_sources=True)

        for i in range(1, maxIter + 1):

            # Test for n point sources
            nSourcesTest = self.gta.find_sources(
                sources_per_iter=1,
                sqrt_ts_threshold=sqrtTsThreshold,
                min_separation=minSeparation,
                max_iter=1,
                **{
                    'search_skydir': self.gta.roi.skydir,
                    'search_minmax_radius': [0., rInner]
                })

            if len(nSourcesTest['sources']) > 0:

                if nSourcesTest['sources'][0]['ts'] > 100.:
                    self.gta.set_source_spectrum(
                        nSourcesTest['sources'][0]['name'],
                        spectrum_type='LogParabola')
                    self.gta.free_source(nSourcesTest['sources'][0]['name'])
                    self.gta.fit()
                    self.gta.free_source(nSourcesTest['sources'][0]['name'],
                                         free=False)

                if debug == True:
                    self.gta.make_plots('nSources' + str(i))

                nSources.append(nSourcesTest['sources'])
                self.gta.localize(nSourcesTest['sources'][0]['name'],
                                  write_npy=debug,
                                  write_fits=debug,
                                  update=True)
                nAIC = 2 * (len(self.gta.get_free_param_vector()) -
                            self.gta._roi_data['loglike'])
                self.gta.free_source(nSourcesTest['sources'][0]['name'],
                                     free=True)
                self.gta.residmap(prefix='nSources' + str(i), make_plots=True)
                self.gta.write_roi('n1SourcesFit')

                # Estimate Akaike Information Criterion difference between both models
                dm = extAIC - nAIC
                print('AIC difference between both models = ', dm)

                # Estimate TS_m+1
                extensionTestPlus = self.gta.extension(
                    'TESTSOURCE',
                    make_plots=True,
                    write_npy=debug,
                    write_fits=debug,
                    spatial_model='RadialDisk',
                    update=True,
                    free_background=True,
                    fit_position=True)
                TSm1 = 2 * (extensionTestPlus['loglike_ext'] - extLike)
                print('TSm+1 = ', TSm1)

                if debug == True:
                    self.gta.residmap(prefix='ext' + str(i), make_plots=True)
                    self.gta.make_plots('ext' + str(i))

                if dm < dmMin and TSm1 < TSm1Min:
                    self.gta.load_roi('extFit', reload_sources=True)
                    break
                else:

                    # Set extension test to current state and save current extension fit ROI and load previous nSources fit ROI
                    extensionTest = extensionTestPlus
                    extLike = extensionTestPlus['loglike_ext']
                    TSext = extensionTestPlus['ts_ext']
                    print('TSext : ', TSext)
                    extAIC = 2 * (len(self.gta.get_free_param_vector()) -
                                  self.gta._roi_data['loglike'])
                    self.gta.write_roi('extFit')
                    self.gta.load_roi('n1SourcesFit', reload_sources=True)
                    self.gta.write_roi('nSourcesFit')

            else:
                if TSext > TSextMin:
                    self.gta.load_roi('extFit', reload_sources=True)
                    break
                else:
                    self.gta.load_roi('nSourcesFit', reload_sources=True)
                    break

        self.gta.fit()

        # Get source radius depending on spatial model
        endSources = self.gta.get_sources()
        for i in range(len(endSources)):
            if endSources[i]['name'] == 'TESTSOURCE':
                self.target = endSources[i]
                self.distance = self.gta.roi.skydir.separation(
                    endSources[i].skydir).value
                if endSources[i].extended == True:
                    self.targetRadius = endSources[i]['SpatialWidth']
                else:
                    self.targetRadius = endSources[i]['pos_r95']

    ''' CHECK OVERLAP '''

    def overlapDisk(self, rInner, radiusCatalog):

        print('Target radius : ', self.targetRadius)

        # Check radius sizes
        if radiusCatalog < self.targetRadius:
            r = float(radiusCatalog)
            R = float(self.targetRadius)
        else:
            r = float(self.targetRadius)
            R = float(radiusCatalog)

        # Estimating overlapping area
        d = self.distance
        print('Distance from center : ', d)

        if d < (r + R):
            if R < (r + d):
                area = r**2 * np.arccos(
                    (d**2 + r**2 - R**2) / (2 * d * r)) + R**2 * np.arccos(
                        (d**2 + R**2 - r**2) / (2 * d * R)) - 0.5 * np.sqrt(
                            (-d + r + R) * (d + r - R) * (d - r + R) *
                            (d + r + R))
                overlap = round((area / (np.pi * r**2)) * 100, 2)
            else:
                area = np.pi * r**2
                overlap = 100.0
        else:
            area = 0.
            overlap = 0.

        print('Overlapping surface : ', area)
        print('Overlap : ', overlap)

        if overlap > 68. and self.distance < rInner:
            associated = True
        else:
            associated = False

        return associated

    ''' CHECK UPPER LIMIT '''

    def upperLimit(self, name, radius):
        sourceModel = {
            'SpectrumType': 'PowerLaw',
            'Index': 2.0,
            'Scale': 30000,
            'Prefactor': 1.e-15,
            'SpatialModel': 'RadialDisk',
            'SpatialWidth': radius,
            'glon': self.gta.config['selection']['glon'],
            'glat': self.gta.config['selection']['glat']
        }
        self.gta.add_source(name, sourceModel, free=True)
        self.gta.fit()
        self.gta.residmap(prefix='upperLimit', make_plots=True)
        print('Upper limit : ', self.gta.get_sources()[0]['flux_ul95'])
예제 #19
0
            f.write(os.path.join(od, 'ft1_00.fits') + '\n')

    # modify base config to include merged files
    with open(BASE_CONFIG) as infile, \
            open(CONFIG_FINAL_FILE, 'w') as outfile:
        config = yaml.load(infile)
        config['data']['evfile'] = os.path.join(os.getcwd(), FT1_FILES_LIST)
        config['data']['ltcube'] = os.path.join(os.getcwd(), LTCUBE_FINAL_FILE)
        config['fileio'] = {'outdir': 'out_merged/'}
        outfile.write('# Automatically merged from directories:\n')
        for outdir in outdirs:
            outfile.write('# {}\n'.format(outdir))
        outfile.write('\n')
        yaml.dump(config, outfile, indent=4)

    # some generic processing just for sanity check
    gta = GTAnalysis(CONFIG_FINAL_FILE, logging={'verbosity': 3})
    gta.setup()
    gta.free_source('4FGL J1512.8-0906', free=True, pars=['Index'])
    gta.free_source('4FGL J1512.8-0906', free=True, pars='norm')
    # Free Normalization of all Sources within 3 deg of ROI center
    gta.free_sources(distance=3.0, pars='norm')
    # Free all parameters of isotropic and galactic diffuse components
    gta.free_source('galdiff')
    gta.free_source('isodiff')
    gta.optimize()
    gta.print_roi()
    fit_res = gta.fit()
    print('Fit Quality: ', fit_res['fit_quality'])
    print(gta.roi['4FGL J1512.8-0906'])