def theta2(theta2_on, theta2_off, scaling, cut, threshold, source, ontime=None, ax=None, window=[0,1]):

    ax = ax or plt.gca()

    ax.hist(theta2_on, bins=100, range=window, histtype='step', color='r', label='ON')
    ax.hist(theta2_off, bins=100, range=window, histtype='stepfilled', color='tab:blue', alpha=0.5, label='OFF', weights=np.full_like(theta2_off, scaling))

    n_off = np.count_nonzero(theta2_off < cut)
    n_on = np.count_nonzero(theta2_on < cut)
    li_ma = li_ma_significance(n_on, n_off, scaling)
    n_exc_mean = n_on - scaling * n_off
    n_exc_std = np.sqrt(n_on + scaling**2 * n_off)

    txt = rf'''Source: {source}, $t_\mathrm{{obs}} = {ontime.to_value(u.hour):.2f} \mathrm{{h}}$
    $\theta_\mathrm{{max}}^2 = {cut} \mathrm{{deg}}^2,\, t_\gamma = {threshold}$
    $N_\mathrm{{on}} = {n_on},\, N_\mathrm{{off}} = {n_off},\, \alpha = {scaling:.2f}$
    $N_\mathrm{{exc}} = {n_exc_mean:.0f} \pm {n_exc_std:.0f},\, S_\mathrm{{Li&Ma}} = {li_ma:.2f}$
    '''
    ax.text(0.5, 0.95, txt, transform=ax.transAxes, va='top', ha='center')
    ax.axvline(cut, color='k', alpha=0.6, lw=1, ls='--')

    ax.set_xlabel(r'$\theta^2 \,\, / \,\, \mathrm{deg}^2$')
    ax.set_xlim(window)
    ax.legend()
    return ax
Ejemplo n.º 2
0
def theta2(
    theta2_on,
    theta2_off,
    scaling,
    cut,
    threshold="",
    source="",
    ontime=None,
    ax=None,
    window=[0, 1],
    bins=100,
    on_weights=None,
    off_weights=None,
):

    if on_weights is None:
        on_weights = np.full_like(theta2_on, 1).astype('bool')
    if off_weights is None:
        off_weights = np.full_like(theta2_off, 1).astype('bool')
    ax = ax or plt.gca()
    bins_ = np.linspace(window[0], window[1], bins)
    ax.hist(theta2_on,
            bins=bins_,
            range=window,
            histtype="step",
            color="r",
            label="ON")
    ax.hist(
        theta2_off,
        bins=bins_,
        range=window,
        histtype="stepfilled",
        color="tab:blue",
        alpha=0.5,
        label="OFF",
        weights=np.full_like(theta2_off, scaling),
    )

    n_off = np.count_nonzero(theta2_off[off_weights] < cut)
    n_on = np.count_nonzero(theta2_on[on_weights] < cut)
    li_ma = li_ma_significance(n_on, n_off, scaling)
    n_exc_mean = n_on - scaling * n_off
    n_exc_std = np.sqrt(n_on + scaling**2 * n_off)

    txt = rf"""
    $N_\mathrm{{on}} = {n_on},\, N_\mathrm{{off}} = {n_off},\, \alpha = {scaling:.2f}$
    $N_\mathrm{{exc}} = {n_exc_mean:.0f} \pm {n_exc_std:.0f}$
    $S_\mathrm{{Li&Ma}} = {li_ma:.2f}$ in $T = {ontime.to_value(u.hour):.2f} \mathrm{{h}}$
    """
    ax.text(0.5, 0.95, txt, transform=ax.transAxes, va="top", ha="center")
    if isinstance(cut, float):
        ax.axvline(cut, color="k", alpha=0.6, lw=1, ls="--")

    ax.set_xlabel(r"$\theta^2 \,\, / \,\, \mathrm{deg}^2$")
    ax.set_xlim(window)
    ax.legend()
    ax.figure.tight_layout()
    return ax
Ejemplo n.º 3
0
 def calc_and_append():
     low_index = bin_edges[-1]
     high_index = i
     n_on = len(on_data.iloc[low_index:high_index])
     n_off = len(off_data.loc[
         (off_data.energy >= on_data.iloc[low_index].energy)
         & (off_data.energy <= on_data.iloc[high_index].energy)])
     sigma_li_ma = li_ma_significance(n_on, n_off)
     nexcess = n_on * self.alpha * n_off
     e_high = on_data.iloc[high_index].energy
     e_low = on_data.iloc[low_index].energy
     size = np.abs((e_high - e_low) / e_low)
     if (((sigma_li_ma >= sigma_per_bin) & (size > min_bin_percentage) &
          (nexcess > min_counts_per_bin)) | (i == length - 1)):
         bin_edges.append(high_index)
         energy = int(on_data.iloc[high_index - 1].energy) + 1
         if energy != bin_edges_energy[-1]:
             bin_edges_energy.append(energy)
             sigma_list.append(sigma_li_ma)
Ejemplo n.º 4
0
                                use_mc=use_mc,
                                theta_square_cut=str(thetasq))
    # if os.path.isfile("/media/michi/523E69793E69574F/daten/a_eff.npy"):
    #    a_eff = np.load("/media/michi/523E69793E69574F/daten/a_eff.npy")
    #    if not (a_eff.shape == (len(zdbins)-1,len(ebins)-1)):
    #        a_eff = calc_a_eff(ebins, zdbins)
    # else:
    #    a_eff = calc_a_eff(ebins, zdbins)

    print(on_time_per_zd)
    print("On-Time:", np.sum(on_time_per_zd), "s")
    print("On-Time:", np.sum(on_time_per_zd) / (60 * 60), "h")
    flux2d = np.ma.divide(exc_histo, a_eff) / (on_time_per_zd)[:, np.newaxis]
    flux2d_err = np.ma.divide(exc_histo_err,
                              a_eff) / (on_time_per_zd)[:, np.newaxis]
    sig = li_ma_significance(on_histo, off_histo)
    flux_e = np.ma.average(flux2d, axis=0, weights=sig)
    flux_e_err = np.sqrt(np.ma.average((flux2d_err**2), axis=0, weights=sig))

    bin_centers = np.power(10,
                           (np.log10(ebins[1:]) + np.log10(ebins[:-1])) / 2)
    bin_width = ebins[1:] - ebins[:-1]

    hess_x = np.array((1.70488, 2.1131, 2.51518, 3.02825, 3.65982, 4.43106,
                       5.37151, 6.50896, 7.87743, 9.52215, 11.4901, 13.8626,
                       16.8379, 20.4584, 24.8479, 30.2065, 36.7507, 44.8404))
    hess_y = np.array(
        (4.15759e-11, 3.30552e-11, 1.7706e-11, 1.28266e-11, 7.57679e-12,
         5.65619e-12, 2.85186e-12, 1.9475e-12, 1.10729e-12, 4.91077e-13,
         3.00283e-13, 8.96491e-14, 4.27756e-14, 1.24023e-14, 3.49837e-15,
         3.51992e-15, 2.24845e-15, 1.34066e-15))
Ejemplo n.º 5
0
    def calc_on_off_histo(self,
                          ganymed_file=None,
                          cut=None,
                          use_multiprocessing=True):
        select_leaves = [
            'DataType.fVal', 'MPointingPos.fZd', 'FileId.fVal', 'MTime.fMjd',
            'MTime.fTime.fMilliSec', 'MTime.fNanoSec', 'MHillas.fSize',
            'ThetaSquared.fVal', 'MNewImagePar.fLeakage2', 'MHillas.fWidth',
            'MHillasSrc.fDist', 'MHillasExt.fM3Long', 'MHillasExt.fSlopeLong',
            'MHillas.fLength', 'MHillasExt.fSlopeSpreadWeighted',
            'MHillasExt.fTimeSpreadWeighted', 'MHillasSrc.fCosDeltaAlpha'
        ]
        if ganymed_file:
            self.ganymed_file_data = ganymed_file

        if self.ganymed_file_data is None:
            leafs = select_leaves.copy()
            leafs.remove('DataType.fVal')
            leafs.remove('FileId.fVal')
            leafs.remove('ThetaSquared.fVal')
            leafs.remove('MPointingPos.fZd')

            histos = histos_from_list_of_mars_files(
                self.run_list_star,
                leafs,
                self.zenith_binning,
                self.energy_binning,
                self.theta_square,
                efunc=self.energy_function,
                cut_function=cut,
                use_multiprocessing=use_multiprocessing)
        else:
            data_cut = read_mars(self.ganymed_file_data,
                                 leaf_names=select_leaves)
            histos = calc_onoffhisto(data_cut,
                                     self.zenith_binning,
                                     self.energy_binning,
                                     self.theta_square,
                                     energy_function=self.energy_function,
                                     cut=cut)

        # Save Theta-Sqare histograms
        self.theta_square_binning = histos[1][0][1]
        self.on_theta_square_histo = histos[1][0][0]
        self.off_theta_square_histo = histos[1][1][0]

        # Zenith, Energy histograms
        self.on_histo_zenith = histos[0][0]
        self.off_histo_zenith = histos[0][1]

        self.excess_histo = self.on_histo_zenith - self.alpha * self.off_histo_zenith
        self.excess_histo_err = np.sqrt(self.on_histo_zenith +
                                        self.alpha**2 * self.off_histo_zenith)

        # Energy histograms

        self.on_histo = np.sum(self.on_histo_zenith, axis=0)
        self.off_histo = np.sum(self.off_histo_zenith, axis=0)

        self.excess_histo = self.on_histo - self.alpha * self.off_histo
        self.excess_histo_err = np.sqrt(self.on_histo +
                                        self.alpha**2 * self.off_histo)

        self.significance_histo = li_ma_significance(self.on_histo,
                                                     self.off_histo,
                                                     self.alpha)

        # Calculate overall statistics

        self.n_on_events = np.sum(self.on_histo_zenith)
        self.n_off_events = np.sum(self.off_histo_zenith)

        self.n_excess_events = self.n_on_events - self.alpha * self.n_off_events
        self.n_excess_events_err = np.sqrt(self.n_on_events +
                                           self.alpha**2 * self.n_off_events)

        self.overall_significance = li_ma_significance(self.n_on_events,
                                                       self.n_off_events,
                                                       self.alpha)
Ejemplo n.º 6
0
 def overall_sigma(x, data=None):
     source_data = data.loc[data["ThetaSquared.fVal"] < x]
     on_data = len(source_data.loc[source_data["DataType.fVal"] == 1.0])
     off_data = len(
         source_data.loc[source_data["DataType.fVal"] == 0.0])
     return 100 - li_ma_significance(on_data, off_data)
Ejemplo n.º 7
0
        print("\nRead data from output ganymed file ---------")
        data_cut = read_mars.read_mars(ganymed_result, leaf_names=select_leaves)
        histos = read_data.calc_onoffhisto(data_cut, zdbins, zdlabels, ebins, elabels, thetasq)

    print("--------- Finished reading data.")

    print(histos)
    print(histos)

    on_histo = histos[0][0]
    off_histo = histos[0][1]

    exc_histo = on_histo - (1 / 5) * off_histo
    exc_histo_err = np.sqrt(on_histo + (1 / 25) * off_histo)

    overall_significance = li_ma_significance(np.sum(on_histo), np.sum(off_histo))

    # Calculate the effective area:

    ceres_list= []

    for i in range(8):
        ceres_list.append("/home/michi/read_mars/ceres_part" + str(i) + ".h5")

    a_eff = calc_a_eff_parallel.calc_a_eff_parallel_hd5(ebins, zdbins,
                                                        correction_factors=corr_factors,
                                                        theta_square_cut=str(thetasq),
                                                        path=base_path + "gamma/hzd_gammasall-analysis.root",
                                                        list_of_hdf_ceres_files=ceres_list)

    print(on_time_per_zd)
Ejemplo n.º 8
0
def theta2(df_on, cut, threshold,  df_off=None, ax=None, range=[0,1], alpha=None, coord=None, n_offs=1):

    ax = ax or plt.gca()

    focal_length = df_on.focal_length
    df_on_selected = df_on.query(f'gammaness > {threshold}')

    dist_on = calc_dist(df_on_selected, coord, n_offs)
    theta2_on = np.rad2deg(np.sqrt(dist_on) / focal_length)**2

    if df_off is not None:
        df_off_selected = df_off.query(f'gammaness > {threshold}')

        dist_off = calc_dist(df_off_selected, coord, n_offs, OFF=True)
        theta2_off = np.rad2deg(np.sqrt(dist_off) / focal_length)**2
        
        if n_offs > 1:
            scaling = 1 / n_offs
        elif alpha == 'manuel':
            norm_range = range[1] / 2

            def mean_count(theta2):
                hist = np.histogram(theta2[theta2 < range[1]], bins=100)
                x = hist[1]
                x = x[:-1].copy()
                return np.mean(hist[0][x > norm_range])

            mean_on = mean_count(theta2_on)
            mean_off = mean_count(theta2_off)

            scaling = mean_on / mean_off
        else:
            total_time_on = total_t(df_on_selected)
            total_time_off = total_t(df_off_selected)
            
            scaling = total_time_on / total_time_off
        
        ax.hist(theta2_off, bins=100, range=range, histtype='stepfilled', color='tab:blue', alpha=0.5, label='OFF', weights=np.full_like(theta2_off, scaling))

    ax.hist(theta2_on, bins=100, range=range, histtype='step', color='r', label='ON')
    ax.set_xlabel(r'$\theta^2 \,\, / \,\, \mathrm{deg}^2$')
    ax.legend()
    ax.figure.tight_layout()

    if df_off is not None:
        text_pos = 0.9 * theta2_on[theta2_on < 0.01].size 

        n_off = np.count_nonzero(theta2_off < cut)
        n_on = np.count_nonzero(theta2_on < cut)
        li_ma = li_ma_significance(n_on, n_off, scaling)
        n_exc_mean = n_on - scaling * n_off
        n_exc_std = np.sqrt(n_on + scaling**2 * n_off)

        ax.axvline(x=cut, color='k', alpha=0.6, lw=1.5, ls=':')
        ax.annotate(
            rf'$\theta_\mathrm{{max}}^2 = {cut} \mathrm{{deg}}^2$' + '\n' + rf'$(\, t_\gamma = {threshold} \,)$', 
            (cut + range[1]/100, 0.8 * text_pos)
        )
        
        if alpha == 'manuel' or n_offs > 1:
            text = (rf'$N_\mathrm{{on}} = {n_on},\, N_\mathrm{{off}} = {n_off},\, \alpha = {scaling:.2f}$' + '\n' 
                + rf'$N_\mathrm{{exc}} = {n_exc_mean:.0f} \pm {n_exc_std:.0f},\, S_\mathrm{{Li&Ma}} = {li_ma:.2f}$'
            )
        else:
            total_time_on_hour = total_time_on / 3600
            total_time_off_hour = total_time_off / 3600
            text = (rf'$N_\mathrm{{on}} = {n_on},\, N_\mathrm{{off}} = {n_off}$' + '\n' 
                + rf'$t_\mathrm{{on}} = {total_time_on_hour:.2f} \mathrm{{h}},\, t_\mathrm{{off}} = {total_time_off_hour:.2f} \mathrm{{h}},\, \alpha = {scaling:.2f}$' + '\n' 
                + rf'$N_\mathrm{{exc}} = {n_exc_mean:.0f} \pm {n_exc_std:.0f},\, S_\mathrm{{Li&Ma}} = {li_ma:.2f}$'
            )
        
        ax.text(0.3, text_pos, text)

    return ax
Ejemplo n.º 9
0
def main(output, data, source, cuts_file, theta2_cut, threshold, n_offs,
         n_jobs):
    outdir = output.split('/')[0]

    src = SkyCoord.from_name(source)

    if n_jobs == -1:
        n_jobs = cpu_count()

    with Pool(n_jobs) as pool:
        results = np.array(pool.starmap(calculation.read_run_calculate_thetas,
                                        [(run, columns, threshold, src, n_offs)
                                         for run in data]),
                           dtype=object)

    df_selected = pd.concat(results[:, 0], ignore_index=True)
    ontime = np.sum(results[:, 1])
    theta = np.concatenate(results[:, 2])
    df_selected5 = pd.concat(results[:, 3], ignore_index=True)
    theta_off = np.concatenate(results[:, 4])

    # use pyirf cuts
    gh_cuts = table.QTable.read(cuts_file, hdu='GH_CUTS')
    theta_cuts_opt = table.QTable.read(cuts_file, hdu='THETA_CUTS_OPT')

    with Pool(n_jobs) as pool:
        results = np.array(pool.starmap(calculation.read_run_calculate_thetas,
                                        [(run, columns, gh_cuts, src, n_offs)
                                         for run in data]),
                           dtype=object)

    df_pyirf = pd.concat(results[:, 0], ignore_index=True)
    theta_pyirf = np.concatenate(results[:, 2])
    df_pyirf5 = pd.concat(results[:, 3], ignore_index=True)
    theta_off_pyirf = np.concatenate(results[:, 4])

    n_on = np.count_nonzero(
        evaluate_binned_cut(
            theta_pyirf,
            df_pyirf.gamma_energy_prediction.to_numpy() * u.TeV,
            theta_cuts_opt, operator.le))
    n_off = np.count_nonzero(
        evaluate_binned_cut(
            theta_off_pyirf,
            df_pyirf5.gamma_energy_prediction.to_numpy() * u.TeV,
            theta_cuts_opt, operator.le))
    li_ma = li_ma_significance(n_on, n_off, 1 / n_offs)
    n_exc_mean = n_on - (1 / n_offs) * n_off
    n_exc_std = np.sqrt(n_on + (1 / n_offs)**2 * n_off)

    ##############################################################################################################
    # plots
    ##############################################################################################################
    figures = []

    figures.append(plt.figure())
    ax = figures[-1].add_subplot(1, 1, 1)
    plotting.theta2(theta.deg**2,
                    theta_off.deg**2,
                    1 / n_offs,
                    theta2_cut,
                    threshold,
                    source,
                    ontime=ontime,
                    ax=ax)
    ax.set_title('Theta calculated in ICRS using astropy')

    figures.append(plt.figure())
    ax = figures[-1].add_subplot(1, 1, 1)
    plotting.theta2(theta_pyirf.deg**2,
                    theta_off_pyirf.deg**2,
                    1 / n_offs,
                    theta2_cut,
                    r'\mathrm{energy-dependent}',
                    source,
                    ontime=ontime,
                    ax=ax)
    ax.set_title(r'Energy-dependent $t_\gamma$ optimised using pyirf')

    # plot using pyirf theta cuts
    figures.append(plt.figure())
    ax = figures[-1].add_subplot(1, 1, 1)

    ax.hist(theta_pyirf.deg**2,
            bins=100,
            range=[0, 1],
            histtype='step',
            color='r',
            label='ON')
    ax.hist(theta_off_pyirf.deg**2,
            bins=100,
            range=[0, 1],
            histtype='stepfilled',
            color='tab:blue',
            alpha=0.5,
            label='OFF',
            weights=np.full_like(theta_off_pyirf.deg**2, 1 / n_offs))

    txt = rf'''Source: {source}, $t_\mathrm{{obs}} = {ontime.to_value(u.hour):.2f} \mathrm{{h}}$
    $N_\mathrm{{on}} = {n_on},\, N_\mathrm{{off}} = {n_off},\, \alpha = {1/n_offs:.2f}$
    $N_\mathrm{{exc}} = {n_exc_mean:.0f} \pm {n_exc_std:.0f},\, S_\mathrm{{Li&Ma}} = {li_ma:.2f}$
    '''
    ax.text(0.5, 0.95, txt, transform=ax.transAxes, va='top', ha='center')

    ax.set_xlabel(r'$\theta^2 \,\, / \,\, \mathrm{deg}^2$')
    ax.set_xlim(0, 1)
    ax.legend()
    ax.set_title(
        r'Energy-dependent $t_\gamma$ and $\theta_\mathrm{max}^2$ optimised using pyirf'
    )

    ##############################################################################################################
    # sensitivity
    ##############################################################################################################
    sensitivity_bins = add_overflow_bins(
        create_bins_per_decade(10**-1.8 * u.TeV,
                               10**2.41 * u.TeV,
                               bins_per_decade=5))
    # gh_cuts and theta_cuts_opt in line 113f

    gammas = plotting.to_astropy_table(
        df_pyirf,  # df_pyirf has pyirf gh cuts already applied
        column_map=COLUMN_MAP,
        unit_map=UNIT_MAP,
        theta=theta_pyirf,
        t_obs=ontime)
    background = plotting.to_astropy_table(df_pyirf5,
                                           column_map=COLUMN_MAP,
                                           unit_map=UNIT_MAP,
                                           theta=theta_off_pyirf,
                                           t_obs=ontime)
    gammas["selected_theta"] = evaluate_binned_cut(gammas["theta"],
                                                   gammas["reco_energy"],
                                                   theta_cuts_opt, operator.le)
    background["selected_theta"] = evaluate_binned_cut(
        background["theta"], background["reco_energy"], theta_cuts_opt,
        operator.le)

    # calculate sensitivity
    signal_hist = create_histogram_table(gammas[gammas["selected_theta"]],
                                         bins=sensitivity_bins)
    background_hist = estimate_background(
        background[background["selected_theta"]],
        reco_energy_bins=sensitivity_bins,
        theta_cuts=theta_cuts_opt,
        alpha=1 / n_offs,
        background_radius=MAX_BG_RADIUS,
    )
    sensitivity = calculate_sensitivity(signal_hist,
                                        background_hist,
                                        alpha=1 / n_offs)

    # scale relative sensitivity by Crab flux to get the flux sensitivity
    spectrum = CRAB_HEGRA
    sensitivity["flux_sensitivity"] = (
        sensitivity["relative_sensitivity"] *
        spectrum(sensitivity['reco_energy_center']))

    ##############################################################################################################
    # sensitivity using unoptimised cuts
    ##############################################################################################################
    gammas_unop = plotting.to_astropy_table(
        df_selected,  # df_selected has gammaness > threshold already applied
        column_map=COLUMN_MAP,
        unit_map=UNIT_MAP,
        theta=theta,
        t_obs=ontime)
    background_unop = plotting.to_astropy_table(df_selected5,
                                                column_map=COLUMN_MAP,
                                                unit_map=UNIT_MAP,
                                                theta=theta_off,
                                                t_obs=ontime)

    gammas_unop["selected_theta"] = gammas_unop["theta"].to_value(
        u.deg) <= np.sqrt(0.03)
    background_unop["selected_theta"] = background_unop["theta"].to_value(
        u.deg) <= np.sqrt(0.03)

    theta_cut_unop = theta_cuts_opt
    theta_cut_unop['cut'] = np.sqrt(0.03) * u.deg

    # calculate sensitivity
    signal_hist_unop = create_histogram_table(
        gammas_unop[gammas_unop["selected_theta"]], bins=sensitivity_bins)
    background_hist_unop = estimate_background(
        background_unop[background_unop["selected_theta"]],
        reco_energy_bins=sensitivity_bins,
        theta_cuts=theta_cut_unop,
        alpha=1 / n_offs,
        background_radius=MAX_BG_RADIUS,
    )
    sensitivity_unop = calculate_sensitivity(signal_hist_unop,
                                             background_hist_unop,
                                             alpha=1 / n_offs)

    # scale relative sensitivity by Crab flux to get the flux sensitivity
    sensitivity_unop["flux_sensitivity"] = (
        sensitivity_unop["relative_sensitivity"] *
        spectrum(sensitivity_unop['reco_energy_center']))

    # write fits file and create plot
    hdus = [
        fits.PrimaryHDU(),
        fits.BinTableHDU(sensitivity, name="SENSITIVITY"),
        fits.BinTableHDU(sensitivity_unop, name="SENSITIVITY_UNOP")
    ]
    fits.HDUList(hdus).writeto(f'{outdir}/sensitivity_{source}.fits.gz',
                               overwrite=True)

    figures.append(plt.figure())
    ax = figures[-1].add_subplot(1, 1, 1)
    for s, label in zip([sensitivity, sensitivity_unop], [
            'pyirf optimised cuts',
            rf'$\theta^2 < {theta2_cut}$ and gh_score$> {threshold}$'
    ]):
        plotting.plot_sensitivity(s, label=label, ax=ax)

    # plot Magic sensitivity for reference
    magic = table.QTable.read('notebooks/magic_sensitivity_2014.ecsv')
    plotting.plot_sensitivity(magic, label='MAGIC 2014', ax=ax, magic=True)

    ax.set_title(
        f'Minimal Flux Satisfying Requirements for 50 hours \n(based on {ontime.to_value(u.hour):.2f}h of {source} observations)'
    )

    # save plots
    with PdfPages(output) as pdf:
        for fig in figures:
            fig.tight_layout()
            pdf.savefig(fig)
Ejemplo n.º 10
0
def calc_spectrum(
        star_files=["/media/michi/523E69793E69574F/daten/421_flare_ed.txt"],
        ganymed_result=None,
        base_path="/media/michi/523E69793E69574F/daten/",
        thetasqare_cut=0.04,
        zdbins=np.linspace(0, 60, 15),
        zdlabels=range(len(zdbins) - 1),
        ebins=np.logspace(np.log10(200.0), np.log10(50000.0), 12),
        elabels=range(len(ebins) - 1),
        correction_factors=False):

    # Setup ThetaSqare Cut.
    # Zenith Distance Bins, MC are available from 0 to 60 deg.
    # Energy Bins, MC are av. from 0.2 to 50 TeV
    # If False, effective area is calculated with estimated energy and not MC energy.

    # On ISDC, put None, to read automatically processed Ganymed Output from star files:

    # Create the labels for binning in energy and zenith distance.

    # Iterate over a list of input STAR files:
    star_list = []
    for entry in star_files:
        star_list += list(open(entry, "r"))

    # Calcualtion of on time from ganymed input list of star files:

    on_time_per_zd = calc_on_time(star_list, zdbins, zdlabels)

    # Read the required leaves of the ganymed-analysis output file and calculate energy estimation:

    select_leaves = [
        'DataType.fVal', 'MPointingPos.fZd', 'FileId.fVal', 'MTime.fMjd',
        'MTime.fTime.fMilliSec', 'MTime.fNanoSec', 'MHillas.fSize',
        'ThetaSquared.fVal', 'MNewImagePar.fLeakage2'
    ]

    if not ganymed_result:
        print("\nRead data from Star files. ---------")
        histos = read_data.histos_from_list_of_mars_files(
            star_list, select_leaves, zdbins, zdlabels, ebins, elabels,
            thetasqare_cut)

    else:
        print("\nRead data from output ganymed file ---------")
        data_cut = read_mars(ganymed_result, leaf_names=select_leaves)
        histos = read_data.calc_onoffhisto(data_cut, zdbins, zdlabels, ebins,
                                           elabels, thetasqare_cut)

    print("--------- Finished reading data.")

    print(histos)
    print(histos)

    on_histo = histos[0][0]
    off_histo = histos[0][1]

    exc_histo = on_histo - (1 / 5) * off_histo
    exc_histo_err = np.sqrt(on_histo + (1 / 25) * off_histo)

    overall_significance = li_ma_significance(np.sum(on_histo),
                                              np.sum(off_histo))

    # Calculate the effective area:

    a_eff = calc_a_eff_parallel(ebins,
                                zdbins,
                                correction_factors=correction_factors,
                                theta_square_cut=thetasqare_cut,
                                path=base_path)

    print(on_time_per_zd)
    print("On-Time:", np.sum(on_time_per_zd), "s")
    print("On-Time:", np.sum(on_time_per_zd) / (60 * 60), "h")

    # Calculate an effective effective area, that scales the effective area per on time
    a_eff = (a_eff * on_time_per_zd[:, np.newaxis]) / np.sum(on_time_per_zd)
    flux = np.divide(np.sum(exc_histo, axis=0), np.sum(a_eff, axis=0))
    flux = np.divide(flux, (np.sum(on_time_per_zd)))
    flux_err = np.ma.divide(
        np.sqrt(
            np.sum(on_histo, axis=0) + (1 / 25) * np.sum(off_histo, axis=0)),
        np.sum(a_eff, axis=0)) / np.sum(on_time_per_zd)
    sig = li_ma_significance(on_histo, off_histo)
    # flux_e = np.ma.average(flux2d, axis=0, weights=sig)
    # flux_e_err = np.sqrt(np.ma.average((flux2d_err ** 2), axis=0, weights=sig))

    bin_centers = np.power(10,
                           (np.log10(ebins[1:]) + np.log10(ebins[:-1])) / 2)
    bin_width = ebins[1:] - ebins[:-1]

    flux_de = np.divide(flux, np.divide(bin_width, 1000))
    flux_de_err = np.divide(flux_err,
                            np.divide(bin_width,
                                      1000))  # / (flux_de * np.log(10))
    flux_de_err_log10 = symmetric_log10_errors(flux_de, flux_de_err)

    return \
        bin_centers, bin_centers-ebins[:-1], ebins[1:]-bin_centers, \
        flux_de, flux_de_err_log10[0], flux_de_err_log10[1], \
        sig, overall_significance