def main():

    #seaborn.set_palette("Set2")
    seaborn.set_palette("colorblind")
    # Create a dataframe with one row per parameter set
    dfs_paramsets = [prepare_df_density_errors(df, R32) for df in dfs]

    name = "mape_liq_density"
    fig, ax = plt.subplots()
    axins = inset_axes(ax,
                       width="100%",
                       height="100%",
                       bbox_to_anchor=(0.35, 0.45, 0.25, 0.40),
                       bbox_transform=ax.transAxes,
                       loc=3)
    ax.set_box_aspect(1.2)
    ax.plot(
        dfs_paramsets[0].sort_values(name)[name],
        np.arange(1, 201, 1),
        '-s',
        markersize=6,
        linewidth=3,
        alpha=0.8,
        label="LD-1",
    )
    ax.plot(
        dfs_paramsets[1].sort_values(name)[name],
        np.arange(1, 201, 1),
        '-s',
        markersize=6,
        linewidth=3,
        alpha=0.8,
        label="LD-2",
    )
    ax.plot(
        dfs_paramsets[2].sort_values(name)[name],
        np.arange(1, 201, 1),
        '-s',
        markersize=6,
        linewidth=3,
        alpha=0.8,
        label="LD-3",
    )
    ax.plot(
        dfs_paramsets[3].sort_values(name)[name],
        np.arange(1, 201, 1),
        '-s',
        markersize=6,
        linewidth=3,
        alpha=0.8,
        label="LD-4",
    )

    ax.set_ylim(0, 205)
    ax.set_xlim(0, 100)
    ax.set_yticks([0, 50, 100, 150, 200])
    ax.xaxis.set_major_locator(MultipleLocator(20))
    ax.xaxis.set_minor_locator(AutoMinorLocator(2))
    ax.yaxis.set_major_locator(MultipleLocator(50))
    ax.yaxis.set_minor_locator(AutoMinorLocator(2))
    ax.tick_params("both",
                   direction="in",
                   which="both",
                   length=4,
                   labelsize=16,
                   pad=10)
    ax.tick_params("both", which="major", length=8)
    ax.xaxis.set_ticks_position("both")
    ax.yaxis.set_ticks_position("both")

    ax.set_ylabel(r"$N_\mathrm{cumu.}$ parameter sets",
                  fontsize=20,
                  labelpad=20)
    ax.set_xlabel("Liquid density MAPE", fontsize=20, labelpad=15)
    ax.legend(fontsize=16,
              loc=(-0.06, 1.05),
              ncol=2,
              columnspacing=1,
              handletextpad=0.5)

    axins.plot(
        dfs_paramsets[0].sort_values(name)[name],
        np.arange(1, 201, 1),
        '-s',
        markersize=4,
        linewidth=2,
        alpha=0.6,
        label="LD-1",
    )
    axins.plot(
        dfs_paramsets[1].sort_values(name)[name],
        np.arange(1, 201, 1),
        '-s',
        markersize=4,
        linewidth=2,
        alpha=0.6,
        label="LD-2",
    )
    axins.plot(
        dfs_paramsets[2].sort_values(name)[name],
        np.arange(1, 201, 1),
        '-s',
        markersize=4,
        linewidth=2,
        alpha=0.6,
        label="LD-3",
    )
    axins.plot(
        dfs_paramsets[3].sort_values(name)[name],
        np.arange(1, 201, 1),
        '-s',
        markersize=4,
        linewidth=2,
        alpha=0.6,
        label="LD-4",
    )

    axins.set_xlim(0, 2.5)
    axins.set_ylim(0, 125)
    axins.tick_params("both",
                      direction="in",
                      which="both",
                      length=3,
                      labelsize=12)
    axins.tick_params("both", which="major", length=6)
    axins.xaxis.set_major_locator(MultipleLocator(1))
    axins.xaxis.set_minor_locator(AutoMinorLocator(2))
    axins.yaxis.set_major_locator(MultipleLocator(50))
    axins.yaxis.set_minor_locator(AutoMinorLocator(2))
    axins.xaxis.set_ticks_position("both")
    axins.yaxis.set_ticks_position("both")

    fig.tight_layout()
    fig.savefig("pdfs/fig2_r32-density-cumu.pdf")
Пример #2
0
f3 = np.loadtxt("freq_590_VNATrc.001")

#chdir("../process")

fig, ax = plt.subplots()
plt.minorticks_on()
p1, = plt.plot(f3[:, 0], f3[:, 2] - np.sum(f3[:, 2]) / len(f1[:, 2]))
p2, = plt.plot(f2[:, 0], f2[:, 2] - np.sum(f2[:, 2]) / len(f1[:, 2]) + 100.)
p3, = plt.plot(f1[:, 0], f1[:, 2] - np.sum(f1[:, 2]) / len(f1[:, 2]) + 100.)
plt.xlim(0.0, 1.6)
plt.ylim(-30., 30.)
plt.ylabel("Phase shift (deg)")
plt.xlabel("Time (s)")
plt.legend([p1, p2, p3], ["590 GHz", "562 GHz", "538 GHz"], loc=3)
plt.title("Motion of cryostat")
ax.xaxis.set_minor_locator(AutoMinorLocator(2))
ax.yaxis.set_minor_locator(AutoMinorLocator(2))
plt.savefig("apr6fig1.pdf")
plt.clf()

fig, ax = plt.subplots()
plt.minorticks_on()
p1, = plt.plot(f3[:, 0], f3[:, 2] - np.sum(f3[:, 2]) / len(f1[:, 2]))
p2, = plt.plot(f2[:, 0], f2[:, 2] - np.sum(f2[:, 2]) / len(f1[:, 2]))
p3, = plt.plot(f1[:, 0], f1[:, 2] - np.sum(f1[:, 2]) / len(f1[:, 2]) + 100.)
plt.xlim(0.0, 1.6)
plt.ylim(-30., 30.)
plt.ylabel("Phase shift (deg)")
plt.xlabel("Time (s)")
plt.legend([p1, p2, p3], ["590 GHz", "562 GHz", "538 GHz"], loc=3)
plt.title("Motion of cryostat")
    plotName = 'Scatterplot_permodel_ToE_noise_RCP85vsPiControl_'+str(multstd)+'std'
    noise = 'PiControl'
else:
    title = 'Hist+RCP8.5 vs. histNat ('+str(nruns)+' runs)'
    noise = 'histNat'
    if runs_rcp == 'all':
        plotName = 'Scatterplot_permodel_ToE_noise_RCP85vshistNat_'+str(multstd)+'std_newsignal'
    else:
        plotName = 'Scatterplot_permodel_ToE_noise_RCP85vshistNat_'+str(multstd)+'std_samerunsvsPiC'


ax.set_ylim([1860,2105])
ax.set_xlim([0,0.14])
ax.set_xticks(np.arange(0,0.1401,0.02))
xmajorLocator = MultipleLocator(40)
xminorLocator = AutoMinorLocator(2)
ax.yaxis.set_major_locator(xmajorLocator)
ax.yaxis.set_minor_locator(xminorLocator)

fig.delaxes(axes[-1,-1])

#plt.suptitle('ToE[>' +str(multstd)+ 'std]/noise for '+title, fontweight='bold', fontsize=14)

plt.figtext(0.38,0.03,'Noise: std('+noise+')', fontweight='bold',fontsize=15)
plt.figtext(0.007,0.63,'Time of Emergence', fontweight='bold', rotation='vertical',fontsize=15)

# Put a legend to the right of the current axis
lgd = fig.legend(l,domain_names,loc='center right', bbox_to_anchor=(0.995, 0.5), scatterpoints=1,frameon=False, markerscale=2,fontsize=14)

plt.subplots_adjust(left=0.06,right=0.8,hspace=0.13,wspace=0.1,bottom=0.12)
def plot_secondary_spectrum():

    # =========================================================
    #   Plot
    # =========================================================

    # tex_preamble = [
    #     r"\usepackage{amsmath}",
    #     r"\usepackage[utf8]{inputenc}",
    #     r"\usepackage[T1]{fontenc}",
    # ]

    # font_size = 10

    # params = {
    #     'backend': 'pdf',
    #     'font.family': 'serif',
    #     'font.size': 12,
    #     'text.usetex': True,
    #     'text.latex.preamble': tex_preamble,
    #     'axes.labelsize': font_size,
    #     'legend.numpoints': 1,
    #     'legend.shadow': False,
    #     'legend.fontsize': font_size,
    #     'xtick.labelsize': font_size,
    #     'ytick.labelsize': font_size,
    #     'axes.unicode_minus': True
    # }

    # plt.rcParams.update(params)

    inch_to_cm = 2.54
    golden_ratio = 1.61803
    width = 29.7  # cm

    # # =========================================================
    # #   All hists together $10^{{{:.0g}}}$
    # # =========================================================

    npzfile = np.load("data_sec_dist_MuMinus_standardrock_Emin_10.0_Emax_10.0.npz")

    ioniz_secondary_energy = npzfile['ioniz']
    brems_secondary_energy = npzfile['brems']
    photo_secondary_energy = npzfile['photo']
    epair_secondary_energy = npzfile['epair']

    statistics = npzfile['statistics'][0]
    E_min_log = npzfile['E_min'][0]
    E_max_log = npzfile['E_max'][0]
    spectral_index = npzfile['spectral_index'][0]
    distance = npzfile['distance'][0]
    medium_name = npzfile['medium_name'][0]
    particle_name = npzfile['particle_name'][0]
    ecut = npzfile['ecut'][0]
    vcut = npzfile['vcut'][0]


    fig_all = plt.figure(
        figsize=(width / inch_to_cm, width / inch_to_cm / golden_ratio)
    )
    fig_all.suptitle(r"{:g} {} from $10^{{{:.2g}}}$ to $10^{{{:.2g}}}$ MeV from $E^{{-{:.2g}}}$ spectrum propagated {} m in {}".format(
        statistics,
        particle_name,
        E_min_log,
        E_max_log,
        spectral_index,
        distance,
        medium_name,
    ))

    ax_all = fig_all.add_subplot(111)
    ax_all.hist(
        [
            ioniz_secondary_energy,
            photo_secondary_energy,
            brems_secondary_energy,
            epair_secondary_energy,
            np.concatenate((
                ioniz_secondary_energy,
                brems_secondary_energy,
                photo_secondary_energy,
                epair_secondary_energy)
            )
        ],
        histtype='step',
        log=True,
        bins=100,
        label=['Ionization', 'Photonuclear', 'Bremsstrahlung', 'Pair Production', 'Sum']
    )
    # ax_all.set_ylim(ymin=0)
    minor_locator = AutoMinorLocator()
    ax_all.xaxis.set_minor_locator(minor_locator)
    ax_all.legend()
    ax_all.set_xlabel(r'energy loss / log($E$/MeV)')
    ax_all.set_ylabel(r'$N$')

    fig_all.savefig("all_{}_stats_{}_Emin_{}_Emax_{}_index_{}.pdf".format(
        medium_name,
        statistics,
        E_min_log,
        E_max_log,
        spectral_index
    ))
Пример #5
0
def plot_all(enwiki_collections, geb_collections, datasets, methods,
             limit_methods, base_methods, plot_folder, get_ylim):
    titles = ["enwiki", "geb"]

    for d in datasets:
        fig, axs = plt.subplots(nrows=1, ncols=2, sharex='all')
        # fig.suptitle(d)
        for i, ax in enumerate(axs):
            ax.set_title(titles[i])
            ax.set_ylim(get_ylim(d))

        cc = cycle(colors)
        for m, lm in zip(methods, limit_methods):
            enwiki_curve = enwiki_collections['alpha'][d][m]
            enwiki_lim = enwiki_collections['alpha'][d][lm]
            geb_curve = geb_collections['alpha'][d][m]
            geb_lim = geb_collections['alpha'][d][lm]

            c = next(cc)

            axs[0].plot(alphas, enwiki_curve, color=c, label=nice_label(m))
            axs[0].hlines(enwiki_lim, amin, amax, linestyles='--', color=c)
            axs[0].set_xlim(-10, 6)
            #axs[0].grid()

            axs[0].set_xticks(np.arange(-10, 7, 2))
            minor_locator = AutoMinorLocator(2)
            axs[0].xaxis.set_minor_locator(minor_locator)

            axs[0].set_yticks(
                np.arange(min(get_ylim(d)),
                          max(get_ylim(d)) + 1, 2))
            minor_locator = AutoMinorLocator(2)
            axs[0].yaxis.set_minor_locator(minor_locator)

            axs[0].set_axisbelow(True)
            #ax.grid(which='major', linestyle='-', linewidth='0.5', color='gray')
            #ax.grid(which='minor', linestyle='-', linewidth='0.5', color='gray')
            axs[0].grid(which='major',
                        linestyle='-',
                        linewidth='1',
                        color='lightgray')
            axs[0].grid(which='minor',
                        linestyle='--',
                        linewidth='1',
                        color='lightgray')

            axs[1].plot(alphas, geb_curve, color=c, label=nice_label(m))
            axs[1].hlines(geb_lim, amin, amax, linestyles='--', color=c)
            axs[1].set_xlim(-10, 7)
            axs[1].grid()

            axs[1].set_xticks(np.arange(-10, 7, 2))
            minor_locator = AutoMinorLocator(2)
            axs[1].xaxis.set_minor_locator(minor_locator)

            axs[1].set_yticks(
                np.arange(min(get_ylim(d)),
                          max(get_ylim(d)) + 1, 2))
            minor_locator = AutoMinorLocator(2)
            axs[1].yaxis.set_minor_locator(minor_locator)

            axs[1].set_axisbelow(True)
            #ax.grid(which='major', linestyle='-', linewidth='0.5', color='gray')
            #ax.grid(which='minor', linestyle='-', linewidth='0.5', color='gray')
            axs[1].grid(which='major',
                        linestyle='-',
                        linewidth='1',
                        color='lightgray')
            axs[1].grid(which='minor',
                        linestyle='--',
                        linewidth='1',
                        color='lightgray')

        plot_base(base_methods, enwiki_collections['base'][d], axs[0])
        plot_base(base_methods, geb_collections['base'][d], axs[1])
        lgd = axs[0].legend(loc='upper center',
                            bbox_to_anchor=(1, 0.0),
                            ncol=4)
        #plt.tight_layout()
        fig.subplots_adjust(bottom=0.4)  # or whatever
        path = plot_folder + "/" + d + ".png"
        plt.savefig(path)  #, bbox_extra_artist = [lgd])
        print("saved " + path)
        plt.close()
        subprocess.run(["convert", "-trim", path, path])
Пример #6
0
ax1 = fig.add_subplot(1, 1, 1)
# Hide the right and top spines
# ax1.spines['right'].set_visible(False)
# ax1.spines['top'].set_visible(False)

# df['mytime'] = df.index
# df['hours'] = df.mytime.dt.strftime('%H:%M')
df = df.reset_index()
# print(df)
df1 = df[ df.index > timeThreshold ] 
ax1.plot(df1.index, df1['HV_voltage'], 'o',color="darkorange", markersize=5,  alpha=0.65, markeredgewidth=1.5, markeredgecolor='darkorange')

# plt.ylim(0.05,0.25)
# ax1.yaxis.set_ticks(np.arange(0.05,0.25+0.05,0.05))
# minor ticks x
minor_locator = AutoMinorLocator(2)
ax1.xaxis.set_minor_locator(minor_locator)
# minor ticks y
minor_locator = AutoMinorLocator(2)
ax1.yaxis.set_minor_locator(minor_locator)
# tick font size
ax1.tick_params('x', colors='black', labelsize=12)	
ax1.tick_params('y', colors='black', labelsize=12)	

ax1.set_ylabel(r'\textbf{HV [-kV]}', fontsize=12)
ax1.set_xlabel(r'\textbf{Timestamp [minutes]}', fontsize=12, labelpad=2)

ax1.grid(b=True, which='major', linestyle='-')#, color='gray')
ax1.grid(b=True, which='minor', linestyle='--')#, color='gray')

Пример #7
0
   # plt.plot(Tplot,cvplot, label="N={}".format(int(n)),
   #          color=colors[k],marker=markers[k])
    WL_only=True
    for i in [1]:#np.arange(len(sys.argv)):
        if True:#sys.argv[i] == "me":
            ax.plot(Tplot_ME,cvplot_ME/cvplot.max(),color=colors[k],marker=markers[k],
                    fillstyle='none', ms=15, ls="", label="ME: $N={}$".format(int(n))) 
            WL_only=False
    ax.plot(Tplot,cvplot/cvplot.max(),alpha=1, color=colors[k],
            dashes=ls_dashes[k],label="WL: $N={}$".format(int(n)),lw=3)
    ax.set_xlim(0,3)
    ax.set_ylim(0,1.25)
    ax.tick_params(left=True,right=True,bottom=True,top=True,which='major',length=10)
    ax.tick_params(right=True, direction='in',which='both',pad=10)
    ax.tick_params(left=True,right=True,bottom=True,top=True,which='minor',length=5)
    minor_locator_x = AutoMinorLocator(5)
    minor_locator_y = AutoMinorLocator(2)
    ax.xaxis.set_minor_locator(minor_locator_x)
    ax.yaxis.set_minor_locator(minor_locator_y)
    major_locator_x = MultipleLocator(0.5)
    major_locator_y = MultipleLocator(0.2)
    ax.xaxis.set_major_locator(major_locator_x)
    ax.yaxis.set_major_locator(major_locator_y)
    k+=1
#f.suptitle(r"Wärmekapazitäten unterschiedlich langer Einzelketten,"+
#           " auf Maximum normiert\n"+
#           r"bei gleicher Wechselwirkungsenergie $\epsilon=-0.4$")
    plt.subplots_adjust(wspace=0.075, hspace=0.05, top=0.99,bottom=0.07,
                    left=0.07,right=0.98)
    #plt.yticks([0,0.2,0.4,0.6,0.8,1])  #plt.yticks()[0][::2])
    #plt.xticks([0,1,2,3])  #plt.xticks()[0][::2])
Пример #8
0
def plot_kappa_v_color(kappas, errs, colorkey, planck_kappas=None, act_kappas=None, transforms=None, remove_reddest=False, linfit=None, mode='color'):
    fig, ax1 = plt.subplots(figsize=(8, 7))

    #colors = np.arange(1, len(kappas)+1)*1/len(kappas) - 1/(2*len(kappas))
    colors = range(1, len(kappas)+1)



    if not remove_reddest:
        lastidx = len(colors)-1
        redcolor = colors[lastidx]
        redkappa = kappas[lastidx]
        rederr = errs[lastidx]

        plt.scatter(redcolor, redkappa, edgecolors='grey', marker='s', facecolors='none')
        plt.errorbar(redcolor, redkappa, yerr=rederr, fmt='none', ecolor='grey')

        colors = colors[:lastidx]
        kappas = kappas[:lastidx]
        errs = errs[:lastidx]

    ax1.scatter(colors, kappas, c='k')
    ax1.errorbar(colors, kappas, yerr=errs, fmt='none', ecolor='k')
    if linfit is not None:
        linmod = linfit[0] * np.array(colors) + linfit[1]
        ax1.plot(colors, linmod, c='k', linestyle='--')
    #if offset:
    #ax1.set_xlabel(r'$\langle \Delta (g-i) \rangle$', fontsize=20)
    if mode == 'color':
        ax1.set_xlabel('$%s$ bin' % colorkey, fontsize=20)
    elif mode == 'bal':
        ax1.set_xlabel('BAL bin', fontsize=20)
    elif mode == 'bhmass':
        ax1.set_xlabel('BH Mass bin', fontsize=20)
    #else:
        #plt.xlabel('$g-i$', fontsize=20)
    ax1.set_ylabel(r'$\langle \kappa_{\mathrm{peak}} \rangle$', fontsize=25)
    if planck_kappas is not None:
        planck_kappas = np.array(planck_kappas)
        ax1.scatter(colors, planck_kappas[:, 0], c='cyan', label='Planck')
        ax1.errorbar(colors, planck_kappas[:, 0], yerr=planck_kappas[:, 1], fmt='none', ecolor='cyan', alpha=0.3)
    if act_kappas is not None:
        act_kappas = np.array(act_kappas)
        ax1.scatter(colors, act_kappas[:, 0], c='pink', label='ACT')
        ax1.errorbar(colors, act_kappas[:, 0], yerr=act_kappas[:, 1], ecolor='pink', fmt='none', alpha=0.3)
        plt.legend()


    if transforms is not None:
        def forward(x):
            return np.interp(x, transforms[0], transforms[1])

        def inverse(x):
            return np.interp(x, transforms[1], transforms[0])

        secax = ax1.secondary_yaxis('right', functions=(forward, inverse))
        secax.yaxis.set_minor_locator(AutoMinorLocator())

        secax.set_ylabel(r'$\langle \mathrm{log}_{10}(M/h^{-1} M_{\odot})\rangle$', fontsize=20)

    plt.savefig('plots/kappa_v_%s.pdf' % mode)
    plt.close('all')
Пример #9
0
def plot_bias_by_bin(refnames, n_sample_bins):

    plt.close('all')
    fig, axs = plt.subplots(len(refnames), 1, sharex=True, figsize=(10, 7*len(refnames)))



    tab = fits.open('catalogs/derived/eBOSS_QSO_binned.fits')[1].data

    shift = 0.007

    nboots = 5
    medcolors, mederrs = [], []
    for j in range(n_sample_bins):
        bintab = tab[np.where(tab['bin'] == j + 1)]
        medcolors.append(np.median(bintab['deltagmini']))
        bootmeds = []
        for k in range(nboots):
            boottab = bintab[np.random.choice(len(bintab), len(bintab))]
            bootmeds.append(np.median(boottab['deltagmini']))
        mederrs.append(np.std(bootmeds))

    medcolors = np.array(medcolors)


    for i, refname in enumerate(refnames):

        ngcbiases, sgcbiases, ngcbiaserrs, sgcbiaserrs = [], [], [], []
        for j in range(n_sample_bins):
            tmp1, tmp2 = np.load('bias/eBOSS_QSO/NGC/%s_%s.npy' % (refname, j+1), allow_pickle=True)

            ngcbiases.append(tmp1)
            ngcbiaserrs.append(tmp2)




        axs[i].scatter(medcolors - shift, ngcbiases, c='slategray', alpha=0.2, label='NGC')
        axs[i].errorbar(medcolors - shift, ngcbiases, yerr=ngcbiaserrs, fmt='none', c='slategray', alpha=0.2)

        for j in range(n_sample_bins):
            tmp1, tmp2 = np.load('bias/eBOSS_QSO/SGC/%s_%s.npy' % (refname, j + 1), allow_pickle=True)

            sgcbiases.append(tmp1)
            sgcbiaserrs.append(tmp2)
        axs[i].scatter(medcolors + shift, sgcbiases, c='saddlebrown', alpha=0.2, label='SGC')
        axs[i].errorbar(medcolors + shift, sgcbiases, yerr=ngcbiaserrs, fmt='none', c='saddlebrown', alpha=0.2)

        avg_bias = np.average([ngcbiases, sgcbiases], weights=[1/np.array(ngcbiaserrs), 1/np.array(sgcbiaserrs)], axis=0)
        avg_err = np.sqrt(np.array(ngcbiaserrs)**2 + np.array(sgcbiaserrs)**2)/2


        minb, maxb = np.min([np.min(ngcbiases), np.min(sgcbiases)]), np.max([np.max(ngcbiases), np.max(sgcbiases)])
        b_grid = np.linspace(minb, maxb, 20)
        zs, dndz = redshift_dists.redshift_dist(tab)
        masses = []
        for bb in b_grid:
            masses.append(clusteringModel.avg_bias_to_mass(bb, zs, dndz))

        transforms = [b_grid, np.log10(masses)]

        if transforms is not None:
            def forward(x):
                return np.interp(x, transforms[0], transforms[1])

            def inverse(x):
                return np.interp(x, transforms[1], transforms[0])

            secax = axs[i].secondary_yaxis('right', functions=(forward, inverse))
            secax.yaxis.set_minor_locator(AutoMinorLocator())

            secax.set_ylabel(r'$ \mathrm{log}_{10}(M_h/h^{-1} M_{\odot})$', fontsize=30, labelpad=20)
            secax.tick_params(axis='y', which='major', labelsize=25)



        axs[i].scatter(medcolors, avg_bias, c='k', label='Mean')
        axs[i].errorbar(medcolors, avg_bias, yerr=avg_err, fmt='none', c='k')
        axs[i].set_ylabel('$b_Q$', fontsize=35, rotation=0, labelpad=25)
        axs[i].legend(fontsize=20)
        axs[i].tick_params(axis='both', which='major', labelsize=25)
        #axs[i].set_title(refname, fontsize=20)

    plt.xlabel(r'$\langle \Delta (g-i) \rangle$', fontsize=35)

    plt.savefig('plots/rel_bias.pdf')
    plt.close('all')
Пример #10
0
             yerr=mag1sig,
             linestyle='None',
             color='grey',
             linewidth=1,
             zorder=2)

plt.axhline(y=np.median(mag1), color='k', ls=':')

xlowerlim1, xupperlim1, ylowerlim1, yupperlim1 = 56400, 57800, 17.0, 20.0

ax1.set_xlim([xlowerlim1, xupperlim1])
ax1.set_ylim([ylowerlim1, yupperlim1])
ax1.invert_yaxis()
plt.xlabel('Date (MJD)', fontsize=14)
plt.ylabel('R Magnitude', fontsize=14)
minorLocator = AutoMinorLocator()
minorLocator2 = AutoMinorLocator()
ax1.xaxis.set_minor_locator(minorLocator)
ax1.yaxis.set_minor_locator(minorLocator2)

# Shaded area to denote uncertainty of median (average of mag1sig)
ax1.add_patch(
    patches.Rectangle(
        (xlowerlim1, np.median(mag1) - 5 * np.average(mag1sig)),  # (x, y)
        xupperlim1 - xlowerlim1,  # width 
        10 * np.average(mag1sig),  # height
        0.0,  # angle
        facecolor='lightgrey',
        edgecolor='none',
        zorder=1))
Пример #11
0
# Load the file
results1 = loadmat("../results/results3_1.mat")

print("results1:")
print(list(results1.keys()))

W1 = results1["W"]
error1 = 10 * np.log10(results1["errav"])
N1 = results1["ns"]
nw1 = results1["n"]

plt.figure(1)
plt.plot(np.arange(0, N1, 200), error1[::200, 0])
ax = plt.gca()
ax.set_xlim(left=0, right=N1)
ax.xaxis.set_minor_locator(AutoMinorLocator(n=5))
ax.grid(b=True, which="major", color="silver", linestyle="-")
ax.grid(b=True, which="minor", color="lightgray", linestyle="--")
ax.set_ylabel("Error (dB)")
ax.set_xlabel("Iteration")
ax.set_title("Error evolution for \(x_1\)")
ax.ticklabel_format(style="sci", axis="x", scilimits=(0, 0))
fname = f"ex3_x1_error"
plt.savefig("../results/" + fname + ".png", dpi=600, bbox_inches="tight")
plt.savefig("../report/figures/pdf/" + fname + ".pdf", bbox_inches="tight")
plt.close()

plt.figure(2)
for i in range(W1.shape[0]):
    plt.plot(np.arange(0, N1 + 1, 200), W1[i, ::200], "-", linewidth=1)
ax = plt.gca()
Пример #12
0
Y_pred = reg.predict(X_poly)

print(reg.coef_)
print('Coefficient of determination: %.3f'
	  % r2_score(Y, Y_pred))
			
ax[0].plot(X, Y,      linewidth=1, color="#008ae6")
ax[1].plot(X, Y,      linewidth=1, color="#008ae6")
ax[0].plot(X, Y_pred, linewidth=1, color="#e60000")
ax[1].fill([0, *X, X.max()], [0, *Y, 0], color="#80ccff")


for i in range(2):
	ax[i].set_ylabel('Стоимость (грн.)')

	ax[i].set(xlim=(0, X.max()), ylim=(0, Y.max() * 1.25))
	ticks, labels = get_ticks(dates) 

	ax[i].set_xticks(ticks=ticks, minor=False)
	ax[i].set_xticklabels(labels=labels)
	ax[i].tick_params(axis='x', which='major', labelsize = 10)
	ax[i].yaxis.set_minor_locator(AutoMinorLocator())

	ax[i].grid(axis='y',    which='minor', color='#c7c7c7', linewidth=0.4)
	ax[i].grid(axis='both', which='major', color='#a2a2a2', linewidth=0.75)


fig.set_figwidth(13)
fig.set_figheight(6)
plt.show()
Пример #13
0
#Patches
peak_patch = mpatches.Patch(color='red', label='Maximum Amplitude')
rms_patch = mpatches.Patch(color='navy', label='Mean')
plt.legend(handles=[peak_patch, rms_patch])

#For a scatter plot use this: ax.scatter(dates, highs, color = 'red', linewidth = 0.1, s=4)
ax.plot(dates, highs, color = 'red', linewidth = 0.5)
ax.xaxis.set_major_locator(hours)
ax.xaxis.set_major_formatter(h_fmt)

#ax.scatter(dates, rms, color = 'firebrick', linewidth = 0.1, s=4)
ax.plot(dates, rms, color = 'navy', linewidth = 0.5)

#minorlocator for quarter of an hour 
minor_locator = AutoMinorLocator(8)
ax.xaxis.set_minor_locator(minor_locator)
plt.grid(which='minor', linestyle=':')

#Title,Label
plt.ylabel('noise level in dB', fontsize=12)
plt.title('Noise Level Protocol of ' + title_date, fontsize=15)
plt.grid(True)

#y axis
plt.ylim (    
    ymin = 20,
    ymax = 80    
)

#noise limit
def plot_map(pdata):
    '''
    Draw PC time series on the top, and
       draw global map where dateline is on the center
    '''

    ###--- Create a figure
    fig = plt.figure()
    fig.set_size_inches(6, 10)  ## (xsize,ysize)

    ###--- Suptitle
    fig.suptitle(pdata['suptit'],
                 fontsize=16,
                 y=0.97,
                 va='bottom',
                 stretch='semi-condensed')

    ###--- Axes setting
    nk = len(pdata['tgt_nums'])  # Number of data to show
    left, right, top, bottom = 0.07, 0.93, 0.925, 0.1
    npnx, gapx, npny, gapy = 1, 0.05, nk + 1, 0.064
    lx = (right - left - gapx * (npnx - 1)) / npnx
    ly = (top - bottom - gapy * (npny - 1)) / npny
    ix, iy = left, top

    ###--- Top panel: PC time series
    ax1 = fig.add_axes([ix, iy - ly, lx, ly])
    colors = plt.cm.tab10(np.linspace(0.05, 0.95, 10))
    for i, k in enumerate(pdata['tgt_nums']):
        ax1.plot_date(pdata['mon_list'],
                      pdata['pc'][:, k],
                      fmt='-',
                      c=colors[i],
                      lw=2.5,
                      alpha=0.85,
                      label='PC{}'.format(k))
    iy = iy - ly - gapy
    subtit = '(a) '
    ax1.set_title(subtit, fontsize=12, ha='left', x=0.0)
    ax1.legend(bbox_to_anchor=(0.08, 1.02, .92, .10),
               loc='lower left',
               ncol=nk,
               mode="expand",
               borderaxespad=0.,
               fontsize=10)
    ax1.axhline(y=0., c='k', lw=0.8, ls='--')
    ax1.grid(ls=':')
    ax1.xaxis.set_major_formatter(DateFormatter('%b%Y'))
    ax1.yaxis.set_minor_locator(AutoMinorLocator(2))
    ax1.yaxis.set_ticks_position('both')
    ax1.tick_params(axis='both', labelsize=10)

    ###--- Next, draw global maps
    ###--- Map Projection
    center = 180  # Want to draw a map where dateline is on the center
    proj = ccrs.PlateCarree(central_longitude=center)
    data_crs = ccrs.PlateCarree()

    map_extent = [0., 359.9, -61, 61]  # Range to be shown
    img_range = pdata['img_bound']

    val_max = max(np.nanmin(pdata['ev_map']) * -1, np.nanmax(pdata['ev_map']))
    val_min, val_max = val_max * -0.9, val_max * 0.9
    abc = 'abcdefgh'

    ###--- Color map
    cm = plt.cm.get_cmap('RdBu_r').copy()
    cm.set_bad('0.9')  # For the gridcell of NaN

    props = dict(vmin=val_min,
                 vmax=val_max,
                 origin='lower',
                 extent=img_range,
                 cmap=cm,
                 transform=data_crs)

    for i, (data, k) in enumerate(zip(pdata['ev_map'], pdata['tgt_nums'])):
        ax2 = fig.add_axes([ix, iy - ly, lx, ly], projection=proj)
        ax2.set_extent(map_extent, crs=data_crs)
        map1 = ax2.imshow(data, **props)

        subtit = '({}) EOF{}'.format(abc[i + 1], k)
        vf.map_common(ax2,
                      subtit,
                      data_crs,
                      xloc=60,
                      yloc=20,
                      gl_lab_locator=[True, True, False, True])

        iy = iy - ly - gapy
    vf.draw_colorbar(fig,
                     ax2,
                     map1,
                     type='horizontal',
                     size='panel',
                     gap=0.06,
                     extend='both',
                     width=0.02)

    ##-- Seeing or Saving Pic --##
    outfnm = pdata['out_fnm']
    print(outfnm)
    #fig.savefig(outfnm,dpi=100)   # dpi: pixels per inch
    fig.savefig(outfnm, dpi=150, bbox_inches='tight')  # dpi: pixels per inch
    # Defalut: facecolor='w', edgecolor='w', transparent=False
    plt.show()
    return
Пример #15
0
def pyorbit_getresults(config_in, sampler, plot_dictionary):
    try:
        use_tex = config_in['parameters']['use_tex']
    except:
        use_tex = True

    if use_tex is False:
        print(' LaTeX disabled')

    if plot_dictionary['use_getdist']:
        from getdist import plots, MCSamples

    # plt.rc('font', **{'family': 'serif', 'serif': ['Computer Modern Roman']})
    plt.rcParams["font.family"] = "Times New Roman"
    plt.rc('text', usetex=use_tex)

    sample_keyword = {
        'multinest': ['multinest', 'MultiNest', 'multi'],
        'polychord': ['polychord', 'PolyChord', 'polychrod', 'poly'],
        'emcee': ['emcee', 'MCMC', 'Emcee']
    }

    if sampler in sample_keyword['emcee']:

        dir_input = './' + config_in['output'] + '/emcee/'
        dir_output = './' + config_in['output'] + '/emcee_plot/'
        os.system('mkdir -p ' + dir_output)

        mc, starting_point, population, prob, state, \
        sampler_chain, sampler_lnprobability, sampler_acceptance_fraction, _ = \
            emcee_load_from_cpickle(dir_input)

        pars_input(config_in, mc, reload_emcee=True)

        if hasattr(mc.emcee_parameters, 'version'):
            emcee_version = mc.emcee_parameters['version'][0]
        else:
            import emcee
            emcee_version = emcee.__version__[0]

        mc.model_setup()
        """ Required to create the right objects inside each class - if defined inside """
        theta_dictionary = results_analysis.get_theta_dictionary(mc)

        nburnin = int(mc.emcee_parameters['nburn'])
        nthin = int(mc.emcee_parameters['thin'])
        nsteps = int(sampler_chain.shape[1] * nthin)

        flat_chain = emcee_flatchain(sampler_chain, nburnin, nthin)
        flat_lnprob = emcee_flatlnprob(sampler_lnprobability, nburnin, nthin,
                                       emcee_version)

        flat_BiC = -2 * flat_lnprob + mc.ndim * np.log(mc.ndata)

        lnprob_med = common.compute_value_sigma(flat_lnprob)
        chain_med = common.compute_value_sigma(flat_chain)
        chain_MAP, lnprob_MAP = common.pick_MAP_parameters(
            flat_chain, flat_lnprob)

        n_samplings, n_pams = np.shape(flat_chain)

        print()
        print('emcee version: ', emcee.__version__)
        if mc.emcee_parameters['version'] == '2':
            print('WARNING: upgrading to version 3 is strongly advised')
        print()
        print(' Reference Time Tref: {}'.format(mc.Tref))
        print()
        print(' Dimensions = {}'.format(mc.ndim))
        print(' Nwalkers = {}'.format(mc.emcee_parameters['nwalkers']))
        print()
        print(' Steps: {}'.format(nsteps))

        results_analysis.print_integrated_ACF(sampler_chain, theta_dictionary,
                                              nthin)

    if sampler in sample_keyword['multinest']:
        plot_dictionary['lnprob_chain'] = False
        plot_dictionary['chains'] = False
        plot_dictionary['traces'] = False

        dir_input = './' + config_in['output'] + '/multinest/'
        dir_output = './' + config_in['output'] + '/multinest_plot/'
        os.system('mkdir -p ' + dir_output)

        mc = nested_sampling_load_from_cpickle(dir_input)

        mc.model_setup()
        mc.initialize_logchi2()
        results_analysis.results_resumen(mc, None, skip_theta=True)
        """ Required to create the right objects inside each class - if defined inside """
        theta_dictionary = results_analysis.get_theta_dictionary(mc)

        data_in = np.genfromtxt(dir_input + 'post_equal_weights.dat')
        flat_lnprob = data_in[:, -1]
        flat_chain = data_in[:, :-1]
        # nsample = np.size(flat_lnprob)
        n_samplings, n_pams = np.shape(flat_chain)

        lnprob_med = common.compute_value_sigma(flat_lnprob)
        chain_med = common.compute_value_sigma(flat_chain)
        chain_MAP, lnprob_MAP = common.pick_MAP_parameters(
            flat_chain, flat_lnprob)

        print()
        print(' Reference Time Tref: {}'.format(mc.Tref))
        print()
        print(' Dimensions: {}'.format(mc.ndim))
        print()
        print(' Samples: {}'.format(n_samplings))

    if sampler in sample_keyword['polychord']:
        plot_dictionary['lnprob_chain'] = False
        plot_dictionary['chains'] = False
        plot_dictionary['traces'] = False

        dir_input = './' + config_in['output'] + '/polychord/'
        dir_output = './' + config_in['output'] + '/polychord_plot/'
        os.system('mkdir -p ' + dir_output)

        mc = nested_sampling_load_from_cpickle(dir_input)

        # pars_input(config_in, mc)

        mc.model_setup()
        mc.initialize_logchi2()
        results_analysis.results_resumen(mc, None, skip_theta=True)
        """ Required to create the right objects inside each class - if defined inside """
        theta_dictionary = results_analysis.get_theta_dictionary(mc)

        data_in = np.genfromtxt(dir_input + 'pyorbit_equal_weights.txt')
        flat_lnprob = data_in[:, 1]
        flat_chain = data_in[:, 2:]
        # nsample = np.size(flat_lnprob)

        n_samplings, n_pams = np.shape(flat_chain)

        lnprob_med = common.compute_value_sigma(flat_lnprob)
        chain_med = common.compute_value_sigma(flat_chain)

        chain_MAP, lnprob_MAP = common.pick_MAP_parameters(
            flat_chain, flat_lnprob)

        print()
        print(' Reference Time Tref: {}'.format(mc.Tref))
        print()
        print(' Dimensions: {}'.format(mc.ndim))
        print()
        print(' Samples: {}'.format(n_samplings))

    print()
    print(' LN posterior: {0:12f}   {1:12f} {2:12f} (15-84 p) '.format(
        lnprob_med[0], lnprob_med[2], lnprob_med[1]))

    MAP_log_priors, MAP_log_likelihood = mc.log_priors_likelihood(chain_MAP)
    BIC = -2.0 * MAP_log_likelihood + np.log(mc.ndata) * mc.ndim
    AIC = -2.0 * MAP_log_likelihood + 2.0 * mc.ndim
    AICc = AIC + (2.0 + 2.0 * mc.ndim) * mc.ndim / (mc.ndata - mc.ndim - 1.0)
    # AICc for small sample

    print()
    print(' MAP log_priors     = {}'.format(MAP_log_priors))
    print(' MAP log_likelihood = {}'.format(MAP_log_likelihood))
    print(' MAP BIC  (using likelihood) = {}'.format(BIC))
    print(' MAP AIC  (using likelihood) = {}'.format(AIC))
    print(' MAP AICc (using likelihood) = {}'.format(AICc))

    MAP_log_posterior = MAP_log_likelihood + MAP_log_priors
    BIC = -2.0 * MAP_log_posterior + np.log(mc.ndata) * mc.ndim
    AIC = -2.0 * MAP_log_posterior + 2.0 * mc.ndim
    AICc = AIC + (2.0 + 2.0 * mc.ndim) * mc.ndim / (mc.ndata - mc.ndim - 1.0)

    print()
    print(' MAP BIC  (using posterior)  = {}'.format(BIC))
    print(' MAP AIC  (using posterior)  = {}'.format(AIC))
    print(' MAP AICc (using posterior)  = {}'.format(AICc))

    if mc.ndata < 40 * mc.ndim:
        print()
        print(
            ' AICc suggested over AIC because NDATA ( {0:12f} ) < 40 * NDIM ( {1:12f} )'
            .format(mc.ndata, mc.ndim))
    else:
        print()
        print(
            ' AIC suggested over AICs because NDATA ( {0:12f} ) > 40 * NDIM ( {1:12f} )'
            .format(mc.ndata, mc.ndim))

    print()
    print(
        '****************************************************************************************************'
    )
    print(
        '****************************************************************************************************'
    )
    print()
    print(
        ' Confidence intervals (median value, 34.135th percentile from the median on the left and right side)'
    )

    planet_variables = results_analysis.results_resumen(mc,
                                                        flat_chain,
                                                        chain_med=chain_MAP,
                                                        return_samples=True)

    print()
    print(
        '****************************************************************************************************'
    )
    print()
    print(
        ' Parameters corresponding to the Maximum a Posteriori probability ( {} )'
        .format(lnprob_MAP))
    print()

    results_analysis.results_resumen(mc, chain_MAP)

    print()
    print(
        '****************************************************************************************************'
    )
    print()

    # Computation of all the planetary variables
    planet_variables_med = results_analysis.get_planet_variables(
        mc, chain_med[:, 0])
    star_variables = results_analysis.get_stellar_parameters(
        mc, chain_med[:, 0])

    planet_variables_MAP = results_analysis.get_planet_variables(mc, chain_MAP)
    star_variables_MAP = results_analysis.get_stellar_parameters(mc, chain_MAP)

    if plot_dictionary['lnprob_chain'] or plot_dictionary['chains']:

        print(' Plot FLAT chain ')

        if emcee_version == '2':
            fig = plt.figure(figsize=(12, 12))
            plt.xlabel('$\ln \mathcal{L}$')
            plt.plot(sampler_lnprobability.T, '-', alpha=0.5)
            plt.axhline(lnprob_med[0])
            plt.axvline(nburnin / nthin, c='r')
            plt.savefig(dir_output + 'LNprob_chain.png',
                        bbox_inches='tight',
                        dpi=300)
            plt.close(fig)
        else:
            fig = plt.figure(figsize=(12, 12))
            plt.xlabel('$\ln \mathcal{L}$')
            plt.plot(sampler_lnprobability, '-', alpha=0.5)
            plt.axhline(lnprob_med[0])
            plt.axvline(nburnin / nthin, c='r')
            plt.savefig(dir_output + 'LNprob_chain.png',
                        bbox_inches='tight',
                        dpi=300)
            plt.close(fig)

        print()
        print(
            '****************************************************************************************************'
        )
        print()

    if plot_dictionary['full_correlation']:

        corner_plot = {
            'samples':
            np.zeros(
                [np.size(flat_chain, axis=0),
                 np.size(flat_chain, axis=1) + 1]),
            'labels': [],
            'truths': []
        }

        i_corner = 0
        for var, var_dict in theta_dictionary.items():
            corner_plot['samples'][:, i_corner] = flat_chain[:, var_dict]
            corner_plot['labels'].append(re.sub('_', '-', var))
            corner_plot['truths'].append(chain_med[var_dict, 0])
            i_corner += 1

        corner_plot['samples'][:, -1] = flat_lnprob[:]
        corner_plot['labels'].append('ln-prob')
        corner_plot['truths'].append(lnprob_med[0])

        if plot_dictionary['use_getdist']:
            print(' Plotting full_correlation plot with GetDist')
            print()
            print(' Ignore the no burn in error warning from getdist')
            print(' since burn in has been already removed from the chains')

            plt.rc('text', usetex=False)

            samples = MCSamples(samples=corner_plot['samples'],
                                names=corner_plot['labels'],
                                labels=corner_plot['labels'])

            g = plots.getSubplotPlotter()
            g.settings.num_plot_contours = 6
            g.triangle_plot(samples, filled=True)
            g.export(dir_output + "all_internal_variables_corner_getdist.pdf")

            print()

        else:
            # plotting mega-corner plot
            print('Plotting full_correlation plot with Corner')
            plt.rc('text', usetex=False)

            fig = corner.corner(corner_plot['samples'],
                                labels=corner_plot['labels'],
                                truths=corner_plot['truths'])
            fig.savefig(dir_output + "all_internal_variables_corner_dfm.pdf",
                        bbox_inches='tight',
                        dpi=300)
            plt.close(fig)
            plt.rc('text', usetex=use_tex)

        print()
        print(
            '****************************************************************************************************'
        )
        print()

    if plot_dictionary['chains']:

        print(' Plotting the chains... ')

        os.system('mkdir -p ' + dir_output + 'chains')
        for theta_name, ii in theta_dictionary.items():
            file_name = dir_output + 'chains/' + repr(
                ii) + '_' + theta_name + '.png'
            fig = plt.figure(figsize=(12, 12))
            plt.plot(sampler_chain[:, :, ii].T, '-', alpha=0.5)
            plt.axvline(nburnin / nthin, c='r')
            plt.savefig(file_name, bbox_inches='tight', dpi=300)
            plt.close(fig)

        print()
        print(
            '****************************************************************************************************'
        )
        print()

    if plot_dictionary['traces']:

        print(' Plotting the Gelman-Rubin traces... ')
        print()
        """
        Gelman-Rubin traces are stored in the dedicated folder iniside the _plot folder
        Note that the GR statistics is not robust because the wlakers are not independent 
        """
        os.system('mkdir -p ' + dir_output + 'gr_traces')

        step_sampling = np.arange(nburnin / nthin,
                                  nsteps / nthin,
                                  1,
                                  dtype=int)

        for theta_name, th in theta_dictionary.items():
            rhat = np.array([
                GelmanRubin_v2(sampler_chain[:, :steps, th])
                for steps in step_sampling
            ])
            print('     Gelman-Rubin: {0:5d} {1:12f} {2:s} '.format(
                th, rhat[-1], theta_name))
            file_name = dir_output + 'gr_traces/v2_' + repr(
                th) + '_' + theta_name + '.png'
            fig = plt.figure(figsize=(12, 12))
            plt.plot(step_sampling, rhat[:], '-', color='k')
            plt.axhline(1.01, c='C0')
            plt.savefig(file_name, bbox_inches='tight', dpi=300)
            plt.close(fig)

        print()
        print(
            '****************************************************************************************************'
        )
        print()

    if plot_dictionary['common_corner']:

        print(' Plotting the common models corner plots')

        plt.rc('text', usetex=False)
        for common_name, common_model in mc.common_models.items():

            print('     Common model: ', common_name)

            corner_plot = {
                'var_list': [],
                'samples': [],
                'labels': [],
                'truths': []
            }
            variable_values = common_model.convert(flat_chain)
            variable_median = common_model.convert(chain_med[:, 0])

            if len(variable_median) < 1.:
                continue
            """
            Check if the eccentricity and argument of pericenter were set as free parameters or fixed by simply
            checking the size of their distribution
            """
            for var in variable_values.keys():
                if np.size(variable_values[var]) == 1:
                    variable_values[var] = variable_values[var] * np.ones(
                        n_samplings)
                else:
                    corner_plot['var_list'].append(var)

            corner_plot['samples'] = []
            corner_plot['labels'] = []
            corner_plot['truths'] = []
            for var_i, var in enumerate(corner_plot['var_list']):
                corner_plot['samples'].extend([variable_values[var]])
                corner_plot['labels'].append(var)
                corner_plot['truths'].append(variable_median[var])
            """ Check if the semi-amplitude K is among the parameters that have been fitted. 
                If so, it computes the correpsing planetary mass with uncertainty """

            fig = corner.corner(np.asarray(corner_plot['samples']).T,
                                labels=corner_plot['labels'],
                                truths=corner_plot['truths'])
            fig.savefig(dir_output + common_name + "_corners.pdf",
                        bbox_inches='tight',
                        dpi=300)
            plt.close(fig)

        print()
        print(
            '****************************************************************************************************'
        )
        print()

    if plot_dictionary['dataset_corner']:

        print(' Dataset + models corner plots ')
        print()

        for dataset_name, dataset in mc.dataset_dict.items():

            for model_name in dataset.models:

                variable_values = dataset.convert(flat_chain)
                variable_median = dataset.convert(chain_med[:, 0])

                for common_ref in mc.models[model_name].common_ref:
                    variable_values.update(
                        mc.common_models[common_ref].convert(flat_chain))
                    variable_median.update(
                        mc.common_models[common_ref].convert(chain_med[:, 0]))

                variable_values.update(mc.models[model_name].convert(
                    flat_chain, dataset_name))
                variable_median.update(mc.models[model_name].convert(
                    chain_med[:, 0], dataset_name))

                corner_plot['samples'] = []
                corner_plot['labels'] = []
                corner_plot['truths'] = []
                for var_i, var in enumerate(variable_values):
                    if np.size(variable_values[var]) <= 1: continue
                    corner_plot['samples'].extend([variable_values[var]])
                    corner_plot['labels'].append(var)
                    corner_plot['truths'].append(variable_median[var])

                fig = corner.corner(np.asarray(corner_plot['samples']).T,
                                    labels=corner_plot['labels'],
                                    truths=corner_plot['truths'])
                fig.savefig(dir_output + dataset_name + '_' + model_name +
                            "_corners.pdf",
                            bbox_inches='tight',
                            dpi=300)
                plt.close(fig)

                print('     Dataset: ', dataset_name, '    model: ',
                      model_name, ' corner plot  done ')

        print()
        print(
            '****************************************************************************************************'
        )
        print()

    if plot_dictionary['write_planet_samples']:

        print(' Saving the planet variable samplings to files (with plots)')

        samples_dir = dir_output + '/planet_samples/'
        os.system('mkdir -p ' + samples_dir)

        for common_ref, variable_values in planet_variables.items():
            for variable_name, variable in variable_values.items():

                rad_filename = samples_dir + common_ref + '_' + variable_name
                fileout = open(rad_filename + '.dat', 'w')
                for val in variable:
                    fileout.write('{0:f} \n'.format(val))
                fileout.close()

                fig = plt.figure(figsize=(10, 10))
                plt.hist(variable, bins=50, color='C0', alpha=0.75, zorder=0)

                perc0, perc1, perc2 = np.percentile(variable,
                                                    [15.865, 50, 84.135],
                                                    axis=0)

                plt.axvline(planet_variables_med[common_ref][variable_name],
                            color='C1',
                            zorder=1,
                            label='Median-corresponding value')
                plt.axvline(planet_variables_MAP[common_ref][variable_name],
                            color='C2',
                            zorder=1,
                            label='MAP-corresponding value')
                plt.axvline(perc1,
                            color='C3',
                            zorder=2,
                            label='Median of the distribution')
                plt.axvline(perc0,
                            color='C4',
                            zorder=2,
                            label='15.865th and 84.135th percentile')
                plt.axvline(perc2, color='C4', zorder=2)
                plt.xlabel(re.sub('_', '-', variable_name + '_' + common_ref))
                plt.legend()
                plt.ticklabel_format(useOffset=False)
                plt.savefig(rad_filename + '.png',
                            bbox_inches='tight',
                            dpi=300)
                plt.close(fig)

        print()
        print(
            '****************************************************************************************************'
        )
        print()

    if plot_dictionary['plot_models'] or plot_dictionary['write_models']:

        print(' Computing the models for plot/data writing ')

        bjd_plot = {'full': {'start': None, 'end': None, 'range': None}}

        kinds = {}

        P_minimum = 2.0  # this temporal range will be divided in 20 subsets
        for key_name, key_val in planet_variables_med.items():
            P_minimum = min(key_val.get('P', 2.0), P_minimum)

        for dataset_name, dataset in mc.dataset_dict.items():
            if dataset.kind in kinds.keys():
                kinds[dataset.kind].extend([dataset_name])
            else:
                kinds[dataset.kind] = [dataset_name]

            bjd_plot[dataset_name] = {
                'start': np.amin(dataset.x),
                'end': np.amax(dataset.x),
                'range': np.amax(dataset.x) - np.amin(dataset.x),
            }

            if bjd_plot[dataset_name]['range'] < 0.1:
                bjd_plot[dataset_name]['range'] = 0.1

            bjd_plot[dataset_name][
                'start'] -= bjd_plot[dataset_name]['range'] * 0.10
            bjd_plot[dataset_name][
                'end'] += bjd_plot[dataset_name]['range'] * 0.10

            if dataset.kind == 'Phot':
                step_size = np.min(bjd_plot[dataset_name]['range'] /
                                   dataset.n / 10.)
            else:
                step_size = P_minimum / 20.

            bjd_plot[dataset_name]['x_plot'] = \
                np.arange(bjd_plot[dataset_name]['start'], bjd_plot[dataset_name]['end'], step_size)

            if bjd_plot['full']['range']:
                bjd_plot['full']['start'] = min(bjd_plot['full']['start'],
                                                np.amin(dataset.x))
                bjd_plot['full']['end'] = max(bjd_plot['full']['end'],
                                              np.amax(dataset.x))
                bjd_plot['full']['range'] = bjd_plot['full']['end'] - bjd_plot[
                    'full']['start']
            else:
                bjd_plot['full']['start'] = np.amin(dataset.x)
                bjd_plot['full']['end'] = np.amax(dataset.x)
                bjd_plot['full']['range'] = bjd_plot['full']['end'] - bjd_plot[
                    'full']['start']

        bjd_plot['full']['start'] -= bjd_plot['full']['range'] * 0.10
        bjd_plot['full']['end'] += bjd_plot['full']['range'] * 0.10
        bjd_plot['full']['x_plot'] = np.arange(bjd_plot['full']['start'],
                                               bjd_plot['full']['end'],
                                               P_minimum / 20.)

        for dataset_name, dataset in mc.dataset_dict.items():
            if dataset.kind == 'RV':
                bjd_plot[dataset_name] = bjd_plot['full']

        bjd_plot['model_out'], bjd_plot[
            'model_x'] = results_analysis.get_model(mc, chain_med[:, 0],
                                                    bjd_plot)
        bjd_plot['MAP_model_out'], bjd_plot[
            'MAP_model_x'] = results_analysis.get_model(
                mc, chain_MAP, bjd_plot)

        if plot_dictionary['plot_models']:
            print(' Writing the plots ')

            for kind_name, kind in kinds.items():
                for dataset_name in kind:

                    try:
                        error_bars = np.sqrt(
                            mc.dataset_dict[dataset_name].e**2 +
                            bjd_plot['model_out'][dataset_name]['jitter']**2)
                    except ValueError:
                        error_bars = mc.dataset_dict[dataset_name].e

                    fig = plt.figure(figsize=(12, 12))

                    # Partially taken from here:
                    # http://www.sc.eso.org/~bdias/pycoffee/codes/20160407/gridspec_demo.html
                    gs = gridspec.GridSpec(2, 1, height_ratios=[3.0, 1.0])
                    # Also make sure the margins and spacing are apropriate
                    gs.update(left=0.3,
                              right=0.95,
                              bottom=0.08,
                              top=0.93,
                              wspace=0.15,
                              hspace=0.05)

                    ax_0 = plt.subplot(gs[0])
                    ax_1 = plt.subplot(gs[1], sharex=ax_0)

                    # Adding minor ticks only to x axis
                    minorLocator = AutoMinorLocator()
                    ax_0.xaxis.set_minor_locator(minorLocator)
                    ax_1.xaxis.set_minor_locator(minorLocator)

                    # Disabling the offset on top of the plot
                    ax_0.ticklabel_format(useOffset=False)
                    ax_1.ticklabel_format(useOffset=False)

                    ax_0.scatter(
                        mc.dataset_dict[dataset_name].x,
                        mc.dataset_dict[dataset_name].y -
                        bjd_plot['model_out'][dataset_name]['systematics'] -
                        bjd_plot['model_out'][dataset_name]
                        ['time_independent'],
                        color='C0',
                        zorder=4,
                        s=16)
                    ax_0.errorbar(
                        mc.dataset_dict[dataset_name].x,
                        mc.dataset_dict[dataset_name].y -
                        bjd_plot['model_out'][dataset_name]['systematics'] -
                        bjd_plot['model_out'][dataset_name]
                        ['time_independent'],
                        yerr=error_bars,
                        color='C0',
                        fmt='o',
                        ms=0,
                        zorder=3,
                        alpha=0.5)
                    ax_0.plot(bjd_plot[dataset_name]['x_plot'],
                              bjd_plot['model_x'][dataset_name]['complete'],
                              label='Median-corresponding model',
                              color='C1',
                              zorder=2)
                    ax_0.plot(
                        bjd_plot[dataset_name]['x_plot'],
                        bjd_plot['MAP_model_x'][dataset_name]['complete'],
                        label='MAP-corresponding model',
                        color='C2',
                        zorder=1)

                    ax_0.set_ylabel('Same as input data')
                    ax_0.legend()

                    ax_1.scatter(
                        mc.dataset_dict[dataset_name].x,
                        mc.dataset_dict[dataset_name].y -
                        bjd_plot['model_out'][dataset_name]['complete'],
                        color='C0',
                        zorder=4,
                        s=16)
                    ax_1.errorbar(
                        mc.dataset_dict[dataset_name].x,
                        mc.dataset_dict[dataset_name].y -
                        bjd_plot['model_out'][dataset_name]['complete'],
                        yerr=error_bars,
                        color='C0',
                        fmt='o',
                        ms=0,
                        zorder=3,
                        alpha=0.5)
                    ax_1.axhline(0.0, color='k', alpha=0.5, zorder=0)

                    ax_1.set_xlabel('Time [d] (offset as the input data)')
                    ax_1.set_ylabel('Residuals (wrt median model)')

                    plt.savefig(dir_output + 'model_' + kind_name + '_' +
                                dataset_name + '.png',
                                bbox_inches='tight',
                                dpi=300)
                    plt.close(fig)

        if plot_dictionary['write_models']:

            for prepend_keyword in ['', 'MAP_']:

                print(' Writing the ', prepend_keyword, 'data files ')

                plot_out_keyword = prepend_keyword + 'model_out'
                plot_x_keyword = prepend_keyword + 'model_x'
                file_keyword = prepend_keyword + 'model_files'

                if prepend_keyword == '':
                    planet_vars = planet_variables_med
                    # star_vars = star_variables # leaving here, it could be useful for the future
                    chain_ref = chain_med[:, 0]
                elif prepend_keyword == 'MAP_':
                    planet_vars = planet_variables_MAP
                    # star_vars = star_variables_MAP
                    chain_ref = chain_MAP

                dir_models = dir_output + file_keyword + '/'
                os.system('mkdir -p ' + dir_models)

                for dataset_name, dataset in mc.dataset_dict.items():
                    for model_name in dataset.models:

                        if getattr(mc.models[model_name], 'systematic_model',
                                   False):
                            continue

                        fileout = open(
                            dir_models + dataset_name + '_' + model_name +
                            '.dat', 'w')

                        phase = np.zeros(dataset.n)
                        tc_folded = np.zeros(dataset.n)
                        phase_plot = np.zeros(
                            np.size(bjd_plot[dataset_name]['x_plot']))
                        tc_folded_plot = np.zeros(
                            np.size(bjd_plot[dataset_name]['x_plot']))

                        for common_ref in mc.models[model_name].common_ref:
                            if common_ref in planet_vars:
                                if 'P' in planet_vars[common_ref]:
                                    phase = (dataset.x0 /
                                             planet_vars[common_ref]['P']) % 1
                                    phase_plot = (
                                        (bjd_plot[dataset_name]['x_plot'] -
                                         mc.Tref) /
                                        planet_vars[common_ref]['P']) % 1
                                    if 'Tc' in planet_vars[common_ref]:
                                        tc_folded = (dataset.x - planet_vars[common_ref]['Tc']
                                                     + planet_vars[common_ref]['P'] / 2.) \
                                                    % planet_vars[common_ref]['P'] \
                                                    - planet_vars[common_ref]['P'] / 2.
                                        tc_folded_plot = (bjd_plot[dataset_name]['x_plot'] - planet_vars[common_ref][
                                            'Tc']
                                                          + planet_vars[common_ref]['P'] / 2.) \
                                                         % planet_vars[common_ref]['P'] \
                                                         - planet_vars[common_ref]['P'] / 2.
                                    else:
                                        tc_folded = dataset.x0 % planet_vars[
                                            common_ref]['P']
                                        tc_folded_plot = (bjd_plot[dataset_name]['x_plot'] - mc.Tref) % \
                                                         planet_vars[common_ref]['P']

                        fileout.write(
                            'descriptor BJD Tc_folded pha val,+- sys mod full val_compare,+- res,+- jit \n'
                        )

                        try:
                            len(bjd_plot[plot_out_keyword][dataset_name]
                                [model_name])
                        except:
                            bjd_plot[plot_out_keyword][dataset_name][model_name] = \
                                bjd_plot[plot_out_keyword][dataset_name][model_name] * np.ones(dataset.n)

                            bjd_plot[plot_x_keyword][dataset_name][model_name] = \
                                bjd_plot[plot_x_keyword][dataset_name][model_name] * np.ones(dataset.n)

                        for x, tcf, pha, y, e, sys, mod, com, obs_mod, res, jit in zip(
                                dataset.x, tc_folded, phase, dataset.y,
                                dataset.e, bjd_plot[plot_out_keyword]
                            [dataset_name]['systematics'],
                                bjd_plot[plot_out_keyword][dataset_name]
                            [model_name], bjd_plot[plot_out_keyword]
                            [dataset_name]['complete'], dataset.y -
                                bjd_plot[plot_out_keyword][dataset_name]
                            ['complete'] + bjd_plot[plot_out_keyword]
                            [dataset_name][model_name], dataset.y -
                                bjd_plot[plot_out_keyword][dataset_name]
                            ['complete'], bjd_plot[plot_out_keyword]
                            [dataset_name]['jitter']):
                            fileout.write(
                                '{0:f} {1:f} {2:f} {3:f} {4:f} {5:f} {6:1f} {7:f} {8:f} {9:f} {10:f} {11:f} {12:f}'
                                '\n'.format(x, tcf, pha, y, e, sys, mod, com,
                                            obs_mod, e, res, e, jit))
                        fileout.close()

                        if getattr(mc.models[model_name], 'systematic_model',
                                   False):
                            continue

                        if getattr(mc.models[model_name], 'jitter_model',
                                   False):
                            continue

                        fileout = open(
                            dir_models + dataset_name + '_' + model_name +
                            '_full.dat', 'w')

                        if model_name + '_std' in bjd_plot[plot_x_keyword][
                                dataset_name]:
                            fileout.write(
                                'descriptor BJD Tc_folded phase mod,+- \n')
                            for x, tfc, pha, mod, std in zip(
                                    bjd_plot[dataset_name]['x_plot'],
                                    tc_folded_plot, phase_plot,
                                    bjd_plot[plot_x_keyword][dataset_name]
                                [model_name], bjd_plot[plot_x_keyword]
                                [dataset_name][model_name + '_std']):
                                fileout.write(
                                    '{0:f} {1:f} {2:f} {3:f} {4:f} \n'.format(
                                        x, tcf, pha, mod, std))
                            fileout.close()
                        else:
                            fileout.write(
                                'descriptor BJD Tc_folded phase mod \n')
                            for x, tcf, pha, mod in zip(
                                    bjd_plot[dataset_name]['x_plot'],
                                    tc_folded_plot, phase_plot,
                                    bjd_plot[plot_x_keyword][dataset_name]
                                [model_name]):
                                fileout.write(
                                    '{0:f} {1:f} {2:f} {3:f}\n'.format(
                                        x, tcf, pha, mod))
                            fileout.close()

                        if getattr(mc.models[model_name], 'model_class',
                                   False) == 'transit':
                            """
                            Exceptional model writing to deal with under-sampled lightcurves, i.e. when folding the 
                            the light curve from the model file is not good enough. Something similar is performed later
                            with the planetary RVs, but here we must keep into account the differences  between datasets
                            due to limb darkening, exposure times, etc.
                            """

                            variable_values = {}
                            for common_ref in mc.models[model_name].common_ref:
                                variable_values.update(
                                    mc.common_models[common_ref].convert(
                                        chain_ref))
                            variable_values.update(
                                mc.models[model_name].convert(
                                    chain_ref, dataset_name))

                            fileout = open(
                                dir_models + dataset_name + '_' + model_name +
                                '_transit.dat', 'w')

                            x_range = np.arange(-variable_values['P'] / 2.,
                                                variable_values['P'] / 2.,
                                                0.01)
                            delta_T = variable_values['Tc'] - dataset.Tref

                            y_plot = mc.models[model_name].compute(
                                variable_values, dataset, x_range + delta_T)

                            fileout.write('descriptor Tc_folded  mod \n')
                            for x, mod in zip(x_range, y_plot):
                                fileout.write('{0:f} {1:f} \n'.format(x, mod))
                            fileout.close()

                    fileout = open(dir_models + dataset_name + '_full.dat',
                                   'w')
                    fileout.write('descriptor BJD mod \n')
                    for x, mod in zip(
                            bjd_plot[dataset_name]['x_plot'],
                            bjd_plot[plot_x_keyword][dataset_name]
                        ['complete']):
                        fileout.write('{0:f} {1:f} \n'.format(x, mod))
                    fileout.close()

                for model in planet_vars:

                    try:

                        RV_out = kepler_exo.kepler_RV_T0P(
                            bjd_plot['full']['x_plot'] - mc.Tref,
                            planet_vars[model]['f'], planet_vars[model]['P'],
                            planet_vars[model]['K'], planet_vars[model]['e'],
                            planet_vars[model]['o'])
                        fileout = open(
                            dir_models + 'RV_planet_' + model + '_kep.dat',
                            'w')
                        fileout.write('descriptor x_range  m_kepler \n')
                        for x, y in zip(bjd_plot['full']['x_plot'], RV_out):
                            fileout.write('{0:f} {1:f} \n'.format(x, y))
                        fileout.close()

                        x_range = np.arange(-1.50, 1.50, 0.001)
                        RV_out = kepler_exo.kepler_RV_T0P(
                            x_range * planet_vars[model]['P'],
                            planet_vars[model]['f'], planet_vars[model]['P'],
                            planet_vars[model]['K'], planet_vars[model]['e'],
                            planet_vars[model]['o'])
                        fileout = open(
                            dir_models + 'RV_planet_' + model + '_pha.dat',
                            'w')
                        fileout.write('descriptor x_phase m_phase \n')
                        for x, y in zip(x_range, RV_out):
                            fileout.write('{0:f} {1:f} \n'.format(x, y))
                        fileout.close()
                    except:
                        pass

        print()
        print(
            '****************************************************************************************************'
        )
        print()

    if plot_dictionary['veuz_corner_files']:

        print(' Writing Veusz-compatible files for personalized corner plots')

        # Transit times are too lenghty for the 'tiny' corner plot, so we apply a reduction to their value
        variable_with_offset = {}

        veusz_dir = dir_output + '/Veuz_plot/'
        if not os.path.exists(veusz_dir):
            os.makedirs(veusz_dir)

        all_variables_list = {}
        for dataset_name, dataset in mc.dataset_dict.items():
            variable_values = dataset.convert(flat_chain)

            for variable_name, variable in variable_values.items():
                all_variables_list[dataset_name + '_' +
                                   variable_name] = variable

            for model_name in dataset.models:
                variable_values = mc.models[model_name].convert(
                    flat_chain, dataset_name)
                for variable_name, variable in variable_values.items():
                    all_variables_list[dataset_name + '_' + model_name + '_' +
                                       variable_name] = variable

        for model_name, model in mc.common_models.items():
            variable_values = model.convert(flat_chain)

            for variable_name, variable in variable_values.items():

                all_variables_list[model.common_ref + '_' +
                                   variable_name] = variable

                # Special treatment for transit time, since ti can be very long but yet very precise, making
                # the axis of corner plot quite messy
                if variable_name == 'Tc':
                    offset = np.median(variable)
                    variable_with_offset[model.common_ref + '_' +
                                         variable_name] = offset
                    all_variables_list[model.common_ref + '_' +
                                       variable_name] -= offset

        for common_ref, variable_values in planet_variables.items():
            for variable_name, variable in variable_values.items():

                # Skipping the variables that have been already included in all_variables_list
                if common_ref + '_' + variable_name in all_variables_list:
                    continue
                all_variables_list[common_ref + '_' + variable_name] = variable

                if variable_name == 'Tc':
                    offset = np.median(variable)
                    variable_with_offset[common_ref + '_' +
                                         variable_name] = offset
                    all_variables_list[common_ref + '_' +
                                       variable_name] -= offset

        text_file = open(veusz_dir + "veusz_offsets.txt", "w")
        for variable_name, offset_value in variable_with_offset.items():
            text_file.write('{0:s} {1:16.9f}'.format(variable_name,
                                                     offset_value))
        text_file.close()

        n_int = len(all_variables_list)
        output_plan = np.zeros([n_samplings, n_int], dtype=np.double)
        output_names = []
        for var_index, variable_name in enumerate(all_variables_list):
            output_plan[:, var_index] = all_variables_list[variable_name]
            output_names.extend([variable_name])

        plot_truths = np.percentile(output_plan[:, :], [15.865, 50, 84.135],
                                    axis=0)
        n_bins = 30 + 1

        h5f = h5py.File(veusz_dir + '_hist1d.hdf5', "w")
        data_grp = h5f.create_group("hist1d")

        data_lim = np.zeros([n_int, 2], dtype=np.double)
        data_edg = np.zeros([n_int, n_bins], dtype=np.double)
        data_skip = np.zeros(n_int, dtype=bool)

        sigma_minus = plot_truths[1, :] - plot_truths[0, :]
        sigma_plus = plot_truths[2, :] - plot_truths[1, :]
        median_vals = plot_truths[1, :]

        for ii in range(0, n_int):

            if sigma_minus[ii] == 0. and sigma_plus[ii] == 0.:
                data_skip[ii] = True
                continue

            sigma5_selection = (output_plan[:, ii] > median_vals[ii] - 5 * sigma_minus[ii]) & \
                               (output_plan[:, ii] < median_vals[ii] + 5 * sigma_plus[ii])

            data_lim[ii, :] = [
                np.amin(output_plan[sigma5_selection, ii]),
                np.amax(output_plan[sigma5_selection, ii])
            ]
            if data_lim[ii, 0] == data_lim[ii, 1]:
                data_lim[ii, :] = [
                    np.amin(output_plan[:, ii]),
                    np.amax(output_plan[:, ii])
                ]
            if data_lim[ii, 0] == data_lim[ii, 1]:
                data_skip[ii] = True
                continue

            data_edg[ii, :] = np.linspace(data_lim[ii, 0], data_lim[ii, 1],
                                          n_bins)

        veusz_workaround_descriptor = 'descriptor'
        veusz_workaround_values = ''

        for ii in range(0, n_int):

            if data_skip[ii]:
                continue

            x_data = output_plan[:, ii]
            x_edges = data_edg[ii, :]

            for jj in range(0, n_int):

                if data_skip[jj]:
                    continue

                y_data = output_plan[:, jj]
                y_edges = data_edg[jj, :]

                if ii != jj:
                    hist2d = np.histogram2d(x_data,
                                            y_data,
                                            bins=[x_edges, y_edges],
                                            density=True)
                    hist1d_y = np.histogram(y_data, bins=y_edges, density=True)

                    Hflat = hist2d[0].flatten()
                    inds = np.argsort(Hflat)[::-1]
                    Hflat = Hflat[inds]
                    sm = np.cumsum(Hflat)
                    sm /= sm[-1]

                    x_edges_1d = (x_edges[1:] + x_edges[:-1]) / 2
                    y_edges_1d = (y_edges[1:] + y_edges[:-1]) / 2
                    h2d_out = np.zeros([n_bins, n_bins])
                    h2d_out[0, 1:] = x_edges_1d
                    h2d_out[1:, 0] = y_edges_1d
                    h2d_out[1:, 1:] = hist2d[0].T * 1. / np.amax(hist2d[0])

                    h2d_list = h2d_out.tolist()
                    h2d_list[0][0] = ''
                    csvfile = veusz_dir + '_hist2d___' + output_names[
                        ii] + '___' + output_names[jj] + '.csv'
                    with open(csvfile, "w") as output:
                        writer = csv.writer(output, lineterminator='\n')
                        writer.writerows(h2d_list)

            hist1d = np.histogram(x_data, bins=x_edges)
            hist1d_norm = hist1d[0] * 1. / n_samplings
            x_edges_1d = (x_edges[1:] + x_edges[:-1]) / 2
            data_grp.create_dataset(output_names[ii] + '_x',
                                    data=x_edges_1d,
                                    compression="gzip")
            data_grp.create_dataset(output_names[ii] + '_y',
                                    data=hist1d_norm,
                                    compression="gzip")

            # data_grp.create_dataset(output_names[ii]+'_val', data=median_vals[ii])
            # data_grp.create_dataset(output_names[ii]+'_val_-', data=sigma_minus[ii])
            # data_grp.create_dataset(output_names[ii]+'_val_+', data=sigma_plus[ii])
            # data_grp.attrs[output_names[ii]+'_val'] = median_vals[ii]

            veusz_workaround_descriptor += ' ' + output_names[ii] + ',+,-'
            veusz_workaround_values += ' ' + repr(
                median_vals[ii]) + ' ' + repr(sigma_plus[ii]) + ' ' + repr(
                    sigma_minus[ii])

        text_file = open(veusz_dir + "veusz_median_sigmas.txt", "w")
        text_file.write('%s \n' % veusz_workaround_descriptor)
        text_file.write('%s \n' % veusz_workaround_values)
        text_file.close()

        print()
        print(
            '****************************************************************************************************'
        )
        print()
if len(Pi_chamber_vars) % nploty == 0:
  nemptyplots = 0
else:
  nemptyplots = nploty - len(Pi_chamber_vars) % nploty
emptyplots = np.arange(nploty - nemptyplots, nploty)
for empty in emptyplots:
  axarr[nplotx-1, empty].axis('off')

#axes = plt.gca()
#axes.tick_params(direction='in')
for x in x_arr:
  for y in y_arr:
    #tics inside
    axarr[x,y].tick_params(direction='in', which='both', top=1, right=1)
    #minor tics
    axarr[x,y].xaxis.set_minor_locator(AutoMinorLocator())
    axarr[x,y].yaxis.set_minor_locator(AutoMinorLocator())
    #labels and tics font size
    for item in ([axarr[x,y].xaxis.label, axarr[x,y].yaxis.label] + axarr[x,y].get_xticklabels() + axarr[x,y].get_yticklabels()):
      item.set_fontsize(8)
    # subplot numbering
    if y < nploty - nemptyplots or x < (nplotx - 1): #nonempty plots
#      axarr[x,y].text(0.2, 0.875, labeldict[y + x*nploty], fontsize=8, transform=axarr[x,y].transAxes)

      # rescale y range to the visible x range, note: overrides ylim!
      var = Pi_chamber_vars[x*nploty + y]
      if var in rescale_vars:
        autoscale_y(axarr[x,y], margin=0.3)
      
      # hide hrzntl tic labels
#      if x*nploty + y < nplotx * nploty - nemptyplots - nploty:
Пример #17
0
def visualize_field(borefield):
    """
    Plot the top view and 3D view of borehole positions.

    Parameters
    ----------
    borefield : list
        List of boreholes in the bore field.

    Returns
    -------
    fig : figure
        Figure object (matplotlib).

    """
    import matplotlib.pyplot as plt
    from matplotlib.ticker import AutoMinorLocator
    from mpl_toolkits.mplot3d import Axes3D
    # -------------------------------------------------------------------------
    # Initialize figure
    # -------------------------------------------------------------------------
    LW = 1.5    # Line width
    bbox_props = dict(boxstyle="circle,pad=0.3", fc="white", ec="b", lw=LW)

    plt.rc('figure', figsize=(160.0/25.4, 80.0*4.0/4.0/25.4))
    fig = plt.figure()

    # -------------------------------------------------------------------------
    # Top view
    # -------------------------------------------------------------------------
    i = 0   # Initialize borehole index
    ax0 = fig.add_subplot(121)

    for borehole in borefield:
        i += 1  # Increment borehole index
        (x, y) = borehole.position()    # Extract borehole position
        # Add current borehole to the figure
        ax0.plot(x, y, 'k.')
        ax0.text(x, y, i, ha="center", va="center", size=9, bbox=bbox_props)

    # Configure figure axes
    ax0.set_xlabel('x (m)')
    ax0.set_ylabel('y (m)')
    ax0.set_title('Top view')
    plt.axis('equal')
    ax0.xaxis.set_minor_locator(AutoMinorLocator())
    ax0.yaxis.set_minor_locator(AutoMinorLocator())

    # -------------------------------------------------------------------------
    # 3D view
    # -------------------------------------------------------------------------
    i = 0   # Initialize borehole index
    ax1 = fig.add_subplot(122, projection='3d')

    for borehole in borefield:
        i += 1  # Increment borehole index
        # Position of head of borehole
        (x, y) = borehole.position()
        # Position of bottom of borehole
        x_H = x + borehole.H*np.sin(borehole.tilt)*np.cos(borehole.orientation)
        y_H = y + borehole.H*np.sin(borehole.tilt)*np.sin(borehole.orientation)
        z_H = borehole.D + borehole.H*np.cos(borehole.tilt)
        # Add current borehole to the figure
        ax1.plot(np.atleast_1d(x), np.atleast_1d(y), np.atleast_1d(borehole.D),
                 'ko')
        ax1.plot(np.array([x, x_H]),
                 np.array([y, y_H]),
                 -np.array([borehole.D, z_H]), 'k-')

    # Configure figure axes
    ax1.set_xlabel('x (m)')
    ax1.set_ylabel('y (m)')
    ax1.set_zlabel('z (m)')
    ax1.set_title('3D view')
    plt.axis('equal')
    ax1.xaxis.set_minor_locator(AutoMinorLocator())
    ax1.yaxis.set_minor_locator(AutoMinorLocator())
    ax1.zaxis.set_minor_locator(AutoMinorLocator())

    plt.tight_layout(rect=[0, 0.0, 0.95, 1.0])

    return fig
Пример #18
0
def run(args, graph, labels, train_idx, val_idx, test_idx, evaluator,
        n_running):
    evaluator_wrapper = lambda pred, labels: evaluator.eval({
        "y_pred": pred,
        "y_true": labels
    })["rocauc"]

    train_batch_size = (len(train_idx) + 9) // 10
    # batch_size = len(train_idx)
    train_sampler = MultiLayerNeighborSampler(
        [32 for _ in range(args.n_layers)])
    # sampler = MultiLayerFullNeighborSampler(args.n_layers)
    train_dataloader = DataLoaderWrapper(
        NodeDataLoader(
            graph.cpu(),
            train_idx.cpu(),
            train_sampler,
            batch_sampler=BatchSampler(len(train_idx),
                                       batch_size=train_batch_size),
            num_workers=10,
        ))

    eval_sampler = MultiLayerNeighborSampler(
        [100 for _ in range(args.n_layers)])
    # sampler = MultiLayerFullNeighborSampler(args.n_layers)
    eval_dataloader = DataLoaderWrapper(
        NodeDataLoader(
            graph.cpu(),
            torch.cat([train_idx.cpu(),
                       val_idx.cpu(),
                       test_idx.cpu()]),
            eval_sampler,
            batch_sampler=BatchSampler(graph.number_of_nodes(),
                                       batch_size=65536),
            num_workers=10,
        ))

    criterion = nn.BCEWithLogitsLoss()

    model = gen_model(args).to(device)

    optimizer = optim.AdamW(model.parameters(),
                            lr=args.lr,
                            weight_decay=args.wd)
    lr_scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer,
                                                        mode="max",
                                                        factor=0.75,
                                                        patience=50,
                                                        verbose=True)

    total_time = 0
    val_score, best_val_score, final_test_score = 0, 0, 0

    train_scores, val_scores, test_scores = [], [], []
    losses, train_losses, val_losses, test_losses = [], [], [], []
    final_pred = None

    for epoch in range(1, args.n_epochs + 1):
        tic = time.time()

        loss = train(args, model, train_dataloader, labels, train_idx,
                     criterion, optimizer, evaluator_wrapper)

        toc = time.time()
        total_time += toc - tic

        if epoch == args.n_epochs or epoch % args.eval_every == 0 or epoch % args.log_every == 0:
            train_score, val_score, test_score, train_loss, val_loss, test_loss, pred = evaluate(
                args, model, eval_dataloader, labels, train_idx, val_idx,
                test_idx, criterion, evaluator_wrapper)

            if val_score > best_val_score:
                best_val_score = val_score
                final_test_score = test_score
                final_pred = pred

            if epoch % args.log_every == 0:
                print(
                    f"Run: {n_running}/{args.n_runs}, Epoch: {epoch}/{args.n_epochs}, Average epoch time: {total_time / epoch:.2f}s"
                )
                print(
                    f"Loss: {loss:.4f}\n"
                    f"Train/Val/Test loss: {train_loss:.4f}/{val_loss:.4f}/{test_loss:.4f}\n"
                    f"Train/Val/Test/Best val/Final test score: {train_score:.4f}/{val_score:.4f}/{test_score:.4f}/{best_val_score:.4f}/{final_test_score:.4f}"
                )

            for l, e in zip(
                [
                    train_scores, val_scores, test_scores, losses,
                    train_losses, val_losses, test_losses
                ],
                [
                    train_score, val_score, test_score, loss, train_loss,
                    val_loss, test_loss
                ],
            ):
                l.append(e)

        lr_scheduler.step(val_score)

    print("*" * 50)
    print(
        f"Best val score: {best_val_score}, Final test score: {final_test_score}"
    )
    print("*" * 50)

    if args.plot:
        fig = plt.figure(figsize=(24, 24))
        ax = fig.gca()
        ax.set_xticks(np.arange(0, args.n_epochs, 100))
        ax.set_yticks(np.linspace(0, 1.0, 101))
        ax.tick_params(labeltop=True, labelright=True)
        for y, label in zip([train_scores, val_scores, test_scores],
                            ["train score", "val score", "test score"]):
            plt.plot(range(1, args.n_epochs + 1, args.log_every),
                     y,
                     label=label,
                     linewidth=1)
        ax.xaxis.set_major_locator(MultipleLocator(100))
        ax.xaxis.set_minor_locator(AutoMinorLocator(1))
        ax.yaxis.set_major_locator(MultipleLocator(0.01))
        ax.yaxis.set_minor_locator(AutoMinorLocator(2))
        plt.grid(which="major", color="red", linestyle="dotted")
        plt.grid(which="minor", color="orange", linestyle="dotted")
        plt.legend()
        plt.tight_layout()
        plt.savefig(f"gat_score_{n_running}.png")

        fig = plt.figure(figsize=(24, 24))
        ax = fig.gca()
        ax.set_xticks(np.arange(0, args.n_epochs, 100))
        ax.tick_params(labeltop=True, labelright=True)
        for y, label in zip([losses, train_losses, val_losses, test_losses],
                            ["loss", "train loss", "val loss", "test loss"]):
            plt.plot(range(1, args.n_epochs + 1, args.log_every),
                     y,
                     label=label,
                     linewidth=1)
        ax.xaxis.set_major_locator(MultipleLocator(100))
        ax.xaxis.set_minor_locator(AutoMinorLocator(1))
        ax.yaxis.set_major_locator(MultipleLocator(0.1))
        ax.yaxis.set_minor_locator(AutoMinorLocator(5))
        plt.grid(which="major", color="red", linestyle="dotted")
        plt.grid(which="minor", color="orange", linestyle="dotted")
        plt.legend()
        plt.tight_layout()
        plt.savefig(f"gat_loss_{n_running}.png")

    if args.save_pred:
        os.makedirs("./output", exist_ok=True)
        torch.save(F.softmax(final_pred, dim=1), f"./output/{n_running}.pt")

    return best_val_score, final_test_score
Пример #19
0
    def cal(self, vis, vis_mask, li, gi, fbl, rt, **kwargs):
        """Function that does the actual cal."""

        unmasked_only = self.params['unmasked_only']
        phs_only = self.params['phs_only']
        save_gain = self.params['save_gain']
        plot_gain = self.params['plot_gain']
        phs_unit = self.params['phs_unit']
        fig_prefix = self.params['fig_name']
        rotate_xdate = self.params['rotate_xdate']
        feed_no = self.params['feed_no']
        order_bl = self.params['order_bl']
        tag_output_iter = self.params['tag_output_iter']
        iteration = self.iteration
        num_mean = kwargs['num_mean']
        inds = kwargs['inds'] # inds is the point before noise on
        bls_plt = kwargs['bls_plt']
        freq_plt = kwargs['freq_plt']
        if rt.ps_first:
            build_normal_file = kwargs['build_normal_file']
        use_center_data = self.params['use_center_data']
        ns_stable = self.params['ns_stable']
        

        if np.prod(vis.shape) == 0 :
            return

        lfi, lbi = li # local freq and bl index
        fi = gi[0] # freq idx for this cal
        bl = tuple(fbl[1]) # bl for this cal

        nt = vis.shape[0]
        on_time = rt['ns_on'].attrs['on_time']
        # off_time = rt['ns_on'].attrs['off_time']
        period = rt['ns_on'].attrs['period']

        # the calculated phase and amp will be at the ind just 1 before ns ON (i.e., at the ind of the last ns OFF)
        valid_inds = []
        phase = []
        if not phs_only:
            amp = []
        ii_range = []
        for ii, ind in enumerate(inds):
            # drop the first and the last ind, as it may lead to exceptional vals
            if (ind == inds[0] or ind == inds[-1]) and not rt.FRB_cal:
                continue

            lower = ind - num_mean
            off_sec = np.ma.array(vis[lower:ind], mask=(~np.isfinite(vis[lower:ind]))&vis_mask[lower:ind])
            if off_sec.count() == 0: # all are invalid values
                continue
            if unmasked_only and off_sec.count() < max(2, num_mean/2): # more valid sample to make stable
                continue

            valid = True
            upper = ind + 1 + on_time
            off_mean = np.ma.mean(off_sec)
            if use_center_data:
                if ind + 2 < upper - 1:
                    this_on = np.ma.masked_invalid(vis[ind+2:upper-1]) # all on signal
                else:
                    continue
            else:
                this_on = np.ma.masked_invalid(vis[ind+1:upper]) # all on signal
            # just to avoid the case of all invalid on values
            if this_on.count() > 0:
                on_mean = np.ma.mean(this_on) # mean for all valid on signals
            else:
                continue
            diff = on_mean - off_mean
            phs = np.angle(diff) # in radians
            if not np.isfinite(phs):
                valid = False
            if not phs_only:
                amp_ = np.abs(diff)
                if not (np.isfinite(amp_) and amp_ > 1.0e-8): # amp_ should > 0
                    valid = False
            if not valid:
                continue
            valid_inds.append(ind)
            if save_gain:
                rt['ns_cal_phase'].local_data[ii, lfi, lbi] = phs
            phase.append( phs ) # in radians
            if not phs_only:
                if save_gain:
                    ii_range += [ii]
                    rt['ns_cal_amp'].local_data[ii, lfi, lbi] = amp_
                amp.append( amp_ )

        # not enough valid data to do the ns_cal
        num_valid = len(valid_inds)
        if (num_valid <= 3 and not rt.FRB_cal) or num_valid < 1:
            print 'Only have %d valid points, mask all for fi = %d, bl = (%d, %d)...' % (num_valid, fbl[0], fbl[1][0], fbl[1][1])
            vis_mask[:] = True # mask the vis as no ns_cal has done
            return

        phase = np.unwrap(phase) # unwrap 2pi discontinuity
        if not phs_only:
            if not rt.ps_first:
                rt['ns_cal_amp'].local_data[ii_range, lfi, lbi] = rt['ns_cal_amp'].local_data[ii_range, lfi, lbi] / np.median(amp)
                amp = np.array(amp) / np.median(amp) # normalize

        # split valid_inds into consecutive chunks
        intervals = [0] + (np.where(np.diff(valid_inds) > 5 * period)[0] + 1).tolist() + [num_valid]
        itp_inds = []
        itp_phase = []
        if not phs_only:
            itp_amp = []
        for i in xrange(len(intervals) -1):
            this_chunk = valid_inds[intervals[i]:intervals[i+1]]
            if len(this_chunk) > 3:
                itp_inds.append(this_chunk)
                itp_phase.append(phase[intervals[i]:intervals[i+1]])
                if not phs_only:
                    itp_amp.append(amp[intervals[i]:intervals[i+1]])

        # if no such chunk, mask all the data
        num_itp = len(itp_inds)
        if num_itp == 0:
            rt.interp_mask_count.append(1)
            vis_mask[:] = True
        else:
            rt.interp_mask_count.append(0)

        # get itp pairs
        itp_pairs = []
        for it in itp_inds:
            # itp_pairs.append((max(0, it[0]-off_time), min(nt, it[-1]+period)))
            itp_pairs.append((max(0, it[0]-5), min(nt, it[-1]+5))) # not to out interpolate two much, which may lead to very inaccurate values

        # get mask pairs
        mask_pairs = []
        for i in xrange(num_itp):
            if i == 0:
                mask_pairs.append((0, itp_pairs[i][0]))
            if i == num_itp - 1:
                mask_pairs.append((itp_pairs[i][-1], nt))
            else:
                mask_pairs.append((itp_pairs[i][-1], itp_pairs[i+1][0]))

        # set mask for inds in mask_pairs
        for mp1, mp2 in mask_pairs:
            vis_mask[mp1:mp2] = True

        # interpolate for inds in itp_inds
        all_phase = np.array([np.nan]*nt)
        if rt.ps_first:
            if rt.normal_index is not None:
                normal_phs = None
                normal_amp = None
                interp_inds = np.array([])
                new_amp = np.array([])
            elif not ns_stable:
                normal_phs = rt.normal_phs[lfi,lbi]
                normal_amp = rt.normal_amp[lfi,lbi]
                interp_inds = np.array([])
                new_amp = np.array([])
            else:
                if build_normal_file:
                    rt.normal_phs[lfi, lbi] = rt['ns_cal_phase'].local_data[0, lfi, lbi]
                    rt.normal_amp[lfi, lbi] = rt['ns_cal_amp'].local_data[0, lfi, lbi]
                normal_phs = rt.normal_phs[lfi,lbi]
                normal_amp = rt.normal_amp[lfi,lbi]
                interp_inds = np.array([])
                new_amp = np.array([])
        for this_inds, this_phase, (i1, i2) in zip(itp_inds, itp_phase, itp_pairs):
            # no need to interpolate for auto-correlation
            if bl[0] == bl[1]:
                all_phase[i1:i2] = 0
                if rt.ps_first:
                    interp_inds = np.concatenate([interp_inds, np.arange(i1,i2)])
                    if rt.normal_index>=i1 and rt.normal_index<i2 and rt.normal_index is not None:
                        normal_phs = 0
                        rt.normal_phs[lfi, lbi] = normal_phs
            else:
                f = InterpolatedUnivariateSpline(this_inds, this_phase)
                this_itp_phs = f(np.arange(i1, i2))
                # # make the interpolated values in the appropriate range
                # this_itp_phs = np.where(this_itp_phs>np.pi, np.pi, this_itp_phs)
                # this_itp_phs = np.where(this_itp_phs<-np.pi, np.pi, this_itp_phs)
                all_phase[i1:i2] = this_itp_phs
                # do phase cal for this range of inds
                if rt.ps_first:
                    interp_inds = np.concatenate([interp_inds, np.arange(i1,i2)])
                    if rt.normal_index>=i1 and rt.normal_index<i2 and rt.normal_index is not None:
                        normal_phs = this_itp_phs[rt.normal_index - i1]
                        rt.normal_phs[lfi, lbi] = normal_phs
                vis[i1:i2] = vis[i1:i2] * np.exp(-1.0J * this_itp_phs)
        if rt.ps_first:
            interp_inds = np.int64(interp_inds)
            if normal_phs is None:
                normal_phs = np.nan
                warnings.warn('The transit point has been masked for fi = %d, bl = (%d, %d)... when calculating phase! Maybe the noise points are too sparse!'%(fbl[0], fbl[1][0], fbl[1][1]),TransitMasked)
            vis[interp_inds] = vis[interp_inds]*np.exp(1.0J * normal_phs)
            rt['ns_cal_phase'].local_data[ii_range, lfi, lbi] = rt['ns_cal_phase'].local_data[ii_range, lfi, lbi] - normal_phs

        if not phs_only:
            all_amp = np.array([np.nan]*nt)
            for this_inds, this_amp, (i1, i2) in zip(itp_inds, itp_amp, itp_pairs):
                f = InterpolatedUnivariateSpline(this_inds, this_amp)
                this_itp_amp = f(np.arange(i1, i2))
                all_amp[i1:i2] = this_itp_amp
                # do amp cal for this range of inds
                if rt.ps_first:
                    new_amp = np.concatenate([new_amp, this_itp_amp])
                    if rt.normal_index>=i1 and rt.normal_index<i2 and rt.normal_index is not None:
                        normal_amp = this_itp_amp[rt.normal_index - i1]
                        rt.normal_amp[lfi, lbi] = normal_amp
                else:
                    vis[i1:i2] = vis[i1:i2] / this_itp_amp

            if rt.ps_first:
                if normal_amp is None:
                    normal_amp = np.nan
                    warnings.warn('The transit point has been masked for frequency %d baseline %d when calculating amplitude! Maybe the noise points are too sparse!'%(lfi,lbi),TransitMasked)
                new_amp = new_amp / normal_amp
                vis[interp_inds] = vis[interp_inds]/new_amp
                rt['ns_cal_amp'].local_data[ii_range, lfi, lbi] = rt['ns_cal_amp'].local_data[ii_range, lfi, lbi] / normal_amp

        if plot_gain and (bl in bls_plt and fi in freq_plt):
            plt.figure()
            if phs_only:
                fig, ax = plt.subplots()
            else:
                fig, ax = plt.subplots(2, sharex=True)
            ax_val = np.array([ (datetime.utcfromtimestamp(sec) + timedelta(hours=8)) for sec in rt['sec1970'][:] ])
            xlabel = '%s' % ax_val[0].date()
            ax_val = mdates.date2num(ax_val)
            if order_bl and (bl[0] > bl[1]):
                # negate phase as for the conj of vis
                all_phase = np.where(np.isfinite(all_phase), -all_phase, np.nan)
                phase = np.where(np.isfinite(phase), -phase, np.nan)
            if phs_unit == 'degree': # default to radians
                all_phase = np.degrees(all_phase)
                phase = np.degrees(phase)
                ylabel = r'$\Delta \phi$ / degree'
            else:
                ylabel = r'$\Delta \phi$ / radian'
            if phs_only:
                ax.plot(ax_val, all_phase)
                ax.plot(ax_val[valid_inds], phase, 'ro')
                ax1 = ax
            else:
                ax[0].plot(ax_val, all_amp)
                ax[0].plot(ax_val[valid_inds], amp, 'ro')
                ax[0].set_ylabel(r'$\Delta |g|$')
                ax[1].plot(ax_val, all_phase)
                ax[1].plot(ax_val[valid_inds], phase, 'ro')
                ax1 = ax[1]
            duration = (ax_val[-1] - ax_val[0])
            dt = duration / nt
            ext = max(0.05*duration, 5*dt)
            # if phs_unit == 'degree': # default to radians
            #     ax1.set_ylim([-180, 180])
            #     ax1.set_yticks([-180, -120, -60, 0, 60, 120, 180])
            # else:
            #     ax1.set_ylim([-np.pi, np.pi])
            ax1.set_xlim([ax_val[0]-ext, ax_val[-1]+ext])
            ax1.xaxis_date()
            date_format = mdates.DateFormatter('%H:%M')
            ax1.xaxis.set_major_formatter(date_format)
            if rotate_xdate:
                # set the x-axis tick labels to diagonal so it fits better
                fig.autofmt_xdate()
            else:
                # reduce the number of tick locators
                locator = MaxNLocator(nbins=6)
                ax1.xaxis.set_major_locator(locator)
                ax1.xaxis.set_minor_locator(AutoMinorLocator(2))
            ax1.set_xlabel(xlabel)
            ax1.set_ylabel(ylabel)

            if feed_no:
                pol = rt['bl_pol'].local_data[li[1]]
                bl = tuple(rt['true_blorder'].local_data[li[1]])
                if order_bl and (bl[0] > bl[1]):
                    bl = (bl[1], bl[0])
                fig_name = '%s_%f_%d_%d_%s.png' % (fig_prefix, fbl[0], bl[0], bl[1], rt.pol_dict[pol])
            else:
                fig_name = '%s_%f_%d_%d.png' % (fig_prefix, fbl[0], fbl[1][0], fbl[1][1])
            if tag_output_iter:
                fig_name = output_path(fig_name, iteration=iteration)
            else:
                fig_name = output_path(fig_name)
            plt.savefig(fig_name)
            plt.close()
Пример #20
0
 xlabel = 'Fraction' if iy == ny - 1 else ''
 ylabel = 'Layers' if ix == 0 else ''
 ax = fig.add_axes(this_position,
                   autoscalex_on=False,
                   autoscaley_on=False,
                   xscale='linear',
                   yscale='linear',
                   xlim=xRange,
                   ylim=yRange,
                   xlabel=xlabel,
                   ylabel=ylabel)
 if iy < (ny - 1):
     ax.set_xticklabels([])
 if ix > 0:
     ax.set_yticklabels([])
 ax.xaxis.set_minor_locator(AutoMinorLocator())
 x_layers = dt['numL']
 layer_acc = numpy.zeros(nLayers_max)
 for i in xrange(0, nSpeciesPlot):
     if speciesPlot[i]['idx'][j] == None:
         continue
     print j, i, speciesPlot[i]['name']
     y_layers = numpy.zeros(dt['nLayers'])
     #y_layers[0] = dt['bin'][dt['div'][0], speciesPlot[i]['idx'][j]]/dt['N_S']
     #y_layers[1:] = numpy.diff(dt['bin'][dt['div'], speciesPlot[i]['idx'][j]])/(dt['N_S'])
     y_layers[0] = dt['bin'][dt['div'][0],
                             speciesPlot[i]['idx'][j]] / dt['N_in'][0]
     y_layers[1:] = numpy.diff(
         dt['bin'][dt['div'],
                   speciesPlot[i]['idx'][j]]) / (dt['N_in'][1:])
     #y_layers = smooth(y_layers)
df4 = df4.assign(nrad=radval).loc[50].sort_values('nrad')
df4PMi = (df4.loc[df4['nrad'] <= (hmrad4[50]*2)])
df4PMo = (df4.loc[df4['nrad'] >= (hmrad4[50]*2)])
        
print (hmrad1[50], hmrad2[50], hmrad3[50], hmrad4[50])

#%% Plotting cumulative distribution of proper motion of stars located inside and outside of 2 x HMR for 4 snapshots
#
#from matplotlib.ticker import MaxNLocator

fnamelist =[d['fname1'],d['fname2'],d['fname3'],d['fname4']]
    
fig, ((ax1,ax2),(ax3,ax4)) = plt.subplots(nrows=2, ncols=2, figsize=(15.,15.))
plt.subplots_adjust(wspace=0.2)
x_minor_locator = AutoMinorLocator(2)
y_minor_locator = AutoMinorLocator(4)

legend =[]

for fname in fnamelist:

    if fname[17] == 'X':
        legendtitle = (r'N=1000, D=%s.%s, $\alpha_{vir}$=%s.%s, init_rad=%s pc, sim=%s' %(fname[7],fname[8],fname[22],fname[23],fname[12],fname[25:27]))
    elif fname[17] == '5':
        legendtitle = (r'N=500, D=%s.%s, $\alpha_{vir}$=%s.%s, init_rad=%s pc, sim=%s' %(fname[7],fname[8],fname[22],fname[23],fname[12],fname[25:27]))
    elif fname[17] == '2':    
        legendtitle = (r'N=2000, D=%s.%s, $\alpha_{vir}$=%s.%s, init_rad=%s pc, sim=%s' %(fname[7],fname[8],fname[22],fname[23],fname[12],fname[25:27]))
    elif fname[5:7] == '1r':
        legendtitle = (r'D=1.6, $\alpha_{vir}$=0.3, init_rad=1 pc, sim=%s' %fname[25:27])
    elif fname[5:7] == '2r':
Пример #22
0
    ax2.scatter(All_read[j].loc[n].sort_values(velocity)[velocity], \
               np.linspace(1/len(All_read[j].loc[n]), 1.0, len(All_read[j].loc[n])), s=0.5, \
               label=(r'D=%s, $alpha_{vir}$=%s, $N_{sim}$=%s'%(\
                      fname[j][11:14], fname[j][15:18], fname[j][25:29])))

## chosing 2,000 stars at random
#    ax2.scatter(np.sort(random.choice(All_read[j].loc[n, velocity], 2000)), \
#               np.linspace(1/2000, 1.0, 2000), s=0.5, \
#               label=(r'%s-velocity D=%s, $alpha_{vir}$=%s'%(velocity[0:2], \
#                      fname[j][11:14], fname[j][15:18])))

for ax in [ax1, ax2]:

    ax.set_ylabel('Cumulative distribution', fontsize=10.)
    ax.xaxis.set_minor_locator(AutoMinorLocator(2))
    ax.yaxis.set_minor_locator(AutoMinorLocator(4))
    ax.tick_params(which='both',
                   direction='in',
                   top=True,
                   right=True,
                   labelbottom=True)
    ax.set_xlim(0.1, 10.)
    ax.set_ylim(0., 1.03)
    #    ax.legend(loc='lower right', frameon=False, fontsize=8.)
    ax.set_xscale('log')
    ax.legend(title='Cluster age %s Myr'\
              %(np.round(All_read[0].loc[n].iloc[0]['time'], decimals=1)),\
              loc='upper left', frameon=False, fontsize=8.)

    ax.set_xlabel('Proper motion [km/s]', fontsize=10.)
def plot_theory_curve():

    npzfile = np.load("data_sec_dist_MuMinus_standardrock_Emin_10.0_Emax_10.0.npz")

    ioniz_secondary_energy = npzfile['ioniz']
    brems_secondary_energy = npzfile['brems']
    photo_secondary_energy = npzfile['photo']
    epair_secondary_energy = npzfile['epair']

    all_secondary_energy = np.concatenate((
        ioniz_secondary_energy,
        brems_secondary_energy,
        photo_secondary_energy,
        epair_secondary_energy)
    )

    sum_hist = np.sum(all_secondary_energy)
    print(sum_hist)

    list_secondary_energies = [
        ioniz_secondary_energy,
        brems_secondary_energy,
        photo_secondary_energy,
        epair_secondary_energy,
        all_secondary_energy
    ]

    list_secondary_energies_label = [
        'Ionization',
        'Photonuclear',
        'Bremsstrahlung',
        'Pair Production',
        'Sum'
    ]

    statistics = npzfile['statistics'][0]
    E_min_log = npzfile['E_min'][0]
    E_max_log = npzfile['E_max'][0]
    spectral_index = npzfile['spectral_index'][0]
    distance = npzfile['distance'][0]
    medium_name = npzfile['medium_name'][0]
    particle_name = npzfile['particle_name'][0]
    ecut = npzfile['ecut'][0]
    vcut = npzfile['vcut'][0]

    particle_def = pp.particle.MuMinusDef()
    medium = pp.medium.StandardRock(1.0)
    energy_cuts = pp.EnergyCutSettings(500, -1)
    multiplier = 1.
    lpm = False
    shadow_effect = pp.parametrization.photonuclear.ShadowButkevichMikhailov()
    add_pertubative = True
    interpolation_def = pp.InterpolationDef()
    interpolation_def.path_to_tables = "~/.local/share/PROPOSAL/tables"

    ioniz = pp.parametrization.Ionization(
        particle_def=particle_def,
        medium=medium,
        energy_cuts=energy_cuts,
        multiplier=multiplier)

    epair = pp.parametrization.pairproduction.EpairProductionRhoInterpolant(
        particle_def=particle_def,
        medium=medium,
        energy_cuts=energy_cuts,
        multiplier=multiplier,
        lpm_effect=lpm,
        interpolation_def=interpolation_def)

    brems = pp.parametrization.bremsstrahlung.KelnerKokoulinPetrukhin(
        particle_def=particle_def,
        medium=medium,
        energy_cuts=energy_cuts,
        multiplier=multiplier,
        lpm_effect=lpm)

    photo = pp.parametrization.photonuclear.AbramowiczLevinLevyMaor97Interpolant(
        particle_def=particle_def,
        medium=medium,
        energy_cuts=energy_cuts,
        multiplier=multiplier,
        shadow_effect=shadow_effect,
        interpolation_def=interpolation_def)

    photo2 = pp.parametrization.photonuclear.BezrukovBugaev(
        particle_def=particle_def,
        medium=medium,
        energy_cuts=energy_cuts,
        multiplier=multiplier,
        add_pertubative=add_pertubative)

    losses_params = [ioniz, epair, brems, photo2]

    muon_energy = 10**10  # MeV

    inch_to_cm = 2.54
    golden_ratio = 1.61803
    width = 29.7  # cm

    num_bins = 100
    v_bins = np.linspace(np.log10(500), np.log10(muon_energy), num_bins)
    v_bins_log = np.logspace(np.log10(500./muon_energy), np.log10(1.), num_bins)

    fig = plt.figure(figsize=(width / inch_to_cm, width / inch_to_cm / golden_ratio))
    ax = fig.add_subplot(111)

    for idx, secondary_list in enumerate(list_secondary_energies):
        ax.hist(
            secondary_list,
            weights=np.ones(len(secondary_list))/sum_hist,
            histtype='step',
            log=True,
            bins=v_bins,
            label=list_secondary_energies_label[idx]
        )

    all_cross_sections = np.empty((len(losses_params), num_bins))

    for idx, param in enumerate(losses_params):
        all_cross_sections[idx] = np.array([param.differential_crosssection(muon_energy, v)*v for v in v_bins_log])

    sum_cross_sections = np.sum(all_cross_sections, axis=0)
    print(sum(all_cross_sections[np.isfinite(all_cross_sections)]))
    print(sum(sum_cross_sections[np.isfinite(sum_cross_sections)]))

    for cross_section in np.append(all_cross_sections, [sum_cross_sections], axis=0):
        ax.plot(
            v_bins,
            cross_section/sum(sum_cross_sections[np.isfinite(sum_cross_sections)]),
            drawstyle="steps-pre"
        )

    # ax.set_xscale("log")
    minor_locator = AutoMinorLocator()
    ax.xaxis.set_minor_locator(minor_locator)
    ax.legend()
    ax.set_xlabel(r'energy loss / log($E$/MeV)')
    ax.set_ylabel(r'$N$')

    ax.set_ylim(ymin=1e-8)
    ax.set_yscale("log")
    ax.legend()
    fig.savefig("theory_curve.pdf")
Пример #24
0
def plot_data(data):

    signal_format = 'hist' # 'line' for line above SM stack
                           # 'hist' for bar above SM stack
                           # None for signal as part of SM stack
    Total_SM_label = False # for Total SM black line in plot and legend
    plot_label = r'$H \rightarrow WW \rightarrow e\nu\mu\nu$'
    signal_label = r'Signal ($m_H=125$ GeV)' # r''

    # *******************
    # general definitions (shouldn't need to change)
    lumi_used = str(lumi*fraction)    
    signal = None
    for s in samples.keys():
        if s not in stack_order and s!='data': signal = s

    for x_variable,hist in HWWHistograms.hist_dict.items():

        h_bin_width = hist['bin_width']
        h_num_bins = hist['num_bins']
        h_xrange_min = hist['xrange_min']
        h_log_y = hist['log_y']
        h_y_label_x_position = hist['y_label_x_position']
        h_legend_loc = hist['legend_loc']
        h_log_top_margin = hist['log_top_margin'] # to decrease the separation between data and the top of the figure, remove a 0
        h_linear_top_margin = hist['linear_top_margin'] # to decrease the separation between data and the top of the figure, pick a number closer to 1
    
        bins = [h_xrange_min + x*h_bin_width for x in range(h_num_bins+1) ]
        bin_centres = [h_xrange_min+h_bin_width/2 + x*h_bin_width for x in range(h_num_bins) ]

        data_x,_ = np.histogram(data['data'][x_variable].values, bins=bins)
        data_x_errors = np.sqrt(data_x)

        signal_x = None
        if signal_format=='line':
            signal_x,_ = np.histogram(data[signal][x_variable].values,bins=bins,weights=data[signal].weight.values)
        elif signal_format=='hist':
            signal_x = data[signal][x_variable].values
            signal_weights = data[signal].weight.values
            signal_color = samples[signal]['color']
    
        mc_x = []
        mc_weights = []
        mc_colors = []
        mc_labels = []
        mc_x_tot = np.zeros(len(bin_centres))

        for s in stack_order:
            mc_labels.append(s)
            mc_x.append(data[s][x_variable].values)
            mc_colors.append(samples[s]['color'])
            mc_weights.append(data[s].weight.values)
            mc_x_heights,_ = np.histogram(data[s][x_variable].values,bins=bins,weights=data[s].weight.values)
            mc_x_tot = np.add(mc_x_tot, mc_x_heights)
    
        mc_x_err = np.sqrt(mc_x_tot)
    
    
        # *************
        # Main plot 
        # *************
        plt.clf()
        plt.axes([0.1,0.3,0.85,0.65]) #(left, bottom, width, height)
        main_axes = plt.gca()
        main_axes.errorbar( x=bin_centres, y=data_x, yerr=data_x_errors, fmt='ko', label='Data')
        mc_heights = main_axes.hist(mc_x,bins=bins,weights=mc_weights,stacked=True,color=mc_colors, label=mc_labels)
        if Total_SM_label:
            totalSM_handle, = main_axes.step(bins,np.insert(mc_x_tot,0,mc_x_tot[0]),color='black')
        if signal_format=='line':
            main_axes.step(bins,np.insert(signal_x,0,signal_x[0]),color=samples[signal]['color'], linestyle='--',
                       label=signal)
        elif signal_format=='hist':
            main_axes.hist(signal_x,bins=bins,bottom=mc_x_tot,weights=signal_weights,color=signal_color,label=signal)
        main_axes.bar(bin_centres,2*mc_x_err,bottom=mc_x_tot-mc_x_err,alpha=0.5,color='none',hatch="////",
                  width=h_bin_width, label='Stat. Unc.')
        
        main_axes.set_xlim(left=h_xrange_min,right=bins[-1])
        main_axes.xaxis.set_minor_locator(AutoMinorLocator()) # separation of x axis minor ticks
        main_axes.tick_params(which='both',direction='in',top=True,labeltop=False,labelbottom=False,right=True,labelright=False)
        main_axes.set_ylabel(r'Events / '+str(h_bin_width)+r' GeV',fontname='sans-serif',horizontalalignment='right',y=1.0,fontsize=11)
        if h_log_y:
            main_axes.set_yscale('log')
            smallest_contribution = mc_heights[0][0]
            smallest_contribution.sort()
            bottom = smallest_contribution[-2]
            top = np.amax(data_x)*h_log_top_margin
            main_axes.set_ylim(bottom=bottom,top=top)
            main_axes.yaxis.set_major_formatter(CustomTicker())
            locmin = LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9),numticks=12)
            main_axes.yaxis.set_minor_locator(locmin)
        else: 
            main_axes.set_ylim(bottom=0,top=(np.amax(data_x)+math.sqrt(np.amax(data_x)))*h_linear_top_margin)
            main_axes.yaxis.set_minor_locator(AutoMinorLocator())
        
        plt.text(0.05,0.97,'ATLAS',ha="left",va="top",family='sans-serif',transform=main_axes.transAxes,style='italic',weight='bold',fontsize=13)
        plt.text(0.19,0.97,'Open Data',ha="left",va="top",family='sans-serif',transform=main_axes.transAxes,fontsize=13)
        plt.text(0.05,0.9,'for education only',ha="left",va="top",family='sans-serif',transform=main_axes.transAxes,style='italic',fontsize=8)
        plt.text(0.05,0.86,r'$\sqrt{s}=13\,\mathrm{TeV},\;\int L\,dt=$'+lumi_used+'$\,\mathrm{fb}^{-1}$',ha="left",va="top",family='sans-serif',transform=main_axes.transAxes)
        plt.text(0.05,0.78,plot_label,ha="left",va="top",family='sans-serif',transform=main_axes.transAxes)
    
        # Create new legend handles but use the colors from the existing ones 
        handles, labels = main_axes.get_legend_handles_labels()
        if signal_format=='line':
            handles[labels.index(signal)] = Line2D([], [], c=samples[signal]['color'], linestyle='dashed')
        if Total_SM_label:
            uncertainty_handle = mpatches.Patch(facecolor='none',hatch='////')
            handles.append((totalSM_handle,uncertainty_handle))
            labels.append('Total SM')
    
        # specify order within legend
        new_handles = [handles[labels.index('Data')]]
        new_labels = ['Data']
        for s in reversed(stack_order):
            new_handles.append(handles[labels.index(s)])
            new_labels.append(s)
        if Total_SM_label:
            new_handles.append(handles[labels.index('Total SM')])
            new_labels.append('Total SM')
        else: 
            new_handles.append(handles[labels.index('Stat. Unc.')])
            new_labels.append('Stat. Unc.')
        if signal is not None:
            new_handles.append(handles[labels.index(signal)])
            new_labels.append(signal_label)
        main_axes.legend(handles=new_handles, labels=new_labels, frameon=False, loc=h_legend_loc)
    
    
        # *************
        # Data/MC ratio 
        # *************
        plt.axes([0.1,0.1,0.85,0.2]) #(left, bottom, width, height)
        ratio_axes = plt.gca()
        ratio_axes.errorbar( x=bin_centres, y=data_x/mc_x_tot, yerr=data_x_errors/mc_x_tot, fmt='ko')
        ratio_axes.bar(bin_centres,2*mc_x_err/mc_x_tot,bottom=1-mc_x_err/mc_x_tot,alpha=0.5,color='none',
            hatch="////",width=h_bin_width)
        ratio_axes.plot(bins,np.ones(len(bins)),color='k')
        ratio_axes.set_xlim(left=h_xrange_min,right=bins[-1])
        ratio_axes.xaxis.set_minor_locator(AutoMinorLocator()) # separation of x axis minor ticks
        ratio_axes.xaxis.set_label_coords(0.9,-0.2) # (x,y) of x axis label # 0.2 down from x axis
        ratio_axes.set_xlabel(labelfile.variable_labels[x_variable],fontname='sans-serif',fontsize=11)
        ratio_axes.tick_params(which='both',direction='in',top=True,labeltop=False,right=True,labelright=False)
        ratio_axes.set_ylim(bottom=0,top=2.5)
        ratio_axes.set_yticks([0,1,2])
        ratio_axes.yaxis.set_minor_locator(AutoMinorLocator())
        if signal is not None:
            ratio_axes.set_ylabel(r'Data/SM',fontname='sans-serif',x=1,fontsize=11)
        else:
            ratio_axes.set_ylabel(r'Data/MC',fontname='sans-serif',fontsize=11)
        
        
        # Generic features for both plots
        main_axes.yaxis.set_label_coords(h_y_label_x_position,1)
        ratio_axes.yaxis.set_label_coords(h_y_label_x_position,0.5)
    
        plt.savefig("HWW_"+x_variable+".pdf")
    
    return signal_x,mc_x_tot
Пример #25
0
    def get_plot(self, subplot=False, width=None, height=None, xmin=-6.,
                 xmax=6., yscale=1, colours=None, plot_total=True,
                 legend_on=True, num_columns=2, legend_frame_on=False,
                 legend_cutoff=3, xlabel='Energy (eV)', ylabel='Arb. units',
                 zero_to_efermi=True, dpi=400, fonts=None, plt=None,
                 style=None, no_base_style=False):
        """Get a :obj:`matplotlib.pyplot` object of the density of states.

        Args:
            subplot (:obj:`bool`, optional): Plot the density of states for
                each element on separate subplots. Defaults to ``False``.
            width (:obj:`float`, optional): The width of the plot.
            height (:obj:`float`, optional): The height of the plot.
            xmin (:obj:`float`, optional): The minimum energy on the x-axis.
            xmax (:obj:`float`, optional): The maximum energy on the x-axis.
            yscale (:obj:`float`, optional): Scaling factor for the y-axis.
            colours (:obj:`dict`, optional): Use custom colours for specific
                element and orbital combinations. Specified as a :obj:`dict` of
                :obj:`dict` of the colours. For example::

                    {
                        'Sn': {'s': 'r', 'p': 'b'},
                        'O': {'s': '#000000'}
                    }

                The colour can be a hex code, series of rgb value, or any other
                format supported by matplotlib.
            plot_total (:obj:`bool`, optional): Plot the total density of
                states. Defaults to ``True``.
            legend_on (:obj:`bool`, optional): Plot the graph legend. Defaults
                to ``True``.
            num_columns (:obj:`int`, optional): The number of columns in the
                legend.
            legend_frame_on (:obj:`bool`, optional): Plot a frame around the
                graph legend. Defaults to ``False``.
            legend_cutoff (:obj:`float`, optional): The cut-off (in % of the
                maximum density of states within the plotting range) for an
                elemental orbital to be labelled in the legend. This prevents
                the legend from containing labels for orbitals that have very
                little contribution in the plotting range.
            xlabel (:obj:`str`, optional): Label/units for x-axis (i.e. energy)
            ylabel (:obj:`str`, optional): Label/units for y-axis (i.e. DOS)
            zero_to_efermi (:obj:`bool`, optional): Normalise the plot such
                that the Fermi level is set as 0 eV.
            dpi (:obj:`int`, optional): The dots-per-inch (pixel density) for
                the image.
            fonts (:obj:`list`, optional): Fonts to use in the plot. Can be a
                a single font, specified as a :obj:`str`, or several fonts,
                specified as a :obj:`list` of :obj:`str`.
            plt (:obj:`matplotlib.pyplot`, optional): A
                :obj:`matplotlib.pyplot` object to use for plotting.
            style (:obj:`list`, :obj:`str`, or :obj:`dict`): Any matplotlib
                style specifications, to be composed on top of Sumo base
                style.
            no_base_style (:obj:`bool`, optional): Prevent use of sumo base
                style. This can make alternative styles behave more
                predictably.

        Returns:
            :obj:`matplotlib.pyplot`: The density of states plot.
        """
        plot_data = self.dos_plot_data(yscale=yscale, xmin=xmin, xmax=xmax,
                                       colours=colours, plot_total=plot_total,
                                       legend_cutoff=legend_cutoff,
                                       subplot=subplot,
                                       zero_to_efermi=zero_to_efermi)

        if subplot:
            nplots = len(plot_data['lines'])
            plt = pretty_subplot(nplots, 1, width=width, height=height,
                                 dpi=dpi, plt=plt)
        else:
            plt = pretty_plot(width=width, height=height, dpi=dpi, plt=plt)

        mask = plot_data['mask']
        energies = plot_data['energies'][mask]
        fig = plt.gcf()
        lines = plot_data['lines']
        spins = [Spin.up] if len(lines[0][0]['dens']) == 1 else \
            [Spin.up, Spin.down]

        for i, line_set in enumerate(plot_data['lines']):
            if subplot:
                ax = fig.axes[i]
            else:
                ax = plt.gca()

            for line, spin in itertools.product(line_set, spins):
                if spin == Spin.up:
                    label = line['label']
                    densities = line['dens'][spin][mask]
                elif spin == Spin.down:
                    label = ""
                    densities = -line['dens'][spin][mask]
                ax.fill_between(energies, densities, lw=0,
                                facecolor=line['colour'],
                                alpha=line['alpha'])
                ax.plot(energies, densities, label=label,
                        color=line['colour'])

            ax.set_ylim(plot_data['ymin'], plot_data['ymax'])
            ax.set_xlim(xmin, xmax)

            ax.tick_params(axis='y', labelleft=False)
            ax.yaxis.set_minor_locator(AutoMinorLocator(2))
            ax.xaxis.set_minor_locator(AutoMinorLocator(2))

            loc = 'upper right' if subplot else 'best'
            ncol = 1 if subplot else num_columns
            if legend_on:
                ax.legend(loc=loc, frameon=legend_frame_on, ncol=ncol)

        # no add axis labels and sort out ticks
        if subplot:
            ax.set_xlabel(xlabel)
            fig.subplots_adjust(hspace=0)
            plt.setp([a.get_xticklabels() for a in fig.axes[:-1]],
                     visible=False)
            if 'axes.labelcolor' in matplotlib.rcParams:
                ylabelcolor = matplotlib.rcParams['axes.labelcolor']
            else:
                ylabelcolor = None

            fig.text(0.08, 0.5, ylabel, ha='left', color=ylabelcolor,
                     va='center', rotation='vertical', transform=ax.transAxes)
        else:
            ax.set_xlabel(xlabel)
            ax.set_ylabel(ylabel)

        return plt
Пример #26
0
    def process(self, ts):

        excl_auto = self.params['excl_auto']
        plot_stats = self.params['plot_stats']
        fig_prefix = self.params['fig_name']
        rotate_xdate = self.params['rotate_xdate']
        tag_output_iter = self.params['tag_output_iter']

        ts.redistribute('baseline')

        if ts.local_vis_mask.ndim == 3: # RawTimestream
            if excl_auto:
                bl = ts.local_bl
                vis_mask = ts.local_vis_mask[:, :, bl[:, 0] != bl[:, 1]].copy()
            else:
                vis_mask = ts.local_vis_mask.copy()
            nt, nf, lnb = vis_mask.shape
        elif ts.local_vis_mask.ndim == 4: # Timestream
            # suppose masks are the same for all 4 pols
            if excl_auto:
                bl = ts.local_bl
                vis_mask = ts.local_vis_mask[:, :, 0, bl[:, 0] != bl[:, 1]].copy()
            else:
                vis_mask = ts.local_vis_mask[:, :, 0].copy()
            nt, nf, lnb = vis_mask.shape
        else:
            raise RuntimeError('Incorrect vis_mask shape %s' % ts.local_vis_mask.shape)

        # total number of bl
        nb = mpiutil.allreduce(lnb, comm=ts.comm)

        # un-mask ns-on positions
        if 'ns_on' in ts.iterkeys():
            vis_mask[ts['ns_on'][:]] = False

        # statistics along time axis
        time_mask = np.sum(vis_mask, axis=(1, 2)).reshape(-1, 1)
        # gather local array to rank0
        time_mask = mpiutil.gather_array(time_mask, axis=1, root=0, comm=ts.comm)
        if mpiutil.rank0:
            time_mask = np.sum(time_mask, axis=1)

        # statistics along time axis
        freq_mask = np.sum(vis_mask, axis=(0, 2)).reshape(-1, 1)
        # gather local array to rank0
        freq_mask = mpiutil.gather_array(freq_mask, axis=1, root=0, comm=ts.comm)
        if mpiutil.rank0:
            freq_mask = np.sum(freq_mask, axis=1)

        if plot_stats and mpiutil.rank0:
            time_fig_name = '%s_%s.png' % (fig_prefix, 'time')
            if tag_output_iter:
                time_fig_name = output_path(time_fig_name, iteration=self.iteration)
            else:
                time_fig_name = output_path(time_fig_name)

            # plot time_mask
            plt.figure()
            fig, ax = plt.subplots()
            x_vals = np.array([ datetime.fromtimestamp(s) for s in ts['sec1970'][:] ])
            xlabel = '%s' % x_vals[0].date()
            x_vals = mdates.date2num(x_vals)
            ax.plot(x_vals, 100*time_mask/np.float(nf*nb))
            ax.xaxis_date()
            date_format = mdates.DateFormatter('%H:%M')
            ax.xaxis.set_major_formatter(date_format)
            if rotate_xdate:
                # set the x-axis tick labels to diagonal so it fits better
                fig.autofmt_xdate()
            else:
                # reduce the number of tick locators
                locator = MaxNLocator(nbins=6)
                ax.xaxis.set_major_locator(locator)
                ax.xaxis.set_minor_locator(AutoMinorLocator(2))

            ax.set_xlabel(xlabel)
            ax.set_ylabel(r'RFI (%)')
            plt.savefig(time_fig_name)
            plt.close()

            freq_fig_name = '%s_%s.png' % (fig_prefix, 'freq')
            if tag_output_iter:
                freq_fig_name = output_path(freq_fig_name, iteration=self.iteration)
            else:
                freq_fig_name = output_path(freq_fig_name)

            # plot freq_mask
            plt.figure()
            plt.plot(ts.freq[:], 100*freq_mask/np.float(nt*nb))
            plt.xlabel(r'$\nu$ / MHz')
            plt.ylabel(r'RFI (%)')
            plt.savefig(freq_fig_name)
            plt.close()

        return super(Stats, self).process(ts)
Пример #27
0
    def corner_plot(self,
                    axes_options=dict(),
                    contourf_options=dict(),
                    figure=None,
                    axes=None,
                    bins_options=None,
                    credible_intervals=True,
                    fill_ci=True,
                    show_samples=False,
                    diagonal='cumul',
                    display_plot_coords=False,
                    labels=dict(),
                    bins=dict(),
                    filename=None,
                    saving_options=dict(),
                    align_xlabels=True,
                    align_ylabels=True,
                    cdf_levels=None,
                    weights=None,
                    rotation=0,
                    display_1sigma=False,
                    rcfile=None,
                    rcparams=dict()):

        N = len(self.request)

        # Definition of the figure and axes
        if (figure == None) | (axes == None):
            self._get_plot_config_scatter_plots(rcfile=rcfile,
                                                rotation=rotation,
                                                rcparams=rcparams)
            width, lb, tr, wh_margin = self.scatter_plot_scaling()
            fig, ax = plt.subplots(N, N, figsize=[width, width])
            fig.subplots_adjust(left=lb,
                                bottom=lb,
                                right=tr,
                                top=tr,
                                wspace=wh_margin,
                                hspace=wh_margin)
        else:
            fig = figure
            ax = axes

        # Compute credible intervals
        xhist, yhist, hist, hist_levels = self.get2dcontours(
            cdf_levels=cdf_levels, weights=weights, **bins)

        # How label are displayed
        label_names = dict()
        [label_names.update({a: a}) for a in self.request]
        label_names.update(labels)

        labels = self.request

        # Range, major and minor ticks locators
        ax_options = dict()
        [ax_options.update({a: 4 * [None]}) for a in self.request]
        ax_options.update(axes_options)

        spectral_map = plt.get_cmap('Spectral')
        alpha = [0.5, 0.9, 0.9, 0.9, 0.9]
        s_list = [1, 5, 5, 5, 5]
        dchi2_list = np.array([16, 9, 4, 1, 0])
        colors = [
            spectral_map(255.0 / 255.0),
            spectral_map(170.0 / 255.0),
            spectral_map(85.0 / 255.0),
            spectral_map(0.0)
        ]

        samples = self.sample.copy(deep=True)
        samples['reject'] = 0

        if cdf_levels == None:
            contourf_opts = {
                'colors': [(0.72, 0.72, 0.9003921568627451),
                           (0.36, 0.36, 0.7011764705882353),
                           (0.0, 0.0, 0.5019607843137255)],
                'antialiased':
                False
            }  # with blue shades
            contourf_opts.update(contourf_options)
        else:
            if len(cdf_levels) <= 3:
                contourf_opts = {
                    'colors': [(0.72, 0.72, 0.9003921568627451),
                               (0.36, 0.36, 0.7011764705882353),
                               (0.0, 0.0, 0.5019607843137255)],
                    'antialiased':
                    False
                }  # with blue shades
                contourf_opts.update(contourf_options)
            else:
                contourf_opts = dict(contourf_options)

        # Create subplots
        for i in range(N):
            for j in range(N):
                if j > i:
                    ax[i][j].set_xlabel("")
                    ax[i][j].set_ylabel("")
                    ax[i][j].axes.get_xaxis().set_visible(False)
                    ax[i][j].axes.get_yaxis().set_visible(False)
                    ax[i][j].set_frame_on(False)
                else:
                    ax[i][j].xaxis.set_minor_locator(AutoMinorLocator(2))
                    ax[i][j].yaxis.set_minor_locator(AutoMinorLocator(2))
                    ax[i][j].set_facecolor("none")
                    if i == N - 1:
                        ax[i][j].set_xlabel(rf"${label_names[labels[j]]}$",
                                            labelpad=0)
                        ax[i][j].tick_params(axis='x', which='major', pad=2)
                        for tick in ax[i][j].get_xticklabels():
                            tick.set_rotation(rotation)
                            if rotation > 10:
                                tick.set_ha(
                                    'right')  # Pour la rotation de 30 deg
                    else:
                        ax[i][j].set_xlabel("")
                        ax[i][j].set_xticklabels([])
                        if j != N - 2:
                            ax[i][j].get_shared_x_axes().join(
                                ax[i][j], ax[i + 1][j])
                    if j == 0:
                        if i == 0:
                            ax[i][j].set_ylabel(rf"${label_names[labels[i]]}$")
                        else:
                            ax[i][j].set_ylabel(rf"${label_names[labels[i]]}$",
                                                labelpad=0)
                    else:
                        ax[i][j].set_ylabel("")
                        if not i == j: ax[i][j].set_yticklabels([])
                    if (j <= i) and display_plot_coords:
                        ax[i][j].annotate(f"({i}, {j})", (0.05, 0.05),
                                          xycoords='axes fraction',
                                          c='k',
                                          size=11,
                                          weight=500,
                                          ha="left",
                                          va="bottom",
                                          bbox=dict(boxstyle='square, pad=0',
                                                    fc='None',
                                                    ec='None'))
                    if not i == j:
                        if credible_intervals:
                            levels = np.concatenate([
                                hist_levels[i][j],
                                [hist[i][j].max() * (1 + 1e-6)]
                            ])
                            if fill_ci:
                                ax[i][j].contourf(xhist[i][j], yhist[i][j],
                                                  hist[i][j].T, levels,
                                                  **contourf_opts)
                            else:
                                ax[i][j].contour(xhist[i][j], yhist[i][j],
                                                 hist[i][j].T, levels,
                                                 **contourf_opts)
                        if show_samples:
                            for id_dchi2 in range(len(dchi2_list) - 1):
                                cond = samples.reject == 1
                                samples.loc[cond, 'reject'] = 0
                                samples.loc[(
                                    (samples.dchi2 < dchi2_list[id_dchi2 + 1])
                                    | (samples.dchi2 >= dchi2_list[id_dchi2])),
                                            'reject'] = 1
                                cond = samples.reject == 0
                                ax[i][j].scatter(
                                    samples[cond][labels[j]].values,
                                    samples[cond][labels[i]].values,
                                    s=s_list[id_dchi2],
                                    facecolors=colors[id_dchi2],
                                    marker='o',
                                    alpha=alpha[id_dchi2],
                                    linewidths=0,
                                    zorder=-100 + id_dchi2)
                        # Limits
                        if not ax_options[labels[j]][0] == None:
                            ax[i][j].set_xlim(ax_options[labels[j]][0][0],
                                              ax_options[labels[j]][0][1])
                        if not ax_options[labels[i]][1] == None:
                            ax[i][j].set_ylim(ax_options[labels[i]][1][0],
                                              ax_options[labels[i]][1][1])

                        # Ticks position and occurence
                        if not ax_options[labels[j]][2] == None:
                            ax[i][j].xaxis.set_major_locator(
                                MultipleLocator(ax_options[labels[j]][2][0]))
                            ax[i][j].xaxis.set_minor_locator(
                                AutoMinorLocator(ax_options[labels[j]][2][1]))
                        if not ax_options[labels[i]][3] == None:
                            ax[i][j].yaxis.set_major_locator(
                                MultipleLocator(ax_options[labels[i]][3][0]))
                            ax[i][j].yaxis.set_minor_locator(
                                AutoMinorLocator(ax_options[labels[i]][3][1]))
                    else:
                        if diagonal == 'chi2':
                            x = np.linspace(samples[labels[j]].min(),
                                            samples[labels[j]].max(), 100)
                            xx = list()
                            yy = list()
                            for k in range(100 - 1):
                                mask = (samples[labels[j]] > x[k]) & (
                                    samples[labels[j]] <= x[k + 1])
                                if mask.sum() > 0:
                                    y = samples.loc[mask, 'dchi2']
                                    xx.append(np.mean([x[k], x[k + 1]]))
                                    yy.append(np.min(y))
                            ax[i][j].plot(xx, yy, ls='-', lw=1, c='k')

                            ax[i][i].yaxis.set_label_position("right")
                            ax[i][i].spines['left'].set_visible(False)
                            ax[i][i].spines['top'].set_visible(False)
                            ax[i][i].tick_params(which='both',
                                                 bottom=True,
                                                 top=False,
                                                 left=False,
                                                 right=True,
                                                 labelbottom=True,
                                                 labeltop=False,
                                                 labelleft=False,
                                                 labelright=True,
                                                 pad=2)
                            ax[i][i].set_ylabel(r"$\Delta\chi^2$")
                            ax[i][i].set_ylim(-0.4, 9)
                            ax[i][i].yaxis.set_major_locator(
                                MultipleLocator(2))
                            ax[i][i].yaxis.set_minor_locator(
                                AutoMinorLocator(4))

                            # Choose same ticks for a column
                            if i < N - 1:
                                ax[i][i].get_shared_x_axes().join(
                                    ax[i][i], ax[i + 1][i])
                        elif diagonal == 'cumul':
                            cdf = self.cdf[labels[j]]
                            ax[i][i].plot(cdf[0], cdf[1], ls='-', lw=1, c='k')

                            if display_1sigma:
                                x = self.ci[labels[j]][1]
                                y = 0.1
                                xerr = np.array([[
                                    x - self.ci[labels[j]][0],
                                    self.ci[labels[j]][2] - x
                                ]]).T
                                ax[i][i].errorbar(x,
                                                  y,
                                                  xerr=xerr,
                                                  marker='o',
                                                  ms=1,
                                                  lw=0.5,
                                                  capsize=0,
                                                  color='k')

                            ax[i][i].yaxis.set_label_position("right")
                            ax[i][i].spines['left'].set_visible(False)
                            ax[i][i].spines['top'].set_visible(False)
                            ax[i][i].tick_params(which='both',
                                                 bottom=True,
                                                 top=False,
                                                 left=False,
                                                 right=True,
                                                 labelbottom=True,
                                                 labeltop=False,
                                                 labelleft=False,
                                                 labelright=True,
                                                 pad=2)
                            ax[i][i].set_ylabel(r"CDF")
                            ax[i][i].set_ylim(0, 1.05)
                            ax[i][i].yaxis.set_major_locator(
                                MultipleLocator(0.2))
                            ax[i][i].yaxis.set_minor_locator(
                                AutoMinorLocator(4))

                            # Choose same ticks for a column
                            if i < N - 1:
                                ax[i][i].get_shared_x_axes().join(
                                    ax[i][i], ax[i + 1][i])

                            # Limits
                            if not ax_options[labels[j]][0] == None:
                                ax[i][j].set_xlim(ax_options[labels[j]][0][0],
                                                  ax_options[labels[j]][0][1])

                            # Ticks position and occurence
                            if not ax_options[labels[j]][2] == None:
                                ax[i][j].xaxis.set_major_locator(
                                    MultipleLocator(
                                        ax_options[labels[j]][2][0]))
                                ax[i][j].xaxis.set_minor_locator(
                                    AutoMinorLocator(
                                        ax_options[labels[j]][2][1]))

        if align_xlabels: fig.align_xlabels(ax[N - 1, :])
        if align_ylabels: fig.align_ylabels(ax[:, 0])

        if not filename == None:
            opts = dict({
                'transparent': False,
                'bbox_inches': 'tight',
                'dpi': 300,
                'pad_inches': 0.01,
            })
            opts.update(saving_options)
            fig.savefig(filename, **opts)
        else:
            return fig, ax
Пример #28
0
def make_1dplot_figure(bf):
    fff = plt.figure(bf.title, figsize=bf.figsize)

    rcParams['font.family'] = bf.fontfamily
    rcParams['axes.linewidth'] = 0.5  # de-emphasize
    #plt.xkcd() # ha!

    # we can add in one summary boxplot per subj
    if bf.boxplot_on:
        ncol = 2
        a, subpl = plt.subplots(bf.nsub,
                                ncol,
                                gridspec_kw={
                                    'width_ratios': [12, 1],
                                    'wspace': 0.05
                                },
                                sharey='row',
                                squeeze=True,
                                figsize=bf.figsize)
    else:
        ncol = 1
        a, subpl = plt.subplots(bf.nsub, ncol, figsize=bf.figsize)

    for i in range(bf.nsub):
        ss = bf.all_subs[i]
        ii = i + 1

        # just relabel, need to account for different cases of what
        # subpl is and what its shape is (if it even has a shape!
        if bf.boxplot_on:
            if bf.nsub > 1:
                pp = subpl[i, 0]
            else:
                pp = subpl[0]
        else:
            if bf.nsub > 1:
                pp = subpl[i]
            else:
                pp = subpl

        # ----------------- Main plot: time series ---------------------

        if bf.ncensor:
            xoffset = 0.5 * bf.censor_width
            for cc in range(bf.ncensor):
                pp.add_patch(
                    matpat.Rectangle((bf.censor_arr[cc] - xoffset, ss.ylim[0]),
                                     width=bf.censor_width,
                                     height=(ss.ylim[1] - ss.ylim[0]),
                                     facecolor=bf.censor_RGB,
                                     lw=0,
                                     edgecolor=None,
                                     alpha=None))

        if ss.censor_hline:
            pp.axhline(y=ss.censor_hline,
                       c=laio.DEF_censor_hline_RGB,
                       ls=':',
                       lw=1)

        if bf.see_xax:
            pp.axhline(y=0, c='0.6', ls='-', lw=0.5)
            #plt.axhline(y=0, c='0.5', ls=':', lw=0.75)

        # the actual plot
        sp = pp.plot(ss.x, ss.y, color=ss.color, lw=2)

        pp.set_xlim(ss.xlim)
        pp.set_ylim(ss.ylim)

        pp.set_xlabel(ss.xlabel, fontsize=bf.fontsize)
        pp.set_ylabel(ss.ylabel, fontsize=bf.fontsize)

        # get ylabels aligned horizontally
        pp.get_yaxis().set_label_coords(-0.1, 0.5)
        print("++ Plotting: {}".format(ss.ylabel))

        pp.xaxis.set_minor_locator(AutoMinorLocator(5))
        pp.yaxis.set_minor_locator(AutoMinorLocator(2))
        pp.tick_params(axis='both',
                       which='minor',
                       direction='in',
                       color='0.5',
                       bottom=True,
                       left=True,
                       right=True)  #, top=True )
        pp.tick_params(axis='both',
                       which='major',
                       direction='in',
                       color='0.5',
                       bottom=True,
                       left=True,
                       right=True)  #, top=True )
        pp.spines['bottom'].set_color('0.5')
        pp.spines['top'].set_color('0.5')
        pp.spines['left'].set_color('0.5')
        pp.spines['right'].set_color('0.5')

        # only show tick labels at very bottom
        if i < bf.nsub - 1:
            nlabs = len(pp.get_xticklabels())
            pp.set_xticklabels([''] * nlabs)

        if bf.title and not (i):
            # cheating with title because tight layout doesn't know about
            # suptitle
            pp.set_title(bf.title, fontsize=bf.fontsize)

        # ----------------- Optional plot: boxplot ---------------------

        if bf.boxplot_on:
            if bf.nsub > 1:
                qq = subpl[i, 1]
            else:
                qq = subpl[1]

            if ss.censor_hline:
                qq.axhline(y=ss.censor_hline,
                           c=laio.DEF_censor_hline_RGB,
                           ls=':',
                           lw=1)

            if bf.see_xax:
                qq.axhline(y=0, c='0.6', ls='-', lw=0.5)
                #plt.axhline(y=0, c='0.5', ls=':', lw=0.75)

            # actual boxplot
            sq = qq.boxplot(ss.y,
                            widths=0.1,
                            sym='.',
                            notch=0,
                            patch_artist=True)
            # fun parameter-setting for boxplot
            SETLW = 1.
            MARKSIZE1 = 8
            MARKSIZE2 = 5
            flilines = sq['fliers']
            for line in flilines:
                line.set_color(ss.color)
                line.set_markersize(MARKSIZE1)
            medlines = sq['medians']
            for line in medlines:
                line.set_color('0.7')
                line.set_linewidth(SETLW * 1.25)
            boxlines = sq['boxes']
            for line in boxlines:
                line.set_color(ss.color)
            plt.setp(sq['fliers'],
                     marker='.',
                     mew=0.3,
                     mec='k',
                     mfc=ss.color,
                     color=ss.color,
                     ms=MARKSIZE2)
            plt.setp(sq['whiskers'], color=ss.color, linestyle='-', lw=SETLW)
            plt.setp(sq['caps'], color=ss.color, linestyle='-', lw=SETLW)

            # no xticks/labels
            if 1:
                qq.set_xticks([])

            # stuff for y-axis ticks (on) and labels (off)
            qq.yaxis.set_minor_locator(AutoMinorLocator(2))
            qq.tick_params(axis='y',
                           which='minor',
                           direction='in',
                           color='0.5',
                           bottom=True,
                           left=True,
                           right=True)  #, top=True )
            qq.tick_params(axis='y',
                           which='major',
                           direction='in',
                           color='0.5',
                           bottom=True,
                           left=True,
                           right=True,
                           labelleft=False)  #, top=True )
            pp.spines['bottom'].set_color('0.5')
            pp.spines['top'].set_color('0.5')
            qq.spines['left'].set_color('0.5')
            qq.spines['right'].set_color('0.5')

    # finishing touches

    if bf.layout == 'nospace':
        fff.subplots_adjust(wspace=0.1, hspace=0.1)
    #elif bf.layout == 'tight':
    #    plt.tight_layout()

    plt.savefig(bf.fname,
                dpi=bf.dpi,
                facecolor=bf.bkgd_color,
                bbox_inches='tight')
    print("++ Done! Figure created:\n\t {}".format(bf.fname))

    return 0
Пример #29
0
#Settings
#MarkEvery --
MarkEvery = [1, 5, 10, 17]
#TickSettings
DT = [600, 1800, 3600, 43200, 86400, 302400, "ByMonth"]
if DeltaT <= 7200:
    DT = 600
elif DeltaT <= 21600:
    DT = 1800
elif DeltaT <= 86400:
    DT = 3600
elif DeltaT <= 172800:
    DT = 7200

# Now Generate plots
L = AutoMinorLocator(4)
for i in CH:
    plt.figure(figsize=(15, 8))
    a = plt.plot(Time, Data[(i - 1)], 'bo', markersize=.01, markevery=5)
    plt.axes().yaxis.set_minor_locator(L)
    plt.grid(True)
    plt.xlabel('Time')
    plt.ylabel("Current (A)")
    #plt.xticks(Locs,Ticks,rotation=30,size='small')
    plt.ylim([9, 18])
    #plt.xlim([WeekBefore,(T-43200)])
    string = "Graph of Current vs Time for CH " + str(i)
    plt.title(string)
    name = "/home/pi/CH" + str(i) + ".png"

    plt.savefig(name)
Пример #30
0
    mag_2[mag_2['name'] == name].iloc[0, 2],
    mag_2[mag_2['name'] == name].iloc[0, 1], 0.1, 1e51, 2, 800)
plt.plot(t,
         1.24 * lm,
         'orange',
         label=r'Magnetar $M_{\rm ej}=2 M_{\odot}, B_{14}=19, P=116~ms$',
         lw=2)
# plt.plot(t,valenti_bol(t,0.1,0))
ax.xaxis.set_ticks_position('both')
ax.yaxis.set_ticks_position('both')
ax.set_yscale("log")
ax.tick_params(direction='in', which='both')
# plt.gca().yaxis.set_minor_locator(AutoMinorLocator(5))
print np.max(lm)
ax.axhline(y=5.9e41, color='r', ls='--', lw=2)
plt.gca().xaxis.set_minor_locator(AutoMinorLocator(5))
plt.legend(frameon=False, fontsize=10)
plt.gca().set_xlim([-5, 150])
plt.gca().set_ylim([5e40, 3e42])
plt.xlabel(r'Time (days)', fontsize=20)
plt.ylabel(r'$ L_{\rm bol} \ (\rm \ erg \ s^{-1})$', fontsize=20)
# plt.hist(df_lfrac['lfac'].tolist(),bins=15, color='goldenrod', ec='none')
# ax.set_xlabel(r'$f$ ',fontsize=15)
# ax.set_ylabel('Count',fontsize=15)
#
# ax=f.add_subplot(gs[3,:])
# plt.hist(np.log10(lpeak_add_list),bins=15, color='gray', ec='none')
# ax.set_xlabel(r'Log $f L_{\rm p} \ (\rm erg \ s^{-1})$ ',fontsize=15)
# ax.set_ylabel('Count',fontsize=15)
# ax.set_ylim(0,6)
plt.tight_layout()