예제 #1
0
def fit(ii, tag, bins, model):

    print('tag:', tag)
    V = (3200)**3
    bincen = (bins[1:] + bins[:-1]) / 2.
    binwidth = bins[1:] - bins[:-1]

    out, hist_all, err = get_lum_all(tags[ii], bins=bins)
    ok = np.where(hist_all <= 5)[0]
    out[ok] = 0.
    hist_all[ok] = 0.
    err[ok] = 0.

    phi = out / (vol * binwidth)
    err = err / (vol * binwidth)

    N = models.phi_to_N(phi, V, bins)

    samples = fitdf(ii, tag, N, hist_all, V, bins, model)

    return None

    for ii, tag in enumerate(tags):

        z = float(tag[5:].replace('p','.'))
        print (z)
        df = pd.read_csv('Magnitude_limits.txt')
        low = np.array(df[filters])[ii]

        Halpha = get_line_all(tag, 'HI6563', inp = 'FLARES', LF = False)
        Hbeta = get_line_all(tag, 'HI4861', inp = 'FLARES', LF = False)
        CIII = get_line_all(tag, 'CIII1907', inp = 'FLARES', LF = False) + get_line_all(tag, 'CIII1909', inp = 'FLARES', LF = False)
        OII = get_line_all(tag, 'OII3726', inp = 'FLARES', LF = False) + get_line_all(tag, 'OII3729', inp = 'FLARES', LF = False)
        # NeIII = get_line_all(tag, 'NeIII3869', inp = 'FLARES', LF = False) + get_line_all(tag, 'NeIII3967', inp = 'FLARES', LF = False)
        OIII = get_line_all(tag, 'OIII4959', inp = 'FLARES', LF = False) + get_line_all(tag, 'OIII5007', inp = 'FLARES', LF = False)
        l_fuvs = np.concatenate(get_lum_all(tag, LF=False))

        LF_create(Halpha, weights, l_fuvs, low, axs[0], titles[0], s_m.to_rgba(ii+0.5))
        LF_create(Hbeta, weights, l_fuvs, low, axs[1], titles[1], s_m.to_rgba(ii+0.5))
        LF_create(CIII, weights, l_fuvs, low, axs[2], titles[2], s_m.to_rgba(ii+0.5))
        LF_create(OII, weights, l_fuvs, low, axs[3], titles[3], s_m.to_rgba(ii+0.5))
        # LF_create(NeIII, weights, l_fuvs, low, axs[4], titles[4], s_m.to_rgba(ii+0.5))
        LF_create(OIII, weights, l_fuvs, low, axs[4], titles[4], s_m.to_rgba(ii+0.5))


    for ii in range(5):
        axs[ii].set_xlim(xlims[ii])
        axs[ii].set_ylim(-9.,-1.8)
        for label in (axs[ii].get_xticklabels() + axs[ii].get_yticklabels()):
            label.set_fontsize(13)
예제 #3
0
fig, axs = plt.subplots(nrows=2,
                        ncols=3,
                        figsize=(13, 5),
                        sharex=True,
                        sharey=True,
                        facecolor='w',
                        edgecolor='k')
axs = axs.ravel()

for ii, tag in enumerate(tags):

    df = pd.read_csv('Magnitude_limits.txt')
    low = np.array(df[filters])[ii]
    bins = -np.arange(-low, 25, 0.4)[::-1]

    L_FUV = get_lum_all(tag, LF=False, filter='FUV', Luminosity='DustModelI')
    L_NUV = get_lum_all(tag, LF=False, filter='NUV', Luminosity='DustModelI')
    L_FUV_int = get_lum_all(tag,
                            LF=False,
                            filter='FUV',
                            Luminosity='Intrinsic')
    Mstar_30 = get_data_all(tag, inp='FLARES', DF=False)
    sfr_30 = get_data_all(tag, dataset='SFR_inst_30', inp='FLARES', DF=False)

    ws = np.array([])
    for jj in range(len(weights)):
        ws = np.append(ws, np.ones(np.shape(L_FUV[jj])) * weights[jj])
    L_FUV = np.concatenate(L_FUV)
    L_FUV_int = np.concatenate(L_FUV_int)
    L_NUV = np.concatenate(L_NUV)
    Mstar_30 = np.concatenate(Mstar_30) * 1e10
예제 #4
0
# create a ScalarMappable and initialize a data structure
s_m = matplotlib.cm.ScalarMappable(cmap=c_m, norm=norm)
s_m.set_array([])

dat = pd.DataFrame({})

for ii, z in enumerate(zs):
    df = pd.read_csv('Magnitude_limits.txt')
    low = np.array(df[filters])[ii]

    bins = -np.arange(-low, 26.5, 0.5)[::-1]
    bincen = (bins[1:]+bins[:-1])/2.
    binwidth = bins[1:] - bins[:-1]

    print ("\n tag: ", tags[ii])
    out, hist, err = get_lum_all(tags[ii], bins=bins)

    Msim = out/(binwidth*vol)
    xerr = np.ones(len(out))*binwidth[0]/2.
    yerr = err/(vol*binwidth)
    ok = np.where(hist > 0)[0]

    observed = Msim*(binwidth*parent_volume)
    sigma = observed/np.sqrt(hist)
    xx, yy, zz, number = np.zeros(15), np.zeros(15), np.zeros(15), np.zeros(15)
    num = len(ok)
    xx[:num], yy[:num], zz[:num], number[:num] = bincen[ok], Msim[ok], yerr[ok], hist[ok]
    dat[f"M{z}"] = xx
    dat[f"phi{z}"] = yy
    dat[F"err{z}"] = zz
    dat[F"num{z}"] = number
    c = 'forestgreen'

    ax.scatter(o['L1500']+np.log10(conversion_fac),np.log10(o['EW'][0]), marker=marker, s = 15, c = c, zorder = 5, label = r'$\rm '+id+'\ ('+o['ref']+')\ z='+str(o['z'])+'$')

    if o['EW'][1]>0:

        ax.plot([o['L1500']+np.log10(conversion_fac)]*2, [np.log10(o['EW'][0]-o['EW'][1]),np.log10(o['EW'][0]+o['EW'][1])], c = c, lw = 1)

    else:

        ax.arrow(o['L1500']+np.log10(conversion_fac), np.log10(o['EW'][0]), 0.0, -0.07, color=c, head_width=0.05, head_length=0.01)


dat1 = get_line_all(tag, 'CIII1907', inp = 'FLARES', LF = False, Type = 'Intrinsic')
dat2 = get_line_all(tag, 'CIII1909', inp = 'FLARES', LF = False, Type = 'Intrinsic')
l_fuvs = get_lum_all(tag, LF=False)

ws = np.array([])
for jj in sims:
    ws = np.append(ws, np.ones(np.shape(l_fuvs[jj]))*weights[jj])
l_fuvs = np.concatenate(l_fuvs)*conversion_fac

CIII1907_lum = np.concatenate(dat1[:,0])
CIII1907_EW = np.concatenate(dat1[:,1])

CIII1909_lum = np.concatenate(dat2[:,0])
CIII1909_EW = np.concatenate(dat2[:,1])


x, y, w = np.log10(l_fuvs), CIII1907_EW+CIII1909_EW, ws
thisok = np.where(y>0)
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()

if rank == 0:
    hists = np.zeros((len(facs),len(bincen)))
    hist = np.zeros((len(facs),len(bincen)))

else:
    hists = np.empty((len(facs),len(bincen)))
    hist = np.zeros((len(facs),len(bincen)))

part = int(len(facs)/size)

if rank!=size-1:
    thisok = np.arange(rank*part, (rank+1)*part, 1).astype(int)

else:
    thisok = np.arange(rank*part, len(facs), 1).astype(int)

for ii, jj in enumerate(thisok):

    data = get_lum_all(facs[jj], tags[0], bins = bins, LF = True)
    hist[jj] = data/(binwidth*vol)

comm.Reduce(hist, hists, op=MPI.SUM, root=0)

if rank == 0:
    np.savez('UVLF_z{}.npz'.format(z), UVLF = hists, UVMag = bincen, fac = facs)
예제 #7
0
    # choose a colormap
    c_m = matplotlib.cm.viridis_r

    # create a ScalarMappable and initialize a data structure
    s_m = matplotlib.cm.ScalarMappable(cmap=c_m, norm=norm)
    s_m.set_array([])

    for ii, z in enumerate(zs):
        df = pd.read_csv('Magnitude_limits.txt')
        low = np.array(df[filters])[ii]
        bins = -np.arange(-low, 26, 0.5)[::-1]
        bincen = (bins[1:] + bins[:-1]) / 2.
        binwidth = bins[1:] - bins[:-1]
        parent_volume = (3200)**3

        out, hist, err = get_lum_all(tags[ii], bins=bins)

        Msim = out / (binwidth * vol)
        xerr = np.ones(len(out)) * binwidth[0] / 2.
        yerr = err / (vol * binwidth)
        mask = np.where(hist == 1)[0]
        uplims = np.zeros(len(bincen))
        uplims[mask] = True
        y_lo = np.log10(Msim) - np.log10(Msim - yerr)
        y_up = np.log10(Msim + yerr) - np.log10(Msim)
        y_lo[mask] = 4.

        observed = Msim * (binwidth * parent_volume)
        sigma = observed / np.sqrt(hist)

        yy = fit_function(model, observed, sigma, bins, z)
    ext_curves = ['Default', 'Calzetti1.0', 'SMC1.0', 'N181.0']
    labels = ['Default', 'Calzetti', 'SMC', 'N18']
    colors = ['black', 'brown', 'grey', 'orange']

    ws = np.array([])
    for jj in range(40):
        ws = np.append(ws, np.ones(np.shape(mstar[jj]))*weights[jj])
    mstar = np.concatenate(mstar)

    for ii, curves in enumerate(ext_curves):
        if ii==0:
            dat1 = get_line_all(tag, 'OIII4959', inp = 'FLARES', LF = False)
            dat2 = get_line_all(tag, 'OIII5007', inp = 'FLARES', LF = False)
            dat3 = get_line_all(tag, 'HI4861', inp = 'FLARES', LF = False)
            l_fuvs = np.concatenate(get_lum_all(tag, LF=False))

            OIII4959_lum = np.concatenate(dat1[:,0])
            OIII4959_EW = np.concatenate(dat1[:,1])

            OIII5007_lum = np.concatenate(dat2[:,0])
            OIII5007_EW = np.concatenate(dat2[:,1])

            Hbeta_lum = np.concatenate(dat3[:,0])
            Hbeta_EW = np.concatenate(dat3[:,1])
        else:
            OIII4959_lum = np.concatenate(get_all(F'BPASS_2.2.1/Chabrier300/Lines/DustModelI_{curves}/OIII4959/Luminosity', tag))
            OIII4959_EW = np.concatenate(get_all(F'BPASS_2.2.1/Chabrier300/Lines/DustModelI_{curves}/OIII4959/EW', tag))

            OIII5007_lum = np.concatenate(get_all(F'BPASS_2.2.1/Chabrier300/Lines/DustModelI_{curves}/OIII5007/Luminosity', tag))
            OIII5007_EW = np.concatenate(get_all(F'BPASS_2.2.1/Chabrier300/Lines/DustModelI_{curves}/OIII5007/EW', tag))
# y = get_hist(0, tags_ref[0], bins = bins, inp = 'REF', LF = True)/(rho_crit_ref * refvol)
# axs.plot(bincen, np.log10(y), lw = 2, ls = 'dashed', color='red', label = 'EAGLE Ref')
#
# y = get_hist(0, tags_ref[0], bins = bins, inp = 'AGNdT9', LF = True)/(rho_crit_ref * AGNdT9vol)
# axs.plot(bincen, np.log10(y), lw = 2, ls = 'dotted', color='red', label = 'EAGLE AGNdT9')

for ii, jj in enumerate(tags):

    df = pd.read_csv('Magnitude_limits.txt')
    low = np.array(df[filter])[ii]
    bins = -np.arange(-low, 25, 0.5)[::-1]
    bincen = (bins[1:] + bins[:-1]) / 2.
    binwidth = bins[1:] - bins[:-1]

    if input == plt_options[1]:
        LFUV = get_lum_all(jj, LF=False, filter='FUV', Luminosity='DustModelI')
        LFUV_int = get_lum_all(jj,
                               LF=False,
                               filter='FUV',
                               Luminosity='Intrinsic')
        xlabel = r'M$_{1500}$'  #\mathrm{(Intrinsic)}$'
        ylabel = r'A$_{\mathrm{FUV}}$=-2.5 log$_{10}$(L$_{\mathrm{FUV}}^{\mathrm{Observed}}$/L$_{\mathrm{FUV}}^{\mathrm{Intrinsic}}$)'
        ylim = (0., 3.7)
        savename = 'att_env_obs.pdf'
        axs[ii].text(-21.3, 3.2, r'$z = {}$'.format(zs[ii]), fontsize=13)

    if input == plt_options[2]:
        LFUV = get_lum_all(jj, LF=False, filter='FUV', Luminosity='DustModelI')
        LNUV = get_lum_all(jj, LF=False, filter='NUV', Luminosity='DustModelI')
        xlabel = r'M$_{1500}$'  #\mathrm{(Intrinsic)}$'
        ylabel = r'$\beta$'