Example #1
0
def read(Rdiff, gp):
    if Rdiff != 'median' and Rdiff != 'min1s' and Rdiff != 'max1s':
        print(
            'run grd_metalsplit.py to get the split by metallicity done before reading it in for GravImage'
        )
        exit(1)

    import gr_params
    gpr = gr_params.grParams(gp)

    global Nsample, split, e_split, PM, split_min, split_max
    gpr.fil = gpr.dir + "data/tracers.dat"
    # number of measured tracer stars
    Nsample = bufcount(gpr.fil)
    delim = [0, 22, 3, 3, 6, 4, 3, 5, 6, 6, 7, 5, 6, 5, 6, 5, 6]
    #ID = np.genfromtxt(gpr.fil,skiprows=29,unpack=True,usecols=(0,1),delimiter=delim)
    if gp.case == 5:
        RAh, RAm, RAs, DEd, DEm, DEs, VHel, e_VHel, Teff, e_Teff, logg, e_logg, Fe, e_Fe, N = np.loadtxt(
            gpr.fil, skiprows=25, unpack=True)
        PM = np.ones(len(RAh))
        split = logg
        e_split = e_logg
        sel = (N > 0)
    else:
        RAh, RAm, RAs, DEd, DEm, DEs, Vmag, VI, VHel, e_VHel, SigFe, e_SigFe, Mg, Mg_err, PM = np.genfromtxt(
            gpr.fil,
            skiprows=29,
            unpack=True,
            usecols=tuple(range(2, 17)),
            delimiter=delim,
            filling_values=-1)
        split = Mg
        e_split = Mg_err
        sel = (Mg > -1)  # exclude missing data on Mg
    RAh = RAh[sel]
    RAm = RAm[sel]
    RAs = RAs[sel]
    DEd = DEd[sel]
    DEm = DEm[sel]
    DEs = DEs[sel]
    #Vmag = Vmag[sel]
    #VI  = VI[sel]
    VHel = VHel[sel]
    e_VHel = e_VHel[sel]
    if gp.case < 5:
        Mg = Mg[sel]
        Mg_err = Mg_err[sel]
    elif gp.case == 5:
        Teff = Teff[sel]
        e_Teff = e_Teff[sel]
        logg = logg[sel]
        e_logg = e_logg[sel]
        Fe = Fe[sel]
        e_Fe = e_Fe[sel]
        N = N[sel]
    split = split[sel]
    e_split = e_split[sel]
    PM = PM[sel]

    split_min = min(split)  # -3, 3 if according to WalkerPenarrubia2011
    split_max = max(split)

    # but: it's not as easy as that
    # we have datapoints with errors and probability of membership weighting
    # thus, we need to smear the values out using a Gaussian of width = split_err
    # and add them up afterwards after scaling with probability PM
    x = np.array(np.linspace(split_min, split_max, 100))
    splitdf = np.zeros(100)
    for i in range(len(split)):
        splitdf += PM[i] * gh.gauss(x, split[i], e_split[i])
    splitdf /= sum(PM)

    sig = abs(RAh[0]) / RAh[0]
    RAh = RAh / sig
    xs = 15 * (RAh * 3600 + RAm * 60 + RAs) * sig  # [arcsec/15]
    sig = abs(DEd[0]) / DEd[0]
    DEd = DEd / sig
    ys = (DEd * 3600 + DEm * 60 + DEs) * sig  # [arcsec]
    arcsec = 2. * np.pi / (360. * 60. * 60)  # [pc]
    kpc = 1000  # [pc]
    DL = {
        1: lambda x: x * (138),  #+/- 8 for Fornax
        2: lambda x: x * (101),  #+/- 5 for Carina
        3: lambda x: x * (79),  #+/- 4 for Sculptor
        4: lambda x: x * (86),  #+/- 4 for Sextans
        5: lambda x: x * (80)  #+/- 10 for Draco
    }[gp.case](kpc)
    xs *= (arcsec * DL)  # [pc]
    ys *= (arcsec * DL)  # [pc]

    # alternative: get center of photometric measurements by deBoer
    # for Fornax, we have
    if gp.case == 1:
        com_x = 96203.736358393697
        com_y = -83114.080684733024
        xs = xs - com_x
        ys = ys - com_y
    else:
        # determine com_x, com_y from shrinking sphere
        import gi_centering as grc
        com_x, com_y = grc.com_shrinkcircle_2D(xs, ys)

    popass = np.loadtxt(gpr.dir + 'data/popass_' + Rdiff)

    sel1 = (popass == 1)
    sel2 = (popass == 2)
    # radii of all stellar tracers from pop 1 and 2
    R1 = np.sqrt((xs[sel1])**2 + (ys[sel1])**2)
    R2 = np.sqrt((xs[sel2])**2 + (ys[sel2])**2)
    R1.sort()
    R2.sort()
    R0 = np.hstack([R1, R2])
    R0.sort()

    for pop in np.arange(2) + 1:
        if pop == 1:
            Rhalf = R1[len(R1) / 2]
            co = 'blue'
        else:
            Rhalf = R2[len(R2) / 2]
            co = 'red'

    Rmin = min(R0)  # [pc]
    Rmax = max(R0)  # [pc]
    Binmin, Binmax, Rbin = gh.determine_radius(R0, Rmin, Rmax, gp)  # [pc]
    gp.xipol = Rbin  # [pc]
    minr = min(Rbin)  # [pc]
    maxr = max(Rbin)  # [pc]
    Vol = gh.volume_circular_ring(Binmin, Binmax, gp)  # [pc^2]
    totmass_tracers = float(len(x))
    Rsi = gh.add_errors(R0, gpr.Rerr)  # [pc], gpr.Rerr was in
    tpb = np.zeros(gp.nipol)
    Sig_phot = np.zeros(gp.nipol)
    for i in range(gp.nipol):
        ind1 = np.argwhere(np.logical_and(Rsi >= Binmin[i],
                                          Rsi < Binmax[i])).flatten()  # [1]
        tpb[i] = float(len(ind1))  # [1]
        Sig_phot[i] = float(
            len(ind1)) * totmass_tracers / Vol[i]  # [Munit/pc^2]
    #loglog(gp.xipol, Sig_phot, co)
    #axvline(Rhalf, color=co)
    #xlim([min(gp.xipol), max(gp.xipol)])
    #xlabel(r'$R$')
    #ylabel(r'$\Sigma(R)$')
    #pdb.set_trace()
    # deproject to get 3D nu profiles
    gp.xipol = Rbin
    minr = min(Rbin)  # [pc]
    maxr = max(Rbin)  # [pc]
    gp.xepol = np.hstack(
        [minr / 8., minr / 4., minr / 2., Rbin, 2 * maxr, 4 * maxr,
         8 * maxr])  #[pc]
    gp.xfine = introduce_points_in_between(gp.xepol, gp)
    #pdb.set_trace()
    #Sigdatnu, Sigerrnu = gh.complete_nu(Rbin, Sig_phot, Sig_phot/10., gp.xfine)
    #dummyx,nudatnu,nuerrnu,Mrnu = gip.Sig_NORM_rho(gp.xfine,Sigdatnu,Sigerrnu,gp)
    #nudat = gh.linipollog(gp.xfine, nudatnu, gp.xipol)
    #nuerr = gh.linipollog(gp.xfine, nuerrnu, gp.xipol)
    #loglog(gp.xipol, nudat, co)
    #axvline(Rhalf, color=co)
    #xlim([min(gp.xipol), max(gp.xipol)])
    #xlabel(r'$R$')
    #ylabel(r'$\nu(R)$')
    #plum = 100*gh.plummer(gp.xipol, Rhalf, len(R0))
    #loglog(gp.xipol, plum, color=co, linestyle='--')
    #ylim([min(plum), max(plum)])
    #pdb.set_trace()

    return
Example #2
0
def run(gp, pop):
    import gr_params
    gpr = gr_params.grParams(gp)
    xall,yall = np.loadtxt(gp.files.get_com_file(0), skiprows=1, \
                           usecols=(0,1), unpack=True)
    # 2*[Rscale0]
    R = np.sqrt(xall**2+yall**2) # [Rscale0]
    # set number and size of (linearly spaced) bins
    Rmin = 0. #[Rscale0]
    Rmax = max(R) if gp.maxR < 0 else 1.0*gp.maxR # [Rscale0]
    R = R[(R<Rmax)] # [Rscale0]
    Binmin, Binmax, Rbin = gh.determine_radius(R, Rmin, Rmax, gp) # [Rscale0]
    gp.xipol = Rbin
    minr = min(Rbin)                           # [pc]
    maxr = max(Rbin)                           # [pc]
    gp.xepol = np.hstack([minr/8., minr/4., minr/2., Rbin, 2*maxr, 4*maxr, 8*maxr]) # [pc]
    Vol = gh.volume_circular_ring(Binmin, Binmax, gp) # [Rscale0^2]
    Rscale0 = gf.read_Xscale(gp.files.get_scale_file(0)) # [pc]
    print('#######  working on component ',pop)
    print('input: ', gp.files.get_com_file(pop))
    # start from data centered on COM already:
    if gf.bufcount(gp.files.get_com_file(pop))<2:
        return
    # only read in data if needed: pops = 1: reuse data from pop=0 part
    x,y = np.loadtxt(gp.files.get_com_file(pop), skiprows=1, usecols=(0,1), unpack = True)
        # [Rscalei], [Rscalei]
    # calculate 2D radius on the skyplane
    R = np.sqrt(x**2+y**2) #[Rscalei]
    Rscalei = gf.read_Xscale(gp.files.get_scale_file(pop)) # [pc]
    # set maximum radius (if gp.maxR is set)
    Rmax = max(R) if gp.maxR<0 else 1.0*gp.maxR # [Rscale0]
    print('Rmax [Rscale0] = ', Rmax)
    sel = (R * Rscalei <= Rmax * Rscale0)
    x = x[sel] # [Rscalei]
    y = y[sel] # [Rscalei]
    R = R[sel] # [Rscalei]
    totmass_tracers = float(len(x)) # [Munit], Munit = 1/star
    Rs = R                   # + possible starting offset, [Rscalei]
    tr = open(gp.files.get_ntracer_file(pop),'w')
    print(totmass_tracers, file=tr)
    tr.close()
    f_Sig, f_nu, f_mass, f_sig, f_kap, f_zeta = gf.write_headers_2D(gp, pop)
    Sig_phot   = np.zeros((gp.nipol, gpr.n))
    # particle selections, shared by density, siglos, kappa and zeta calculations
    tpb       = np.zeros((gp.nipol,gpr.n))
    for k in range(gpr.n):
        Rsi   = gh.add_errors(Rs,   gpr.Rerr)   # [Rscalei]
        for i in range(gp.nipol):
            ind1 = np.argwhere(np.logical_and(Rsi * Rscalei >= Binmin[i] * Rscale0, \
                                          Rsi * Rscalei <  Binmax[i] * Rscale0)).flatten() # [1]
            tpb[i][k] = float(len(ind1)) #[1]
            Sig_phot[i][k] = float(len(ind1))*totmass_tracers/Vol[i] # [Munit/rscale^2]
    # do the following for all populations
    Sig0 = np.sum(Sig_phot[0])/float(gpr.n) # [Munit/Rscale^2]
    Sig0pc = Sig0/Rscale0**2              # [munis/pc^2]
    gf.write_Sig_scale(gp.files.get_scale_file(pop), Sig0pc, totmass_tracers)

    # calculate density and mass profile, store it
    # ----------------------------------------------------------------------
    P_dens  = np.zeros(gp.nipol)
    P_edens = np.zeros(gp.nipol)
    for b in range(gp.nipol):
        Sig = np.sum(Sig_phot[b])/(1.*gpr.n) # [Munit/Rscale^2]
        tpbb   = np.sum(tpb[b])/float(gpr.n)       # [1], mean number of tracers in bin
        Sigerr = Sig/np.sqrt(tpbb)       # [Munit/Rscale^2], Poissonian error
        # compare data and analytic profile <=> get stellar
        # density or mass ratio from Matt Walker
        if(np.isnan(Sigerr)):
            P_dens[b] = P_dens[b-1]  # [1]
            P_edens[b]= P_edens[b-1] # [1]
        else:
            P_dens[b] = Sig/Sig0   # [1]
            P_edens[b]= Sigerr/Sig0 # [1]
        print(Rbin[b], Binmin[b], Binmax[b], P_dens[b], P_edens[b], file=f_Sig)
        # 3*[rscale], [dens0], [dens0]
        indr = (R<Binmax[b])
        Menclosed = float(np.sum(indr))/totmass_tracers # for normalization to 1#[totmass_tracers]
        Merr = Menclosed/np.sqrt(tpbb) # or artificial Menclosed/10 #[totmass_tracers]
        print(Rbin[b], Binmin[b], Binmax[b], Menclosed, Merr, file=f_mass) # [Rscale0], 2* [totmass_tracers]
    f_Sig.close()
    f_mass.close()

    # deproject Sig to get nu
    numedi = gip.Sig_INT_rho(Rbin*Rscalei, Sig0pc*P_dens, gp)
    #numin  = gip.Sig_INT_rho(Rbin*Rscalei, Sig0pc*(P_dens-P_edens), gp)
    numax  = gip.Sig_INT_rho(Rbin*Rscalei, Sig0pc*(P_dens+P_edens), gp)
    nu0pc  = numedi[0]
    gf.write_nu_scale(gp.files.get_scale_file(pop), nu0pc)
    nuerr  = numax-numedi
    for b in range(gp.nipol):
        print(Rbin[b], Binmin[b], Binmax[b],\
              numedi[b]/nu0pc, nuerr[b]/nu0pc, \
              file = f_nu)
    f_nu.close()
    # write dummy sig scale, not to be used later on
    maxsiglos = -1. #[km/s]
    fpars = open(gp.files.get_scale_file(pop),'a')
    print(maxsiglos, file=fpars)          #[km/s]
    fpars.close()
Example #3
0
def run(gp):
    pop = 0
    import gr_params
    gpr = gr_params.grParams(gp)
    xall, yall = np.loadtxt(gp.files.get_com_file(0),
                            skiprows=1,
                            usecols=(0, 1),
                            unpack=True)
    # 2*[Rscale0]
    R = np.sqrt(xall**2 + yall**2)  # [Rscale0]
    # set number and size of (linearly spaced) bins
    Rmin = 0.  # [Rscale0]
    Rmax = max(R) if gp.maxR < 0 else 1.0 * gp.maxR  # [Rscale0]
    R = R[(R < Rmax)]  # [Rscale0]
    Binmin, Binmax, Rbin = gh.determine_radius(R, Rmin, Rmax, gp)  # [Rscale0]
    gp.xipol = Rbin
    minr = min(Rbin)  # [pc]
    maxr = max(Rbin)  # [pc]
    gp.xepol = np.hstack(
        [minr / 8., minr / 4., minr / 2., Rbin, 2 * maxr, 4 * maxr,
         8 * maxr])  # [pc]
    Vol = gh.volume_circular_ring(Binmin, Binmax, gp)  # [Rscale0^2]
    Rscale0 = float(gf.read_Xscale(gp.files.get_scale_file(0)))  # [pc]
    print('#######  working on component ', pop)
    print('input: ', gp.files.get_com_file(pop))
    # start from data centered on COM already:
    if gf.bufcount(gp.files.get_com_file(pop)) < 2:
        return
    # only read in data if needed: pops = 1: reuse data from pop=0 part
    x, y = np.loadtxt(gp.files.get_com_file(pop),
                      skiprows=1,
                      usecols=(0, 1),
                      unpack=True)
    # [Rscalei], [Rscalei]
    # calculate 2D radius on the skyplane
    R = np.sqrt(x**2 + y**2)  #[Rscalei]
    Rscalei = gf.read_Xscale(gp.files.get_scale_file(pop))  # [pc]
    # set maximum radius (if gp.maxR is set)
    Rmax = max(R) if gp.maxR < 0 else 1.0 * gp.maxR  # [Rscale0]
    print('Rmax [Rscale0] = ', Rmax)
    sel = (R * Rscalei <= Rmax * Rscale0)
    x = x[sel]  # [Rscalei]
    y = y[sel]  # [Rscalei]
    R = R[sel]  # [Rscalei]
    totmass_tracers = float(len(x))  # [Munit], Munit = 1/star
    Rs = R  # + possible starting offset, [Rscalei]
    tr = open(gp.files.get_ntracer_file(pop), 'w')
    print(totmass_tracers, file=tr)
    tr.close()
    f_Sig, f_nu, f_mass, f_sig, f_kap, f_zeta = gf.write_headers_2D(gp, pop)
    Sig_phot = np.zeros((gp.nipol, gpr.n))
    # particle selections, shared by density, siglos, kappa and zeta calculations
    tpb = np.zeros((gp.nipol, gpr.n))
    for k in range(gpr.n):
        Rsi = gh.add_errors(Rs, gpr.Rerr)  # [Rscalei]
        for i in range(gp.nipol):
            ind1 = np.argwhere(np.logical_and(Rsi * Rscalei >= Binmin[i] * Rscale0, \
                                          Rsi * Rscalei <  Binmax[i] * Rscale0)).flatten() # [1]
            tpb[i][k] = float(len(ind1))  #[1]
            Sig_phot[i][k] = float(
                len(ind1)) * totmass_tracers / Vol[i]  # [Munit/rscale^2]
    # do the following for all populations
    Sig0 = np.sum(Sig_phot[0]) / float(gpr.n)  # [Munit/Rscale^2]
    Sig0pc = Sig0 / Rscale0**2  # [munis/pc^2]
    gf.write_Sig_scale(gp.files.get_scale_file(pop), Sig0pc, totmass_tracers)

    # calculate density and mass profile, store it
    # ----------------------------------------------------------------------
    P_dens = np.zeros(gp.nipol)
    P_edens = np.zeros(gp.nipol)
    for b in range(gp.nipol):
        Sig = np.sum(Sig_phot[b]) / (1. * gpr.n)  # [Munit/Rscale^2]
        tpbb = np.sum(tpb[b]) / float(
            gpr.n)  # [1], mean number of tracers in bin
        Sigerr = Sig / np.sqrt(tpbb)  # [Munit/Rscale^2], Poissonian error
        # compare data and analytic profile <=> get stellar
        # density or mass ratio from Matt Walker
        if (np.isnan(Sigerr)):
            P_dens[b] = P_dens[b - 1]  # [1]
            P_edens[b] = P_edens[b - 1]  # [1]
        else:
            P_dens[b] = Sig / Sig0  # [1]
            P_edens[b] = Sigerr / Sig0  # [1]
        print(Rbin[b], Binmin[b], Binmax[b], P_dens[b], P_edens[b], file=f_Sig)
        # 3*[rscale], [dens0], [dens0]
        indr = (R < Binmax[b])
        Menclosed = float(
            np.sum(indr)
        ) / totmass_tracers  # for normalization to 1#[totmass_tracers]
        Merr = Menclosed / np.sqrt(
            tpbb)  # or artificial Menclosed/10 #[totmass_tracers]
        print(Rbin[b], Binmin[b], Binmax[b], Menclosed, Merr,
              file=f_mass)  # [Rscale0], 2* [totmass_tracers]
    f_Sig.close()
    f_mass.close()

    # deproject Sig to get nu
    numedi = gip.Sig_INT_rho(Rbin * Rscalei, Sig0pc * P_dens, gp)
    #numin  = gip.Sig_INT_rho(Rbin*Rscalei, Sig0pc*(P_dens-P_edens), gp)
    numax = gip.Sig_INT_rho(Rbin * Rscalei, Sig0pc * (P_dens + P_edens), gp)
    nu0pc = numedi[0]
    gf.write_nu_scale(gp.files.get_scale_file(pop), nu0pc)
    nuerr = numax - numedi
    for b in range(gp.nipol):
        print(Rbin[b], Binmin[b], Binmax[b],\
              numedi[b]/nu0pc, nuerr[b]/nu0pc, \
              file = f_nu)
    f_nu.close()
    # write dummy sig scale, not to be used later on
    maxsiglos = -1.  #[km/s]
    fpars = open(gp.files.get_scale_file(pop), 'a')
    print(maxsiglos, file=fpars)  #[km/s]
    fpars.close()
Example #4
0
def read(Rdiff, gp):
    if Rdiff != 'median' and Rdiff != 'min1s' and Rdiff != 'max1s':
        print('run grd_metalsplit.py to get the split by metallicity done before reading it in for GravImage')
        exit(1)

    import gr_params
    gpr = gr_params.grParams(gp)

    global Nsample, split, e_split, PM, split_min, split_max
    gpr.fil = gpr.dir+"data/tracers.dat"
    # number of measured tracer stars
    Nsample = bufcount(gpr.fil)
    delim = [0,22,3,3,6,4,3,5,6,6,7,5,6,5,6,5,6]
    #ID = np.genfromtxt(gpr.fil,skiprows=29,unpack=True,usecols=(0,1),delimiter=delim)
    if gp.case==5:
        RAh,RAm,RAs,DEd,DEm,DEs,VHel,e_VHel,Teff,e_Teff,logg,e_logg,Fe,e_Fe,N=np.loadtxt(gpr.fil, skiprows=25, unpack=True)
        PM = np.ones(len(RAh))
        split = logg
        e_split = e_logg
        sel = (N>0)
    else:
        RAh,RAm,RAs,DEd,DEm,DEs,Vmag,VI,VHel,e_VHel,SigFe,e_SigFe, Mg,Mg_err,PM = np.genfromtxt(gpr.fil, skiprows=29, unpack=True, usecols=tuple(range(2,17)), delimiter=delim, filling_values=-1)
        split = Mg
        e_split = Mg_err
        sel = (Mg>-1)  # exclude missing data on Mg
    RAh = RAh[sel]
    RAm = RAm[sel]
    RAs = RAs[sel]
    DEd = DEd[sel]
    DEm = DEm[sel]
    DEs = DEs[sel]
    #Vmag = Vmag[sel]
    #VI  = VI[sel]
    VHel = VHel[sel]
    e_VHel = e_VHel[sel]
    if gp.case < 5:
        Mg = Mg[sel]
        Mg_err = Mg_err[sel]
    elif gp.case == 5:
        Teff = Teff[sel]
        e_Teff = e_Teff[sel]
        logg = logg[sel]
        e_logg = e_logg[sel]
        Fe = Fe[sel]
        e_Fe = e_Fe[sel]
        N = N[sel]
    split = split[sel]
    e_split = e_split[sel]
    PM = PM[sel]

    split_min = min(split) # -3, 3 if according to WalkerPenarrubia2011
    split_max = max(split)

    # but: it's not as easy as that
    # we have datapoints with errors and probability of membership weighting
    # thus, we need to smear the values out using a Gaussian of width = split_err
    # and add them up afterwards after scaling with probability PM
    x = np.array(np.linspace(split_min, split_max, 100))
    splitdf = np.zeros(100)
    for i in range(len(split)):
        splitdf += PM[i]*gh.gauss(x, split[i], e_split[i])
    splitdf /= sum(PM)

    sig = abs(RAh[0])/RAh[0]
    RAh = RAh/sig
    xs = 15*(RAh*3600+RAm*60+RAs)*sig       # [arcsec/15]
    sig = abs(DEd[0])/DEd[0]
    DEd = DEd/sig
    ys = (DEd*3600+DEm*60+DEs)*sig          # [arcsec]
    arcsec = 2.*np.pi/(360.*60.*60) # [pc]
    kpc = 1000 # [pc]
    DL = {1: lambda x: x * (138),#+/- 8 for Fornax
          2: lambda x: x * (101),#+/- 5 for Carina
          3: lambda x: x * (79), #+/- 4 for Sculptor
          4: lambda x: x * (86), #+/- 4 for Sextans
          5: lambda x: x * (80)  #+/- 10 for Draco
      }[gp.case](kpc)
    xs *= (arcsec*DL) # [pc]
    ys *= (arcsec*DL) # [pc]

    # alternative: get center of photometric measurements by deBoer
    # for Fornax, we have
    if gp.case == 1:
        com_x = 96203.736358393697
        com_y = -83114.080684733024
        xs = xs-com_x
        ys = ys-com_y
    else:
        # determine com_x, com_y from shrinking sphere
        import gi_centering as grc
        com_x, com_y = grc.com_shrinkcircle_2D(xs, ys)

    popass = np.loadtxt(gpr.dir+'data/popass_'+Rdiff)

    sel1 = (popass==1)
    sel2 = (popass==2)
    # radii of all stellar tracers from pop 1 and 2
    R1 = np.sqrt((xs[sel1])**2 + (ys[sel1])**2)
    R2 = np.sqrt((xs[sel2])**2 + (ys[sel2])**2)
    R1.sort()
    R2.sort()
    R0 = np.hstack([R1, R2])
    R0.sort()

    for pop in np.arange(2)+1:
        if pop == 1:
            Rhalf = R1[len(R1)/2]
            co = 'blue'
        else:
            Rhalf = R2[len(R2)/2]
            co = 'red'

    Rmin = min(R0) # [pc]
    Rmax = max(R0) # [pc]
    Binmin, Binmax, Rbin = gh.determine_radius(R0, Rmin, Rmax, gp) # [pc]
    gp.xipol = Rbin # [pc]
    minr = min(Rbin)# [pc]
    maxr = max(Rbin)# [pc]
    Vol = gh.volume_circular_ring(Binmin, Binmax, gp) # [pc^2]
    totmass_tracers = float(len(x))
    Rsi   = gh.add_errors(R0, gpr.Rerr)   # [pc], gpr.Rerr was in
    tpb = np.zeros(gp.nipol)
    Sig_phot = np.zeros(gp.nipol)
    for i in range(gp.nipol):
        ind1 = np.argwhere(np.logical_and(Rsi >= Binmin[i], Rsi <  Binmax[i])).flatten() # [1]
        tpb[i] = float(len(ind1)) # [1]
        Sig_phot[i] = float(len(ind1))*totmass_tracers/Vol[i] # [Munit/pc^2]
    #loglog(gp.xipol, Sig_phot, co)
    #axvline(Rhalf, color=co)
    #xlim([min(gp.xipol), max(gp.xipol)])
    #xlabel(r'$R$')
    #ylabel(r'$\Sigma(R)$')
    #pdb.set_trace()
    # deproject to get 3D nu profiles
    gp.xipol = Rbin
    minr = min(Rbin)                           # [pc]
    maxr = max(Rbin)                           # [pc]
    gp.xepol =np.hstack([minr/8.,minr/4.,minr/2.,Rbin,2*maxr,4*maxr,8*maxr])#[pc]
    gp.xfine = introduce_points_in_between(gp.xepol, gp)
    #pdb.set_trace()
    #Sigdatnu, Sigerrnu = gh.complete_nu(Rbin, Sig_phot, Sig_phot/10., gp.xfine)
    #dummyx,nudatnu,nuerrnu,Mrnu = gip.Sig_NORM_rho(gp.xfine,Sigdatnu,Sigerrnu,gp)
    #nudat = gh.linipollog(gp.xfine, nudatnu, gp.xipol)
    #nuerr = gh.linipollog(gp.xfine, nuerrnu, gp.xipol)
    #loglog(gp.xipol, nudat, co)
    #axvline(Rhalf, color=co)
    #xlim([min(gp.xipol), max(gp.xipol)])
    #xlabel(r'$R$')
    #ylabel(r'$\nu(R)$')
    #plum = 100*gh.plummer(gp.xipol, Rhalf, len(R0))
    #loglog(gp.xipol, plum, color=co, linestyle='--')
    #ylim([min(plum), max(plum)])
    #pdb.set_trace()

    return
Example #5
0
def run(gp):
    import gr_params

    gpr = gr_params.grParams(gp)
    print("scalefile: ", gp.files.get_scale_file(0))
    Rscale0 = gf.read_Xscale(gp.files.get_scale_file(0))  # [pc]
    print("input: ", gp.files.get_com_file(0))
    # start from data centered on COM already:
    x, y, v = np.loadtxt(
        gp.files.get_com_file(0), skiprows=1, usecols=(0, 1, 2), unpack=True
    )  # [Rscalei], [Rscalei], [km/s]

    for pop in range(2):
        # calculate 2D radius on the skyplane
        R = np.sqrt(x ** 2 + y ** 2)  # [Rscalei]
        Rscalei = gf.read_Xscale(gp.files.get_scale_file(pop))  # [pc]
        # set number and size of bins
        Rmin = 0.0  # [rscale]
        Rmax = max(R) if gp.maxR < 0 else float(gp.maxR)  # [Rscale0]

        sel = R * Rscalei < Rmax * Rscale0
        x = x[sel]
        y = y[sel]
        v = v[sel]  # [rscale]
        totmass_tracers = 1.0 * len(x)  # [munit], munit = 1/star

        Binmin, Binmax, Rbin = gh.determine_radius(R, Rmin, Rmax, gp)  # [Rscale0]
        gp.xipol = Rbin
        minr = min(Rbin)  # [pc]
        maxr = max(Rbin)  # [pc]
        gp.xepol = np.hstack([minr / 8.0, minr / 4.0, minr / 2.0, Rbin, 2 * maxr, 4 * maxr, 8 * maxr])  # [pc]
        Vol = gh.volume_circular_ring(Binmin, Binmax, gp)  # [Rscale0^2]

        # rs = gpr.Rerr*np.random.randn(len(r))+r
        Rs = R  # [Rscale] # if no initial offset is whished

        tr = open(gp.files.get_ntracer_file(pop), "w")
        print(totmass_tracers, file=tr)
        tr.close()

        f_Sig, f_nu, f_mass, f_sig, f_kap, f_zeta = gf.write_headers_2D(gp, pop)

        # 30 iterations for getting random picked radius values
        Density = np.zeros((gp.nipol, gpr.n))
        tpb = np.zeros((gp.nipol, gpr.n))
        for k in range(gpr.n):
            Rsi = gh.add_errors(Rs, gpr.Rerr)  # [Rscalei]
            for j in range(gp.nipol):
                ind1 = np.argwhere(
                    np.logical_and(Rsi * Rscalei >= Binmin[j] * Rscale0, Rsi * Rscalei < Binmax[j] * Rscale0)
                ).flatten()  # [1]
                Density[j][k] = float(len(ind1)) / Vol[j] * totmass_tracers  # [munit/Rscale0^2]
                tpb[j][k] = float(len(ind1))  # [1]

        Dens0 = np.sum(Density[0]) / float(gpr.n)  # [Munit/Rscale0^2]
        Dens0pc = Dens0 / Rscale0 ** 2  # [Munit/pc^2]
        gf.write_Sig_scale(gp.files.get_scale_file(pop), Dens0pc, totmass_tracers)

        tpbb0 = np.sum(tpb[0]) / float(gpr.n)  # [1]
        Denserr0 = Dens0 / np.sqrt(tpbb0)  # [Munit/rscale^2]

        p_dens = np.zeros(gp.nipol)
        p_edens = np.zeros(gp.nipol)

        for b in range(gp.nipol):
            Dens = np.sum(Density[b]) / float(gpr.n)  # [Munit/rscale^2]
            tpbb = np.sum(tpb[b]) / float(gpr.n)  # [1]
            Denserr = Dens / np.sqrt(tpbb)  # [Munit/rscale^2]
            if np.isnan(Denserr):
                p_dens[b] = p_dens[b - 1]  # [1]
                p_edens[b] = p_edens[b - 1]  # [1]
            else:
                p_dens[b] = Dens / Dens0  # [1]
                p_edens[b] = Denserr / Dens0  # [1] #100/rbin would be artificial guess

        for b in range(gp.nipol):
            print(Rbin[b], Binmin[b], Binmax[b], p_dens[b], p_edens[b], file=f_Sig)
            # [rscale], [dens0], [dens0]
            indr = R < Binmax[b]
            menclosed = float(np.sum(indr)) / totmass_tracers
            # /totmass_tracers for normalization to 1 at last bin #[totmass_tracers]
            merr = menclosed / np.sqrt(tpbb)  # artificial menclosed/10 gives good approximation #[totmass_tracers]
            print(Rbin[b], Binmin[b], Binmax[b], menclosed, merr, file=f_mass)
            # [rscale], [totmass_tracers], [totmass_tracers]
        f_Sig.close()
        f_mass.close()

        # deproject Sig to get nu
        numedi = gip.Sig_INT_rho(Rbin * Rscalei, Dens0pc * p_dens, gp)
        numin = gip.Sig_INT_rho(Rbin * Rscalei, Dens0pc * (p_dens - p_edens), gp)
        numax = gip.Sig_INT_rho(Rbin * Rscalei, Dens0pc * (p_dens + p_edens), gp)

        nu0pc = numedi[0]
        gf.write_nu_scale(gp.files.get_scale_file(pop), nu0pc)

        nuerr = numax - numedi
        for b in range(gp.nipol):
            print(Rbin[b], Binmin[b], Binmax[b], numedi[b] / nu0pc, nuerr[b] / nu0pc, file=f_nu)
        f_nu.close()

        if gpr.showplots:
            gpr.show_plots_dens_2D(Rbin * Rscalei, p_dens, p_edens, Dens0pc)
Example #6
0
def run(gp):
    import gr_params
    gpr = gr_params.grParams(gp)
    xall,yall = np.loadtxt(gp.files.get_com_file(0), skiprows=1, usecols=(0,1), unpack=True)
    # 2*[Rscale0]
    R = np.sqrt(xall**2+yall**2) # [Rscale0]
    # set number and size of (linearly spaced) bins
    Rmin = 0. #[Rscale0]
    Rmax = max(R) if gp.maxR < 0 else 1.0*gp.maxR # [Rscale0]
    R = R[(R<Rmax)] # [Rscale0]
    Binmin, Binmax, Rbin = gh.determine_radius(R, Rmin, Rmax, gp) # [Rscale0]
    gp.xipol = Rbin
    minr = min(Rbin) # [pc]
    maxr = max(Rbin) # [pc]
    gp.xepol = np.hstack([minr/8., minr/4., minr/2., Rbin, 2*maxr, 4*maxr, 8*maxr]) # [pc]
    Vol = gh.volume_circular_ring(Binmin, Binmax, gp) # [Rscale0^2]
    Rscale0 = gf.read_Xscale(gp.files.get_scale_file(0)) # [pc]
    for pop in range(gp.pops+1):
        print('#######  working on component ',pop)
        print('input: ', gp.files.get_com_file(pop))
        # exclude second condition if self-consistent approach wished
        if gp.investigate == "obs" and gp.case==1 and pop==0:
            # for Fornax, overwrite first Sigma with deBoer data
            import gr_MCMCbin_for
            gr_MCMCbin_for.run(gp)
            continue
        # start from data centered on COM already:
        if gf.bufcount(gp.files.get_com_file(pop))<2:
            continue
        # only read in data if needed: pops = 1: reuse data from pop=0 part
        if (gp.pops == 1 and pop < 1 or gp.pops == 2) or gp.investigate == 'obs':
            x,y,v = np.loadtxt(gp.files.get_com_file(pop), skiprows=1,usecols=(0,1,2),unpack=True)
            # [Rscalei], [Rscalei], [km/s]
            # calculate 2D radius on the skyplane
            R = np.sqrt(x**2+y**2) #[Rscalei]
            Rscalei = gf.read_Xscale(gp.files.get_scale_file(pop)) # [pc]
            # set maximum radius (if gp.maxR is set)
            Rmax = max(R) if gp.maxR<0 else 1.0*gp.maxR # [Rscale0]
            print('Rmax [Rscale0] = ', Rmax)
            #pdb.set_trace()
            #from pylab import clf, hist, axvline, xlim
            #clf()
            #hist(np.log10(R*Rscalei), 40)
            #for i in range(len(Rbin)):
            #    axvline(np.log10(Rbin[i]*Rscale0))
            #xlim([np.log10(min(gp.xepol*Rscale0)), np.log10(max(gp.xepol*Rscale0))])
            sel = (R * Rscalei <= Rmax * Rscale0)
            x = x[sel]
            y = y[sel]
            v = v[sel]
            R = R[sel] # [Rscalei]
            totmass_tracers = float(len(x)) # [Munit], Munit = 1/star
            Rs = R                   # + possible starting offset, [Rscalei]
            vlos = v                 # + possible starting offset, [km/s]
        tr = open(gp.files.get_ntracer_file(pop),'w')
        print(totmass_tracers, file=tr)
        tr.close()
        f_Sig, f_nu, f_mass, f_sig, f_kap, f_zeta = gf.write_headers_2D(gp, pop)
        if (gp.pops == 1 and pop < 1) or gp.pops == 2 or gp.investigate == 'obs':
            Sig_kin   = np.zeros((gp.nipol, gpr.n))
            siglos    = np.zeros((gp.nipol, gpr.n))
            if gp.usekappa:
                kappa     = np.zeros((gp.nipol, gpr.n))
            if gp.usezeta:
                v2        = np.zeros((gp.nipol, gpr.n))
                v4        = np.zeros((gp.nipol, gpr.n))
                Ntot      = np.zeros(gpr.n)
                zetaa     = np.zeros(gpr.n)
                zetab     = np.zeros(gpr.n)
            # particle selections, shared by density, siglos, kappa and zeta calculations
            tpb       = np.zeros((gp.nipol,gpr.n))
            for k in range(gpr.n):
                Rsi   = gh.add_errors(Rs,   gpr.Rerr)   # [Rscalei]
                vlosi = gh.add_errors(vlos, gpr.vrerr)   # [km/s]
                for i in range(gp.nipol):
                    ind1 = np.argwhere(np.logical_and(Rsi * Rscalei >= Binmin[i] * Rscale0, Rsi * Rscalei <  Binmax[i] * Rscale0)).flatten() # [1]
                    tpb[i][k] = float(len(ind1)) # [1]
                    Sig_kin[i][k] = float(len(ind1))*totmass_tracers/Vol[i] # [Munit/rscale**2]
                    if(len(ind1)<=1):
                        siglos[i][k] = siglos[i-1][k]
                        print('### using last value, missing data')
                        if gp.usekappa:
                            kappa[i][k] = kappa[i-1][k]
                            # attention! should be 0, uses last value
                        if gp.usezeta:
                            v2[i][k] = v2[i-1][k]
                            v4[i][k] = v4[i-1][k]
                    else:
                        siglos[i][k] = meanbiweight(vlosi[ind1], ci_perc=68.4, \
                                                    ci_mean=True, ci_std=True)[1]
                        # [km/s], see BiWeight.py
                        if gp.usekappa:
                            kappa[i][k] = kurtosis(vlosi[ind1], axis=0, \
                                                   fisher=False, bias=False) # [1]
                        if gp.usezeta:
                            ave, adev, sdev, var, skew, curt = gh.moments(vlosi[ind1])
                            v2[i][k] = var
                            v4[i][k] = (curt+3)*var**2
                Sigma = Sig_kin[:,k]
                if gp.usezeta:
                    pdb.set_trace()
                    Ntot[k] = gh.Ntot(Rbin, Sigma, gp)
                    zetaa[k] = gh.starred(Rbin, v4[:,k], Sigma, Ntot[k], gp)
                    v2denom = (gh.starred(Rbin, v2[:,k], Sigma, Ntot[k], gp))**2
                    zetaa[k] /= v2denom
                    zetab[k] = gh.starred(Rbin, v4[:,k]*Rbin**2, Sigma, Ntot[k], gp)
                    zetab[k] /= v2denom
                    zetab[k] /= (gh.starred(Rbin, Rbin, Sigma, Ntot[k], gp))**2
            if gp.investigate == 'obs' and gp.case < 5:
                Sig_phot = obs_Sig_phot(Binmin, Binmax, Rscale0, Sig_kin, gp, gpr)
            else:
                Sig_phot = Sig_kin
        # do the following for all populations
        Sig0 = np.sum(Sig_phot[0])/float(gpr.n) # [Munit/Rscale^2]
        Sig0pc = Sig0/Rscale0**2              # [munis/pc^2]
        gf.write_Sig_scale(gp.files.get_scale_file(pop), Sig0pc, totmass_tracers)

        # calculate density and mass profile, store it
        # ----------------------------------------------------------------------
        #tpb0   = np.sum(tpb[0])/float(gpr.n)     # [1]
        #Sigerr0 = Sig0/np.sqrt(tpb0)       # [Munit/Rscale^2]
        P_dens  = np.zeros(gp.nipol)
        P_edens = np.zeros(gp.nipol)
        for b in range(gp.nipol):
            Sig = np.sum(Sig_kin[b])/(1.*gpr.n) # [Munit/Rscale^2]
            tpbb   = np.sum(tpb[b])/float(gpr.n)       # [1], mean number of tracers in bin
            Sigerr = Sig/np.sqrt(tpbb)       # [Munit/Rscale^2], Poissonian error
            # compare data and analytic profile <=> get stellar
            # density or mass ratio from Matt Walker
            if(np.isnan(Sigerr)):
                P_dens[b] = P_dens[b-1]  # [1]
                P_edens[b]= P_edens[b-1] # [1]
            else:
                P_dens[b] = Sig/Sig0   # [1]
                P_edens[b]= Sigerr/Sig0 # [1]
            print(Rbin[b], Binmin[b], Binmax[b], P_dens[b], P_edens[b], file=f_Sig)
            # 3*[rscale], [dens0], [dens0]
            indr = (R<Binmax[b])
            Menclosed = float(np.sum(indr))/totmass_tracers # for normalization to 1#[totmass_tracers]
            Merr = Menclosed/np.sqrt(tpbb) # or artificial Menclosed/10 #[totmass_tracers]
            print(Rbin[b], Binmin[b], Binmax[b], Menclosed, Merr, file=f_mass) # [Rscale0], 2* [totmass_tracers]
        f_Sig.close()
        f_mass.close()
        # deproject Sig to get nu
        numedi = gip.Sig_INT_rho(Rbin*Rscalei, Sig0pc*P_dens, gp)
        #numin  = gip.Sig_INT_rho(Rbin*Rscalei, Sig0pc*(P_dens-P_edens), gp)
        numax  = gip.Sig_INT_rho(Rbin*Rscalei, Sig0pc*(P_dens+P_edens), gp)
        nu0pc  = numedi[0]
        gf.write_nu_scale(gp.files.get_scale_file(pop), nu0pc)
        nuerr  = numax-numedi
        for b in range(gp.nipol):
            print(Rbin[b], Binmin[b], Binmax[b], numedi[b]/nu0pc, nuerr[b]/nu0pc, file = f_nu)
        f_nu.close()
        # calculate and output siglos
        # --------------------------------------------
        p_dvlos = np.zeros(gp.nipol)
        p_edvlos = np.zeros(gp.nipol)
        for b in range(gp.nipol):
            sig = np.sum(siglos[b])/gpr.n #[km/s]
            tpbb = np.sum(tpb[b])/float(gpr.n) #[1]
            if tpbb == 0:
                sigerr = p_edvlos[b-1] #[km/s]
                # attention! uses last error
            else:
                # Poisson error with measurement errors
                #sigerr = sig/np.sqrt(tpbb)
                #sigerr = np.sqrt(sigerr**2+2**2) # 2km/s

                # standard deviation
                #sigerr = stddevbiweight(siglos[b])

                # Poisson error, first guess
                sigerr = sig/np.sqrt(tpbb) #[km/s]
            p_dvlos[b] = sig    #[km/s]
            p_edvlos[b]= sigerr #[km/s]
        maxsiglos = max(p_dvlos) #[km/s]
        print('maxsiglos = ', maxsiglos, '[km/s]')
        fpars = open(gp.files.get_scale_file(pop),'a')
        print(maxsiglos, file=fpars)          #[km/s]
        fpars.close()
        for b in range(gp.nipol):
            print(Rbin[b], Binmin[b], Binmax[b], np.abs(p_dvlos[b]/maxsiglos),\
                  np.abs(p_edvlos[b]/maxsiglos), file=f_sig)
            # 3*[rscale], 2*[maxsiglos]
        f_sig.close()
        # calculate and output kurtosis kappa
        # --------------------------------------------
        if gp.usekappa:
            p_kappa = np.zeros(gp.nipol) # needed for plotting later
            p_ekappa = np.zeros(gp.nipol)
            for b in range(gp.nipol):
                kappavel = np.sum(kappa[b])/gpr.n #[1]
                tpbb = np.sum(tpb[b])/float(gpr.n) #[1]
                if tpbb == 0:
                    kappavelerr = p_edvlos[b-1] #[1]
                    # attention! uses last error
                else:
                    kappavelerr = np.abs(kappavel/np.sqrt(tpbb)) #[1]
                p_kappa[b] = kappavel
                p_ekappa[b] = kappavelerr
                print(Rbin[b], Binmin[b], Binmax[b], \
                      kappavel, kappavelerr, file=f_kap)
                # [rscale], 2*[1]
            f_kap.close()
        # output zetas
        # -------------------------------------------------------------
        if gp.usezeta:
            print(np.median(zetaa), np.median(zetab), file=f_zeta)
            f_zeta.close()
        if gpr.showplots:
            gpr.show_plots_dens_2D(Rbin*Rscalei, P_dens, P_edens, Sig0pc)
            gpr.show_plots_sigma(Rbin*Rscalei, p_dvlos, p_edvlos)
            if gp.usekappa:
                gpr.show_plots_kappa(Rbin*Rscalei, p_kappa, p_ekappa)

        # overwrite Sig profile if photometric data is used
        if gp.investigate == 'obs' and gp.case==1 and pop==1 and not gp.selfconsistentnu:
            import os
            os.system('cp '+gp.files.get_scale_file(0)+' '+gp.files.get_scale_file(1))
            # replace last line with actual maxsiglos from tracer particles
            os.system("sed -i '$s/^.*/"+str(maxsiglos)+"/' "+gp.files.get_scale_file(1))
            os.system('cp '+gp.files.Sigfiles[0]+' '+gp.files.Sigfiles[1])
            continue