Esempio n. 1
0
def stack(data, delta=False):
    nstack = int((forest.lmax - forest.lmin) / forest.dll) + 1
    ll = forest.lmin + sp.arange(nstack) * forest.dll
    st = sp.zeros(nstack)
    wst = sp.zeros(nstack)
    for p in sorted(list(data.keys())):
        for d in data[p]:
            if delta:
                de = d.de
                we = d.we
            else:
                de = d.fl / d.co
                var_lss = forest.var_lss(d.ll)
                eta = forest.eta(d.ll)
                fudge = forest.fudge(d.ll)
                var = 1. / d.iv / d.co**2
                we = 1. / variance(var, eta, var_lss, fudge)

            bins = ((d.ll - forest.lmin) / forest.dll + 0.5).astype(int)
            c = sp.bincount(bins, weights=de * we)
            st[:len(c)] += c
            c = sp.bincount(bins, weights=we)
            wst[:len(c)] += c

    w = wst > 0
    st[w] /= wst[w]
    return ll, st, wst
Esempio n. 2
0
def stack_flux(data, delta):
    '''Make a weighted sum of flux/delta values in wavelength bins.'''

    nstack = int((forest.lmax - forest.lmin) / forest.dll) + 1
    ll = forest.lmin + sp.arange(nstack) * forest.dll
    st = sp.zeros(nstack)
    wst = sp.zeros(nstack)
    data_bad_cont = []

    # Stack flux & weights, or deltas & weights
    for d in data:
        if d.bad_cont is not None:
            data_bad_cont.append(d)
            continue

        bins=((d.ll - d.lmin) / d.dll + 0.5).astype(int)
        eta = forest.eta(d.ll)
        var_lss = forest.var_lss(d.ll)
        fudge = forest.fudge(d.ll)

        if (delta == 0):
            # convert ivar into normalized ivar (going from flux units to F units)
            ivar_F = d.iv * d.co**2

            # correct this variance, adding the var_lss and eta factors
            var_F = 1./ivar_F
            var_F_tot = var_F*eta + var_lss + fudge/var_F

            # convert back to flux units
            var_flux_tot = var_F_tot * d.co**2 
            we = 1./var_flux_tot
            c = sp.bincount(bins, weights = d.fl * we)
        else:
            iv = d.iv / eta
            we = iv * d.co**2 / (iv * d.co**2 * var_lss + 1)
            c = sp.bincount(bins, weights = (d.fl/d.co - 1) * we)

        st[:len(c)] += c
        c = sp.bincount(bins, weights = we)
        wst[:len(c)] += c

    w = wst>0
    st[w] /= wst[w]
    for d in data_bad_cont:
        print ("rejected {} due to {}\n".format(d.thid,d.bad_cont))

    return ll, st, wst
Esempio n. 3
0
def mc(data):
    nmc = int((forest.lmax_rest - forest.lmin_rest) / forest.dll) + 1
    mcont = sp.zeros(nmc)
    wcont = sp.zeros(nmc)
    ll = forest.lmin_rest + (sp.arange(nmc) + .5) * (forest.lmax_rest -
                                                     forest.lmin_rest) / nmc
    for p in sorted(list(data.keys())):
        for d in data[p]:
            bins = ((d.ll - forest.lmin_rest - sp.log10(1 + d.zqso)) /
                    (forest.lmax_rest - forest.lmin_rest) * nmc).astype(int)
            var_lss = forest.var_lss(d.ll)
            eta = forest.eta(d.ll)
            fudge = forest.fudge(d.ll)
            var = 1. / d.iv / d.co**2
            we = 1 / variance(var, eta, var_lss, fudge)
            c = sp.bincount(bins, weights=d.fl / d.co * we)
            mcont[:len(c)] += c
            c = sp.bincount(bins, weights=we)
            wcont[:len(c)] += c

    w = wcont > 0
    mcont[w] /= wcont[w]
    mcont /= mcont.mean()
    return ll, mcont, wcont
Esempio n. 4
0
        # Fit the continuum for the stack
        g = data[0]
        d = forest(ll1, st1, wst1, g.thid, g.ra, g.dec, g.zqso, g.plate, g.mjd,
                   g.fid, g.order)
        try:
            d.cont_fit()
        except:
            print 'Error fitting continuum: ' + str(g.thid)
            break
        # Create deltas and weights in same bin widths as before
        nstack = int((forest.lmax - forest.lmin) / forest.dll) + 1
        de1 = sp.zeros(nstack)
        bins = ((d.ll - d.lmin) / d.dll + 0.5).astype(int)
        c = sp.bincount(bins, weights=d.fl / d.co - 1)
        de1[:len(c)] += c
        eta = forest.eta(d.ll)
        var_lss = forest.var_lss(d.ll)
        iv = d.iv / eta
        we = iv * d.co**2 / (iv * d.co**2 * var_lss + 1)
        c = sp.bincount(bins, weights=we)
        wst1[:len(c)] += c

        # Get rid of leading and trailing zeros
        w = (wst1 != 0.)
        we1 = wst1[w]
        wei1.append(we1)
        w = (wst2 != 0.)
        we2 = wst2[w]
        wei2.append(we2)

    # Get rid of leading and trailing zeros