def jackknife_lensums(data, jackreg_col=None, weights=None): """ jackknife the data. If regions are sent, use them for jackknifing, otherwise jackknife one object at a time parameters ---------- data: array An array with fields 'dsum' and 'wsum'. If shear style is lensfit, dsensum is needed rather than wsum. jackreg_col: string, optional column name holding the jackknife region ids weights: array, optional Additional weights per lens returns ------- dsig, dsig_cov dsig: array The delta sigma in radial bins [nrad] dsig_cov: array The covariance matrix of delta sigma [nrad,nrad] """ import jackknife jdsum, jwsum = get_jackknife_sums(data, weights=weights, jackreg_col=jackreg_col) dsig, dsig_cov = jackknife.wjackknife(vsum=jdsum, wsum=jwsum) return dsig, dsig_cov
def calc_gmean(data): """ get gtrue, gmeas, gcov """ import jackknife gtrue = data['shear_true'].mean(axis=0) gmeas = numpy.zeros(2) wts=get_weights(data) ''' g1sum = (data['g'][:,0]*wts).sum() g2sum = (data['g'][:,1]*wts).sum() g1sensum = (data['g_sens'][:,0]*wts).sum() g2sensum = (data['g_sens'][:,1]*wts).sum() gmeas[0]=g1sum/g1sensum gmeas[1]=g2sum/g2sensum print("gmeas: ",gmeas) ''' wa=wts[:,newaxis] jdsum=data['g']*wa if 'g_sens' in data.dtype.names: jwsum=data['g_sens']*wa else: jwsum=numpy.ones( data['g'].shape )*wa #print(jdsum.shape) gmeas,gcov=jackknife.wjackknife(vsum=jdsum, wsum=jwsum) return gtrue, gmeas, gcov
def average_lensums_weighted(lout, weights_in, jackreg_col=None): """ average over all the individual lensums with additional weights The covariance matrix is estimated from jackknifing. If regions are sent, use them for jackknifing, otherwise jackknife one object at a time parameters ---------- data: array Array containing the outputs from xshear weights: array Additional weights for each lens jackreg_col: string, optional column name holding the jackknife region ids """ import jackknife nlens = lout.size nrad = lout["rsum"][0].size weights = weights_in.copy() weights *= 1.0 / weights.max() if weights.size != nlens: raise ValueError("weights not same size as lensout, " "%d instead of %d" % (weights.size, nlens)) totweights = weights.sum() shear_style = get_shear_style(lout) # broadcast it wa = weights[:, newaxis] comb = averaged_struct(nrad, shear_style=shear_style) # weight is the weight for the lens. Call this weightsum to # indicate a sum over multiple lenses comb["weightsum"] = (lout["weight"] * weights).sum() comb["totpairs"] = lout["totpairs"].sum() comb["wsum"] = (lout["wsum"] * wa).sum(axis=0) comb["npair"] = lout["npair"].sum(axis=0) comb["rsum"] = (lout["rsum"] * wa).sum(axis=0) comb["dsum"] = (lout["dsum"] * wa).sum(axis=0) comb["osum"] = (lout["osum"] * wa).sum(axis=0) # averages comb["r"] = comb["rsum"] / comb["wsum"] if shear_style == "lensfit": comb["dsensum"] = (lout["dsensum"] * wa).sum(axis=0) comb["osensum"] = (lout["osensum"] * wa).sum(axis=0) comb["dsig"] = comb["dsum"] / comb["dsensum"] comb["osig"] = comb["osum"] / comb["osensum"] else: comb["dsig"] = comb["dsum"] / comb["wsum"] comb["osig"] = comb["osum"] / comb["wsum"] # we calculate boost factors from this, wsum_mean/wsum_mean_random comb["wsum_mean"] = comb["wsum"] / totweights # jackknife to get the covariance matrix m, cov = jackknife_lensums(lout, weights=weights, jackreg_col=jackreg_col) comb["dsigcov"][0] = cov comb["dsigcor"][0] = jackknife.covar2corr(cov) comb["dsigerr"][0] = sqrt(diag(cov)) # # also jackknife the wsums, used for errors on boost factors # # this will broadcase w_wsum_all = lout["wsum"] * wa # but here we need fully expanded version weights_big = ones((lout.size, nrad)) * wa m, cov = jackknife.wjackknife(vsum=w_wsum_all, wsum=weights_big) comb["wsum_mean_err"][0] = sqrt(diag(cov)) return comb
def average_lensums_weighted_slow(lout, weights): """ Reduce the lens-by-lens lensums by summing over all the individual sums and producing averages """ import jackknife nlens = lout.size nrad = lout["rsum"][0].size if weights.size != nlens: raise ValueError("weights not same size as lensout, " "%d instead of %d" % (weights.size, nlens)) totweights = weights.sum() if "dsensum" in lout.dtype.names: shear_style = "lensfit" else: shear_style = "reduced" comb = averaged_struct(nrad, shear_style=shear_style) # weight is the weight for the lens. Call this weightsum to # indicate a sum over multiple lenses comb["weightsum"][0] = (lout["weight"] * weights).sum() # should not use this for anything since weights # make it non-integer comb["totpairs"][0] = lout["totpairs"].sum() # these will get the extra weight # WE MUST MAKE A COPY!! ARRGGG THIS BIT ME!!! jwsum = lout["wsum"].copy() jdsum = lout["dsum"].copy() for i in xrange(nrad): npair = lout["npair"][:, i].sum() w_rsum = (lout["rsum"][:, i] * weights).sum() w_wsum = (lout["wsum"][:, i] * weights).sum() w_dsum = (lout["dsum"][:, i] * weights).sum() w_osum = (lout["osum"][:, i] * weights).sum() comb["npair"][0, i] = npair comb["rsum"][0, i] = w_rsum comb["wsum"][0, i] = w_wsum comb["dsum"][0, i] = w_dsum comb["osum"][0, i] = w_osum # averages comb["r"][0, i] = w_rsum / w_wsum jdsum[:, i] *= weights if shear_style == "lensfit": comb["dsensum"][0, i] = lout["dsensum"][:, i].sum() comb["osensum"][0, i] = lout["osensum"][:, i].sum() comb["dsig"][0, i] = w_dsum / comb["dsensum"][0, i] comb["osig"][0, i] = w_osum / comb["osensum"][0, i] jwsum[:, i] = comb["dsensum"] * weights else: comb["dsig"][0, i] = w_dsum / w_wsum comb["osig"][0, i] = w_osum / w_wsum jwsum[:, i] = comb["wsum"] * weights # this is average wsum over lenses # we calculate clustering correction from this, wsum_mean/wsum_mean_random comb["wsum_mean"][0, i] = w_wsum / totweights # jwsum will be wsum*weights or dsensum*weights m, cov = jackknife.wjackknife(vsum=jdsum, wsum=jwsum) comb["dsigcov"][0, :, :] = cov comb["dsigcor"][0, :, :] = jackknife.covar2corr(cov) comb["dsigerr"][0, :] = sqrt(diag(cov)) # make weights in shape of wsum w = zeros(lout["wsum"].shape) w_wsum = zeros(lout["wsum"].shape) for i in xrange(lout.size): w_wsum[i, :] = lout["wsum"][i, :] * weights[i] w[i, :] = weights[i] m, cov = jackknife.wjackknife(vsum=w_wsum, wsum=w) comb["wsum_mean_err"][0, :] = sqrt(diag(cov)) return comb
def average_lensums_slow(lout, weights=None): """ combine the lens-by-lens lensums by summing over all the individual sums and producing averages This uses the averaged_dtype This is used by the binner routines """ import jackknife if weights is not None: return average_lensums_weighted(lout, weights) nlens = lout.size nrad = lout["rsum"][0].size if "dsensum" in lout.dtype.names: shear_style = "lensfit" else: shear_style = "reduced" comb = averaged_struct(nrad, shear_style=shear_style) # weight is the weight for the lens. Call this weightsum to # indicate a sum over multiple lenses comb["weightsum"] = lout["weight"].sum(axis=0) comb["totpairs"] = lout["totpairs"].sum(axis=0) for i in xrange(nrad): npair = lout["npair"][:, i].sum() rsum = lout["rsum"][:, i].sum() wsum = lout["wsum"][:, i].sum() wsum2 = (lout["wsum"][:, i] ** 2).sum() dsum = lout["dsum"][:, i].sum() osum = lout["osum"][:, i].sum() comb["npair"][0, i] = npair comb["rsum"][0, i] = rsum comb["wsum"][0, i] = wsum comb["dsum"][0, i] = dsum comb["osum"][0, i] = osum # averages comb["r"][0, i] = rsum / wsum if shear_style == "lensfit": comb["dsensum"][0, i] = lout["dsensum"][:, i].sum() comb["osensum"][0, i] = lout["osensum"][:, i].sum() comb["dsig"][0, i] = dsum / comb["dsensum"][0, i] comb["osig"][0, i] = osum / comb["osensum"][0, i] else: comb["dsig"][0, i] = dsum / wsum comb["osig"][0, i] = osum / wsum comb["wsum_mean"][0, i] = wsum / nlens if shear_style == "lensfit": m, cov = jackknife.wjackknife(vsum=lout["dsum"], wsum=lout["dsensum"]) else: m, cov = jackknife.wjackknife(vsum=lout["dsum"], wsum=lout["wsum"]) comb["dsigcov"][0, :, :] = cov comb["dsigcor"][0, :, :] = jackknife.covar2corr(cov) comb["dsigerr"][0, :] = sqrt(diag(cov)) w = numpy.ones(lout["wsum"].shape) m, cov = jackknife.wjackknife(vsum=lout["wsum"], wsum=w) comb["wsum_mean_err"][0, :] = sqrt(diag(cov)) return comb
def average_lensums(lout, weights=None, jackreg_col=None): """ average over all the individual lensums The covariance matrix is estimated from jackknifing. If regions are sent, use them for jackknifing, otherwise jackknife one object at a time parameters ---------- data: array Array containing the outputs from xshear weights: array, optional Optional weights jackreg_col: string, optional column name holding the jackknife region ids """ import jackknife if weights is not None: print(" using extra weights in averages") return average_lensums_weighted(lout, weights, jackreg_col=jackreg_col) # print("yes using no weights") nlens = lout.size nrad = lout["rsum"][0].size shear_style = get_shear_style(lout) comb = averaged_struct(nrad, shear_style=shear_style) comb["nlenses"][0] = nlens # weight is the weight for the lens. Call this weightsum to # indicate a sum over multiple lenses comb["weightsum"][0] = lout["weight"].sum() comb["totpairs"][0] = lout["totpairs"].sum() comb["npair"][0] = lout["npair"].sum(axis=0) comb["rsum"][0] = lout["rsum"].sum(axis=0) comb["wsum"][0] = lout["wsum"].sum(axis=0) comb["dsum"][0] = lout["dsum"].sum(axis=0) comb["osum"][0] = lout["osum"].sum(axis=0) # averages comb["r"][0] = comb["rsum"][0] / comb["wsum"][0] if shear_style == "lensfit": comb["dsensum"][0] = lout["dsensum"].sum(axis=0) comb["osensum"][0] = lout["osensum"].sum(axis=0) comb["dsig"][0] = comb["dsum"][0] / comb["dsensum"][0] comb["osig"][0] = comb["osum"][0] / comb["osensum"][0] else: comb["dsig"][0] = comb["dsum"][0] / comb["wsum"][0] comb["osig"][0] = comb["osum"][0] / comb["wsum"][0] # this is average wsum over lenses # we calculate boost factors from this, wsum_mean/wsum_mean_random comb["wsum_mean"][0] = comb["wsum"][0] / nlens # jackknife to get the covariance matrix m, cov = jackknife_lensums(lout, jackreg_col=jackreg_col) comb["dsigcov"][0] = cov comb["dsigcor"][0] = jackknife.covar2corr(cov) comb["dsigerr"][0] = sqrt(diag(cov)) # also jackknife the wsums, used for errors on boost factors w = ones(lout["wsum"].shape) m, cov = jackknife.wjackknife(vsum=lout["wsum"], wsum=w) comb["wsum_mean_err"][0] = sqrt(diag(cov)) return comb
def average_lensums_weighted(lout, weights): """ Reduce the lens-by-lens lensums by summing over all the individual sums and producing averages """ nlens = lout.size nbin = lout['rsum'][0].size if weights.size != nlens: raise ValueError("weights not same size as lensout, " "%d instead of %d" % (weights.size,nlens)) totweights = weights.sum() comb = averaged_struct(nbin) # weight is the weight for the lens. Call this weightsum to # indicate a sum over multiple lenses comb['weightsum'][0] = (lout['weight']*weights).sum() if 'sshsum' in lout.dtype.names: comb['sshsum'][0] = (lout['sshsum']*weights).sum() comb['ssh'] = comb['sshsum']/comb['weightsum'] else: # this makes ssh unity comb['sshsum'][0] = comb['weightsum'].sum() comb['ssh'] = comb['sshsum']/comb['weightsum'] # should not use this for anything since weights # make it non-integer comb['totpairs'][0] = lout['totpairs'].sum() # these will get the extra weight # WE MUST MAKE A COPY!! ARRGGG THIS BIT ME!!! jwsum = lout['wsum'].copy() jdsum = lout['dsum'].copy() for i in xrange(nbin): npair = lout['npair'][:,i].sum() rsum = lout['rsum'][:,i].sum() w_wsum = (lout['wsum'][:,i]*weights).sum() w_dsum = (lout['dsum'][:,i]*weights).sum() w_osum = (lout['osum'][:,i]*weights).sum() comb['npair'][0,i] = npair comb['rsum'][0,i] = rsum comb['wsum'][0,i] = w_wsum comb['dsum'][0,i] = w_dsum comb['osum'][0,i] = w_osum # averages tr = rsum/w_wsum if tr > 100: # probably old way comb['r'][0,i] = rsum/weights.sum() else: comb['r'][0,i] = tr comb['dsig'][0,i] = w_dsum/w_wsum comb['osig'][0,i] = w_osum/w_wsum # this is average wsum over lenses # we calculate clustering correction from this, wsum_mean/wsum_mean_random comb['wsum_mean'][0,i] = w_wsum/totweights jwsum[:,i] *= weights jdsum[:,i] *= weights m,cov=jackknife.wjackknife(vsum=jdsum, wsum=jwsum) comb['dsigcov'][0,:,:] = cov comb['dsigcor'][0,:,:] = jackknife.covar2corr(cov) comb['dsigerr'][0,:] = sqrt(diag(cov)) # make weights in shape of wsum w = zeros(lout['wsum'].shape) w_wsum = zeros(lout['wsum'].shape) for i in xrange(lout.size): w_wsum[i,:] = lout['wsum'][i,:]*weights[i] w[i,:] = weights[i] m,cov = jackknife.wjackknife(vsum=w_wsum, wsum=w) comb['wsum_mean_err'][0,:] = sqrt(diag(cov)) return comb
def average_lensums(lout, weights=None): """ combine the lens-by-lens lensums by summing over all the individual sums and producing averages This uses the averaged_dtype This is used by the binner routines """ if weights is not None: return average_lensums_weighted(lout,weights) nlens = lout.size nbin = lout['rsum'][0].size comb = averaged_struct(nbin) # weight is the weight for the lens. Call this weightsum to # indicate a sum over multiple lenses comb['weightsum'][0] = lout['weight'].sum() if 'sshsum' in lout.dtype.names: comb['sshsum'][0] = lout['sshsum'].sum() comb['ssh'] = comb['sshsum']/comb['weightsum'] else: # this makes ssh unity comb['sshsum'][0] = comb['weightsum'][0] comb['ssh'] = comb['sshsum']/comb['weightsum'] comb['totpairs'][0] = lout['totpairs'].sum() for i in xrange(nbin): npair = lout['npair'][:,i].sum() rsum = lout['rsum'][:,i].sum() wsum = lout['wsum'][:,i].sum() wsum2 = (lout['wsum'][:,i]**2).sum() dsum = lout['dsum'][:,i].sum() osum = lout['osum'][:,i].sum() comb['npair'][0,i] = npair comb['rsum'][0,i] = rsum comb['wsum'][0,i] = wsum comb['dsum'][0,i] = dsum comb['osum'][0,i] = osum # averages tr = rsum/wsum if tr > 100: # old style probably comb['r'][0,i] = rsum/npair else: comb['r'][0,i] = tr comb['dsig'][0,i] = dsum/wsum comb['osig'][0,i] = osum/wsum comb['dsigerr_simple'][0,i] = numpy.sqrt(1.0/wsum) # this is average wsum over lenses # we calculate clustering correction from this, wsum_mean/wsum_mean_random wsum_mean = wsum/nlens comb['wsum_mean'][0,i] = wsum_mean comb['wsum_mean_err_simple'][0,i] = sqrt(wsum2/nlens - wsum_mean**2)/sqrt(nlens) m,cov=jackknife.wjackknife(vsum=lout['dsum'], wsum=lout['wsum']) comb['dsigcov'][0,:,:] = cov comb['dsigcor'][0,:,:] = jackknife.covar2corr(cov) comb['dsigerr'][0,:] = sqrt(diag(cov)) # turns out this agrees with the above one w=ones(lout['wsum'].shape) m,cov = jackknife.wjackknife(vsum=lout['wsum'], wsum=w) comb['wsum_mean_err'][0,:] = sqrt(diag(cov)) return comb