def test_get_pairwise_ksz(): testPath = '/'.join((catalogTools.__file__).split('/')[:-2]) + '/tests/' testParamFileFullPath = os.path.join(testPath, 'data_toTestAPI/params.ini') params = paramTools.params(testParamFileFullPath) df = produceFakeCatalog() rsep, p_uk = pairwiser.get_pairwise_ksz(df, params, multithreading=False) tzav = pairwiser.get_tzav(df.dT.values, df.z.values, params.SIGMA_Z) if not params.UNEVEN_BINS: rsep0, p_uk0 = pairwiser.pairwise_ksz(df.Dc.values, df.ra.values, df.dec.values, tzav, df.dT.values, params.BIN_SIZE_MPC, params.N_BINS, multithreading=False) else: rsep0, p_uk0 = pairwiser.pairwise_ksz_uneven_bins(df.Dc.values, df.ra.values, df.dec.values, tzav, df.dT.values, params.BIN_EDGES, multithreading=False) rsep_diff_sq = np.sum((rsep - rsep0)**2) p_uk_diff_sq = np.sum((p_uk - p_uk0)**2) assert rsep_diff_sq < 1e-10 assert p_uk_diff_sq < 1e-10
def test_openConfigFile(): pars = paramTools.params(testConfigFileFullPath) assert type(pars.CAT_FNAME) is str assert type(pars.CAT_QUERY) is str assert type(pars.FITS_FNAME) is str assert type(pars.N_OBJ) is int assert type(pars.NAME) is str assert type(pars.PHOTODISKR) is float assert type(pars.PHOTORINGR) is float assert type(pars.REPROJECT) is bool assert type(pars.REPIXELIZE) is bool assert type(pars.JK_NGROUPS) is int assert type(pars.N_BINS) is int assert type(pars.BIN_SIZE_MPC) is float assert type(pars.SIGMA_Z) is float
def test_varianceWeighted(): '''Tests variance_weighted_pairwise_ksz and variance_weighted_pairwise_one_row''' testPath = '/'.join((catalogTools.__file__).split('/')[:-2]) + '/tests/' testParamFileFullPath = os.path.join(testPath, 'data_toTestAPI/params.ini') params = paramTools.params(testParamFileFullPath) df = produceFakeCatalog() rsep, p_uk = pairwiser.get_pairwise_ksz(df, params, multithreading=False) tzav = pairwiser.get_tzav(df.dT.values, df.z.values, params.SIGMA_Z) div = np.ones(len(tzav)) rsep0, p_uk0 = pairwiser.variance_weighted_pairwise_ksz( df.Dc.values, df.ra.values, # noqa df.dec.values, tzav, df.dT.values, div, params.BIN_SIZE_MPC, params.N_BINS, multithreading=False) chisq = np.sum((p_uk - p_uk0)**2) assert chisq < 1.0e10
#!/nfs/user/pag227/miniconda/bin/python import glob import pandas as pd import numpy as np from iskay import wiggly_tee_tools from iskay import paramTools import sys sigma_z = 0.01 N_in_sigma = 20 gaussian_or_square = 'gaussian_conventional' mean_or_median = 'mean' assert len(sys.argv) == 2 # usage: correction paramfile.ini p = paramTools.params(sys.argv[1]) filter_galaxies_by = p.CAT_QUERY # noqa def wiggly_tee_correct(sigma_z, N_in_sigma, gaussian_or_square, mean_or_median, filter_galaxies_by): fnames = glob.glob('ApPhotoResults/*.csv') fnames.sort() df = pd.concat([pd.read_csv(fname) for fname in fnames]) df.query(filter_galaxies_by, inplace=True) # remmeber to edit this later df['ra_rad'] = np.deg2rad(df.ra.values) df['dec_rad'] = np.deg2rad(df.dec.values)
submap = submapTools.getSubmap_originalPixelization( theMap, ra, dec, 4 * photoringR_arcmin / 60.) # noqa stamp = extractStamp(submap, ra, dec, submapSemiWidthR_arcmin, repixelize=params.REPIXELIZE, reprojection=params.REPROJECT) dset[j, :, :] = np.array(stamp) if verbose: bar.update(j + 1) if verbose: bar.finish() f.close() if howMany is not None: print "Processing only %i objects, use howMany=None full cat" % howMany params = paramTools.params('params.ini') mapfnames = {'submaps': params.FITS_FNAME, 'divmaps': params.DIVMAP_FNAME} theMap = mapTools.openMap_remote(fname=mapfnames[dsetName]) df = catalogTools.cat(fname=params.CAT_FNAME, howMany=howMany).df writeSubapsToFile(theMap, df, params.PHOTORINGR, params, dsetName=dsetName, verbose=True)
from iskay import paramTools from iskay import catalogTools import matplotlib.pyplot as plt from iskay import pairwiser import numpy as np import scipy.stats fnames = [ "params_disjoint_bin_lum_gt_04p3_and_06p1_jk.ini", "params_disjoint_bin_lum_gt_06p1_and_07p9_jk.ini", "params_lum_gt_07p9_jk.ini" ] for j in range(len(fnames)): fname = fnames[j] p = paramTools.params(fname) df = catalogTools.preProcessedCat(howMany=None, query=p.CAT_QUERY).df dT = df.dT.values z = df.z.values tzav = pairwiser.get_tzav_fast(dT, z, p.SIGMA_Z) dT_ksz = dT - tzav mean, std = np.mean(dT_ksz), np.std(dT_ksz) plt.figure(figsize=[8, 4.5]) plt.hist(dT_ksz, normed=True, histtype='step', color='black', lw=2,
def produceFakeParams(): testPath = '/'.join((catalogTools.__file__).split('/')[:-2]) + '/tests/' testParamFileFullPath = os.path.join(testPath, 'data_toTestAPI/params.ini') params = paramTools.params(testParamFileFullPath) return params
#!/nfs/user/pag227/miniconda/bin/python from iskay import paramTools import glob p = paramTools.params('./params.ini') TITLE = p.NAME s1 = [] s1.append('# %s\n\n' % p.NAME) s1.append("**fits_fname** = %s \n\n" % p.FITS_FNAME) s1.append("**divmap_fname** = %s \n\n" % p.DIVMAP_FNAME) s1.append("**query** = %s\n\n" % p.CAT_QUERY) s1.append("# Curves\n") s1.append("![](kSZ_velocityCurves.png)\n") s1.append("![](kSZ_velocityCurves_disjoint_bins.png)\n") s1.append("# Covariance Matrices\n") def get_files(token): '''Find files under folder token''' s = [] fnames = glob.glob("./%s/%s_lum*.png" % (token, p.NAME.replace('.', 'p'))) s.append("## Joint bins\n") for fname in fnames: fname = fname.split("/")[-1]
def howManyEntries(): '''Opens the catalog pointed to in params.ini and returns its length are there.''' params = paramTools.params('params.ini') df = catalogTools.cat(fname=params.CAT_FNAME).df return len(df)
#! /nfs/user/pag227/miniconda/bin/python ''' pairwsise kSZ analysis script example for iskay. Usage: iskay_analysis.py param.ini Written by: P. Gallardo. ''' from iskay import paramTools from iskay import catalogTools from iskay import JK import sys param_fname = sys.argv[1] params = paramTools.params(param_fname) df = catalogTools.preProcessedCat(howMany=None, query=params.CAT_QUERY).df jk = JK.JK(df, params, distributed=True)
#!/nfs/user/pag227/miniconda/bin/python '''Usage: arguments: ngroups, N_chnk, r_max N_chunk starts in 1 to make it compatible to sge. N_groups: in how many chunks to split the computation N_chunk: what chunk to compute''' import sys from iskay import pairwise_and_save_data as pw_save from iskay import paramTools import pandas as pd assert len(sys.argv) == 4 N_groups, N_chunk = int(sys.argv[1]), int(sys.argv[2]) - 1 params = paramTools.params(sys.argv[3]) df = pd.read_csv('wtee_corrected_decrements_%s.csv' % params.NAME) pw_save.compute_one_pairwise_chunk_saving_everything_to_lnx1032(df, N_groups, N_chunk, params.BIN_EDGES, MAX_NPAIRS=100000) # noqa