def main(): args = parse_args() print('args = ', args) # Make the work directory if it does not exist yet. work = os.path.expanduser(args.work) print('work dir = ', work) try: if not os.path.isdir(work): os.makedirs(work) except OSError as e: print("Ignore OSError from makedirs(work):") print(e) pass if args.use_psfex: prefix = 'psfex' else: prefix = 'piff' if args.file != '': print('Read file ', args.file) with open(args.file) as fin: exps = [line.strip() for line in fin if line[0] != '#'] print('File included %d exposures' % len(exps)) else: exps = args.exps print('Explicit listing of %d exposures' % len(exps)) exps = sorted(exps) keys = [ 'ra', 'dec', 'x', 'y', 'mag', 'obs_e1', 'obs_e2', 'obs_T', prefix + '_e1', prefix + '_e2', prefix + '_T' ] data, bands, tilings = read_data(exps, work, keys, limit_bands=args.bands, prefix=prefix, use_reserved=args.use_reserved, frac=args.frac) e1 = data['obs_e1'] e2 = data['obs_e2'] T = data['obs_T'] p_e1 = data[prefix + '_e1'] p_e2 = data[prefix + '_e2'] p_T = data[prefix + '_T'] de1 = e1 - p_e1 de2 = e2 - p_e2 dT = (T - p_T) / T psf_whiskers(data['ccd'], data['x'], data['y'], e1, e2, T, de1, de2, dT) psf_hex(data['ccd'], data['x'], data['y'], e1, e2, T, de1, de2, dT)
def main(): import sys sys.path.insert(0, '/home/dfa/sobreira/alsina/alpha-beta-gamma/code/src') import numpy as np from read_psf_cats import read_data, toList, read_h5 import h5py as h args = parse_args() #Reading Metacal galkeys = ['ra', 'dec', 'T', 'psf_T'] data_galaxies = read_h5(args.metacal_cat, 'catalog/metacal/unsheared', galkeys ) print("Total objects in catalog:", len(data_galaxies)) f = h.File(args.metacal_cat, 'r') index = f['index'] select = np.array(index['select']) data_galaxies = data_galaxies[select] print("Total objects after masking", len(data_galaxies)) Tpsfgal = data_galaxies['psf_T'] Tgal = data_galaxies['T'] prior_gal = np.mean(Tpsfgal/Tgal) priorgal_max = np.max(Tpsfgal/Tgal) priorgal_min = np.min(Tpsfgal/Tgal) print('prior_gal=', prior_gal) print('prior_gal_min=', priorgal_min) print('prior_gal_max=', priorgal_max) #Reading Mike stars catalog keys = ['ra', 'dec', 'obs_T', 'piff_T', 'mag'] exps = toList(args.exps_file) data_stars, bands, tilings = read_data(exps, args.piff_cat , keys, limit_bands=args.bands, use_reserved=args.use_reserved) print("Objects", len(data_stars)) data_stars = data_stars[data_stars['mag']<20] print("Objects with magnitude <20", len(data_stars)) Tpsf = data_stars['piff_T'] Tstars = data_stars['obs_T'] prior = np.mean(Tpsf/Tstars) prior_max = np.max(Tpsf/Tstars) prior_min = np.min(Tpsf/Tstars) print('prior_stars=', prior) print('prior_stars_min=', prior_min) print('prior_stars_max=', prior_max)
def main(): import sys sys.path.insert(0, '/home/dfa/sobreira/alsina/alpha-beta-gamma/code/src') #sys.path.insert(0, '/global/cscratch1/sd/alsina/alpha-beta-gamma/code/src') import numpy as np from read_psf_cats import read_data, toList from run_rho import do_rho_stats args = parse_args() #Make directory where the ouput data will be outpath = os.path.expanduser(args.outpath) try: if not os.path.exists(outpath): os.makedirs(outpath) except OSError: if not os.path.exists(outpath): raise #STATISTIC USING ONLY RESERVED STARS keys = ['ra', 'dec','obs_e1', 'obs_e2', 'obs_T', 'piff_e1', 'piff_e2', 'piff_T', 'mag'] exps = toList(args.exps_file) data_stars, bands, tilings = read_data(exps, args.piff_cat , keys, limit_bands=args.bands, use_reserved=args.use_reserved) print("Objects", len(data_stars)) data_stars = data_stars[data_stars['mag']<20] print("Objects with magnitude <20", len(data_stars)) meanra = np.mean(data_stars['ra']) meandec = np.mean(data_stars['dec']) patchstars = []; patchstars.append((data_stars['ra']>meanra)&(data_stars['dec']>meandec)) patchstars.append((data_stars['ra']<meanra)&(data_stars['dec']>meandec)) patchstars.append((data_stars['ra']<meanra)&(data_stars['dec']<meandec)) patchstars.append((data_stars['ra']>meanra)&(data_stars['dec']<meandec)) for pat in range(4): do_rho_stats(data_stars[patchstars[pat]], bands, tilings, outpath, max_sep=300, sep_units='arcmin', name= today + 'mod_epiff_magcut_sn'+ '_patch_' + str(pat + 1) , bandcombo=args.bandcombo, mod=args.mod, obs=args.obs, shapenoise=args.sn)
def read_somedata2(catalogpath, expolist): from read_psf_cats import read_data exps = load_explist(expolist) exps = sorted(exps) keys = ['use'] data, bands, tilings = read_data(exps, catalogpath, keys, limit_bands='riz', prefix='piff', use_reserved=False, frac=1.) file_name = "stars2.fits" print('Finished reading data') write_fit(data, file_name)
def run_rhos(args, outpath ): from read_psf_cats import read_data, toList #STATISTIC USING ONLY RESERVED STARS keys = ['ra', 'dec','obs_e1', 'obs_e2', 'obs_T', 'piff_e1', 'piff_e2', 'piff_T', 'mag'] exps = toList(args.exps_file) data, bands, tilings = read_data(exps, args.piff_cat , keys, limit_bands=args.bands, use_reserved=args.use_reserved) print("Objects", len(data)) data = data[data['mag']<20] print("Objects with magnitude <20", len(data)) min_sep_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0] for min_sep in min_sep_list: stats = measure_rho(data, min_sep=min_sep, mod=args.mod, obs=args.obs) stat_file = os.path.join(outpath, "rho_%f.json"%(min_sep)) write_stats(stat_file,*stats)
def main(): args = parse_args() print('args = ', args) # Make the work directory if it does not exist yet. work = os.path.expanduser(args.work) print('work dir = ', work) try: if not os.path.isdir(work): os.makedirs(work) except OSError as e: print("Ignore OSError from makedirs(work):") print(e) pass if args.use_psfex: prefix = 'psfex' else: prefix = 'piff' if args.file != '': print('Read file ', args.file) with open(args.file) as fin: exps = [line.strip() for line in fin if line[0] != '#'] print('File included %d exposures' % len(exps)) else: exps = args.exps print('Explicit listing of %d exposures' % len(exps)) exps = sorted(exps) keys = [ 'ra', 'dec', 'x', 'y', 'obs_e1', 'obs_e2', 'obs_T', prefix + '_e1', prefix + '_e2', prefix + '_T', 'mag' ] if args.use_reserved: all_stars = '' else: all_stars = '_all' keys += ['obs_flag', prefix + '_flag'] out_file_name = os.path.join( work, "psf_%s_%s%s.fits" % (args.tag, args.bands, all_stars)) data = None if not args.write_data: try: data, bands, tilings = read_data_file(out_file_name) except Exception as e: print('Caught: ', e) if data is None: data, bands, tilings = read_data(exps, work, keys, limit_bands=args.bands, prefix=prefix, use_reserved=args.use_reserved, frac=args.frac) if args.write_data: write_data_file(data, out_file_name) print('all bands = ', bands) print('all tilings = ', tilings) for band in bands: print('n for band %s = ' % band, np.sum(data['band'] == band)) for til in tilings: print('n for tiling %d = ' % til, np.sum(data['tiling'] == til)) gdata = np.where(data['band'] == 'g')[0] rdata = np.where(data['band'] == 'r')[0] idata = np.where(data['band'] == 'i')[0] zdata = np.where(data['band'] == 'z')[0] #odddata = np.where(data['tiling']%2 == 1)[0] #evendata = np.where(data['tiling']%2 == 0)[0] print('len(gdata) = ', len(gdata)) print('len(rdata) = ', len(rdata)) print('len(idata) = ', len(idata)) print('len(zdata) = ', len(zdata)) #print('len(odddata) = ',len(odddata)) #print('len(evendata) = ',len(evendata)) #bands = ['r', 'i'] do_canonical_stats(data, bands, tilings, work, max_mag=args.max_mag, prefix=prefix, opt=args.opt, subtract_mean=args.subtract_mean, do_rho0=args.do_rho0)
def main(): import sys sys.path.insert(0, '/home/dfa/sobreira/alsina/alpha-beta-gamma/code/src') #sys.path.insert(0, '/global/cscratch1/sd/alsina/alpha-beta-gamma/code/src') import numpy as np from read_psf_cats import read_data, toList, read_metacal import h5py as h args = parse_args() #Make directory where the ouput data will be outpath = os.path.expanduser(args.outpath) try: if not os.path.exists(outpath): os.makedirs(outpath) except OSError: if not os.path.exists(outpath): raise names = [ 'JKR', 'ANGBIN', 'THETA', 'TAU0P', 'TAU0M', 'VAR_TAU0', 'TAU2P', 'TAU2M', 'VAR_TAU2', 'TAU5P', 'TAU5M', 'VAR_TAU5' ] forms = [ 'i4', 'i4', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8' ] dtype = dict(names=names, formats=forms) nrows = 20 outdata = np.recarray((nrows, ), dtype=dtype) #Reading Mike stars catalog keys = [ 'ra', 'dec', 'obs_e1', 'obs_e2', 'obs_T', 'piff_e1', 'piff_e2', 'piff_T', 'mag' ] exps = toList(args.exps_file) data_sam, bands, tilings = read_data(exps, args.piff_cat, keys, limit_bands=args.bands, use_reserved=args.use_reserved, frac=0.01) data_stars, bands, tilings = read_data(exps, args.piff_cat, keys, limit_bands=args.bands, use_reserved=args.use_reserved) print("Objects", len(data_stars)) data_stars = data_stars[data_stars['mag'] < 20] print("Objects with magnitude <20", len(data_stars)) if (args.tomo): print('Starting Tomography!') galkeys = ['ra', 'dec', 'e_1', 'e_2', 'R11', 'R22'] nbins = 4 for bin_c in range(nbins): print('Starting bin!', bin_c) data_gal = read_metacal(args.metacal_cat, galkeys, zbin=bin_c, nz_source_file=args.nz_source) njk = 4 ##TODO generate km first an later finnearest, jkindexes_gals = jk_kmeans(data_sam['ra'], data_sam['dec'], data_gal['ra'], data_gal['dec'], njk) for jkidx in range(njk): print("running jackkniffe region", jkidx) booljk = [jkindexes_gals != jkidx] tau0, tau2, tau5 = measure_tau(data_stars, data_gal[booljk], mod=args.mod) jkrarr = np.array([jkidx] * nrows) angarr = np.arange(nrows) thetaarr = np.exp(tau0.meanlogr) tau0parr = tau0.xip tau2parr = tau2.xip tau5parr = tau5.xip tau0marr = tau0.xim tau2marr = tau2.xim tau5marr = tau5.xim vartau0arr = 2 * tau0.varxi vartau2arr = 2 * tau2.varxi vartau5arr = 2 * tau5.varxi array_list = [ jkrarr, angar, thetaarr, tau0parr, tau0marr, vartau0arr, tau2parr, tau2marr, vartau2arr, tau5parr, tau5marr, vartau5arr, ] for array, name in zip(array_list, names): outdata[name] = array write_fit( outdata, names, outpath + 'alltaus_4jk_' + str(bin_c + 1) + '_' + str(bin_c + 1) + '.fits') galkeys = ['ra', 'dec', 'e_1', 'e_2', 'R11', 'R22'] data_galaxies = read_metacal(args.metacal_cat, galkeys) print("Total objects in catalog:", len(data_galaxies)) njk = 4 jkindexes_gals = jk_kmeans(data_sam['ra'], data_sam['dec'], data_galaxies['ra'], data_galaxies['dec'], njk) for jkidx in range(njk): print("running jackkniffe region", jkidx) booljk = [jkindexes_gals != jkidx] tau0, tau2, tau5 = measure_tau(data_stars, data_galaxies[booljk], mod=args.mod) jkrarr = np.array([jkidx] * nrows) angarr = np.arange(nrows) thetaarr = np.exp(tau0.meanlogr) tau0parr = tau0.xip tau2parr = tau2.xip tau5parr = tau5.xip tau0marr = tau0.xim tau2marr = tau2.xim tau5marr = tau5.xim vartau0arr = 2 * tau0.varxi vartau2arr = 2 * tau2.varxi vartau5arr = 2 * tau5.varxi array_list = [ jkrarr, angarr, thetaarr, tau0parr, tau0marr, vartau0arr, tau2parr, tau2marr, vartau2arr, tau5parr, tau5marr, vartau5arr, ] for array, name in zip(array_list, names): outdata[name] = array write_fit(outdata, names, outpath + args.filename)
def main(): args = parse_args() print('args = ',args) # Make the work directory if it does not exist yet. work = os.path.expanduser(args.work) print('work dir = ',work) try: if not os.path.isdir(work): os.makedirs(work) except OSError as e: print("Ignore OSError from makedirs(work):") print(e) pass if args.use_psfex: prefix='psfex' else: prefix='piff' if True: if args.file != '': print('Read file ',args.file) with open(args.file) as fin: exps = [ line.strip() for line in fin if line[0] != '#' ] print('File included %d exposures'%len(exps)) else: exps = args.exps print('Explicit listing of %d exposures'%len(exps)) exps = sorted(exps) keys = ['ra', 'dec', 'x', 'y', 'mag', 'obs_e1', 'obs_e2', 'obs_T', prefix+'_e1', prefix+'_e2', prefix+'_T'] data, bands, tilings = read_data(exps, work, keys, limit_bands=args.bands, prefix=prefix, use_reserved=args.use_reserved, frac=args.frac) e1 = data['obs_e1'] e2 = data['obs_e2'] T = data['obs_T'] p_e1 = data[prefix+'_e1'] p_e2 = data[prefix+'_e2'] p_T = data[prefix+'_T'] de1 = e1-p_e1 de2 = e2-p_e2 dT = (T-p_T)/T #psf_resid(data['mag'], de1, de2, dT) psf_whiskers(data['ccd'], data['x'], data['y'], e1, e2, T, de1, de2, dT) if False: ngmix_data = get_ngmix_epoch_data() gal_whiskers(*ngmix_data, filename='ngmix_whiskers.eps', title='ngmix') ngmix_data2 = get_ngmix_epoch_data(use_gold=False) evscol(*ngmix_data2, filename='ngmix_evscol.eps', title='ngmix') ccd31 = ccd==31 ccd, x, y, e1, e2, s, w = ngmix_data gal_whiskers(ccd[ccd31], x[ccd31], y[ccd31], e1[ccd31], e2[ccd31], s[ccd31], w[ccd31], filename='ngmix_ccd31.eps', scale=5, auto_size=True, title='ccd31') if False: im3shape_data = get_im3shape_epoch_data() gal_whiskers(*im3shape_data, filename='im3shape_whiskers.eps', title='im3shape') im3shape_data2 = get_im3shape_epoch_data(use_gold=False) evscol(*im3shape_data2, filename='im3shape_evscol.eps', title='im3shape')
def main(): matplotlib.use('Agg') # needs to be done before import pyplot args = parse_args() work = os.path.expanduser(args.work) print('work dir = ', work) outpath = os.path.expanduser(args.outpath) try: if not os.path.exists(outpath): os.makedirs(outpath) except OSError: if not os.path.exists(outpath): raise if args.file != '': print('Read file ', args.file) with open(args.file) as fin: exps = [line.strip() for line in fin if line[0] != '#'] else: exps = args.exps exps = sorted(exps) if args.use_psfex: prefix = 'psfex' else: prefix = 'piff' keys = [ 'ra', 'dec', 'x', 'y', 'mag', 'obs_e1', 'obs_e2', 'obs_T', 'obs_flag', prefix + '_e1', prefix + '_e2', prefix + '_T', prefix + '_flag' ] data, bands, tilings = read_data(exps, work, keys, limit_bands=args.bands, prefix=prefix, use_reserved=args.use_reserved, frac=args.frac) for bands in ['r', 'i', 'z', 'riz']: this_data = data[np.in1d(data['band'], list(bands))] if len(this_data) == 0: print('No files with bands ', bands) continue used = this_data[prefix + '_flag'] & ~RESERVED == 0 #airmass = this_data['airmass'] #sky = this_data['sky'] #sigsky = this_data['sigsky'] #fwhm = this_data['fwhm'] #med_airmass = np.median(airmass) #med_sky = np.median(sky) #med_sigsky = np.median(sigsky) #med_fwhm = np.median(fwhm) #print('airmass: ',min(airmass),med_airmass,max(airmass)) #print('sky: ',min(sky),med_sky,max(sky)) #print('sigsky: ',min(sigsky),med_sigsky,max(sigsky)) #print('fwhm: ',min(fwhm),med_fwhm,max(fwhm)) ra = this_data['ra'] dec = this_data['dec'] x = this_data['x'] y = this_data['y'] m = this_data['mag'] print('full mag range = ', np.min(m), np.max(m)) print('used mag range = ', np.min(m[used]), np.max(m[used])) e1 = this_data['obs_e1'] print('mean e1 = ', np.mean(e1)) e2 = this_data['obs_e2'] print('mean e2 = ', np.mean(e2)) T = this_data['obs_T'] print('mean s = ', np.mean(T)) pe1 = this_data[prefix + '_e1'] print('mean pe1 = ', np.mean(pe1)) pe2 = this_data[prefix + '_e2'] print('mean pe2 = ', np.mean(pe2)) pT = this_data[prefix + '_T'] print('mean pT = ', np.mean(pT)) print('min mag = ', np.min(m)) print('max mag = ', np.max(m)) print('mean T (used) = ', np.mean(T[used])) print('mean e1 (used) = ', np.mean(e1[used])) print('mean e2 (used) = ', np.mean(e2[used])) print('mean pT (used) = ', np.mean(pT[used])) print('mean pe1 (used) = ', np.mean(pe1[used])) print('mean pe2 (used) = ', np.mean(pe2[used])) de1 = e1 - pe1 de2 = e2 - pe2 dT = T - pT print('mean dT (used) = ', np.mean(dT[used])) print('mean de1 (used) = ', np.mean(de1[used])) print('mean de2 (used) = ', np.mean(de2[used])) if args.use_psfex: min_mused = 0 else: min_mused = np.min(m[used]) print('min_mused = ', min_mused) bin_by_mag(m, dT, de1, de2, min_mused, bands, outpath) bin_by_mag(m[used], dT[used], de1[used], de2[used], min_mused, bands + '_used', outpath) make_hist(dT, T, de1, de2, bands, outpath) make_hist(dT[used], T[used], de1[used], de2[used], bands + '_used', outpath)
def main(): matplotlib.use('Agg') # needs to be done before import pyplot args = parse_args() work = os.path.expanduser(args.work) print('work dir = ',work) if args.file != '': print('Read file ',args.file) with open(args.file) as fin: exps = [ line.strip() for line in fin if line[0] != '#' ] else: exps = args.exps exps = sorted(exps) if args.use_psfex: prefix = 'psfex' else: prefix = 'piff' keys = ['ra', 'dec', 'x', 'y', 'mag', 'obs_e1', 'obs_e2', 'obs_T', 'obs_flag', prefix+'_e1', prefix+'_e2', prefix+'_T', prefix+'_flag'] data, bands, tilings = read_data(exps, work, keys, limit_bands=args.bands, prefix=prefix, use_reserved=args.use_reserved, frac=args.frac) use_bands = band_combinations(args.bands) for bands in use_bands: this_data = data[np.in1d(data['band'], list(bands))] if len(this_data) == 0: print('No files with bands ',bands) continue used = this_data[prefix+'_flag'] & ~RESERVED == 0 #airmass = this_data['airmass'] #sky = this_data['sky'] #sigsky = this_data['sigsky'] #fwhm = this_data['fwhm'] #med_airmass = np.median(airmass) #med_sky = np.median(sky) #med_sigsky = np.median(sigsky) #med_fwhm = np.median(fwhm) #print('airmass: ',min(airmass),med_airmass,max(airmass)) #print('sky: ',min(sky),med_sky,max(sky)) #print('sigsky: ',min(sigsky),med_sigsky,max(sigsky)) #print('fwhm: ',min(fwhm),med_fwhm,max(fwhm)) ra = this_data['ra'] dec = this_data['dec'] x = this_data['x'] y = this_data['y'] m = this_data['mag'] print('full mag range = ',np.min(m),np.max(m)) print('used mag range = ',np.min(m[used]),np.max(m[used])) e1 = this_data['obs_e1'] print('mean e1 = ',np.mean(e1)) e2 = this_data['obs_e2'] print('mean e2 = ',np.mean(e2)) T = this_data['obs_T'] print('mean s = ',np.mean(T)) pe1 = this_data[prefix+'_e1'] print('mean pe1 = ',np.mean(pe1)) pe2 = this_data[prefix+'_e2'] print('mean pe2 = ',np.mean(pe2)) pT = this_data[prefix+'_T'] print('mean pT = ',np.mean(pT)) print('min mag = ',np.min(m)) print('max mag = ',np.max(m)) print('mean T (used) = ',np.mean(T[used])) print('mean e1 (used) = ',np.mean(e1[used])) print('mean e2 (used) = ',np.mean(e2[used])) print('mean pT (used) = ',np.mean(pT[used])) print('mean pe1 (used) = ',np.mean(pe1[used])) print('mean pe2 (used) = ',np.mean(pe2[used])) de1 = e1 - pe1 de2 = e2 - pe2 dT = T - pT print('mean dT (used) = ',np.mean(dT[used])) print('mean de1 (used) = ',np.mean(de1[used])) print('mean de2 (used) = ',np.mean(de2[used])) if args.use_psfex: min_mused = 0 else: min_mused = np.min(m[used]) print('min_mused = ',min_mused) bin_by_mag(m, dT, de1, de2, min_mused, bands) bin_by_mag(m[used], dT[used], de1[used], de2[used], min_mused, bands+'_used') make_hist(dT, T, de1, de2, bands) make_hist(dT[used], T[used], de1[used], de2[used], bands+'_used')
def main(): import sys sys.path.insert(0, '/home/dfa/sobreira/alsina/alpha-beta-gamma/code/src') #sys.path.insert(0, '/global/cscratch1/sd/alsina/alpha-beta-gamma/code/src') import numpy as np from read_psf_cats import read_data, toList, read_metacal from astropy.io import fits import gc args = parse_args() #Make directory where the ouput data will be outpath = os.path.expanduser(args.outpath) try: if not os.path.exists(outpath): os.makedirs(outpath) except OSError: if not os.path.exists(outpath): raise ##Format of the fit file output names = ['BIN1', 'BIN2', 'ANGBIN', 'VALUE', 'ANG'] forms = ['i4', 'i4', 'i4', 'f8', 'f8'] dtype = dict(names=names, formats=forms) nrows = 20 outdata = np.recarray((nrows, ), dtype=dtype) namesout = ['TAU0P', 'TAU2P', 'TAU5P', 'TAU0M', 'TAU2M', 'TAU5M'] #Reading Mike stars catalog keys = [ 'ra', 'dec', 'obs_e1', 'obs_e2', 'obs_T', 'piff_e1', 'piff_e2', 'piff_T', 'mag' ] exps = toList(args.exps_file) data_stars, bands, tilings = read_data(exps, args.piff_cat, keys, limit_bands=args.bands, use_reserved=args.use_reserved) print("Objects", len(data_stars)) data_stars = data_stars[data_stars['mag'] < 20] print("Objects with magnitude <20", len(data_stars)) del bands, tilings, exps, keys gc.collect() if (args.tomo): print('Starting Tomography!') galkeys = ['ra', 'dec', 'e_1', 'e_2', 'R11', 'R22'] nbins = 4 for bin_c in range(nbins): print('Starting bin!', bin_c) data_gal = read_metacal(args.metacal_cat, galkeys, zbin=bin_c, nz_source_file=args.nz_source) tau0, tau2, tau5 = measure_tau(data_stars, data_gal, mod=args.mod) tau0parr = tau0.xip tau2parr = tau2.xip tau5parr = tau5.xip tau0marr = tau0.xim tau2marr = tau2.xim tau5marr = tau5.xim vartau0arr = 2 * tau0.varxi vartau2arr = 2 * tau2.varxi vartau5arr = 2 * tau5.varxi taus = [tau0parr, tau2parr, tau5parr, tau0marr, tau2marr, tau5marr] vares = [ vartau0arr, vartau2arr, vartau5arr, vartau0arr, vartau2arr, vartau5arr ] for i, nam in enumerate(namesout): covmat = np.diag(vares[i]) hdu = fits.PrimaryHDU() hdul = fits.HDUList([hdu]) covmathdu = fits.ImageHDU(covmat, name='COVMAT') hdul.insert(1, covmathdu) zangarray = np.exp(tau0.meanlogr) valuearray = np.array(taus[i]) bin1array = np.array([bin_c] * nrows) bin2array = np.array([bin_c] * nrows) angbinarray = np.arange(nrows) array_list = [ bin1array, bin2array, angbinarray, valuearray, zangarray ] for array, name in zip(array_list, names): outdata[name] = array corrhdu = fits.BinTableHDU(outdata, name=nam) hdul.insert(2, corrhdu) hdul.writeto(outpath + nam + '_bin_' + str(bin_c) + '.fits', overwrite=True) else: galkeys = ['ra', 'dec', 'e_1', 'e_2', 'R11', 'R22'] data_galaxies = read_metacal(args.metacal_cat, galkeys) print("Total objects in catalog:", len(data_galaxies)) tau0, tau2, tau5 = measure_tau(data_stars, data_galaxies, mod=args.mod) tau0marr = tau0.xim tau2marr = tau2.xim tau5marr = tau5.xim tau0parr = tau0.xip tau2parr = tau2.xip tau5parr = tau5.xip vartau0arr = 2 * tau0.varxi vartau2arr = 2 * tau2.varxi vartau5arr = 2 * tau5.varxi taus = [tau0parr, tau2parr, tau5parr, tau0marr, tau2marr, tau5marr] vares = [ vartau0arr, vartau2arr, vartau5arr, vartau0arr, vartau2arr, vartau5arr ] for i, nam in enumerate(namesout): covmat = np.diag(vares[i]) hdu = fits.PrimaryHDU() hdul = fits.HDUList([hdu]) covmathdu = fits.ImageHDU(covmat, name='COVMAT') hdul.insert(1, covmathdu) angarray = np.exp(tau0.meanlogr) valuearray = np.array(taus[i]) bin1array = np.array([-999] * nrows) bin2array = np.array([-999] * nrows) angbinarray = np.arange(nrows) array_list = [ bin1array, bin2array, angbinarray, valuearray, angarray ] for array, name in zip(array_list, names): outdata[name] = array corrhdu = fits.BinTableHDU(outdata, name=nam) hdul.insert(2, corrhdu) hdul.writeto(outpath + nam + '.fits', overwrite=True)
def main(): matplotlib.use('Agg') # needs to be done before import pyplot args = parse_args() work = os.path.expanduser(args.work) print('work dir = ', work) if args.file != '': print('Read file ', args.file) with open(args.file) as fin: exps = [line.strip() for line in fin if line[0] != '#'] else: exps = args.exps exps = sorted(exps) if args.use_psfex: prefix = 'psfex' else: prefix = 'piff' keys = [ 'ra', 'dec', 'x', 'y', 'mag', 'obs_e1', 'obs_e2', 'obs_T', 'obs_flag', prefix + '_e1', prefix + '_e2', prefix + '_T', prefix + '_flag' ] if not args.use_dat: data, bands, tilings = read_data(exps, work, keys, limit_bands=args.bands, prefix=prefix, use_reserved=args.use_reserved, frac=args.frac) use_bands = band_combinations(args.bands) for bands in use_bands: if not args.use_dat: this_data = data[np.in1d(data['band'], list(bands))] if len(this_data) == 0: print('No files with bands ', bands) continue print('bands = ', bands) print('unique flags = ', np.unique(this_data[prefix + '_flag'])) if args.use_reserved: RESERVED = 65 used = this_data[prefix + '_flag'] & ~RESERVED == 0 else: used = this_data[prefix + '_flag'] == 0 print('used = ', used) print('unique used = ', np.unique(used)) ra = this_data['ra'] dec = this_data['dec'] x = this_data['x'] y = this_data['y'] m = this_data['mag'] print('full mag range = ', np.min(m), np.max(m)) print('used mag range = ', np.min(m[used]), np.max(m[used])) e1 = this_data['obs_e1'] print('mean e1 = ', np.mean(e1)) e2 = this_data['obs_e2'] print('mean e2 = ', np.mean(e2)) T = this_data['obs_T'] print('mean s = ', np.mean(T)) pe1 = this_data[prefix + '_e1'] print('mean pe1 = ', np.mean(pe1)) pe2 = this_data[prefix + '_e2'] print('mean pe2 = ', np.mean(pe2)) pT = this_data[prefix + '_T'] print('mean pT = ', np.mean(pT)) print('min mag = ', np.min(m)) print('max mag = ', np.max(m)) print('mean T (used) = ', np.mean(T[used])) print('mean e1 (used) = ', np.mean(e1[used])) print('mean e2 (used) = ', np.mean(e2[used])) print('mean pT (used) = ', np.mean(pT[used])) print('mean pe1 (used) = ', np.mean(pe1[used])) print('mean pe2 (used) = ', np.mean(pe2[used])) de1 = e1 - pe1 de2 = e2 - pe2 dT = T - pT print('mean dT (used) = ', np.mean(dT[used])) print('mean de1 (used) = ', np.mean(de1[used])) print('mean de2 (used) = ', np.mean(de2[used])) if args.use_psfex: min_mused = 0 elif args.use_dat: # Save these by hand. TODO: put this in output file? d = {'r': 15.0976, 'i': 15.2468, 'z': 14.8781, 'riz': 14.8781} if not bands in d: continue min_mused = d[bands] else: min_mused = np.min(m[used]) print('min_mused = ', min_mused) if args.use_dat: infile1 = 'dpsf_mag_' + bands + '.dat' bin_data1 = read_bins(infile1) infile2 = 'dpsf_mag_' + bands + '_used.dat' bin_data2 = read_bins(infile2) else: bin_data1 = bin_by_mag(m, dT, de1, de2, min_mused) outfile1 = 'dpsf_mag_' + bands + '.dat' write_bins(bin_data1, outfile1) bin_data2 = bin_by_mag(m[used], dT[used], de1[used], de2[used], min_mused) outfile2 = 'dpsf_mag_' + bands + '_used.dat' write_bins(bin_data2, outfile2) pdffile1 = 'dpsf_mag_' + bands + '.pdf' plot_bins(bin_data1, pdffile1) pdffile2 = 'dpsf_mag_' + bands + '_used.pdf' plot_bins(bin_data2, pdffile2, min_mused)
def main(): args = parse_args() print('args = ',args) # Make the work directory if it does not exist yet. work = os.path.expanduser(args.work) print('work dir = ',work) try: if not os.path.isdir(work): os.makedirs(work) except OSError as e: print("Ignore OSError from makedirs(work):") print(e) pass if args.use_psfex: prefix='psfex' else: prefix='piff' if args.file != '': print('Read file ',args.file) with open(args.file) as fin: exps = [ line.strip() for line in fin if line[0] != '#' ] print('File included %d exposures'%len(exps)) else: exps = args.exps print('Explicit listing of %d exposures'%len(exps)) exps = sorted(exps) keys = ['ra', 'dec', 'x', 'y', 'obs_e1', 'obs_e2', 'obs_T', prefix+'_e1', prefix+'_e2', prefix+'_T', 'mag'] data, bands, tilings = read_data(exps, work, keys, limit_bands=args.bands, prefix=prefix, use_reserved=args.use_reserved, frac=args.frac) print('all bands = ',bands) #print('all tilings = ',tilings) #out_file_name = os.path.join(work, "psf_%s.fits"%args.tag) #write_data(data, out_file_name) for band in bands: print('n for band %s = '%band, np.sum(data['band'] == band)) #for til in tilings: #print('n for tiling %d = '%til, np.sum(data['tiling'] == til)) gdata = np.where(data['band'] == 'g')[0] rdata = np.where(data['band'] == 'r')[0] idata = np.where(data['band'] == 'i')[0] zdata = np.where(data['band'] == 'z')[0] #odddata = np.where(data['tiling']%2 == 1)[0] #evendata = np.where(data['tiling']%2 == 0)[0] print('len(gdata) = ',len(gdata)) print('len(rdata) = ',len(rdata)) print('len(idata) = ',len(idata)) print('len(zdata) = ',len(zdata)) #print('len(odddata) = ',len(odddata)) #print('len(evendata) = ',len(evendata)) #bands = ['r', 'i'] do_canonical_stats(data, bands, tilings, work, alt_tt=False, prefix=prefix, lucas=args.lucas)
def main(): import sys sys.path.insert(0, '/home/dfa/sobreira/alsina/alpha-beta-gamma/code/src') #sys.path.insert(0, '/global/cscratch1/sd/alsina/alpha-beta-gamma/code/src') import numpy as np from read_psf_cats import read_data, toList args = parse_args() #Make directory where the ouput data will be outpath = os.path.expanduser(args.outpath) try: if not os.path.exists(outpath): os.makedirs(outpath) except OSError: if not os.path.exists(outpath): raise #STATISTIC USING ONLY RESERVED STARS keys = [ 'ra', 'dec', 'obs_e1', 'obs_e2', 'obs_T', 'piff_e1', 'piff_e2', 'piff_T', 'mag' ] exps = toList(args.exps_file) data_sam, bands, tilings = read_data(exps, args.piff_cat, keys, limit_bands=args.bands, use_reserved=args.use_reserved, frac=0.01) data, bands, tilings = read_data(exps, args.piff_cat, keys, limit_bands=args.bands, use_reserved=args.use_reserved) print("Objects", len(data)) data = data[data['mag'] < 20] print("Objects with magnitude <20", len(data)) names = [ 'JKR', 'ANGBIN', 'THETA', 'RHO0P', 'RHO0M', 'VAR_RHO0', 'RHO1P', 'RHO1M', 'VAR_RHO1', 'RHO2P', 'RHO2M', 'VAR_RHO2', 'RHO3P', 'RHO3M', 'VAR_RHO3', 'RHO4P', 'RHO4M', 'VAR_RHO4', 'RHO5P', 'RHO5M', 'VAR_RHO5' ] forms = [ 'i4', 'i4', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8' ] dtype = dict(names=names, formats=forms) nrows = 20 outdata = np.recarray((nrows, ), dtype=dtype) njk = 4 jkindexes = jk_kmeans(data_sam['ra'], data_sam['dec'], data['ra'], data['dec'], njk, plot=True) #print (jkindexes) for jkidx in range(njk): print("running jackkniffe region", jkidx) rho0, rho1, rho2, rho3, rho4, rho5 = measure_rho( data[jkindexes != jkidx], mod=args.mod, obs=args.obs) jkrarr = np.array([jkidx] * nrows) angarr = np.arange(nrows) thetaarr = np.exp(rho0.meanlogr) rho0marr = rho0.xim rho1marr = rho1.xim rho2marr = rho2.xim rho3marr = rho3.xim rho4marr = rho4.xim rho5marr = rho5.xim rho0parr = rho0.xip rho1parr = rho1.xip rho2parr = rho2.xip rho3parr = rho3.xip rho4parr = rho4.xip rho5parr = rho5.xip varrho0arr = 2 * rho0.varxi varrho1arr = 2 * rho1.varxi varrho2arr = 2 * rho2.varxi varrho3arr = 2 * rho3.varxi varrho4arr = 2 * rho4.varxi varrho5arr = 2 * rho5.varxi array_list = [ jkrarr, angarr, thetaarr, rho0parr, rho0marr, varrho0arr, rho1parr, rho1marr, varrho1arr, rho2parr, rho2marr, varrho2arr, rho3parr, rho3marr, varrho3arr, rho4parr, rho4marr, varrho4arr, rho5parr, rho5marr, varrho5arr ] for array, name in zip(array_list, names): outdata[name] = array write_fit(outdata, names, outpath + args.filename)
def main(): import sys sys.path.insert(0, '/home/dfa/sobreira/alsina/alpha-beta-gamma/code/src') #sys.path.insert(0, '/global/cscratch1/sd/alsina/alpha-beta-gamma/code/src') import numpy as np from read_psf_cats import read_data, toList, read_h5 from run_rho import do_tau_stats import h5py as h args = parse_args() #Make directory where the ouput data will be outpath = os.path.expanduser(args.outpath) try: if not os.path.exists(outpath): os.makedirs(outpath) except OSError: if not os.path.exists(outpath): raise #Reading metacal catalog #galkeys = ['ra'] #blabla = read_h5(args.metacal_cat, 'catalog/metacal/sheared_1m', galkeys ) #Reading Mike stars catalog keys = [ 'ra', 'dec', 'obs_e1', 'obs_e2', 'obs_T', 'piff_e1', 'piff_e2', 'piff_T', 'mag' ] exps = toList(args.exps_file) data_stars, bands, tilings = read_data(exps, args.piff_cat, keys, limit_bands=args.bands, use_reserved=args.use_reserved) print("Objects", len(data_stars)) data_stars = data_stars[data_stars['mag'] < 20] print("Objects with magnitude <20", len(data_stars)) meanra = np.mean(data_stars['ra']) meandec = np.mean(data_stars['dec']) if (args.tomo): #Make directory where the ouput data will be ipath = os.path.join(args.outpath, 'tomo_taus') outpath = os.path.expanduser(ipath) try: if not os.path.exists(outpath): os.makedirs(outpath) except OSError: if not os.path.exists(outpath): raise print('Starting Tomography!') galkeys = ['ra', 'dec', 'e_1', 'e_2', 'R11', 'R22'] data_gal = read_h5(args.metacal_cat, 'catalog/metacal/unsheared', galkeys) print("Total objects in catalog:", len(data_gal)) dgamma = 2 * 0.01 f = h.File(args.metacal_cat, 'r') index = f['index'] select = np.array(index['select']) select_1p = np.array(index['select_1p']) select_1m = np.array(index['select_1m']) select_2p = np.array(index['select_2p']) select_2m = np.array(index['select_2m']) n = h.File(args.nz_source, 'r') zbin_array = np.array(n['nofz/zbin']) nbins = 4 for bin_c in range(nbins): print('Starting bin!', bin_c) ind = np.where(zbin_array == bin_c)[0] ind_1p = np.where(np.array(n['nofz/zbin_1p']) == bin_c) ind_1m = np.where(np.array(n['nofz/zbin_1m']) == bin_c) ind_2p = np.where(np.array(n['nofz/zbin_2p']) == bin_c) ind_2m = np.where(np.array(n['nofz/zbin_2m']) == bin_c) R11s = (data_gal['e_1'][select_1p][ind_1p].mean() - data_gal['e_1'][select_1m][ind_1m].mean()) / dgamma R22s = (data_gal['e_2'][select_2p][ind_2p].mean() - data_gal['e_2'][select_2m][ind_2m].mean()) / dgamma Rs = [R11s, R22s] patchstars = [] patchgal = [] patchstars.append((data_stars['ra'] > meanra) & (data_stars['dec'] > meandec)) patchstars.append((data_stars['ra'] < meanra) & (data_stars['dec'] > meandec)) patchstars.append((data_stars['ra'] < meanra) & (data_stars['dec'] < meandec)) patchstars.append((data_stars['ra'] > meanra) & (data_stars['dec'] < meandec)) patchgal.append((data_gal[select][ind]['ra'] > meanra) & (data_gal[select][ind]['dec'] > meandec)) patchgal.append((data_gal[select][ind]['ra'] < meanra) & (data_gal[select][ind]['dec'] > meandec)) patchgal.append((data_gal[select][ind]['ra'] < meanra) & (data_gal[select][ind]['dec'] < meandec)) patchgal.append((data_gal[select][ind]['ra'] > meanra) & (data_gal[select][ind]['dec'] < meandec)) for pat in range(4): patchstarbool = patchstars[pat] data_starsaux = data_stars[patchstarbool] patchgalbool = patchgal[pat] do_tau_stats(data_gal[select][ind][patchgalbool], Rs, data_starsaux, bands, tilings, outpath, max_sep=300, sep_units='arcmin', name=today + 'mod_bin_' + str(bin_c + 1) + '_' + str(bin_c + 1) + '_patch_' + str(pat + 1), bandcombo=args.bandcombo, mod=args.mod, shapenoise=args.sn) galkeys = ['ra', 'dec', 'e_1', 'e_2', 'R11', 'R22'] data_gal = read_h5(args.metacal_cat, 'catalog/metacal/unsheared', galkeys) print("Total objects in catalog:", len(data_gal)) dgamma = 2 * 0.01 f = h.File(args.metacal_cat, 'r') index = f['index'] select = np.array(index['select']) select_1p = np.array(index['select_1p']) select_1m = np.array(index['select_1m']) select_2p = np.array(index['select_2p']) #added by Lucas: select_2m = np.array(index['select_2m']) #added by Lucas R11s = (data_gal['e_1'][select_1p].mean() - data_gal['e_1'][select_1m].mean()) / dgamma R22s = (data_gal['e_2'][select_2p].mean() - data_gal['e_2'][select_2m].mean()) / dgamma #added by Lucas: modified to to select_2p and 2m Rs = [R11s, R22s] print("Total objects after masking", len(data_gal)) print("R11s=", R11s) print("R22s=", R22s) patchstars = [] patchgal = [] patchstars.append((data_stars['ra'] > meanra) & (data_stars['dec'] > meandec)) patchstars.append((data_stars['ra'] < meanra) & (data_stars['dec'] > meandec)) patchstars.append((data_stars['ra'] < meanra) & (data_stars['dec'] < meandec)) patchstars.append((data_stars['ra'] > meanra) & (data_stars['dec'] < meandec)) patchgal.append((data_gal[select]['ra'] > meanra) & (data_gal[select]['dec'] > meandec)) patchgal.append((data_gal[select]['ra'] < meanra) & (data_gal[select]['dec'] > meandec)) patchgal.append((data_gal[select]['ra'] < meanra) & (data_gal[select]['dec'] < meandec)) patchgal.append((data_gal[select]['ra'] > meanra) & (data_gal[select]['dec'] < meandec)) for pat in range(4): patchstarbool = patchstars[pat] data_starsaux = data_stars[patchstarbool] patchgalbool = patchgal[pat] do_tau_stats(data_gal[select][patchgalbool], Rs, data_starsaux, bands, tilings, outpath, max_sep=300, sep_units='arcmin', name=today + 'mod_bin_' + 'patch_' + str(pat + 1), bandcombo=args.bandcombo, mod=args.mod, shapenoise=args.sn)
def main(): args = parse_args() print('args = ',args) # Make the work directory if it does not exist yet. work = os.path.expanduser(args.work) print('work dir = ',work) try: if not os.path.isdir(work): os.makedirs(work) except OSError as e: print("Ignore OSError from makedirs(work):") print(e) pass if args.use_psfex: prefix='psfex' else: prefix='piff' if args.file != '': print('Read file ',args.file) with open(args.file) as fin: exps = [ line.strip() for line in fin if line[0] != '#' ] print('File included %d exposures'%len(exps)) else: exps = args.exps print('Explicit listing of %d exposures'%len(exps)) exps = sorted(exps) keys = ['ra', 'dec', 'x', 'y', 'obs_e1', 'obs_e2', 'obs_T', prefix+'_e1', prefix+'_e2', prefix+'_T', 'mag'] out_file_name = os.path.join(work, "psf_%s_%s.fits"%(args.tag, args.bands)) if not args.write_data: try: data, bands, tilings = read_data_file(out_file_name) except Exception as e: print('Caught: ',e) data = None if data is None: data, bands, tilings = read_data(exps, work, keys, limit_bands=args.bands, prefix=prefix, use_reserved=args.use_reserved, frac=args.frac) if args.write_data: write_data_file(data, out_file_name) print('all bands = ',bands) print('all tilings = ',tilings) for band in bands: print('n for band %s = '%band, np.sum(data['band'] == band)) for til in tilings: print('n for tiling %d = '%til, np.sum(data['tiling'] == til)) gdata = np.where(data['band'] == 'g')[0] rdata = np.where(data['band'] == 'r')[0] idata = np.where(data['band'] == 'i')[0] zdata = np.where(data['band'] == 'z')[0] #odddata = np.where(data['tiling']%2 == 1)[0] #evendata = np.where(data['tiling']%2 == 0)[0] print('len(gdata) = ',len(gdata)) print('len(rdata) = ',len(rdata)) print('len(idata) = ',len(idata)) print('len(zdata) = ',len(zdata)) #print('len(odddata) = ',len(odddata)) #print('len(evendata) = ',len(evendata)) #bands = ['r', 'i'] do_canonical_stats(data, bands, tilings, work, max_mag=args.max_mag, prefix=prefix, lucas=args.lucas, subtract_mean=args.subtract_mean, do_rho0=args.do_rho0)
def get_fields(bands, expsfile, catpath): with open(expsfile) as fin: exps = [line.strip() for line in fin if line[0] != '#'] exps = sorted(exps) keys = [ 'ra', 'dec', 'x', 'y', 'mag', 'obs_e1', 'obs_e2', 'obs_T', 'obs_flag', prefix + '_e1', prefix + '_e2', prefix + '_T', prefix + '_flag' ] prefix = 'piff' work = os.path.expanduser(catpath) print('work dir = ', work) data, bands, tilings = read_data(exps, work, keys, limit_bands=bands, prefix=prefix, use_reserved=args.use_reserved, frac=args.frac) bands = ['riz'] this_data = data[np.in1d(data['band'], list(bands))] if len(this_data) == 0: print('No files with bands ', bands) raise Exception('No files with bands ', bands) used = this_data[prefix + '_flag'] & ~RESERVED == 0 ra = this_data['ra'] dec = this_data['dec'] x = this_data['x'] y = this_data['y'] m = this_data['mag'] print('full mag range = ', np.min(m), np.max(m)) print('used mag range = ', np.min(m[used]), np.max(m[used])) e1 = this_data['obs_e1'] print('mean e1 = ', np.mean(e1)) e2 = this_data['obs_e2'] print('mean e2 = ', np.mean(e2)) T = this_data['obs_T'] print('mean s = ', np.mean(T)) pe1 = this_data[prefix + '_e1'] print('mean pe1 = ', np.mean(pe1)) pe2 = this_data[prefix + '_e2'] print('mean pe2 = ', np.mean(pe2)) pT = this_data[prefix + '_T'] print('mean pT = ', np.mean(pT)) print('min mag = ', np.min(m)) print('max mag = ', np.max(m)) print('mean T (used) = ', np.mean(T[used])) print('mean e1 (used) = ', np.mean(e1[used])) print('mean e2 (used) = ', np.mean(e2[used])) print('mean pT (used) = ', np.mean(pT[used])) print('mean pe1 (used) = ', np.mean(pe1[used])) print('mean pe2 (used) = ', np.mean(pe2[used])) de1 = e1 - pe1 de2 = e2 - pe2 dT = T - pT print('mean dT (used) = ', np.mean(dT[used])) print('mean de1 (used) = ', np.mean(de1[used])) print('mean de2 (used) = ', np.mean(de2[used])) if args.use_psfex: min_mused = 0 else: min_mused = np.min(m[used]) print('min_mused = ', min_mused) return m[used], dT[used], de1[used], de2[used], min_mused
def main(): import sys sys.path.insert(0, '/home/dfa/sobreira/alsina/alpha-beta-gamma/code/src') #sys.path.insert(0, '/global/cscratch1/sd/alsina/alpha-beta-gamma/code/src') import numpy as np from read_psf_cats import read_data, toList from astropy.io import fits args = parse_args() #Make directory where the ouput data will be outpath = os.path.expanduser(args.outpath) try: if not os.path.exists(outpath): os.makedirs(outpath) except OSError: if not os.path.exists(outpath): raise #STATISTIC USING ONLY RESERVED STARS keys = [ 'ra', 'dec', 'obs_e1', 'obs_e2', 'obs_T', 'piff_e1', 'piff_e2', 'piff_T', 'mag' ] exps = toList(args.exps_file) data, bands, tilings = read_data(exps, args.piff_cat, keys, limit_bands=args.bands, use_reserved=args.use_reserved) print("Objects", len(data)) data = data[data['mag'] < 20] print("Objects with magnitude <20", len(data)) names = ['BIN1', 'BIN2', 'ANGBIN', 'VALUE', 'ANG'] forms = ['i4', 'i4', 'i4', 'f8', 'f8'] dtype = dict(names=names, formats=forms) nrows = 20 outdata = np.recarray((nrows, ), dtype=dtype) namesout = [ 'RHO0P', 'RHO1P', 'RHO2P', 'RHO3P', 'RHO4P', 'RHO5P', 'RHO0M', 'RHO1M', 'RHO2M', 'RHO3M', 'RHO4M', 'RHO5M' ] rho0, rho1, rho2, rho3, rho4, rho5 = measure_rho(data, mod=args.mod, obs=args.obs) angarr = np.arange(nrows) thetaarr = np.exp(rho0.meanlogr) rho0parr = rho0.xip rho1parr = rho1.xip rho2parr = rho2.xip rho3parr = rho3.xip rho4parr = rho4.xip rho5parr = rho5.xip rho0marr = rho0.xim rho1marr = rho1.xim rho2marr = rho2.xim rho3marr = rho3.xim rho4marr = rho4.xim rho5marr = rho5.xim varrho0arr = 2 * rho0.varxi varrho1arr = 2 * rho1.varxi varrho2arr = 2 * rho2.varxi varrho3arr = 2 * rho3.varxi varrho4arr = 2 * rho4.varxi varrho5arr = 2 * rho5.varxi rhos = [ rho0parr, rho1parr, rho2parr, rho3parr, rho4parr, rho5parr, rho0marr, rho1marr, rho2marr, rho3marr, rho4marr, rho5marr ] vares = [ varrho0arr, varrho1arr, varrho2arr, varrho3arr, varrho4arr, varrho5arr, varrho0arr, varrho1arr, varrho2arr, varrho3arr, varrho4arr, varrho5arr ] for i, nam in enumerate(namesout): covmat = np.diag(vares[i]) hdu = fits.PrimaryHDU() hdul = fits.HDUList([hdu]) covmathdu = fits.ImageHDU(covmat, name='COVMAT') hdul.insert(1, covmathdu) angarray = np.exp(rho0.meanlogr) valuearray = np.array(rhos[i]) bin1array = np.array([-999] * nrows) bin2array = np.array([-999] * nrows) angbinarray = np.arange(nrows) array_list = [bin1array, bin2array, angbinarray, valuearray, angarray] for array, name in zip(array_list, names): outdata[name] = array corrhdu = fits.BinTableHDU(outdata, name=nam) hdul.insert(2, corrhdu) hdul.writeto(outpath + nam + '.fits', overwrite=True)