from astropy.coordinates import SkyCoord from astropy import units as u ## Matplotlib modules import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt from matplotlib.colors import LogNorm import matplotlib.cm as cm #debug = bool(int(sys.argv[2])) #if debug: # import pdb; pdb.set_trace() ## Load the input/true WCS input_imgname = '/disks/shear14/KiDS_simulations/Cosmos/Theli_image/KIDS_150p1_2p2_r_SDSS.V0.5.9A.swarp.cut.fits' wcs_input = galsim.AstropyWCS(input_imgname) ## Load the input/truth catalogue input_catname = '/disks/shear14/KiDS_simulations/Cosmos/KIDS_HST_cat/KiDS_Griffith_iMS1_handpicked_stars.cat' input_catalogue = fits.open(input_catname) input_data = input_catalogue[1].data print "Loaded the input data" ## Obtain the cuts on the input/truth catalogue MASK_all = input_data.MASK mask = ~np.array(MASK_all & 0xfc3c, dtype=bool) handpicked_stars = input_data['handpicked_stars'] rank = input_data['rank'] distance2d = input_data['distance2d'] assert handpicked_stars.dtype == bool
def test_withOrigin(): from test_wcs import Cubic # First EuclideantWCS types: wcs_list = [ galsim.OffsetWCS(0.3, galsim.PositionD(1, 1), galsim.PositionD(10, 23)), galsim.OffsetShearWCS(0.23, galsim.Shear(g1=0.1, g2=0.3), galsim.PositionD(12, 43)), galsim.AffineTransform(0.01, 0.26, -0.26, 0.02, galsim.PositionD(12, 43)), galsim.UVFunction(ufunc=lambda x, y: 0.2 * x, vfunc=lambda x, y: 0.2 * y), galsim.UVFunction(ufunc=lambda x, y: 0.2 * x, vfunc=lambda x, y: 0.2 * y, xfunc=lambda u, v: u / scale, yfunc=lambda u, v: v / scale), galsim.UVFunction(ufunc='0.2*x + 0.03*y', vfunc='0.01*x + 0.2*y'), ] color = 0.3 for wcs in wcs_list: # Original version of the shiftOrigin tests in do_nonlocal_wcs using deprecated name. new_origin = galsim.PositionI(123, 321) wcs3 = check_dep(wcs.withOrigin, new_origin) assert wcs != wcs3, name + ' is not != wcs.withOrigin(pos)' wcs4 = wcs.local(wcs.origin, color=color) assert wcs != wcs4, name + ' is not != wcs.local()' assert wcs4 != wcs, name + ' is not != wcs.local() (reverse)' world_origin = wcs.toWorld(wcs.origin, color=color) if wcs.isUniform(): if wcs.world_origin == galsim.PositionD(0, 0): wcs2 = wcs.local(wcs.origin, color=color).withOrigin(wcs.origin) assert wcs == wcs2, name + ' is not equal after wcs.local().withOrigin(origin)' wcs2 = wcs.local(wcs.origin, color=color).withOrigin(wcs.origin, wcs.world_origin) assert wcs == wcs2, name + ' not equal after wcs.local().withOrigin(origin,world_origin)' world_pos1 = wcs.toWorld(galsim.PositionD(0, 0), color=color) wcs3 = check_dep(wcs.withOrigin, new_origin) world_pos2 = wcs3.toWorld(new_origin, color=color) np.testing.assert_almost_equal( world_pos2.x, world_pos1.x, 7, 'withOrigin(new_origin) returned wrong world position') np.testing.assert_almost_equal( world_pos2.y, world_pos1.y, 7, 'withOrigin(new_origin) returned wrong world position') new_world_origin = galsim.PositionD(5352.7, 9234.3) wcs5 = check_dep(wcs.withOrigin, new_origin, new_world_origin, color=color) world_pos3 = wcs5.toWorld(new_origin, color=color) np.testing.assert_almost_equal( world_pos3.x, new_world_origin.x, 7, 'withOrigin(new_origin, new_world_origin) returned wrong position') np.testing.assert_almost_equal( world_pos3.y, new_world_origin.y, 7, 'withOrigin(new_origin, new_world_origin) returned wrong position') # Now some CelestialWCS types cubic_u = Cubic(2.9e-5, 2000., 'u') cubic_v = Cubic(-3.7e-5, 2000., 'v') center = galsim.CelestialCoord(23 * galsim.degrees, -13 * galsim.degrees) radec = lambda x, y: center.deproject_rad( cubic_u(x, y) * 0.2, cubic_v(x, y) * 0.2, projection='lambert') wcs_list = [ galsim.RaDecFunction(radec), galsim.AstropyWCS('1904-66_TAN.fits', dir='fits_files'), galsim.GSFitsWCS('tpv.fits', dir='fits_files'), galsim.FitsWCS('sipsample.fits', dir='fits_files'), ] for wcs in wcs_list: # Original version of the shiftOrigin tests in do_celestial_wcs using deprecated name. new_origin = galsim.PositionI(123, 321) wcs3 = wcs.shiftOrigin(new_origin) assert wcs != wcs3, name + ' is not != wcs.shiftOrigin(pos)' wcs4 = wcs.local(wcs.origin) assert wcs != wcs4, name + ' is not != wcs.local()' assert wcs4 != wcs, name + ' is not != wcs.local() (reverse)' world_pos1 = wcs.toWorld(galsim.PositionD(0, 0)) wcs3 = wcs.shiftOrigin(new_origin) world_pos2 = wcs3.toWorld(new_origin) np.testing.assert_almost_equal( world_pos2.distanceTo(world_pos1) / galsim.arcsec, 0, 7, 'shiftOrigin(new_origin) returned wrong world position')
def check_consistency(randomKey, psfIDs=[0, 1, 2, 3, 4]): gRange = [ 'p400m000', 'm400m000', 'm000p400', 'm000m400', 'm283m283', 'p283m283', 'm283p283', 'p283p283' ] ## Load the input/true WCS input_imgname = '/disks/shear14/KiDS_simulations/Cosmos/Theli_image/KIDS_150p1_2p2_r_SDSS.V0.5.9A.swarp.cut.fits' wcs_input = galsim.AstropyWCS(input_imgname) ## Load the input/truth catalogue input_catname = '/disks/shear14/KiDS_simulations/Cosmos/KIDS_HST_cat/KiDS_Griffith_iMS1_handpicked_stars.cat' input_catalogue = fits.open(input_catname) input_data = input_catalogue[1].data print "Loaded the input data" ## Obtain the cuts on the input/truth catalogue MASK_all = input_data.MASK mask = ~np.array(MASK_all & 0xfc3c, dtype=bool) handpicked_stars = input_data['handpicked_stars'] rank = input_data['rank'] distance2d = input_data['distance2d'] assert handpicked_stars.dtype == bool cuts = mask & (rank >= 0) & (distance2d < 1) & (~handpicked_stars) OBJNO = input_data['OBJNO'][cuts] RA = input_data['RA'][cuts] DEC = input_data['DEC'][cuts] X, Y = [], [] x_offset, y_offset = 2500, 2500 ## Converting the sky position to image positions (takes about a minute)... ## This is needed because Xpos_THELI and Ypos_THELI aren't filled for the faint galaxies for gg in xrange(cuts.sum()): pos = wcs_input.posToImage( galsim.CelestialCoord(RA[gg] * galsim.degrees, DEC[gg] * galsim.degrees)) x, y = pos.x - x_offset, pos.y - y_offset X.append(x) Y.append(y) X = np.array(X) Y = np.array(Y) ## "Building a kd-tree with {0} galaxies using their input positions...".format(cuts.sum()) tree = cKDTree(np.vstack([X, Y]).T) for psfID in psfIDs: for g_id in xrange(len(gRange)): runID = gRange[g_id] + '_' + str(psfID) + '_' + randomKey print "Comparing ", runID #shearID, psfID, randomKey = runID.split('_') ARCHDIR = os.path.join( '/disks/shear15/KiDS/ImSim/pipeline/archive/', randomKey, runID) TMPDIR = os.path.join('/disks/shear15/KiDS/ImSim/temp', randomKey, runID) prior_catname = 'prior' prior_pathname = os.path.join(ARCHDIR, prior_catname) prior_dat = np.loadtxt(prior_pathname) sex_arrs, lf_arrs = [], [] indices = [] for rot_id in xrange(4): sex_catname = 'sexrot0{0}.cat'.format(rot_id) lf_catname = '0{0}.output.rot.fits.asc.scheme2b_corr'.format( rot_id) if rot_id == 0: sex_catname = 'sex.cat' # lf_catname = 'output.fits.asc.scheme2b_corr' sex_pathname = os.path.join(TMPDIR, sex_catname) lf_pathname = os.path.join(ARCHDIR, lf_catname) sex_params_filename = 'kidssims.param' sex_params_pathname = os.path.join( '/disks/shear15/KiDS/ImSim/pipeline/backup/pipeline/config/', sex_params_filename) with open(sex_params_pathname, 'r') as f: sex_fieldnames = f.readlines() ## Remove the empty lines n_emptylines = sex_fieldnames.count('\n') for ii in xrange(n_emptylines): sex_fieldnames.remove('\n') ## Strip of the newline character from the rest for ii in xrange(len(sex_fieldnames)): sex_fieldnames[ii] = sex_fieldnames[ii][:-1] lf_fieldnames = [] with open(lf_pathname, 'r') as f: for lineno in xrange(31): line = f.readline() words = line.split() ## Omit the first line. It is not a column name if lineno > 0: ## Because somebody thought giving a space in between is legible lf_fieldname = ' '.join(words[2:]) lf_fieldnames.append(lf_fieldname) sex_arr = np.loadtxt(sex_pathname) lf_arr = np.loadtxt(lf_pathname) assert len(sex_arr) == len(lf_arr) assert sex_arr.shape[1] == len(sex_fieldnames) assert lf_arr.shape[1] == len(lf_fieldnames) d2d, idx = tree.query( np.array([ sex_arr[:, sex_fieldnames.index('X_IMAGE')], sex_arr[:, sex_fieldnames.index('Y_IMAGE')] ]).T) sex_arrs.append(sex_arr) lf_arrs.append(lf_arr) indices.append(idx) ## Assuming that the columns mean the same for all rotations, append them sex_dat = np.vstack(tuple(sex_arrs)) lf_dat = np.vstack(tuple(lf_arrs)) ## Make the QC directory, if it doesn't exist already QC_dirname = os.path.join(ARCHDIR, 'QC') if not 'QC' in os.listdir(ARCHDIR): os.mkdir(QC_dirname) ## Make the overall distributions ## Magnitude plots fig, ax = plt.subplots() bins = np.arange(16, 27, 0.05) prior_mag_col_id = 2 _n, _bins, _patches = ax.hist(prior_dat[:, prior_mag_col_id], bins=bins, histtype='step', color='k', label='Input magnitude') _n, _bins, _patches = ax.hist( sex_dat[:, sex_fieldnames.index('MAG_AUTO')], bins=bins, histtype='step', weights=0.25 * np.ones(len(sex_dat)), color='r', label='Output magnitude') ax.set_yscale('log') _lgnd = ax.legend(loc='best') fig.suptitle('Magnitude distributions') fig_filename = 'magnitudes.png' fig_pathname = os.path.join(QC_dirname, fig_filename) fig.savefig(fig_pathname) # ## SExtractor SNR plots # fig, ax = plt.subplots() # bins = np.logspace(-2,4,60) # _n, _bins, _patches = ax.hist(input_data[cuts]['FLUX_AUTO_THELI']/input_data[cuts]['FLUXERR_AUTO_THELI'], bins=bins, histtype='step', color='k', label='SNR in data') # _n, _bins, _patches = ax.hist(sex_dat[:,sex_fieldnames.index('FLUX_AUTO')]/sex_dat[:,sex_fieldnames.index('FLUXERR_AUTO')], bins=bins, histtype='step', weights=0.25*np.ones(len(sex_dat)), color='r', label='SNR in sims') # ax.set_xscale('log') # _lgnd = ax.legend(loc='best') # fig.suptitle('SNR from SExtractor') # fig_filename = 'snr.png' # fig_pathname = os.path.join(QC_dirname,fig_filename) # fig.savefig(fig_pathname) # ## FWHM plots # fig, ax = plt.subplots() # bins = np.logspace(-2,2,20) # _n, _bins, _patches = ax.hist(input_data[cuts]['FWHM_IMAGE_THELI'], bins=bins, histtype='step', color='k', label='FWHM_IMAGE in data') # _n, _bins, _patches = ax.hist(sex_dat[:,sex_fieldnames.index('FWHM_IMAGE')], bins=bins, histtype='step', weights=0.25*np.ones(len(sex_dat)), color='r', label='FWHM_IMAGE in sims') # ax.set_xscale('log') # _lgnd = ax.legend(loc='best') # fig.suptitle('FWHM_IMAGE from SExtractor') # fig_filename = 'fwhm.png' # fig_pathname = os.path.join(QC_dirname,fig_filename) # fig.savefig(fig_pathname) ## Scalelength fig, ax = plt.subplots() bins = np.logspace(-2, 2, 20) _n, _bins, _patches = ax.hist( input_data[cuts]['bias_corrected_scalelength_pixels'], bins=bins, histtype='step', color='k', label='LF scalength in data') _n, _bins, _patches = ax.hist( lf_dat[:, lf_fieldnames.index('bias-corrected scalelength /pixels' )], bins=bins, histtype='step', weights=0.25 * np.ones(len(lf_dat)), color='r', label='LF scalelength in sims') ax.set_xscale('log') _lgnd = ax.legend(loc='best') fig.suptitle('Scalelength from LF') fig_filename = 'scalelength.png' fig_pathname = os.path.join(QC_dirname, fig_filename) fig.savefig(fig_pathname) ## model SNR fig, ax = plt.subplots() bins = np.logspace(-2, 4, 60) _n, _bins, _patches = ax.hist(input_data[cuts]['model_SNratio'], bins=bins, histtype='step', color='k', label='Model SNR in data') _n, _bins, _patches = ax.hist( lf_dat[:, lf_fieldnames.index('model SNratio')], bins=bins, histtype='step', weights=[0.25] * len(lf_dat), color='r', label='Model SNR in sims') ax.set_xscale('log') _lgnd = ax.legend(loc='best') fig.suptitle('Model SNR from LF') fig_filename = 'snr_model.png' fig_pathname = os.path.join(QC_dirname, fig_filename) fig.savefig(fig_pathname) ## pixel SNR fig, ax = plt.subplots() bins = np.logspace(-2, 4, 60) _n, _bins, _patches = ax.hist(input_data[cuts]['pixel_SNratio'], bins=bins, histtype='step', color='k', label='Pixel SNR in data') _n, _bins, _patches = ax.hist( lf_dat[:, lf_fieldnames.index('pixel SNratio')], bins=bins, histtype='step', weights=[0.25] * len(lf_dat), color='r', label='Pixel SNR in sims') ax.set_xscale('log') _lgnd = ax.legend(loc='best') fig.suptitle('Pixel SNR from LF') fig_filename = 'snr_pixel.png' fig_pathname = os.path.join(QC_dirname, fig_filename) fig.savefig(fig_pathname) plt.close('all')