def database_from_query(query): engine = db.get_db_engine() utils.set_warnings() result = db.from_sql(query, engine) return result
def all_fields(): from grizli.aws import db engine = db.get_db_engine() ch = db.from_sql( "select * from charge_fields where log like 'Finish%%' and field_ra > 0 and filters like '%%F1%%' and nfilt > 1 and field_root like 'j%%'", engine) so = np.argsort(ch['field_root']) with open('fields.txt', 'w') as fp: for r in ch['field_root'][so]: fp.write(f'{r}\n') bash = """ roots=`cat fields.txt | shuf` for root in ${roots}; do if [ ! -e "./log/${root}.log" ]; then date > ./log/${root}.log rm -rf ${root}/output/catalog_assets ${root}/output/js python make_fitsmap.py ${root} rm ${root}/*fits* fi done """ import json def reproc(): js = json.load(open('CHArGE-Mar2020.json.bkup')) count = 0 for row in js['data']: root = row[0] map_href = f"<a href=https://s3.amazonaws.com/grizli-v1/Pipeline/{root}/Map/index.html>{root}</a>" if os.path.exists(f'log/{root}.log'): row[0] = map_href count += 1 print(f'Count: {count} / {len(js["data"])}') with open('CHArGE-Mar2020.json', 'w') as fp: json.dump(js, fp) os.system( 'aws s3 cp CHArGE-Mar2020.json s3://grizli-v1/Master/ --acl public-read' ) os.system( 'aws s3 cp CHArGE-Mar2020.json.bkup s3://grizli-v1/Master/ --acl public-read' )
def get_matched_multibeam(matched_id, good_ids, redshift_cat = [], sed_cat = [], engine = []): gal_id = good_ids[matched_id] sed_ra = sed_cat[gal_id]['ra'] sed_dec = sed_cat[gal_id]['dec'] sed_spec = redshift_cat[gal_id]['z_spec'] print('gal_id: %.0f, z_spec: %.3f' %(gal_id+1, sed_spec)) columns=['status','root','id','ra','dec','mag_auto','flux_radius', 'bic_diff', 'q_z','z_map','d4000','t_g102','t_g141'] SQL = ("SELECT {colstr} FROM redshift_fit NATURAL JOIN photometry_apcorr" " WHERE q_z > -0.2").format(colstr=','.join(columns)) # " AND z_map > 0.2 AND z_map < 0.3 AND mag_auto < 27").format(colstr=','.join(columns)) extra = " AND ra < %.3f AND ra > %.3f" %(sed_ra + 0.001, sed_ra - 0.001) extra += " AND dec < %.3f AND dec > %.3f" %(sed_dec + 0.001, sed_dec - 0.001) extra += " AND z_map < %.3f AND z_map > %.3f" %(sed_spec + 0.005, sed_spec - 0.005) SQL = SQL + extra #print(SQL) res = grizli_db.from_sql(SQL, engine) print('N: ', len(res)) so = np.argsort(res['mag_auto']) HTML(grizli_db.render_for_notebook(res[so]['root','id','ra','dec','mag_auto','q_z','d4000','z_map'],image_extensions=['stack','full'])) match_obj = res[0] root, id = match_obj['root'].item(), match_obj['id'].item() print(root, id) # Fetch grism spectra file base_url = 'https://s3.amazonaws.com/grizli-v1/Pipeline/{0}/Extractions'.format(root) files = ['{0}_{1:05d}.beams.fits'.format(root, id), '{0}_fit_args.npy'.format(root)] for file in files: #print(file) if not os.path.exists(file): os.system('wget {0}/{1}'.format(base_url, file)) #print('wget {0}/{1}'.format(base_url, file)) #args = np.load('{0}_fit_args.npy'.format(root), allow_pickle=True)[0] ix = (res['root'] == root) & (res['id'] == id) z_grism = res['z_map'][ix][0] print('Grism redshift: {0:.4f}'.format(z_grism)) # let's load this spectrum in now: mb = multifit.MultiBeam('{0}_{1:05d}.beams.fits'.format(root, id)) return mb, z_grism, gal_id
def get_visit_files(): import boto3 from grizli.aws import db engine = db.get_db_engine() fields = db.from_sql("select field_root, a_wfc3 from charge_fields where log LIKE 'Finished%%'", engine=engine) s3 = boto3.resource('s3') bkt = s3.Bucket('grizli-v1') for i, field in enumerate(fields['field_root']): s3_file = '{0}_visits.npy'.format(field) if not os.path.exists(s3_file): s3_path = 'Pipeline/{0}/Prep'.format(field) try: bkt.download_file(s3_path+'/'+s3_file, s3_file, ExtraArgs={"RequestPayer": "requester"}) print(i, s3_file) except: print(i, 'Download failed: {0}'.format(field))
def find_matched_obs(z_low = 1.6, z_high = 3.0, redshift_cat = [], sed_cat = [], engine = []): cat_mask = (redshift_cat['z_spec'] > z_low) & (redshift_cat['z_spec'] < z_high) &\ (np.abs(redshift_cat['z_spec'] - redshift_cat['z_a']) < 0.1) & (redshift_cat['nfilt'] > 39) print('# galaxies: %.0f' %np.sum(cat_mask)) good_ids = redshift_cat['id'][cat_mask] - 1 for i in range(len(good_ids)): gal_id = good_ids[i] sed_ra = sed_cat[gal_id]['ra'] sed_dec = sed_cat[gal_id]['dec'] sed_spec = redshift_cat[gal_id]['z_spec'] # print('gal_id: %.0f, z_spec: %.3f' %(gal_id+1, sed_spec)) columns=['status','root','id','ra','dec','mag_auto','flux_radius', 'bic_diff', 'q_z','z_map','d4000','t_g102','t_g141'] SQL = ("SELECT {colstr} FROM redshift_fit NATURAL JOIN photometry_apcorr" " WHERE q_z > -0.2").format(colstr=','.join(columns)) # " AND z_map > 0.2 AND z_map < 0.3 AND mag_auto < 27").format(colstr=','.join(columns)) extra = " AND ra < %.3f AND ra > %.3f" %(sed_ra + 0.001, sed_ra - 0.001) extra += " AND dec < %.3f AND dec > %.3f" %(sed_dec + 0.001, sed_dec - 0.001) extra += " AND z_map < %.3f AND z_map > %.3f" %(sed_spec + 0.005, sed_spec - 0.005) SQL = SQL + extra #print(SQL) res = grizli_db.from_sql(SQL, engine) #print('N: ', len(res)) if (len(res) > 0): print('match for i = %.0f, with N: %.0f' %(i,len(res))) print('gal_id: %.0f, z_spec: %.3f' %(gal_id+1, sed_spec)) so = np.argsort(res['mag_auto']) # HTML(grizli_db.render_for_notebook(res[so]['root','id','ra','dec','mag_auto','q_z','d4000','z_map'],image_extensions=['stack','full'])) # try: # HTML(grizli_db.render_for_notebook(res[so]['root','id','ra','dec','mag_auto','q_z','d4000','z_map'],image_extensions=['stack','full'])) # except: # print('i = %.0f, no matches' %i) return good_ids
def full_hawki_query(rd=None, query_result=None, eso=None): """ Query all HAWKI observations.... """ import os import numpy as np import matplotlib.pyplot as plt from shapely.geometry import Polygon, Point from descartes import PolygonPatch from shapely import affinity from grizli import utils from mastquery import query, overlaps if eso is None: eso = get_eso() if query_result is None: _, kwargs, res = full_query(eso=eso) else: kwargs, res = query_result # surveys = 092.A-0472 # CHArGE fields from grizli.aws import db import astropy.units as u from astropy.coordinates import SkyCoord engine = db.get_db_engine() if rd is None: ch = db.from_sql( "SELECT field_root, field_ra as ra, field_dec as dec, log FROM charge_fields where log LIKE '%%Finish%%'", engine) else: ra, dec = rd ch = utils.GTable() ch['ra'] = [ra] ch['dec'] = [dec] ch['field_root'] = [ utils.radec_to_targname( ra=ra, dec=dec, round_arcsec=(4, 60), precision=2, targstr='j{rah}{ram}{ras}{sign}{ded}{dem}', header=None, ) ] idx, dr = ch.match_to_catalog_sky(res) has_hawki = dr < 10 * u.arcmin import scipy.spatial ch_rd = SkyCoord(ch['ra'], ch['dec'], unit='deg') ch_xyz = ch_rd.cartesian.get_xyz().value ctree = scipy.spatial.cKDTree(ch_xyz.T) hawki_rd = SkyCoord(res['RA'], res['DEC'], unit='deg') hawki_xyz = hawki_rd.cartesian.get_xyz().value htree = scipy.spatial.cKDTree(hawki_xyz.T) r = 30. / 60 / 360. * 2 tr = ctree.query_ball_tree(htree, r) n_hawki = np.array([len(t) for t in tr]) # Figures idx = np.where(n_hawki > 0)[0] xsize = 5 px, py = 0.45, 0.2 for i in idx: field = ch['field_root'][i] print(i, field) if os.path.exists(f'{field}_hawki.png'): continue field = ch['field_root'][i] #tab = utils.read_catalog(f'../FieldsSummary/{field}_footprint.fits') if os.path.exists(f'{field}_footprint.fits'): tab = utils.read_catalog(f'{field}_footprint.fits') meta = tab.meta xr = (meta['XMIN'], meta['XMAX']) yr = (meta['YMIN'], meta['YMAX']) ra, dec = meta['BOXRA'], meta['BOXDEC'] cosd = np.cos(dec / 180 * np.pi) dx = (xr[1] - xr[0]) * cosd * 60 dy = (yr[1] - yr[0]) * 60 box_width = np.maximum(dx, dy) #query_size = np.maximum(min_size, box_width/2)/60. p_hst = None p_ir = None for j, fph in enumerate(tab['footprint']): ps, is_bad, poly = query.instrument_polygon(tab[j]) if not hasattr(ps, '__len__'): ps = [ps] for p in ps: p_j = Polygon(p).buffer(0.001) if p_hst is None: p_hst = p_j else: p_hst = p_hst.union(p_j) if tab['instrument_name'][j] == 'WFC3/IR': if p_ir is None: p_ir = p_j else: p_ir = p_ir.union(p_j) else: cosd = np.cos(dec / 180 * np.pi) p_hst = None p_ir = None ############################## fig = plt.figure(figsize=[6, 6]) ax = fig.add_subplot(111) ax.scatter(ra, dec, zorder=1000, marker='+', color='k') # HAWKI h_p = None for j in tr[i]: p = Point(res['RA'][j], res['DEC'][j]).buffer(4.1 / 60) p = affinity.scale(p, xfact=1. / cosd) # ax.add_patch(PolygonPatch(p, color='r', alpha=0.1)) x, y = p.boundary.xy ax.plot(x, y, color=utils.MPL_COLORS['r'], alpha=0.05) if h_p is None: h_p = p else: h_p = h_p.union(p) # If overlap between hawki and HST, query all exposures if p_hst is not None: hawki_overlap = h_p.intersection(p_hst) hawki_un = h_p.union(p_hst) if not hasattr(p_hst, '__len__'): p_hst = [p_hst] if not hasattr(h_p, '__len__'): h_p = [h_p] for p in p_hst: #ax.add_patch(PolygonPatch(p, color='k', alpha=0.2)) if not hasattr(p.boundary, '__len__'): bs = [p.boundary] else: bs = p.boundary for b in bs: x, y = b.xy ax.plot(x, y, color=utils.MPL_COLORS['gray'], alpha=0.3) else: hawki_overlap = h_p if not hasattr(h_p, '__len__'): h_p = [h_p] if p_ir is not None: if not hasattr(p_ir, '__len__'): p_ir = [p_ir] for p in p_ir: ax.add_patch( PolygonPatch(p, color=utils.MPL_COLORS['gray'], alpha=0.2)) x, y = p.boundary.xy ax.plot(x, y, color=utils.MPL_COLORS['gray'], alpha=0.3) for p in h_p: ax.add_patch( PolygonPatch(p, color=utils.MPL_COLORS['r'], alpha=0.2)) targets = [ '{0} {1}'.format(res['ProgId'][j], res['Object'][j]) for j in tr[i] ] for j, targ in enumerate(np.unique(targets)): ixj = np.where(np.array(targets) == targ)[0] expt = res['DET NDIT'] * res['DET DIT'] * res['TPL NEXP'] ax.text(0.02, 0.98 - j * 0.03, '{0} {1:.1f}'.format(targ, expt[tr[i]][ixj].sum() / 3600.), ha='left', va='top', transform=ax.transAxes, fontsize=7) ax.set_aspect(1. / cosd) ax.set_title(field) ax.grid() #xsize = 4 dx = np.diff(ax.get_xlim())[0] * cosd * 60 dy = np.diff(ax.get_ylim())[0] * 60 fig.set_size_inches(xsize * np.clip(dx / dy, 0.2, 5) + px, xsize + py) ax.set_xlim(ax.get_xlim()[::-1]) overlaps.draw_axis_labels(ax=ax, nlabel=3) fig.tight_layout(pad=0.5) fig.savefig(f'{field}_hawki.png', dpi=120) plt.close('all') if (hawki_overlap.area > 0.0) & (not os.path.exists(f'{field}_hawki.fits')): kws = {} for k in kwargs: kws[k] = kwargs[k].copy() kws['column_filters'].pop('tpl_nexp') kws['column_filters'].pop('tpl_expno') _res = eso.query_instrument('hawki', pi_coi_name='PI_only', coord1=ra, coord2=dec, box='00 30 00', **kws) if len(_res) > 0: print('{0} datasets'.format(len(_res))) _res['PI'] = [p.split('/')[0].strip() for p in _res['PI/CoI']] _res.write(f'{field}_hawki.fits', overwrite=True)
def run_root(root='j002532m1223', min_zoom=2, get_grism=True): """ Prepare images for fitsmap.convert """ from grizli.pipeline import auto_script from grizli import utils import eazy.utils from fitsmap import convert print('sync') os.system( f'aws s3 sync s3://grizli-v1/Pipeline/{root}/Prep/ {root}/ --exclude "*" --include "*sci.fits.gz" --include "*phot.fits" --include "*seg.fits.gz"' ) os.system( f'aws s3 sync s3://grizli-v1/Pipeline/{root}/IRAC/ {root}/ --exclude "*" --include "*sci.fits*" --include "*model.fits"' ) os.system( f'aws s3 sync s3://grizli-v1/Pipeline/{root}/Map/ {root}/ --exclude "*" --include "{root}.*png"' ) os.chdir(root) if not os.path.exists(f'{root}.rgb.png'): _ = auto_script.field_rgb(root=root, xsize=6, full_dimensions=True, HOME_PATH=None, gzext='*', suffix='.rgb', output_format='png') # IR files = glob.glob(f'{root}-[if][r01]*sci.fits*') files.sort() filts = [file.split(f'{root}-')[1].split('_')[0] for file in files] for filt in filts: if os.path.exists(f'{root}.{filt}.png'): continue _ = auto_script.field_rgb(root=root, xsize=6, full_dimensions=True, HOME_PATH=None, gzext='*', filters=[filt], suffix=f'.{filt}', output_format='png', invert=True, scl=2) # Optical, 2X pix files = glob.glob(f'{root}-[f][2-8]*sci.fits*') files.sort() filts = [file.split(f'{root}-')[1].split('_')[0] for file in files] for filt in filts: if os.path.exists(f'{root}.{filt}.png'): continue _ = auto_script.field_rgb(root=root, xsize=6, full_dimensions=2, HOME_PATH=None, gzext='*', filters=[filt], suffix=f'.{filt}', output_format='png', invert=True, scl=2) # Spitzer if glob.glob(f'{root}-ch*fits*'): import reproject out_img = pyfits.open(f'{root}-ir_drz_sci.fits.gz') repr_hdu = out_img[0] # repr_hdu = utils.make_maximal_wcs([out_wcs], pixel_scale=0.2, # verbose=False, pad=0, poly_buffer=0) repr_wcs = pywcs.WCS(repr_hdu.header) mosaics = glob.glob(f'{root}-ch[12]*sci.fits*') mosaics.sort() for mos in mosaics: ch = mos.split(f'{root}-')[1].split('_')[0] if os.path.exists(f'{root}.{ch}.png'): continue print(f'Reproject {ch}') in_img = pyfits.open(mos) in_wcs = pywcs.WCS(in_img[0].header) reproj = utils.blot_nearest_exact(in_img[0].data, in_wcs, repr_wcs, scale_by_pixel_area=False) pyfits.writeto(f'{root}-{ch}s_drz_sci.fits', data=reproj, header=repr_hdu.header, overwrite=True) ext = [ch + 's'] if os.path.exists(f'{root}-{ch}_model.fits'): # resid print(f' {ch} model') in_img = pyfits.open(f'{root}-{ch}_model.fits') reproj = utils.blot_nearest_exact(in_img[1].data, in_wcs, repr_wcs, scale_by_pixel_area=False) pyfits.writeto(f'{root}-{ch}m_drz_sci.fits', data=reproj, header=repr_hdu.header, overwrite=True) ext.append(ch + 'm') for filt in ext: _ = auto_script.field_rgb(root=root, xsize=6, full_dimensions=True, HOME_PATH=None, gzext='', filters=[filt], suffix=f'.{filt}', output_format='png', invert=True, scl=2) if not os.path.exists(f'{root}.seg.png'): sfig = make_seg(f'{root}-ir_seg.fits.gz', outfile=f'{root}.seg.png') filelist = [] for q in ['*f[2-8]', '*f[01]*', '*ir*', '*ch[12]', '*seg', '*rgb']: l_i = glob.glob(q + '*png') l_i.sort() filelist.extend(l_i) ph = utils.read_catalog(f'{root}_phot.fits') ph['id'] = ph['number'] ph['ra'].format = '.6f' ph['dec'].format = '.6f' ph['mag'] = ph['mag_auto'] ph['mag'].format = '.2f' ph['query'] = [ eazy.utils.query_html(r, d).split(') ')[1] for r, d in zip(ph['ra'], ph['dec']) ] ph['id', 'ra', 'dec', 'query', 'mag'].write('phot.cat', format='ascii.csv', overwrite=True) filelist += ['phot.cat'] if get_grism: from grizli.aws import db engine = db.get_db_engine() gr = db.from_sql( f"select root, id, ra, dec, z_map from redshift_fit where root='{root}'", engine) print(f'grism.cat: {len(gr)} sources') if len(gr) > 0: gr['query'] = [ eazy.utils.query_html(r, d).split(') ')[1] for r, d in zip(gr['ra'], gr['dec']) ] gr['stack'] = [ f'<img src="https://s3.amazonaws.com/grizli-v1/Pipeline/{root}/Extractions/{root}_{id:05d}.stack.png" height="100px"/>' for id in gr['id'] ] gr['full'] = [ f'<img src="https://s3.amazonaws.com/grizli-v1/Pipeline/{root}/Extractions/{root}_{id:05d}.full.png" height="100px"/>' for id in gr['id'] ] gr['line'] = [ f'<img src="https://s3.amazonaws.com/grizli-v1/Pipeline/{root}/Extractions/{root}_{id:05d}.line.png" height="80px"/>' for id in gr['id'] ] gr['ra'].format = '.6f' gr['dec'].format = '.6f' gr['z_map'].format = '.4f' gr['id', 'ra', 'dec', 'query', 'z_map', 'stack', 'full', 'line'].write('grism.cat', format='ascii.csv', overwrite=True) filelist += ['grism.cat'] convert.MPL_CMAP = 'gray_r' convert.cartographer.MARKER_HTML_WIDTH = '650px' convert.cartographer.MARKER_HTML_HEIGHT = '440px' convert.POPUP_CSS = [ "span { text-decoration:underline; font-weight:bold; line-height:12pt; }", "tr { line-height: 7pt; }", "table { width: 100%; }", "img { height: 100px; width: auto; }", ] convert.dir_to_map("./", filelist=filelist, out_dir="output", cat_wcs_fits_file=f"{root}-ir_drz_sci.fits.gz", catalog_delim=',', min_zoom=min_zoom, task_procs=False, image_engine='MPL') plt.close('all') if os.path.exists('output/index.html'): os.system( f'aws s3 sync output/ s3://grizli-v1/Pipeline/{root}/Map/ --acl public-read --quiet' ) os.system( f'aws s3 sync ./ s3://grizli-v1/Pipeline/{root}/Map/ --exclude "*" --include "{root}.*png" --acl public-read' )