def test_query_region_box_async(coordinates, patch_get): response = Irsa.query_region_async( coordinates, catalog='fp_psc', spatial='Box', width=2 * u.arcmin, get_query_payload=True) assert response['size'] == 120 response = Irsa.query_region_async( coordinates, catalog='fp_psc', spatial='Box', width=2 * u.arcmin) assert response is not None
def test_query_region_cone_async(coordinates, patch_get): response = Irsa.query_region_async( coordinates, catalog='fp_psc', spatial='Cone', radius=2 * u.arcmin, get_query_payload=True) assert response['radius'] == 2 assert response['radunits'] == 'arcmin' response = Irsa.query_region_async( coordinates, catalog='fp_psc', spatial='Cone', radius=2 * u.arcmin) assert response is not None
def GetPositionsAndEpochs(ra, dec, Epochs, radius=6): t1 = Irsa.query_region(coords.SkyCoord(ra, dec, unit=(u.deg, u.deg), frame='icrs'), catalog="allsky_4band_p1bs_psd", spatial="Cone", radius=radius * u.arcsec) t2 = Irsa.query_region(coords.SkyCoord(ra, dec, unit=(u.deg, u.deg), frame='icrs'), catalog="allsky_3band_p1bs_psd", spatial="Cone", radius=radius * u.arcsec) if len(t2) == 0: t2 = Irsa.query_region(coords.SkyCoord(ra, dec, unit=(u.deg, u.deg), frame='icrs'), catalog="allsky_2band_p1bs_psd", spatial="Cone", radius=radius * u.arcsec) t3 = Irsa.query_region(coords.SkyCoord(ra, dec, unit=(u.deg, u.deg), frame='icrs'), catalog="neowiser_p1bs_psd", spatial="Cone", radius=radius * u.arcsec) t00 = vstack([t1, t2], join_type='inner') t0 = vstack([t00, t3], join_type='inner') t = t0 #### Find the epoch clusters Groups = [] for epoch in Epochs: group = np.where(t['mjd'] == epoch) if len(group[0]) != 0: Groups.append(group[0][0]) #print(Epochs) #print(Groups) #print(len(Epochs),len(Groups)) if len(Groups) >= 0.5 * len(Epochs): #print('Good') #print(t['ra'][Groups].data, t['dec'][Groups].data, t['mjd'][Groups].data, t['w1mpro'][Groups].data ) return t['ra'][Groups].data, t['dec'][Groups].data, t['mjd'][ Groups].data, t['w1mpro'][Groups].data else: return [-9999], [-9999], [-9999], [-9999]
def get_nir_cat(self,clobber=False,use_twomass=True): """ Get the NIR catalog Catalog (necessary for zero-point determination) is saved into self.data_dir as self.name+"_"+self.nir_survey+"cat.fits" """ print("Fetching NIR catalog from server...") if use_twomass: if (not os.path.isfile(self.nir_cal_cat)) or clobber: from astroquery.irsa import Irsa Irsa.ROW_LIMIT = 2000. table = Irsa.query_region(coordinates.Galactic(l=self.glon, b=self.glat, unit=(u.deg, u.deg)), catalog="fp_psc", spatial="Box", width=self.nir_im_size) #print(table) #IPAC table does not take overwrite? But FITS does? So inconsistent and bad table.write(self.nir_cal_cat,format='votable',overwrite=clobber) else: print("NIR catalog already downloaded. Use clobber=True to fetch new versions.") else: if (not os.path.isfile(self.nir_cat)) or clobber: if self.nir_survey == "VISTA": from astroquery.vista import Vista as NIR if self.nir_survey == "UKIDSS": from astroquery.ukidss import Ukidss as NIR table = NIR.query_region(coordinates.Galactic(l=self.glon, b=self.glat, unit=(u.deg, u.deg)), radius=self.nir_im_size) table.write(self.nir_cat,format="fits",overwrite=clobber) else: print("NIR catalog already downloaded. Use clobber=True to fetch new versions.")
def search_2MASS(): ''' Faz busca no catálogo 2MASS a partir das coordenadas celestes ''' w = WCS(fits.getheader(UPLOAD_FOLDER+'/'+session['name'])) r = session['r'] o = SkyCoord(w.wcs_pix2world([(0,0)],1), unit='deg') opr = SkyCoord(w.wcs_pix2world([(r,r)],1), unit='deg') rw = o.separation(opr)[0] print('Separação',rw) req = request.get_json() data = pd.DataFrame(req) src = SkyCoord(ra=data['ra'], dec=data['dec'], unit='deg', frame='icrs') crval = SkyCoord(ra=np.mean(data['ra']), dec=np.mean(data['dec']), unit='deg', frame='icrs') r = 1.1*crval.separation(src).max() Q = Irsa.query_region(crval,catalog='fp_psc',spatial='Cone', radius=r,selcols=['ra','dec','j_m','k_m']).to_pandas() print(Q) m = SkyCoord(ra=Q['ra'],dec=Q['dec'], unit=('deg','deg'), frame='icrs') idx, d2, _ = match_coordinates_sky(src,m) Q.loc[idx[d2>=rw]] = None # retira estrela que não conseguiu chegar perto data[['j','k']] = Q[['j_m','k_m']].loc[idx].values print(data) res = make_response(data.to_json(), 200) return res
def download_ptf(coords,name=None,directory=None): """Download PTF light curve data. Keyword arguments: coords -- astropy.coordinates.SkyCoord object name -- string for filename (default None, i.e. PTF oid) directory -- location to save data (default './') """ #Download the PTF data if directory is None: directory = datadir table = Irsa.query_region(coordinates=coords,catalog='ptf_lightcurves',radius=5*u.arcsec) table = table.filled(-99) #Don't only use the nearest! nearest = np.where(table['dist'] == np.min(table['dist'])) if name is None: name = str(table["oid"][0]) nearestcoords = SkyCoord(table["ra"][nearest][0],table["dec"][nearest][0],unit="deg") matchedinds = [] for i in range(len(table)): if nearestcoords.separation(SkyCoord(table["ra"][i],table["dec"][i],unit="deg")) < 3*u.arcsec: matchedinds.append(i) fname = directory+name+'.xml' table[matchedinds].write(fname, format='votable', overwrite=True) print(str(len(matchedinds))+" data points saved to "+fname) #add to target menu and display targets[name] = fname target_select.options.append(name) target_select.value = target_select.options[-1]
def cross_match(): """ Nothing but test query data and cross match. """ twomass = Irsa.query_region(coord.SkyCoord(28.2, -0.049, unit=(u.deg, u.deg), frame='galactic'), catalog='fp_psc', radius='1d0m0s') v = Vizier(columns=["**", "RAJ2000", "DEJ2000"]) v.ROW_LIMIT = 9000000 result = v.query_region(coord.SkyCoord(ra=280.7421273, dec=-4.2326516,\ unit=(u.deg, u.deg), frame='icrs'),radius=0.05*u.deg, catalog=["GAIA DR2"]) gaia_data = result[4] coo_wise = coord.SkyCoord(twomass['ra'], twomass['dec']) coo_twomass = coord.SkyCoord(gaia_data['RAJ2000'], gaia_data['DEJ2000']) idx_twomass, d2d_twomass, d3d_twomass = coo_wise.match_to_catalog_sky( coo_twomass) YSO = gaia_data[:0].copy() for k in idx_twomass: YSO.add_row(gaia_data[k]) return YSO
def test_query_region_box_async(self): response = Irsa.query_region_async("00h42m44.330s +41d16m07.50s", catalog='fp_psc', spatial='Box', width=2 * u.arcmin, cache=False) assert response is not None
def test_query_region_box(self): result = Irsa.query_region("00h42m44.330s +41d16m07.50s", catalog='fp_psc', spatial='Box', width=2 * u.arcmin, cache=False) assert isinstance(result, Table)
def test_query_region_cone_async(self): response = Irsa.query_region_async('m31', catalog='fp_psc', spatial='Cone', radius=2 * u.arcmin, cache=False) assert response is not None
def test_query_region_cone(self): result = Irsa.query_region('m31', catalog='fp_psc', spatial='Cone', radius=2 * u.arcmin, cache=False) assert isinstance(result, Table)
def query_survey(dataframe, idx, features, survey): series = dataframe.loc[idx] coord = SkyCoord(ra=series.RA, dec=series.Dec, unit=(u.deg, u.deg)) try: if survey == '2MASS': temp = Irsa.query_region(coord, radius=search_radius, catalog='fp_psc').to_pandas() elif survey == 'GAIA': temp = Gaia.query_object(coordinate=coord, width=search_radius, height=search_radius).to_pandas() else: print('Invalid Survey') catalog = SkyCoord(ra=temp.ra, dec=temp.dec, unit=(u.deg, u.deg)) i, _, _ = match_coordinates_sky(coord, catalog) for feature in features: dataframe.at[idx, feature] = temp.at[int(i), feature] except Exception as e: print(e)
def test_query_region_async_polygon(polygon, patch_get): response = Irsa.query_region_async( "m31", catalog="fp_psc", spatial="Polygon", polygon=polygon, get_query_payload=True) for a, b in zip(re.split("[ ,]", response["polygon"]), re.split("[ ,]", "10.1 +10.1,10.0 +10.1,10.0 +10.0")): for a1, b1 in zip(a.split(), b.split()): a1 = float(a1) b1 = float(b1) np.testing.assert_almost_equal(a1, b1) response = Irsa.query_region_async( "m31", catalog="fp_psc", spatial="Polygon", polygon=polygon) assert response is not None
def galaxies_moc(field, moc_field, radius, moc_order=15): """ MOC with the intersection of field with galaxies included in the 2MASS Large Galaxy Atlas. """ galaxies = Irsa.query_region(field, catalog="lga_v2", spatial="Cone", radius=2 * u.deg) moc_galaxies = MOC() if galaxies: w = obsid_wcs(field) field_reg = CircleSkyRegion(center=field, radius=radius) moc_galaxies = MOC() for g in galaxies: gcoords = SkyCoord(ra=g['ra'], dec=g['dec'], unit=u.deg) amajor = 1.5 * 2 * g['r_ext'] * u.arcsec galaxy_reg = EllipseSkyRegion(center=gcoords, width=amajor, height=amajor * g['sup_ba'], angle=(90 + g['sup_pa']) * u.deg) region = field_reg.intersection(galaxy_reg) moc_galaxies += reg2moc(region, moc_field, w, moc_order) return moc_galaxies
def test_query_region_polygon(self): polygon = [(10.1, 10.1), (10.0, 10.1), (10.0, 10.0)] result = Irsa.query_region("m31", catalog="fp_psc", spatial="Polygon", polygon=polygon, cache=False) assert isinstance(result, Table)
def query(self, **kwargs): # check for data release to use if "coordinates" in kwargs: self.__coordinates = kwargs["coordinates"] elif "ra" in kwargs and "dec" in kwargs: self.__coordinates = str(kwargs["ra"]) + "," + str(kwargs["dec"]) else: raise ValueError( "Not valid coordinates found. Used coordinates key or ra dec keys" ) self.__coordinates = CoordinateParser.validateCoordinates( self.__coordinates) radius = 1 # arcmin if "radius" in kwargs: radius = kwargs["radius"] params = { "bgApp": "/FinderChart/nph-finder", "romeserver": "ROMEDEV", "srchsize": 12.0, "outsize": 200, "colortbl": 1, "nthread": 10, "markercolor": "red", "markersize": 10, "mode": "cgi", "outtype": "single", "locstr": "20.48371,0.4223", "subsetsize": 5.0, "survey": "sdss", "survey": "dss", "survey": "2mass", "markervis_shrunk": "true" } print(Irsa.list_catalogs()) r = Irsa.query_region(self.__coordinates, catalog='fp_psc', radius=radius * u.arcmin) return r
def test_query_region_async_polygon(self): polygon = [ SkyCoord(ra=10.1, dec=10.1, unit=(u.deg, u.deg)), SkyCoord(ra=10.0, dec=10.1, unit=(u.deg, u.deg)), SkyCoord(ra=10.0, dec=10.0, unit=(u.deg, u.deg)) ] response = Irsa.query_region_async("m31", catalog="fp_psc", spatial="Polygon", polygon=polygon, cache=False) assert response is not None
def query2mass(ra, dec): tmass = Irsa.query_region( SkyCoord(df['RA'][i], df['DEC'][i], unit=(u.deg, u.deg), frame='fk5'), catalog='fp_psc', radius='0d0m2s', selcols='ra,dec,j_m,j_cmsig,h_m,h_cmsig,k_m,k_cmsig') if tmass: return tmass['ra'].data[0], tmass['dec'].data[0], tmass['j_m'].data[ 0], tmass['j_cmsig'].data[0], tmass['h_m'].data[0], tmass[ 'h_cmsig'].data[0], tmass['k_m'].data[0], tmass[ 'k_cmsig'].data[0] else: return ra, dec, -9999.0, -9999.0, -9999.0, -9999.0, -9999.0, -9999.0
def get_cat(method): cwd = os.getcwd() try: os.mkdir(method) except OSError: pass if method == 'wise': from astroquery.irsa import Irsa Irsa.ROW_LIMIT = 1000000 ra_factor, pos = tile(cwd + '/image_ampphase1.app.restored.fits') print 'Downloading catalogues for', len(pos), 'sky positions' for i, p in enumerate(pos): outfile = method + '/' + method + '-' + str(i) + '.vo' if os.path.isfile(outfile): print 'Catalogue at position', p, 'already present' continue print 'Downloading at position', p if method == 'panstarrs': while True: try: r = requests.post( 'http://archive.stsci.edu/panstarrs/search.php', data={ 'ra': p[0], 'dec': p[1], 'SR': CSIZE, 'max_records': 100000, 'nDetections': ">+5", 'action': 'Search', 'selectedColumnsCsv': 'objid,ramean,decmean' }, timeout=300) except requests.exceptions.Timeout: print 'Timeout, retrying!' else: break f = open(outfile, 'w') f.writelines(r.text) f.close() elif method == 'wise': t = Irsa.query_region(coord.SkyCoord(p[0], p[1], unit=(u.deg, u.deg)), catalog='allwise_p3as_psd', radius='0d30m0s') t.write(outfile, format='votable') else: raise NotImplementedError('Method ' + method)
def queryWISE(ra, dec): wise = Irsa.query_region( SkyCoord(ra, dec, unit=(u.deg, u.deg), frame='fk5'), catalog='allwise_p3as_psd', radius='0d0m2s', selcols= 'ra,dec,w1mpro,w1sigmpro,w2mpro,w2sigmpro,w3mpro,w3sigmpro,w4mpro,w4sigmpro' ) #print wise['ra'].data[0] return (wise['ra'].data[0], wise['dec'].data[0], wise['w1mpro'].data[0], wise['w1sigmpro'].data[0], wise['w2mpro'].data[0], wise['w2sigmpro'].data[0], wise['w3mpro'].data[0], wise['w3sigmpro'].data[0], wise['w4mpro'].data[0], wise['w4sigmpro'].data[0])
def do_ppmxl(catalog): task_str = catalog.get_current_task_str() keys = list(catalog.entries.keys()) warnings.filterwarnings("ignore") for oname in pbar(keys, task_str): # Some events may be merged in cleanup process, skip them if # non-existent. try: name = catalog.add_entry(oname) except Exception: catalog.log.warning( '"{}" was not found, suggests merge occurred in cleanup ' 'process.'.format(oname)) continue if (FASTSTARS.RA not in catalog.entries[name] or FASTSTARS.DEC not in catalog.entries[name]): continue else: radec= str(catalog.entries[name][FASTSTARS.RA][0]['value'])+str(catalog.entries[name][FASTSTARS.DEC][0]['value']) c=coord(radec,unit=(un.hourangle, un.deg),frame='icrs') cnttry = 0 foundstar = False while foundstar == False and cnttry < 1: try: cnttry += 1 time.sleep(0.1) result = Irsa.query_region(c,catalog='ppmxl',radius='0d0m10s') except TypeError: #print(radec,cnttry) continue if len(result) > 1: foundstar = True if foundstar == True: source = (catalog.entries[name] .add_source(name='The PPMXL Catalog', bibcode="2010AJ....139.2440R", url="https://irsa.ipac.caltech.edu/Missions/ppmxl.html", secondary=True)) catalog.entries[name].add_quantity(FASTSTARS.PROPER_MOTION_RA, str(result['pmra'][0]*degperyrtomasperyear), source, e_value=str(result['e_pmra'][0]*degperyrtomasperyear), u_value='mas/yr') catalog.entries[name].add_quantity(FASTSTARS.PROPER_MOTION_DEC, str(result['pmde'][0]*degperyrtomasperyear), source, e_value=str(result['e_pmde'][0]*degperyrtomasperyear), u_value='mas/yr') catalog.journal_entries() return
def surveyFieldConverter(self,ra,dec,margin,need_clean=False,cat = 'fp_xsc'): ''' for 2MASS return the designation for each detected source in search field ''' pos = FK5(ra*u.degree, dec*u.degree) #pos = coordinates.SkyFrame(ra*u.deg,dec*u,deg, frame='fk5') #pos =SkyCoord(ra* u.deg,dec* u.deg, frame='fk5') tbl = Irsa.query_region(pos,catalog=cat, spatial='Box',width=2*margin*u.deg) lst=[] if (need_clean and len(tbl)>0): for i in range(len(tbl['designation'])): if (tbl[0]['cc_flg']=='0'): lst.append(tbl[i]['designation']) return lst else: return list(tbl['designation'])
def get_nir_cat(self, clobber=False, use_twomass=True): """ Get the NIR catalog Catalog (necessary for zero-point determination) is saved into self.data_dir as self.name+"_"+self.nir_survey+"cat.fits" """ print("Fetching NIR catalog from server...") if use_twomass: if (not os.path.isfile(self.nir_cal_cat)) or clobber: from astroquery.irsa import Irsa Irsa.ROW_LIMIT = 2000. table = Irsa.query_region(coordinates.Galactic(l=self.glon, b=self.glat, unit=(u.deg, u.deg)), catalog="fp_psc", spatial="Box", width=self.nir_im_size) #print(table) #IPAC table does not take overwrite? But FITS does? So inconsistent and bad table.write(self.nir_cal_cat, format='votable', overwrite=clobber) else: print( "NIR catalog already downloaded. Use clobber=True to fetch new versions." ) else: if (not os.path.isfile(self.nir_cat)) or clobber: if self.nir_survey == "VISTA": from astroquery.vista import Vista as NIR if self.nir_survey == "UKIDSS": from astroquery.ukidss import Ukidss as NIR table = NIR.query_region(coordinates.Galactic(l=self.glon, b=self.glat, unit=(u.deg, u.deg)), radius=self.nir_im_size) table.write(self.nir_cat, format="fits", overwrite=clobber) else: print( "NIR catalog already downloaded. Use clobber=True to fetch new versions." )
def query_cat(coords,catalog,radius=0.5*u.arcsec,cols=None,fill_val=-99.99,full=False,IRSA=False): #one-to-one query if IRSA: results = Irsa.query_region(coords,catalog=catalog,radius=radius) else: results = Vizier.query_region(coords,catalog=catalog,radius=radius) if len(results) == 0: return None if full: return results results = results[0] if cols is not None: # if dict, remap colnames if isinstance(cols,dict): for k,v in cols.iteritems(): results.rename_column(k,v) names = cols.values() else: names = cols else: names = results.colnames # make new columns one-to-one with coords newtable = Table(masked=True) for col in names: oldcol = results[col] newcol = MaskedColumn(data=np.zeros(len(coords),dtype=oldcol.dtype),unit=oldcol.unit,name=col,mask=np.ones(len(coords),dtype=bool),fill_value=fill_val) # copy data from results for row in results: if not row[col]: continue # _q IS 1-BASED INDEXING?! newcol[row['_q']-1] = row[col] newcol.mask[row['_q']-1] = False newtable.add_column(newcol) return newtable
def irsa_search(folder_path, input_file, output_file, catalog, radius, row_range, input_cols, output_cols): """ Perform an IRSA catalog search on an excel spreadsheet with RA-DEC coords given in degrees for the closest matches distance-wise within the catalog, and output the name, distance, RA-DEC coords and catalog into specified columns. Note: convert HMS and DMS coords into degrees beforehand. """ from astroquery.irsa import Irsa from astropy.coordinates import SkyCoord import astropy.units as u from openpyxl import load_workbook import numpy as np wb = load_workbook(folder_path + input_file) sheet = wb.active for i in range(row_range[0], row_range[1]): ra = sheet[input_cols[0] + str(i)].value dec = sheet[input_cols[1] + str(i)].value Q = Irsa.query_region(SkyCoord(ra=ra, dec=dec, unit=(u.deg, u.deg)), catalog=catalog, radius=radius * u.arcmin, selcols="object,ra,dec").as_array() if len(Q) < 1: continue sources_cat = [] for source in Q: dist_cat = np.sqrt((ra - source[1])**2 + (dec - source[2])**2) sources_cat.append([ dist_cat, source[1], source[2], catalog, str(source[0])[2:-1] ]) sorted_sources_cat = sorted(sources_cat, key=lambda x: x[0]) sheet[output_cols[0] + str(i)] = sorted_sources_cat[0][4] sheet[output_cols[1] + str(i)] = sorted_sources_cat[0][0] sheet[output_cols[2] + str(i)] = sorted_sources_cat[0][1] sheet[output_cols[3] + str(i)] = sorted_sources_cat[0][2] sheet[output_cols[4] + str(i)] = sorted_sources_cat[0][3] wb.save(folder_path + output_file) return
def query_2MASS(ra,dec): ''' Busca no 2MASS dado um campo com o raio definido pelo usuário ''' w = WCS(fits.getheader(session['wcs'])) r = session['r'] orim = SkyCoord(w.wcs_pix2world([(0,0)],1), unit='deg') opr = SkyCoord(w.wcs_pix2world([(r,0)],1), unit='deg') rw = orim.separation(opr)[0] print('Separação',rw) crval = SkyCoord(ra=ra, dec=dec, unit='deg', frame='icrs') Q = Irsa.query_region(crval,catalog='fp_psc',spatial='Cone', radius=rw,\ selcols=['ra','dec','j_m','k_m']).to_pandas() print(Q) m = SkyCoord(ra=Q['ra'],dec=Q['dec'], unit=('deg','deg'), frame='icrs') idx, _, _ = match_coordinates_sky(crval,m) j , k = Q.loc[idx][['j_m','k_m']] return j, k
def cmd_setup(ra_min, ra_max, dec_min, dec_max, GridSize=100, height=8, width=20): """Prepares a 2d histogram of HST data and scatter plot of unWISE data from given RA and DEC""" Irsa.ROW_LIMIT = 100000 data_table = Irsa.query_region("m33", catalog="unwise_2019", spatial="Polygon", polygon=[SkyCoord(ra=ra_min,dec=dec_min,unit=(u.deg,u.deg),frame='icrs'), SkyCoord(ra=ra_max,dec=dec_min,unit=(u.deg,u.deg),frame='icrs'), SkyCoord(ra=ra_max,dec=dec_max,unit=(u.deg,u.deg),frame='icrs'), SkyCoord(ra=ra_min,dec=dec_max,unit=(u.deg,u.deg),frame='icrs')]) data_table['W1'] = f_to_mag(data_table['flux_1']) data_table['W2'] = f_to_mag(data_table['flux_2']) data_table['W2 - W1'] = data_table['W2'] - data_table['W1'] fig, (ax1, ax2) = plt.subplots(1,2) fig.set_figheight(height) fig.set_figwidth(width) ax1.set_xlabel('W2 - W1', fontsize=20) ax1.set_ylabel('W2', fontsize=20) ax1.scatter(data_table['W2 - W1'],data_table['W2'], s=10,c='black') ax1.set_ylim(ax1.get_ylim()[::-1]) ax1.set_title('unWISE', size=20) """HST sources 2d histogram""" hst_table = pd.read_csv('./small_data.csv') selection = hst_table.where((hst_table['RA'] > ra_min) & (hst_table['RA'] < ra_max) & (hst_table['DEC'] > dec_min) & (hst_table['DEC'] < dec_max)) color = selection['F110W_VEGA'] - selection['F160W_VEGA'] mag = selection['F160W_VEGA'] ax2.hexbin(color, mag, gridsize=GridSize, bins='log') ax2.invert_yaxis() ax2.invert_xaxis() ax2.set_xlabel('F110W - F160W', fontsize=20) ax2.set_ylabel('F160W', fontsize=20) ax2.set_title('HST', fontsize=20)
sigma_clip=True, sigma_clip_func=np.ma.median) sci_med.write(red_path + sci + '.fits', overwrite=True) with fits.open(red_path + sci + '.fits', mode='update') as hdr: hdr[0].header['RDNOISE'] = header['RDNOISE'] / len(sci_list[sci]) hdr[0].header['NFILES'] = len(sci_list[sci]) for k, n in enumerate(sci_list[sci]): hdr[0].header['FILE' + str(k + 1)] = (os.path.basename(n), 'Name of file used in median.') if args.wcs == 'True' or args.wcs == 'yes': fig, ax = plt.subplots(figsize=(7, 7)) ax = plt.subplot(projection=wcs_object) gaia = Irsa.query_region(SkyCoord(hdr[0].header['CRVAL1'] * u.deg, hdr[0].header['CRVAL2'] * u.deg, frame='fk5'), catalog="gaia_dr2_source", spatial="Cone", radius=3 * u.arcmin) if len(gaia) == 0: log.info('No GAIA stars found within 3 arcmin for starlist.') plt.close() else: ax.imshow(sci_med, cmap='gray', norm=ImageNormalize(sci_med, interval=ZScaleInterval())) _, median, std = sigma_clipped_stats(sci_med, sigma=3.0) daofind = DAOStarFinder(fwhm=7.0, threshold=5. * std) sources = daofind(np.asarray(sci_med)) for l, m in enumerate(gaia['source_id']): x, y = (wcs.WCS(hdr[0].header)).all_world2pix( gaia['ra'][l], gaia['dec'][l], 1)
def WISE_LC(obj, alldata=False, interac=False, clobber=False): # moreplots=False, # return 1 if successfully finished # return 0 if doesnt download retval = 0 # create directories that we'll need if not os.path.exists('data'): os.makedirs('data') if not os.path.exists('img'): os.makedirs('img') # the WISE tables to search cats = [ 'neowiser_p1bs_psd', 'allsky_4band_p1bs_psd', 'allsky_3band_p1bs_psd', 'allsky_2band_p1bs_psd' ] # if clobber=False (i.e. don't overwrite), # double-check the data/ dir to see if this star has already been run! doobj = True if clobber is False: #booleans of if each output file prev. exists. t1 = os.path.isfile('data/' + obj + cats[0] + '.csv') t2 = os.path.isfile('data/' + obj + cats[1] + '.csv') t3 = os.path.isfile('data/' + obj + cats[2] + '.csv') t4 = os.path.isfile('data/' + obj + cats[3] + '.csv') # if ANY of these files exists, then DONT do object again doobj = not (t1 | t2 | t3 | t4) if doobj == False: print("\x1b[31mWISE_LC: Data already pulled\x1b[0m") # should we actually Do this Object? False if prev found & clobber=False if doobj: colors = ['#1f77b4', '#ff7f0e', '#c5b0d5', '#d62728'] plt.figure(figsize=(13, 8)) totvisits = 0 for k in range(len(cats)): try: table1 = Irsa.query_region(obj, catalog=cats[k], spatial='Cone', radius=3 * u.arcsec) # table2 = Irsa.query_region(obj, catalog=cats[1], spatial='Cone', radius=3 * u.arcsec) # table3 = Irsa.query_region(obj, catalog=cats[2], spatial='Cone', radius=3 * u.arcsec) # table4 = Irsa.query_region(obj, catalog=cats[3], spatial='Cone', radius=3 * u.arcsec) table1.sort('mjd') # table2.sort('mjd') # table3.sort('mjd') # table4.sort('mjd') df1 = table1.to_pandas() # df2 = table2.to_pandas() # df3 = table3.to_pandas() # df4 = table4.to_pandas() totvisits = totvisits + len( df1) #+ len(df2) + len(df3) + len(df4) # manually fix the Python3 str=>bytestr problem... boo df1['ph_qual'] = df1['ph_qual'].str.decode('ascii') # df2['ph_qual'] = df2['ph_qual'].str.decode('ascii') # df3['ph_qual'] = df3['ph_qual'].str.decode('ascii') # df4['ph_qual'] = df4['ph_qual'].str.decode('ascii') df1['cc_flags'] = df1['cc_flags'].str.decode('ascii') # df2['cc_flags'] = df2['cc_flags'].str.decode('ascii') # df3['cc_flags'] = df3['cc_flags'].str.decode('ascii') # df4['cc_flags'] = df4['cc_flags'].str.decode('ascii') df1.to_csv('data/' + obj + cats[k] + '.csv') # df2.to_csv('data/' + obj + cats[1] + '.csv') # df3.to_csv('data/' + obj + cats[2] + '.csv') # df4.to_csv('data/' + obj + cats[3] + '.csv') #### QUALITY CUTS # can't add this to the latter 3 surveys... (df1['qual_frame'] > 8) if k == 0: ok1 = (df1['ph_qual'].str[0] == 'A') & (df1['nb'] == 1) & ( df1['cc_flags'].str[0:2] == '00') & ( df1['w1rchi2'] < 5) & (df1['qual_frame'] > 8) else: ok1 = (df1['ph_qual'].str[0] == 'A') & ( df1['nb'] == 1) & (df1['cc_flags'].str[0:2] == '00') & (df1['w1rchi2'] < 5) # ok3 = (df3['ph_qual'].str[0] == 'A') & (df3['nb'] == 1) & (df3['cc_flags'].str[0:2] == '00') & (df3['w1rchi2'] < 5) # ok4 = (df4['ph_qual'].str[0] == 'A') & (df4['nb'] == 1) & (df4['cc_flags'].str[0:2] == '00') & (df4['w1rchi2'] < 5) if alldata: plt.scatter(df1['mjd'], df1['w1mpro'], c='k', s=8, alpha=0.25) # plt.scatter(df2['mjd'], df2['w1mpro'], c='k', s=8, alpha=0.25) # plt.scatter(df3['mjd'], df3['w1mpro'], c='k', s=8, alpha=0.25) # plt.scatter(df4['mjd'], df4['w1mpro'], c='k', s=8, alpha=0.25) plt.errorbar(df1['mjd'][ok1], df1['w1mpro'][ok1], yerr=df1['w1sigmpro'][ok1], marker='o', linestyle='none', alpha=0.25, color=colors[k]) # plt.errorbar(df2['mjd'][ok2], df2['w1mpro'][ok2], yerr=df2['w1sigmpro'][ok2], # marker='o', linestyle='none', alpha=0.25, color=colors[1]) # plt.errorbar(df3['mjd'][ok3], df3['w1mpro'][ok3], yerr=df3['w1sigmpro'][ok3], # marker='o', linestyle='none', alpha=0.25, color=colors[2]) # plt.errorbar(df4['mjd'][ok4], df4['w1mpro'][ok4], yerr=df4['w1sigmpro'][ok4], # marker='o', linestyle='none', alpha=0.25, color=colors[3]) except Exception as eek: print( "\x1b[31mWISE_LC: ' + str(eek) + ' encountered. Huh.\x1b[0m" ) plt.ylabel('W1 (mag)') plt.xlabel('MJD (days)') plt.gca().invert_yaxis() plt.title(obj + ', N=' + str(totvisits)) plt.savefig('img/' + obj + '_W1.png', dpi=150, bbox_inches='tight', pad_inches=0.25) if interac: plt.show() else: plt.close() retval = 1 # a value to return, assuming the code makes it this far! # # 2) W1-W2 color light curve # if moreplots: # plt.figure(figsize=(13,8)) # if alldata: # plt.scatter(df1['mjd'], df1['w1mpro']-df1['w2mpro'], c='k', s=8, alpha=0.25) # plt.scatter(df2['mjd'], df2['w1mpro']-df2['w2mpro'], c='k', s=8, alpha=0.25) # plt.scatter(df3['mjd'], df3['w1mpro']-df3['w2mpro'], c='k', s=8, alpha=0.25) # plt.scatter(df4['mjd'], df4['w1mpro']-df4['w2mpro'], c='k', s=8, alpha=0.25) # # plt.errorbar(df1['mjd'][ok1], df1['w1mpro'][ok1] - df1['w2mpro'][ok1], # yerr=np.sqrt(df1['w1sigmpro'][ok1]**2 + df1['w2sigmpro'][ok1]**2), # marker='o', linestyle='none', alpha=0.25, color=colors[0]) # plt.errorbar(df2['mjd'][ok2], df2['w1mpro'][ok2] - df2['w2mpro'][ok2], # yerr=np.sqrt(df2['w1sigmpro'][ok2]**2 + df2['w2sigmpro'][ok2]**2), # marker='o', linestyle='none', alpha=0.25, color=colors[1]) # plt.errorbar(df3['mjd'][ok3], df3['w1mpro'][ok3] - df3['w2mpro'][ok3], # yerr=np.sqrt(df3['w1sigmpro'][ok3]**2 + df3['w2sigmpro'][ok3]**2), # marker='o', linestyle='none', alpha=0.25, color=colors[2]) # plt.errorbar(df4['mjd'][ok4], df4['w1mpro'][ok4] - df4['w2mpro'][ok4], # yerr=np.sqrt(df4['w1sigmpro'][ok4]**2 + df4['w2sigmpro'][ok4]**2), # marker='o', linestyle='none', alpha=0.25, color=colors[3]) # plt.xlabel('MJD (days)') # plt.ylabel('W1-W2 (mag)') # plt.title(obj + ', N=' + str(totvisits)) # plt.savefig('img/'+obj + '_W1W2.png', dpi=150, bbox_inches='tight', pad_inches=0.25) # # plt.show() # plt.close() # 3) CMD # plt.figure(figsize=(8,8)) # plt.errorbar(df1['w1mpro'] - df1['w2mpro'], df1['w1mpro'], # xerr=np.sqrt(df1['w1sigmpro']**2 + df1['w2sigmpro']**2), yerr=df1['w1sigmpro'], # marker='o', linestyle='none', alpha=0.25, color='#1f77b4') # plt.errorbar(df2['w1mpro_ep'] - df2['w2mpro_ep'], df2['w1mpro_ep'], # xerr=np.sqrt(df2['w1sigmpro_ep']**2 + df2['w2sigmpro_ep']**2 ), yerr=df2['w1sigmpro_ep'], # marker='o', linestyle='none', alpha=0.25, color='#ff7f0e') # # plt.ylabel('W1 (mag)') # plt.xlabel('W1-W2 (mag)') # plt.gca().invert_yaxis() # plt.savefig('img/'+obj + '_cmd.png', dpi=150, bbox_inches='tight', pad_inches=0.25) # plt.close() # bonus: RA,Dec to make sure not a blend, etc # plt.figure(figsize=(8, 8)) # plt.scatter(df1['ra'], df1['dec'], # marker='o', alpha=0.25, color='#1f77b4') # plt.scatter(df2['ra'], df2['dec'], # marker='o', alpha=0.25, color='#ff7f0e') # plt.xlabel('RA (deg)') # plt.ylabel('Dec (deg)') # plt.savefig('img/' + obj + '_radec.png', dpi=150, bbox_inches='tight', pad_inches=0.25) # plt.close() return retval
def contamVerify(RA, DEC, INSTRUMENT, APAlist, binComp=[], PDF='', web=False): """ Generates a PDF file of figures displaying a simulation of the science image for any given observation using the parameters provided. Parameter(s) ------------ RA : str The Right Ascension of your target in HH:MM:SS DEC : str The Declination of your target in DD:MM:SS INSTRUMENT : str The instrument you are observing with (case-sensitive). The software currently supports: 'MIRI', 'NIRISS', 'NIRCam F322W2', 'NIRCam F444W' APAlist : list A list of Aperture Position Angle(s). Element(s) must be in integers. Example 1: [1, 25, 181, 205] Example 2: [25] binComp : list A list containing parameters of a missing companion that is not in the 2MASS IRSA point-source catalog. The format is: [RA (arcseconds), DEC (arcseconds), J mag, H mag, K mag] [string, string, integer, integer, integer] PDF : string The path to where the PDF file will be saved. If left blank, the PDF file will be saved in your current working directory. Example: 'path/to/my/file.pdf' web : boolean Makes it easier to integrate it onto the website. Leave this as false, unless you're running this in app_exoctk.py Returns ------- A .PDF file containing a simulation of the FOV of your target in the science coordinate system. Some things to consider when reading the figures: 1. The target is circled in red 2. Stellar temperatures of all sources are plotted by color 3. The gray region oulined in blue represents the aperture for the given instrument. 4. The blue square represents the readout region, or the "origin" """ print('Generating FOV...') # Decimal degrees --> HMSDMS for Irsa.query_region() targetcrd = crd.SkyCoord(ra=RA, dec=DEC, unit='deg').to_string('hmsdms') targetRA, targetDEC = RA, DEC # Querying for neighbors with 2MASS IRSA's fp_psc (point-source catalog) rad = 2.5 print('Querying for point-sources within {} arcminutes...'.format( str(rad))) info = Irsa.query_region(targetcrd, catalog='fp_psc', spatial='Cone', radius=rad * u.arcmin) # Coordinates of all stars in FOV, including target allRA = info['ra'].data.data allDEC = info['dec'].data.data # Initiating a dictionary to hold all relevant star information stars = {} stars['RA'], stars['DEC'] = allRA, allDEC print('Total point-sources found in region: {}'.format(len(stars['RA']))) # Finding the target using relative distances sindRA = (targetRA - stars['RA']) * np.cos(targetDEC) cosdRA = targetDEC - stars['DEC'] distance = np.sqrt(sindRA**2 + cosdRA**2) targetIndex = np.argmin(distance) # Appending missing companion to the above lists (if any) if binComp != []: print('Adding missing companion...') bb = binComp[0] / 3600 / np.cos(allDEC[targetIndex] * deg2rad) allRA = np.append(allRA, (allRA[targetIndex] + bb)) allDEC = np.append(allDEC, (allDEC[targetIndex] + binComp[1] / 3600)) Jmag = np.append(Jmag, binComp[2]) Hmag = np.append(Kmag, binComp[3]) Kmag = np.append(Kmag, binComp[4]) J_Hobs = Jmag - Hmag H_Kobs = Hmag - Kmag # Restoring model parameters modelParam = readsav(os.path.join(TRACES_PATH, 'NIRISS', 'modelsInfo.sav'), verbose=False) models = modelParam['models'] modelPadX = modelParam['modelpadx'] modelPadY = modelParam['modelpady'] dimXmod = modelParam['dimxmod'] dimYmod = modelParam['dimymod'] jhMod = modelParam['jhmod'] hkMod = modelParam['hkmod'] teffMod = modelParam['teffmod'] # JHK bands of all stars in FOV, including target Jmag = info['j_m'].data.data Hmag = info['h_m'].data.data Kmag = info['k_m'].data.data # J-H band, H-K band. This will be used to derive the stellar Temps later J_Hobs = Jmag - Hmag H_Kobs = Hmag - Kmag # Number of stars nStars = stars['RA'].size # Find/assign Teff of each star print('Calculating effective temperatures...') starsT = np.empty(nStars) for j in range(nStars): color_separation = (J_Hobs[j] - jhMod)**2 + (H_Kobs[j] - hkMod)**2 min_separation_ind = np.argmin(color_separation) starsT[j] = teffMod[min_separation_ind] # Record keeping stars['Temp'] = starsT # Initiating a dictionary for customizability apertures = {} apertures['NIRISS'] = ['NIS_SOSSFULL', 'NIS_SOSSFULL'] apertures['NIRCam F444W'] = ['NRCA5_GRISM256_F444W', 'NRCA5_FULL'] apertures['NIRCam F322W2'] = ['NRCA5_GRISM256_F322W2', 'NRCA5_FULL'] apertures['MIRI'] = ['MIRIM_SLITLESSPRISM', 'MIRIM_FULL'] # Instantiate SIAF object siaf = pysiaf.Siaf(INSTRUMENT.split(' ')[0]) aper = siaf.apertures[apertures[INSTRUMENT][0]] full = siaf.apertures[apertures[INSTRUMENT][1]] # DET_targ -> TEL_targ -> get attitude matrix for target # -> TEL_neighbor -> DET_neighbor -> SCI_neighbor print('Converting Sky --> Science coordinates...') xSweet, ySweet = aper.reference_point('det') v2targ, v3targ = aper.det_to_tel(xSweet, ySweet) contam = {} if not web: filename = 'contam_{}_{}_{}.pdf'.format(RA, DEC, INSTRUMENT) defaultPDF = os.path.join(os.getcwd(), filename).replace(' ', '_') PDF = defaultPDF if PDF == '' else PDF elif web: filename = 'contam_{}_{}_{}.pdf'.format(RA, DEC, INSTRUMENT) PDF = os.path.join(TRACES_PATH, filename) print('Saving figures to: {}'.format(PDF)) print('This will take a second...') pdfobj = PdfPages(PDF) for APA in APAlist: attitude = pysiaf.utils.rotations.attitude_matrix( v2targ, v3targ, targetRA, targetDEC, APA) xdet, ydet = [], [] xsci, ysci = [], [] for starRA, starDEC in zip(stars['RA'], stars['DEC']): # Get the TEL coordinates of each star using the attitude # matrix of the target V2, V3 = pysiaf.utils.rotations.sky_to_tel(attitude, starRA, starDEC) # Convert to arcsec and turn to a float V2, V3 = V2.to(u.arcsec).value, V3.to(u.arcsec).value XDET, YDET = aper.tel_to_det(V2, V3) XSCI, YSCI = aper.det_to_sci(XDET, YDET) xdet.append(XDET) ydet.append(YDET) xsci.append(XSCI) ysci.append(YSCI) XDET, YDET = np.array(xdet), np.array(ydet) XSCI, YSCI = np.array(xsci), np.array(ysci) starsAPA = {'xdet': XDET, 'ydet': YDET, 'xsci': XSCI, 'ysci': YSCI} # Finding indexes of neighbor sources that land on detector rows, cols = full.corners('det') minrow, maxrow = rows.min(), rows.max() mincol, maxcol = cols.min(), cols.max() inFOV = [] for star in range(0, nStars): x, y = starsAPA['xdet'][star], starsAPA['ydet'][star] if (mincol < x) & (x < maxcol) & (minrow < y) & (y < maxrow): inFOV.append(star) inFOV = np.array(inFOV) # Making final plot fig = plt.figure(figsize=(15, 15)) aper.plot(frame='sci', fill_color='gray', color='blue') plt.scatter(XSCI[targetIndex], YSCI[targetIndex], s=400, lw=1.5, facecolor='gray', edgecolor='red') plotTemps(starsT[inFOV], XSCI[inFOV], YSCI[inFOV]) aper.plot_frame_origin(frame='sci', which='sci') # Fine-tune trace lengths start, stop = traceLength(INSTRUMENT) # Plotting the trace footprints for x, y in zip(XSCI[inFOV], YSCI[inFOV]): if 'F322W2' in INSTRUMENT: plt.plot([x - stop, x + start], [y, y], lw=40, color='white', alpha=0.2) plt.plot([x - stop, x + start], [y, y], lw=2., color='white') elif 'F444W' in INSTRUMENT: plt.plot([x - start, x + stop], [y, y], lw=40, color='white', alpha=0.2) plt.plot([x - start, x + stop], [y, y], lw=2., color='white') else: plt.plot([x, x], [y - stop, y + start], lw=40, color='white', alpha=0.2) plt.plot([x, x], [y - stop, y + start], lw=2., color='white') # Labeling aperstr = str(aper.AperName.replace('_', ' ')) tx, ty = str(round(XSCI[targetIndex])), str(round(YSCI[targetIndex])) plt.title( 'The FOV in SCIENCE coordinates at APA {}$^o$'.format(str(APA)) + '\n' + '{}'.format(aperstr) + '\n' + 'Target (X,Y): {}, {}'.format(tx, ty), fontsize=20) # Adding to PDF pdfobj.savefig(fig, bbox_inches='tight') pdfobj.close() if web: return PDF
def getWISE(entry): ''' get IR data from AllWISE Source Catalog attempts to query Irsa 5 times; if they keep failing, abort returns updated entry ''' ir_pos = coord.SkyCoord(entry['consensus']['ir_ra'], entry['consensus']['ir_dec'], unit=(u.deg, u.deg), frame='icrs') tryCount = 0 while ( True ): #in case of error, wait 10 sec and try again; give up after 5 tries tryCount += 1 try: table = Irsa.query_region(ir_pos, catalog='allwise_p3as_psd', radius=3. * u.arcsec) break except (astroquery.exceptions.TimeoutError, astroquery.exceptions.TableParseError) as e: if tryCount > 5: message = 'Unable to connect to IRSA; trying again in 10 min' logging.exception(message) print message raise fn.DataAccessError(message) logging.exception(e) time.sleep(10) except Exception as e: if 'Query failed' in str(e) or 'timed out' in str(e): if tryCount > 5: message = 'Unable to connect to IRSA; trying again in 10 min' logging.exception(message) print message raise fn.DataAccessError(message) logging.exception(e) time.sleep(10) else: raise if len(table): number_matches = 0 if table[0]['w1snr'] > 5: match = table[0] dist = match['dist'] number_matches += 1 else: match = None dist = np.inf if len(table) > 1: for row in table: if row['dist'] < dist and row['w1snr'] > 5: match = row dist = match['dist'] number_matches += 1 if match: wise_match = {'designation':'WISEA'+match['designation'], 'ra':match['ra'], 'dec':match['dec'], \ 'number_matches':np.int16(number_matches), \ 'w1mpro':match['w1mpro'], 'w1sigmpro':match['w1sigmpro'], 'w1snr':match['w1snr'], \ 'w2mpro':match['w2mpro'], 'w2sigmpro':match['w2sigmpro'], 'w2snr':match['w2snr'], \ 'w3mpro':match['w3mpro'], 'w3sigmpro':match['w3sigmpro'], 'w3snr':match['w3snr'], \ 'w4mpro':match['w4mpro'], 'w4sigmpro':match['w4sigmpro'], 'w4snr':match['w4snr']} else: wise_match = None else: wise_match = None if wise_match: logging.info('AllWISE match found') for key in wise_match.keys(): if wise_match[key] is np.ma.masked: wise_match.pop(key) elif wise_match[key] and type(wise_match[key]) is not str: wise_match[key] = wise_match[key].item() elif wise_match[key] == 0: wise_match[key] = 0 else: logging.info('No AllWISE match found') return wise_match
def get_cat(method, retries=100): cwd = os.getcwd() try: os.mkdir(method) except OSError: pass if method == 'pslocal': hplist = [] if method == 'wise': from astroquery.irsa import Irsa Irsa.ROW_LIMIT = 1000000 ra_factor, pos = tile(find_fullres_image()) print 'Downloading catalogues for', len(pos), 'sky positions' for i, p in enumerate(pos): outfile = method + '/' + method + '-' + str(i) + '.vo' if os.path.isfile(outfile): print 'Catalogue at position', p, 'already present' continue print 'Downloading at position', p if method == 'panstarrs': count = 0 while True: try: r = requests.post( 'http://archive.stsci.edu/panstarrs/search.php', data={ 'ra': p[0], 'dec': p[1], 'SR': CSIZE, 'max_records': 100000, 'nDetections': ">+5", 'action': 'Search', 'selectedColumnsCsv': 'objid,ramean,decmean' }, timeout=300) except requests.exceptions.Timeout: print 'Timeout, retrying!' else: if 'Warning' not in r.text and 'Please' not in r.text: break else: # will go round the loop again print 'Bad response, retry download (%i)' % count sleep(5 + count * 15) count += 1 if count >= retries: raise RuntimeError( 'Number of retries exceeded for download') f = open(outfile, 'w') f.writelines(r.text) f.close() elif method == 'wise': t = Irsa.query_region(coord.SkyCoord(p[0], p[1], unit=(u.deg, u.deg)), catalog='allwise_p3as_psd', radius='0d30m0s') t.write(outfile, format='votable') elif method == 'pslocal': from astropy_healpix import HEALPix hp = HEALPix(nside=64) cs = hp.cone_search_lonlat(p[0] * u.deg, p[1] * u.deg, radius=CSIZE * u.deg) hplist += list(cs) if not os.path.isdir(PSBASE): # we don't have a local PS database, so download for pix in cs: outfile = method + '/' + str(pix) if not os.path.isfile(outfile): print 'Downloading healpix pixel', pix download_file( 'http://uhhpc.herts.ac.uk/panstarrs-healpix/' + str(pix), outfile) else: raise NotImplementedError('Method ' + method) if method == 'pslocal': hplist = list(set(hplist)) print 'Found', len(hplist), 'unique healpix pixels' outname = method + '/' + method + '.txt' with open(outname, 'w') as outfile: outfile.write('# RA DEC ObjID\n') for pixel in hplist: print 'Appending pixel', pixel if os.path.isdir(PSBASE): pixelfile = PSBASE + '/' + str(pixel) else: pixelfile = method + '/' + str(pixel) if not os.path.isfile(pixelfile): raise RuntimeError('Pixel file ' + pixelfile + 'does not exist') os.system('cat ' + pixelfile + ' >> ' + outname)
def get_cat(method,retries=100): cwd=os.getcwd() try: os.mkdir(method) except OSError: pass if method=='pslocal': hplist=[] if method=='wise': from astroquery.irsa import Irsa Irsa.ROW_LIMIT=1000000 ra_factor,pos=tile(cwd+'/image_ampphase1.app.restored.fits') print 'Downloading catalogues for',len(pos),'sky positions' for i,p in enumerate(pos): outfile=method+'/'+method+'-'+str(i)+'.vo' if os.path.isfile(outfile): print 'Catalogue at position',p,'already present' continue print 'Downloading at position',p if method=='panstarrs': count=0 while True: try: r = requests.post('http://archive.stsci.edu/panstarrs/search.php', data = {'ra':p[0],'dec':p[1],'SR':CSIZE,'max_records':100000,'nDetections':">+5",'action':'Search','selectedColumnsCsv':'objid,ramean,decmean'},timeout=300) except requests.exceptions.Timeout: print 'Timeout, retrying!' else: if 'Warning' not in r.text and 'Please' not in r.text: break else: # will go round the loop again print 'Bad response, retry download (%i)' % count sleep(5+count*15) count+=1 if count>=retries: raise RuntimeError('Number of retries exceeded for download') f=open(outfile,'w') f.writelines(r.text) f.close() elif method=='wise': t=Irsa.query_region(coord.SkyCoord(p[0],p[1],unit=(u.deg,u.deg)), catalog='allwise_p3as_psd', radius='0d30m0s') t.write(outfile,format='votable') elif method=='pslocal': from astropy_healpix import HEALPix hp = HEALPix(nside=64) cs = hp.cone_search_lonlat(p[0]*u.deg, p[1]*u.deg, radius=CSIZE*u.deg) hplist += list(cs) if not os.path.isdir(PSBASE): # we don't have a local PS database, so download for pix in cs: outfile=method+'/'+str(pix) if not os.path.isfile(outfile): print 'Downloading healpix pixel',pix download_file('http://uhhpc.herts.ac.uk/panstarrs-healpix/'+str(pix),outfile) else: raise NotImplementedError('Method '+method) if method=='pslocal': hplist=list(set(hplist)) print 'Found',len(hplist),'unique healpix pixels' outname=method+'/'+method+'.txt' with open(outname,'w') as outfile: outfile.write('# RA DEC ObjID\n') for pixel in hplist: print 'Appending pixel',pixel if os.path.isdir(PSBASE): pixelfile=PSBASE+'/'+str(pixel) else: pixelfile=method+'/'+str(pixel) if not os.path.isfile(pixelfile): raise RuntimeError('Pixel file '+pixelfile+'does not exist') os.system('cat '+pixelfile+' >> '+outname)
from astroquery.irsa import Irsa print Irsa.list_catalogs()
from astropy import units as u from astropy import coordinates from astroquery.irsa import Irsa from common_constants import distance IRAS = Irsa.query_region(coordinates.SkyCoord.from_name("W51"), catalog="iraspsc", radius=1 * u.arcmin) Akari = Irsa.query_region(coordinates.SkyCoord.from_name("W51"), catalog="akari_fis", radius=1 * u.arcmin) # formulae from http://marc.sauvage.free.fr/astro_book/IRAS_pages/IRAS.html fir_lum_iras = 3.96e5 * (2.58 * IRAS["fnu_60"][0] + IRAS["fnu_100"][0]) * (distance.to(u.Mpc).value) ** 2 * u.L_sun mir_lum_iras = 1.611e6 * (2.61 * IRAS["fnu_12"][0] + IRAS["fnu_25"][0]) * (distance.to(u.Mpc).value) ** 2 * u.L_sun print("IRAS FIR luminosity: {0}".format(fir_lum_iras)) print("IRAS MIR luminosity: {0}".format(mir_lum_iras)) print("Harvey 1986 luminosity: {0}".format(1e7 * u.L_sun)) print("Sievers 1991 luminosity: {0}".format(1.8e7 * u.L_sun * (7.5 * u.kpc / distance) ** -2)) # Harvey 1986a 1986ApJ...300..737H: 10^7 Lsun import pylab as pl iras_wl = [12, 25, 60, 100] akari_wl = [65, 90, 140, 160] pl.figure(1).clf() pl.plot(iras_wl, [IRAS["fnu_{0}".format(wl)] for wl in iras_wl], "s") pl.plot(akari_wl, [Akari["flux{0}".format(wl)] for wl in akari_wl], "o")
def sossFieldSim(ra, dec, binComp='', dimX=256): # binComp: [deltaRA,deltaDEC,J,H,K] # stars in large field around target targetcrd = crd.SkyCoord(ra = ra, dec = dec, unit=(u.hour, u.deg)) targetRA = targetcrd.ra.value targetDEC = targetcrd.dec.value info = Irsa.query_region(targetcrd, catalog = 'fp_psc', spatial = 'Cone', radius = 2.5*u.arcmin) # coordinates of all stars in FOV, including target allRA = info['ra'].data.data allDEC = info['dec'].data.data Jmag = info['j_m'].data.data Hmag = info['h_m'].data.data Kmag = info['k_m'].data.data J_Hobs = Jmag-Hmag H_Kobs = Hmag-Kmag # target coords distance = np.sqrt( ((targetRA-allRA)*np.cos(targetDEC))**2 + (targetDEC-allDEC)**2 ) targetIndex = np.argmin(distance) # the target # add any missing companion cubeNameSuf='' if binComp!='': deg2rad = np.pi/180 allRA = np.append(allRA, (allRA[targetIndex] + binComp[0]/3600/np.cos(allDEC[targetIndex]*deg2rad))) allDEC = np.append(allDEC, (allDEC[targetIndex] + binComp[1]/3600)) Jmag = np.append(Jmag,binComp[2]) Hmag = np.append(Kmag,binComp[3]) Kmag = np.append(Kmag,binComp[4]) J_Hobs = Jmag-Hmag H_Kobs = Hmag-Kmag cubeNameSuf ='_custom' #number of stars nStars=allRA.size cooTar=crd.SkyCoord(ra=allRA[targetIndex],dec=allDEC[targetIndex], unit=(u.deg, u.deg)) #Restoring model parameters modelParam = readsav(os.path.join(idlsave_path,'modelsInfo.sav'),verbose=False) models = modelParam['models'] modelPadX = modelParam['modelpadx'] modelPadY = modelParam['modelpady'] dimXmod = modelParam['dimxmod'] dimYmod = modelParam['dimymod'] jhMod = modelParam['jhmod'] hkMod = modelParam['hkmod'] teffMod = modelParam['teffmod'] # find/assign Teff of each star starsT=np.empty(nStars) for j in range(nStars): color_separation = (J_Hobs[j]-jhMod)**2+(H_Kobs[j]-hkMod)**2 min_separation_ind = np.argmin(color_separation) starsT[j]=teffMod[min_separation_ind] radeg = 180/np.pi niriss_pixel_scale = 0.065 # arcsec sweetSpot = dict(x=856,y=107,RA=allRA[targetIndex],DEC=allDEC[targetIndex],jmag=Jmag[targetIndex]) #offset between all stars and target dRA=(allRA - sweetSpot['RA'])*np.cos(sweetSpot['DEC']/radeg)*3600 dDEC=(allDEC - sweetSpot['DEC'])*3600 # Put field stars positions and magnitudes in structured array _ = dict(RA=allRA, DEC=allDEC, dRA=dRA, dDEC=dDEC, jmag=Jmag, T=starsT, x=np.empty(nStars), y=np.empty(nStars), dx=np.empty(nStars), dy=np.empty(nStars)) stars=np.empty(nStars,dtype=[(key,val.dtype) for key,val in _.items()]) for key,val in _.items(): stars[key]=val # Initialize final fits cube that contains the modelled traces with contamination PAmin = 0 #instrument PA, degrees PAmax = 360 dPA = 1 # degrees # Set of IPA values to cover PAtab = np.arange(PAmin, PAmax, dPA) # degrees nPA = len(PAtab) # dimX=256 #2048 #########now as argument, with default to 256 dimY=2048 simuCube=np.zeros([nPA+2,dimY, dimX]) # cube of trace simulation at every degree of field rotation, +target at O1 and O2 # saveFiles = glob.glob('idlSaveFiles/*.sav')[:-1] saveFiles = glob.glob(os.path.join(idlsave_path,'*.sav'))[:-1] #pdb.set_trace() # Big loop to generate a simulation at each instrument PA for kPA in range(PAtab.size): APA= PAtab[kPA] V3PA=APA+0.57 #from APT stars['dx']= (np.cos(np.pi/2+APA/radeg)*stars['dRA']-np.sin(np.pi/2+APA/radeg)*stars['dDEC'])/niriss_pixel_scale stars['dy']= (np.sin(np.pi/2+APA/radeg)*stars['dRA']+np.cos(np.pi/2+APA/radeg)*stars['dDEC'])/niriss_pixel_scale stars['x'] = stars['dx']+sweetSpot['x'] stars['y'] = stars['dy']+sweetSpot['y'] # Display the star field (blue), target (red), subarray (green), full array (blue), and axes if (kPA==0 and nStars > 1) and False: print(kPA) plt.plot([0,2047,2047,0,0],[0,0,2047,2047,0], 'b') plt.plot([0,255,255,0,0],[0,0,2047,2047,0], 'g') #the order 1 & 2 traces t1=np.loadtxt('/Users/david/Documents/work/jwst/niriss/soss/data/trace_order1.txt',unpack=True) plt.plot(t1[0],t1[1],'r') t2=np.loadtxt('/Users/david/Documents/work/jwst/niriss/soss/data/trace_order2.txt',unpack=True) plt.plot(t2[0],t2[1],'r') plt.plot(stars['x'], stars['y'], 'b*') plt.plot(sweetSpot['x'], sweetSpot['y'], 'r*') plt.title("APA= {} (V3PA={})".format(APA,V3PA)) ax=plt.gca() #add V2 & V3 axes l,hw,hl=250,50,50 adx,ady=-l*np.cos(-0.57/radeg),-l*np.sin(-0.57/radeg) ax.arrow(2500, 1800, adx,ady, head_width=hw, head_length=hl, length_includes_head=True, fc='k') #V3 plt.text(2500+1.4*adx,1800+1.4*ady,"V3",va='center',ha='center') adx,ady=-l*np.cos((-0.57-90)/radeg),-l*np.sin((-0.57-90)/radeg) ax.arrow(2500, 1800, adx, ady, head_width=hw, head_length=hl, length_includes_head=True, fc='k') #V2 plt.text(2500+1.4*adx,1800+1.4*ady,"V2",va='center',ha='center') #add North and East adx,ady=-l*np.cos(APA/radeg),-l*np.sin(APA/radeg) ax.arrow(2500, 1300, adx, ady, head_width=hw, head_length=hl, length_includes_head=True, fc='k') #N plt.text(2500+1.4*adx,1300+1.4*ady,"N",va='center',ha='center') adx,ady=-l*np.cos((APA-90)/radeg),-l*np.sin((APA-90)/radeg) ax.arrow(2500, 1300, adx, ady, head_width=hw, head_length=hl, length_includes_head=True, fc='k') #E plt.text(2500+1.4*adx,1300+1.4*ady,"E",va='center',ha='center') ax.set_xlim(-400,2047+800) ax.set_ylim(-400,2047+400) ax.set_aspect('equal') plt.show() # Retain stars that are within the Direct Image NIRISS POM FOV ind, = np.where((stars['x'] >= -162) & (stars['x'] <= 2047+185) & (stars['y'] >= -154) & (stars['y'] <= 2047+174)) starsInFOV=stars[ind] for i in range(len(ind)): intx = round(starsInFOV['dx'][i]) inty = round(starsInFOV['dy'][i]) # print(intx,inty) k=np.where(teffMod == starsInFOV['T'][i])[0][0] fluxscale = 10.0**(-0.4*(starsInFOV['jmag'][i] - sweetSpot['jmag'])) #deal with subection sizes mx0=int(modelPadX-intx) mx1=int(modelPadX-intx+dimX) my0=int(modelPadY-inty) my1=int(modelPadY-inty+dimY) if (mx0 > dimXmod) or (my0 > dimYmod): continue if (mx1 < 0) or (my1 < 0): continue x0 =(mx0<0)*(-mx0) y0 =(my0<0)*(-my0) mx0 *=(mx0 >= 0) mx1 = dimXmod if mx1>dimXmod else mx1 my0 *=(my0 >= 0) my1 =dimYmod if my1>dimYmod else my1 # if target and first kPA, add target traces of order 1 and 2 in output cube if (intx == 0) & (inty == 0) & (kPA == 0): fNameModO12 = saveFiles[k] modelO12 = readsav(fNameModO12,verbose=False)['modelo12'] simuCube[0, y0:y0+my1-my0, x0:x0+mx1-mx0] = modelO12[0, my0:my1, mx0:mx1] * fluxscale # order 1 simuCube[1, y0:y0+my1-my0, x0:x0+mx1-mx0] = modelO12[1, my0:my1, mx0:mx1] * fluxscale # order 2 if (intx != 0) or (inty != 0): #field star simuCube[kPA+2, y0:y0+my1-my0, x0:x0+mx1-mx0] += models[k, my0:my1, mx0:mx1] * fluxscale # fits.writeto(cubeName, simuCube, clobber = True) # print(cubeName) return simuCube