def test_gcrs_self_transform_closeby(): """ Tests GCRS self transform for objects which are nearby and thus have reasonable parallax. Moon positions were originally created using JPL DE432s ephemeris. The two lunar positions (one geocentric, one at a defined location) are created via a transformation from ICRS to two different GCRS frames. We test that the GCRS-GCRS self transform can correctly map one GCRS frame onto the other. """ t = Time("2014-12-25T07:00") moon_geocentric = SkyCoord(GCRS(318.10579159*u.deg, -11.65281165*u.deg, 365042.64880308*u.km, obstime=t)) # this is the location of the Moon as seen from La Palma obsgeoloc = [-5592982.59658935, -63054.1948592, 3059763.90102216]*u.m obsgeovel = [4.59798494, -407.84677071, 0.]*u.m/u.s moon_lapalma = SkyCoord(GCRS(318.7048445*u.deg, -11.98761996*u.deg, 369722.8231031*u.km, obstime=t, obsgeoloc=obsgeoloc, obsgeovel=obsgeovel)) transformed = moon_geocentric.transform_to(moon_lapalma.frame) delta = transformed.separation_3d(moon_lapalma) assert_allclose(delta, 0.0*u.m, atol=1*u.m)
def proper_motion(g0, g1): """Proper motion from two `Geom` instances. Parameters ---------- g0, g1 : Geom Two positions of the target. `g0.date < g1.date` is assumed. Returns ------- mu : Quantity The proper motion. phi : Angle The position angle of the proper motion. """ from astropy.coordinates import SkyCoord c0 = SkyCoord(ra=g0.ra, dec=g0.dec, frame='icrs') c1 = SkyCoord(ra=g1.ra, dec=g1.dec, frame='icrs') dt = (g1.date - g0.date).jd * u.day mu = (c0.separation(c1) / dt).to(u.arcsec / u.hr) phi = c0.position_angle(c1).to(u.deg) return mu, phi
def find_uniques(spec_data, remove_imacs=False, nearenough_sep=5*u.arcsec): import collections # MATCH ON COORDINATES scs = SkyCoord(spec_data['RA'], spec_data['DEC'], unit=u.deg) idx1, idx2, sep2d, _ = scs.search_around_sky(scs, nearenough_sep) # now contruct the groups from the pairs grpdct = {} grpi = 0 for i1, i2 in zip(idx1, idx2): if i1 in grpdct: if i2 in grpdct: # combine the two groups by assigning grp2 items to grp1 # this block is by far the slowest part so if the data size grows it should be optimized grp1 = grpdct[i1] grp2 = grpdct[i2] if grp1 != grp2: to_set_to_1 = [i for i, grp in grpdct.iteritems() if grp==grp2] for i in to_set_to_1: grpdct[i] = grp1 else: #add i2 to the group i1 is already in grpdct[i2] = grpdct[i1] else: if i2 in grpdct: #add i1 to the group i2 is already in grpdct[i2] = grpdct[i1] else: # add them both to a new group grpdct[i1] = grpdct[i2] = grpi grpi += 1 grpnum_to_group_members = collections.defaultdict(list) for k, v in grpdct.iteritems(): grpnum_to_group_members[v].append(k) # convert the members into arrays grpnum_to_group_members = {k:np.array(v) for k, v in grpnum_to_group_members.iteritems()} # identify which is the "best" spectrum (meaning the first zq=4 spectrum) idxs_to_keep = [] new_repeats = [] for grpnum, allmembers in grpnum_to_group_members.iteritems(): if remove_imacs: members = allmembers[spec_data['TELNAME'][allmembers]!='IMACS'] if len(members)==0: continue else: members = allmembers idxs_to_keep.append(members[np.argsort(spec_data['ZQUALITY'][members])[-1]]) new_repeats.append('+'.join(np.unique(spec_data['SPEC_REPEAT'][members]))) # now build the output table from the input unique_objs = spec_data[np.array(idxs_to_keep)] del unique_objs['SPEC_REPEAT'] unique_objs['SPEC_REPEAT'] = new_repeats return unique_objs
def _calculate_rotation_angle(reg_coordinate_frame, header): """Calculates the rotation angle from the region to the header's frame This attempts to be compatible with the implementation used by SAOImage DS9. In particular, this measures the rotation of the north axis as measured at the center of the image, and therefore requires a `~astropy.io.fits.Header` object with defined 'NAXIS1' and 'NAXIS2' keywords. Parameters ---------- reg_coordinate_frame : str Coordinate frame used by the region file header : `~astropy.io.fits.Header` instance Header describing the image Returns ------- y_axis_rot : float Degrees by which the north axis in the region's frame is rotated when transformed to pixel coordinates """ new_wcs = WCS(header) region_frame = SkyCoord( '0d 0d', frame=reg_coordinate_frame, obstime='J2000') region_frame = SkyCoord( '0d 0d', frame=reg_coordinate_frame, obstime='J2000', equinox=region_frame.equinox) origin = SkyCoord.from_pixel( header['NAXIS1']/2, header['NAXIS2']/2, wcs=new_wcs, origin=1).transform_to(region_frame) offset = proj_plane_pixel_scales(new_wcs)[1] origin_x, origin_y = origin.to_pixel(new_wcs, origin=1) origin_lon = origin.data.lon.degree origin_lat = origin.data.lat.degree offset_point = SkyCoord( origin_lon, origin_lat+offset, unit='degree', frame=origin.frame.name, obstime='J2000') offset_x, offset_y = offset_point.to_pixel(new_wcs, origin=1) north_rot = np.arctan2( offset_y-origin_y, offset_x-origin_x) / np.pi*180. cdelt = new_wcs.wcs.get_cdelt() if (cdelt > 0).all() or (cdelt < 0).all(): return north_rot - 90 else: return -(north_rot - 90)
def calculate(self): ephem_location = ephem.Observer() ephem_location.lat = self.location.latitude.to(u.rad) / u.rad ephem_location.lon = self.location.longitude.to(u.rad) / u.rad ephem_location.elevation = self.location.height / u.meter ephem_location.date = ephem.Date(self.time.datetime) if self.data is None: self.alt = Latitude([], unit=u.deg) self.az = Longitude([], unit=u.deg) self.names = Column([], dtype=np.str) self.vmag = Column([]) else: ra = Longitude(self.data["ra"], u.h) dec = Latitude(self.data["dec"], u.deg) c = SkyCoord(ra, dec, frame='icrs') altaz = c.transform_to(AltAz(obstime=self.time, location=self.location)) self.alt = altaz.alt self.az = altaz.az self.names = self.data['name'] self.vmag = self.data['mag'] for ephemeris in self.ephemerides: ephemeris.compute(ephem_location) self.vmag = np.insert(self.vmag, [0], ephemeris.mag) self.alt = np.insert(self.alt, [0], (ephemeris.alt.znorm * u.rad).to(u.deg)) self.az = np.insert(self.az, [0], (ephemeris.az * u.rad).to(u.deg)) self.names = np.insert(self.names, [0], ephemeris.name) return self.names, self.vmag, self.alt, self.az
def exp_dead_new(file_num, name_file, imsz, wcs, flat_list, foc_list, asp_solution, dead, cut, flat_idx, step, out_path, return_dict): print imsz count = np.zeros(imsz) x_lim = imsz[0] y_lim = imsz[1] length = flat_list[0].shape[0] half_len = length/2. print half_len l = imsz[0]/10 start = foc_list[0,1]-half_len print foc_list.shape print start.shape ox = np.repeat(np.arange(l)+start,length+1000) oy = np.tile(np.arange(length+1000)+foc_list[0,0]-half_len-500,l) omask = (ox>=0) & (ox<imsz[0]) & (oy>=0) & (oy<imsz[1]) ox = ox[omask] oy = oy[omask] gl,gb = wcs.all_pix2world(oy,ox,0) c = SkyCoord(gl*u.degree, gb*u.degree, frame='galactic') rd = c.transform_to(FK5) for i in range(asp_solution.shape[0]): hrflat = flat_list[flat_idx[i]] foc = foc_list[i,:]#wcs.sip_pix2foc(wcs.wcs_world2pix(coo,1),1) if (foc[1]+half_len)>=(start+l): print 'update' start = foc[1]-half_len ox = np.repeat(np.arange(l)+start,length+1000) oy = np.tile(np.arange(length+1000)+foc[0]-half_len-500,l) omask = (ox>=0) & (ox<imsz[0]) & (oy>=0) & (oy<imsz[1]) if np.sum(omask)==0: break ox = ox[omask] oy = oy[omask] gl,gb = wcs.all_pix2world(oy,ox,0) c = SkyCoord(gl*u.degree, gb*u.degree, frame='galactic') rd = c.transform_to(FK5) fmask = (ox>=(foc[1]-length/2)) & (ox<(foc[1]+length/2)) & (oy>=(foc[0]-length/2)) & (oy<(foc[0]+length/2)) if np.sum(fmask)==0: continue x = ox[fmask] y = oy[fmask] xi, eta = gn.gnomfwd_simple(rd.ra.deg[fmask], rd.dec.deg[fmask], asp_solution[i,1], asp_solution[i,2], -asp_solution[i,3],1/36000.,0.) px = ((xi/36000.)/(1.25/2.)*(1.25/(800* 0.001666))+1.)/2.*length py = ((eta/36000.)/(1.25/2.)*(1.25/(800* 0.001666))+1.)/2.*length pmask = (px>=0) & (px<length) & (py>=0) & (py<length) if np.sum(pmask)==0: continue count[x[pmask].astype(int),y[pmask].astype(int)] += \ hrflat[px[pmask].astype(int),py[pmask].astype(int)]*step*(1-dead[i])*cut[i] if i%100==0: with open('/scratch/dw1519/galex/fits/scan_map/%s_gal_sec_exp_tmp%d.dat'%(name_file, file_num),'w') as f: f.write('%d'%i) print i print '%d done'%file_num #return_dict[file_num] = count np.save('%s/%s_gal_sec_exp_tmp%d.npy'%(out_path, name_file, file_num), count)
def test_make_source_designation(): # Crab pulsar position for HESS coordinate = SkyCoord('05h34m31.93830s +22d00m52.1758s', frame='icrs') strrep = coordinate_iau_format(coordinate, ra_digits=4) assert strrep == '0534+220' # PKS 2155-304 AGN position for 2FGL coordinate = SkyCoord('21h58m52.06511s -30d13m32.1182s', frame='icrs') strrep = coordinate_iau_format(coordinate, ra_digits=5) assert strrep == '2158.8-3013' # Check the example from Section 3.2.1 of the IAU spec: # http://cdsweb.u-strasbg.fr/Dic/iau-spec.html icrs = SkyCoord('00h51m09.38s -42d26m33.8s', frame='icrs') fk4 = icrs.transform_to('fk4') strrep = coordinate_iau_format(icrs, ra_digits=6) assert strrep == '005109-4226.5' strrep = coordinate_iau_format(fk4, ra_digits=6) assert strrep == '004848-4242.8' strrep = coordinate_iau_format(fk4, ra_digits=4) assert strrep == '0048-427' strrep = coordinate_iau_format(fk4, ra_digits=4, dec_digits=2) assert strrep == '0048-42' # Check that array coordinate input works coordinates = SkyCoord(ra=[10.68458, 83.82208], dec=[41.26917, -5.39111], unit=('deg', 'deg')) strreps = coordinate_iau_format(coordinates, ra_digits=5, prefix='HESS J') assert strreps == ['HESS J0042.7+4116', 'HESS J0535.2-0523']
def hextile(image,radius): pos=[] hs=radius*np.sqrt(3) hdus=fits.open(image) hdu=flatten(hdus) maxy,maxx=hdu.data.shape w=WCS(hdu.header) print 'Hex tiling image' # co-ords of bottom left of image ra_c,dec_c=w.wcs_pix2world(maxx/2,maxy/2,0) ra_factor=np.cos(dec_c*np.pi/180.0) ra_ll,dec_ll=w.wcs_pix2world(0,0,0) ra_lr,dec_lr=w.wcs_pix2world(maxx,0,0) ra_ul,dec_ul=w.wcs_pix2world(0,maxy,0) c_c=SkyCoord(ra_c*u.degree,dec_c*u.degree,frame='icrs') c_ll=SkyCoord(ra_ll*u.degree,dec_ll*u.degree,frame='icrs') c_lr=SkyCoord(ra_lr*u.degree,dec_lr*u.degree,frame='icrs') dra,ddec=[v.value for v in c_c.spherical_offsets_to(c_ll)] nha=dra*2/hs print 'Number of hexes across',nha c_ul=SkyCoord(ra_ul*u.degree,dec_ul*u.degree,frame='icrs') dra,ddec=[v.value for v in c_c.spherical_offsets_to(c_ul)] nhu=2*ddec/hs print 'Number of hexes up',nhu nha=int(0.5+nha) nhu=int(0.5+nhu) for j in range(nhu): for i in range(nha): xc=(1.0*maxx*(i+(j % 2)*0.5))/nha yc=(maxy*(j+0.5))/nhu ra_p,dec_p=w.wcs_pix2world(xc,yc,0) pos.append((float(ra_p),float(dec_p))) return ra_factor,pos
def assign_id(file1, file2): """ Preconditions: Expects 2 files read as astropy Tables. Files must have RA and Dec columns. Postconditions: Fills the DataNum column in the second file with the DataNum of the closest RA/Dec match in the first file. """ ra1 = file1['RA'] dec1 = file1['Dec'] ra2 = file2['RA'] dec2 = file2['Dec'] # returns two catalogs comparing file2 to file 1 c = SkyCoord(ra=ra1*u.degree, dec=dec1*u.degree) catalog = SkyCoord(ra=ra2*u.degree, dec=dec2*u.degree) idx, d2d, d3d = c.match_to_catalog_3d(catalog) # some of the matches are likely to be duplicates and not within a # reasonable distance to be the same star # return an array of true's and false's where match is within specified # range (2 arcsec) good_matches = d2d < 2*u.arcsec # get all matches that are within 2 arcsec of the target idx2 = idx[good_matches] # apply file1's dataname to file2's dataname at the indexes specified by # idx2 file2['DataNum'][idx2] = file1['DataNum'][good_matches] # now have 2 files with the DataName column matching for stars with RA/Dec # close enough return file2
def make_coreqso_table(dr14qso,ebosstarg): if isinstance(dr14qso,str): dr14qso = Table.read(dr14qso) if isinstance(ebosstarg,str): ebosstarg = Table.read(ebosstarg) # dr14coo = SkyCoord(dr14qso['RA'],dr14qso['DEC'],unit=u.deg) # restrict to CORE quasar targets ii = np.where(ebosstarg['EBOSS_TARGET1'] & (1<<10) > 0)[0] ebosstarg = ebosstarg[ii] ebosstargcoo = SkyCoord(ebosstarg['RA'],ebosstarg['DEC'],unit=u.deg) # now identify confirmed quasars from DR14 in the target list m1,m2,sep,_ = dr14coo.search_around_sky(ebosstargcoo,2*u.arcsec) # for some reason there is a repeated entry... _,ii = np.unique(m1,return_index=True) dr14qso = dr14qso[m2[ii]] # just a sanity check jj = np.where(dr14qso['EXTINCTION']>0)[0] assert np.allclose(dr14qso['EXTINCTION'][jj], ebosstarg['EXTINCTION'][m1[ii[jj]]],atol=1e-3) # extract all the WISE columns from targeting wisecols = ['W1_MAG','W1_MAG_ERR', 'W1_NANOMAGGIES','W1_NANOMAGGIES_IVAR', 'W2_NANOMAGGIES','W2_NANOMAGGIES_IVAR', 'HAS_WISE_PHOT'] # overwriting the DR14Q flux fields because they have invalid entries for k in wisecols + ['EXTINCTION','PSFFLUX','PSFFLUX_IVAR']: dr14qso[k] = ebosstarg[k][m1[ii]] dr14qso.write('ebosscore_dr14q.fits',overwrite=True)
def transform(self, input_coords): """ Transform one set of coordinates to another """ if self.same_frames: return input_coords x_in, y_in = input_coords[:, 0], input_coords[:, 1] try: c_in = SkyCoord(x_in, y_in, unit=(u.deg, u.deg), frame=self.input_system) except: # Astropy < 1.0 c_in = SkyCoord(x_in, y_in, unit=(u.deg, u.deg), frame=self.input_system.name, **dict((key, getattr(self.input_system, key)) for key in self.input_system.get_frame_attr_names().keys())) c_out = c_in.transform_to(self.output_system) if issubclass(c_out.representation, (SphericalRepresentation, UnitSphericalRepresentation)): lon = c_out.data.lon.deg lat = c_out.data.lat.deg else: lon = c_out.spherical.lon.deg lat = c_out.spherical.lat.deg return np.concatenate((lon[:, np.newaxis], lat[:, np.newaxis]), axis=1)
def match_sky(reference_data,match_data,reference_radec=['ra','dec'],match_radec=['ra','dec']): '''---Find the matches between 2 sets of ra+dec points--- Inputs: ------- reference_data: usually the catlogue we wish to match to (eg. galaxies in GZ). match_data: usually a subsidiary dataset, eg. detections in AFALFA, WISE, ... reference_radec, match_radec: names of the columns that contain ra+dec (in degrees). Outputs: -------- ids: 3 column catalogue of 'match index', 'reference index' and 'separations' (in degrees). ''' reference_ra, reference_dec = [np.array(reference_data[i]) for i in reference_radec] match_ra, match_dec = [np.array(match_data[i]) for i in match_radec] reference_coord = SkyCoord(ra=reference_ra*u.degree, dec=reference_dec*u.degree) match_coord = SkyCoord(ra=match_ra*u.degree, dec=match_dec*u.degree) idx, sep, _ = match_coord.match_to_catalog_sky(reference_coord) match_idx = np.arange(len(match_data)) ids = Table(np.array([match_idx,idx,sep.arcsecond]).T ,names=('match_index','reference_index','separation')) print('{} galaxies in the reference catalogue'.format(len(reference_data))) print('{} galaxies in the match catalogue'.format(len(match_data))) print('---> {} matches in total'.format(len(ids))) return ids
def beamcheck(beam_input, ras, decs): """ Creates a box and a cicle at the start and end of the obs then checks if the source is inside it """ found = False #TODO: (BWM) this is just a coding prferences, but try to space out big chunks of code. source = SkyCoord(ra=ras*u.degree, dec=decs*u.degree, frame='icrs') obs, ra_beam, dec_beam, time_beam = beam_input #in degrees from metadata ra_intial, ra_final, dec_top, dec_bot = calcbox( ra_beam, dec_beam, time_beam) centrebeam_start = SkyCoord(ra=ra_intial*u.degree, dec=dec_beam*u.degree, frame='icrs') centrebeam_end = SkyCoord(ra=ra_final*u.degree, dec=dec_beam*u.degree, frame='icrs') #checks if the source is within 10 degrees of the start and end of the file angdif_start = centrebeam_start.separation(source).degree angdiff_start = float(angdif_start) angdif_end = centrebeam_end.separation(source).degree angdiff_end = float(angdif_end) #loop is inaccurate for beams near the south pole #check the circle beam at the start and end of the observation and the rectangle connecting them if ra_intial > ra_final: if (angdiff_start < 10.) or (angdiff_end < 10.) or \ ( ( ((ras > ra_intial) and (ras < 360.)) or ((ras > 0.) and (ras < ra_final)) ) and \ ((dec_top > decs) and (dec_bot < decs)) ): found = True #TODO: just fyi, if you are comparing a value to two limits, i.e. want to know if x>1 and x<10 # the equivalent in python is actually just: if 1<x<10: *do stuff*, rather than having to use a million "and" else: if (angdiff_start < 10.) or (angdiff_end < 10.) or \ ( ((ras > ra_intial) and (ras < ra_final)) and ((dec_top > decs) and (dec_bot < decs)) ): found = True return found
def _convert_radec_to_altaz(ra, dec, lon, lat, height, time): """Convert a single position. This is done for easy code sharing with other tools. Astropy does support arrays of positions. """ radec = SkyCoord(ra, dec, unit='deg') location = EarthLocation(lon=Angle(lon, 'deg'), lat=Angle(lat, 'deg'), height=height * u.km) # Pressure = 0 is the default obstime = Time(time, scale='utc') # temperature = 0 * u.deg_C # pressure = 0 * u.bar # relative_humidity = ? # obswl = ? altaz_frame = AltAz(obstime=obstime, location=location) # temperature=temperature, pressure=pressure) altaz = radec.transform_to(altaz_frame) az = altaz.az.deg alt = altaz.alt.deg return dict(az=az, alt=alt)
def convert_catalog(cfile): tycho2 = pyfits.open('../data/tycho2.fits')[1].data sfile=re.split('\.',cfile)[0]+'.txt' print(sfile) try: df = load_data.load_catalog(sfile) except IOError: print('skip') return None c = SkyCoord(df['gl']*u.degree, df['gb']*u.degree, frame='galactic') catalog = SkyCoord(tycho2['Glon']*u.degree, tycho2['Glat']*u.degree, frame='galactic') idx, d2d, d3d = c.match_to_catalog_sky(catalog) mask=d2d<0.001*u.degree print(np.sum(mask)) dtype = np.dtype([('tycho_num', int), ('Glon', '>f4'), ('Glat', '>f4'), ('RAJ2000', '>f4'), ('DEJ2000', '>f4'), ('flux', float), ('nuv', float), ('gl', float), ('gb', float)]) matched_tycho = tycho2[idx[mask]] matched_df = df[mask] matched_catalog = np.core.records.fromarrays(np.array([idx[mask], matched_tycho['Glon'], matched_tycho['Glat'], matched_tycho['RAJ2000'], matched_tycho['DEJ2000'], np.array(matched_df['FLUX_AUTO']), np.array(matched_df['nuv']), np.array(matched_df['gl']), np.array(matched_df['gb'])]), dtype=dtype) print(matched_catalog.shape) np.save(cfile, matched_catalog) return matched_catalog
def test_wcsndmap_set_get_by_coord(npix, binsz, coordsys, proj, skydir, axes): geom = WcsGeom.create(npix=npix, binsz=binsz, skydir=skydir, proj=proj, coordsys=coordsys, axes=axes) m = WcsNDMap(geom) coords = m.geom.get_coord() m.set_by_coord(coords, coords[0]) assert_allclose(coords[0], m.get_by_coord(coords)) if not geom.is_allsky: coords[1][...] = 0.0 assert_allclose( np.nan * np.ones(coords[0].shape), m.get_by_coord(coords)) # Test with SkyCoords m = WcsNDMap(geom) coords = m.geom.get_coord() skydir = SkyCoord(coords[0], coords[1], unit='deg', frame=coordsys_to_frame(geom.coordsys)) skydir_cel = skydir.transform_to('icrs') skydir_gal = skydir.transform_to('galactic') m.set_by_coord((skydir_gal,) + coords[2:], coords[0]) assert_allclose(coords[0], m.get_by_coord(coords)) assert_allclose(m.get_by_coord((skydir_cel,) + coords[2:]), m.get_by_coord((skydir_gal,) + coords[2:])) # Test with MapCoord m = WcsNDMap(geom) coords = m.geom.get_coord() coords_dict = dict(lon=coords[0], lat=coords[1]) if axes: for i, ax in enumerate(axes): coords_dict[ax.name] = coords[i + 2] map_coords = MapCoord.create(coords_dict, coordsys=coordsys) m.set_by_coord(map_coords, coords[0]) assert_allclose(coords[0], m.get_by_coord(map_coords))
def box(coords, unit=None, expand=True): """ Box (rectangle) containing all the `coords` Returns ------- (center:SkyCoord, ra_size:Angle, dec_size:Angle) """ unit_kwargs = {} if unit is not None: unit_kwargs['unit'] = unit if isinstance(coords, Table): try: try: coords = SkyCoord.guess_from_table(coords[['ra', 'dec']], **unit_kwargs) except u.UnitsError: coords = SkyCoord.guess_from_table(coords[['ra', 'dec']], unit=(u.hourangle, u.deg)) except (KeyError, AttributeError): coords = SkyCoord.guess_from_table(coords, **unit_kwargs) else: coords = SkyCoord(coords, **unit_kwargs) dra = coords.ra.max() - coords.ra.min() ddec = coords.dec.max() - coords.dec.min() cra = coords.ra.min() + dra / 2.0 cdec = coords.dec.min() + ddec / 2.0 if expand: if isinstance(expand, bool): expand = 1.1 dra *= expand ddec *= expand return SkyCoord(cra, cdec), dra, ddec
def compute_output_transform(refwcs, filename, fiducial): """Compute a simple FITS-type WCS transform """ x0, y0 = refwcs.backward_transform(*fiducial) x1 = x0 + 1 y1 = y0 + 1 ra0, dec0 = refwcs(x0, y0) ra_xdir, dec_xdir = refwcs(x1, y0) ra_ydir, dec_ydir = refwcs(x0, y1) position0 = SkyCoord(ra=ra0, dec=dec0, unit='deg') position_xdir = SkyCoord(ra=ra_xdir, dec=dec_xdir, unit='deg') position_ydir = SkyCoord(ra=ra_ydir, dec=dec_ydir, unit='deg') offset_xdir = position0.spherical_offsets_to(position_xdir) offset_ydir = position0.spherical_offsets_to(position_ydir) xscale = np.abs(position0.separation(position_xdir).value) yscale = np.abs(position0.separation(position_ydir).value) scale = np.sqrt(xscale * yscale) c00 = offset_xdir[0].value / scale c01 = offset_xdir[1].value / scale c10 = offset_ydir[0].value / scale c11 = offset_ydir[1].value / scale pc_matrix = AffineTransformation2D(matrix=[[c00, c01], [c10, c11]]) cdelt = Scale(scale) & Scale(scale) return pc_matrix | cdelt
def get_tns_ra_dec(ra, dec, rad=15): ''' Queries the TNS and obtains the targets reported for the specified RA, DEC position. Provided that ASASSN targets are there, a 7 arcsec position error is expected. By default we will use 10 arcsec. ra: float position in degrees dec: float position in degrees rad: float, optional Search radius in arcseconds. ''' url = "https://wis-tns.weizmann.ac.il/search?&name=&ra={0}&decl={1}&radius={2}&coords_unit=arcsec&format=csv".format(ra, dec, rad) cont_url = urlopen(url) cont = cont_url.read() t = Table.read(StringIO(cont), format='ascii.csv') if len(t) > 0: coords = np.array([t["RA"], t["DEC"]]).T c = SkyCoord(coords, frame='icrs', unit=(u.hourangle, u.deg)) basecoord = SkyCoord(ra, dec, frame='icrs', unit=(u.deg, u.deg)) #In case there are several objects in the match radius, we select the closest one dist = c.separation(basecoord) closest = t[np.argmin(dist)] else: closest = None return closest
def cam_to_tel(): # Coordinates in any fram can be given as a numpy array of the xyz positions # e.g. in this case the position on pixels in the camera pix_x = np.ones(2048) * u.m pix_y = np.ones(2048) * u.m # first define the camera frame camera_frame = CameraFrame(focal_length=15 * u.m) # create a coordinate in that frame camera_coord = SkyCoord(pix_x, pix_y, frame=camera_frame) # then use transform to function to convert to a new system making sure # to give the required values for the conversion (these are not checked # yet) telescope_coord = camera_coord.transform_to(TelescopeFrame()) # Print coordinates in the new frame print("Telescope Coordinate", telescope_coord) # Transforming back is then easy camera_coord2 = telescope_coord.transform_to(camera_frame) # We can easily check the distance between 2 coordinates in the same frame # In this case they should be the same print("Separation", np.sum(camera_coord.separation_3d(camera_coord2)))
def test_get_skycoord(): m31 = SkyCoord(10.6847083*u.deg, 41.26875*u.deg) m31_with_distance = SkyCoord(10.6847083*u.deg, 41.26875*u.deg, 780*u.kpc) subaru = Observer.at_site('subaru') time = Time("2016-01-22 12:00") pos, vel = subaru.location.get_gcrs_posvel(time) gcrs_frame = GCRS(obstime=Time("2016-01-22 12:00"), obsgeoloc=pos, obsgeovel=vel) m31_gcrs = m31.transform_to(gcrs_frame) m31_gcrs_with_distance = m31_with_distance.transform_to(gcrs_frame) coo = get_skycoord(m31) assert coo.is_equivalent_frame(ICRS()) with pytest.raises(TypeError) as exc_info: len(coo) coo = get_skycoord([m31]) assert coo.is_equivalent_frame(ICRS()) assert len(coo) == 1 coo = get_skycoord([m31, m31_gcrs]) assert coo.is_equivalent_frame(ICRS()) assert len(coo) == 2 coo = get_skycoord([m31_with_distance, m31_gcrs_with_distance]) assert coo.is_equivalent_frame(ICRS()) assert len(coo) == 2 coo = get_skycoord([m31, m31_gcrs, m31_gcrs_with_distance, m31_with_distance]) assert coo.is_equivalent_frame(ICRS()) assert len(coo) == 4 coo = get_skycoord([m31_gcrs, m31_gcrs_with_distance]) assert coo.is_equivalent_frame(m31_gcrs.frame) assert len(coo) == 2
def calculateSeparation(ra1, dec1, ra2, dec2): """ Returns angular separation between two coordinates (all in degrees). Parameters ---------- ra1 : float or numpy array RA of coordinate 1 in degrees dec1 : float or numpy array Dec of coordinate 1 in degrees ra2 : float RA of coordinate 2 in degrees dec2 : float Dec of coordinate 2 in degrees Returns ------- separation : astropy Angle or numpy array Angular separation in degrees """ from astropy.coordinates import SkyCoord import astropy.units as u coord1 = SkyCoord(ra1, dec1, unit=(u.degree, u.degree), frame='fk5') coord2 = SkyCoord(ra2, dec2, unit=(u.degree, u.degree), frame='fk5') return coord1.separation(coord2)
def test_against_pyephem(): """Check that Astropy gives consistent results with one PyEphem example. PyEphem: http://rhodesmill.org/pyephem/ See example input and output here: https://gist.github.com/zonca/1672906 https://github.com/phn/pytpm/issues/2#issuecomment-3698679 """ obstime = Time('2011-09-18 08:50:00') location = EarthLocation(lon=Angle('-109d24m53.1s'), lat=Angle('33d41m46.0s'), height=30000. * u.m) # We are using the default pressure and temperature in PyEphem # relative_humidity = ? # obswl = ? altaz_frame = AltAz(obstime=obstime, location=location, temperature=15 * u.deg_C, pressure=1.010 * u.bar) altaz = SkyCoord('6.8927d -60.7665d', frame=altaz_frame) radec_actual = altaz.transform_to('icrs') radec_expected = SkyCoord('196.497518d -4.569323d', frame='icrs') # EPHEM # radec_expected = SkyCoord('196.496220d -4.569390d', frame='icrs') # HORIZON distance = radec_actual.separation(radec_expected).to('arcsec') # TODO: why is this difference so large? # It currently is: 31.45187984720655 arcsec assert distance < 1e3 * u.arcsec # Add assert on current Astropy result so that we notice if something changes radec_expected = SkyCoord('196.495372d -4.560694d', frame='icrs') distance = radec_actual.separation(radec_expected).to('arcsec') # Current value: 0.0031402822944751997 arcsec assert distance < 1 * u.arcsec
def do_stage(self, images): for image in images: self.setup_logging(image) try: # OFST-RA/DEC is the same as CAT-RA/DEC but includes user requested offset requested_coords = SkyCoord(image.header['OFST-RA'], image.header['OFST-DEC'], unit=(u.hour, u.deg), frame='icrs') except ValueError as e: try: # Fallback to CAT-RA and CAT-DEC requested_coords = SkyCoord(image.header['CAT-RA'], image.header['CAT-DEC'], unit=(u.hour, u.deg), frame='icrs') except: self.logger.error(e, extra=self.logging_tags) continue # This only works assuming CRPIX is at the center of the image solved_coords = SkyCoord(image.header['CRVAL1'], image.header['CRVAL2'], unit=(u.deg, u.deg), frame='icrs') angular_separation = solved_coords.separation(requested_coords).arcsec logs.add_tag(self.logging_tags, 'PNTOFST', angular_separation) if abs(angular_separation) > self.SEVERE_THRESHOLD: self.logger.error('Pointing offset exceeds threshold', extra=self.logging_tags) elif abs(angular_separation) > self.WARNING_THRESHOLD: self.logger.warning('Pointing offset exceeds threshhold', extra=self.logging_tags) image.header['PNTOFST'] = ( angular_separation, '[arcsec] offset of requested and solved center' ) return images
def test_fk5_equinox_and_epoch_j2000_0_to_topocentric_observed(): """ http://phn.github.io/pytpm/conversions.html#fk5-equinox-and-epoch-j2000-0-to-topocentric-observed """ # Observatory position for `kpno` from here: # http://idlastro.gsfc.nasa.gov/ftp/pro/astro/observatory.pro location = EarthLocation(lon=Angle('-111.598333d'), lat=Angle('31.956389d'), height=2093.093 * u.m) # TODO: height correct? obstime = Time('2010-01-01 12:00:00') # relative_humidity = ? # obswl = ? altaz_frame = AltAz(obstime=obstime, location=location, temperature=0 * u.deg_C, pressure=0.781 * u.bar) radec = SkyCoord('12h22m54.899s 15d49m20.57s', frame='fk5') altaz_actual = radec.transform_to(altaz_frame) altaz_expected = SkyCoord('264d55m06s 37d54m41s', frame='altaz') # altaz_expected = SkyCoord('343.586827647d 15.7683070508d', frame='altaz') # altaz_expected = SkyCoord('133.498195532d 22.0162383595d', frame='altaz') distance = altaz_actual.separation(altaz_expected) # print(altaz_actual) # print(altaz_expected) # print(distance) """TODO: Current output is completely incorrect ... xfailing this test for now. <SkyCoord (AltAz: obstime=2010-01-01 12:00:00.000, location=(-1994497.7199061865, -5037954.447348028, 3357437.2294832403) m, pressure=781.0 hPa, temperature=0.0 deg_C, relative_humidity=0, obswl=1.0 micron):00:00.000, location=(-1994497.7199061865, -5037954.447348028, 3357437.2294832403) m, pressure=781.0 hPa, temperature=0.0 deg_C, relative_humidity=0, obswl=1.0 micron): az=133.4869896371561 deg, alt=67.97857990957701 deg> <SkyCoord (AltAz: obstime=None, location=None, pressure=0.0 hPa, temperature=0.0 deg_C, relative_humidity=0, obswl=1.0 micron): az=264.91833333333335 deg, alt=37.91138888888889 deg> 68d02m45.732s """ assert distance < 1 * u.arcsec
def calculate(self): ephem_location = ephem.Observer() ephem_location.lat = self.location.latitude.to(u.rad) / u.rad ephem_location.lon = self.location.longitude.to(u.rad) / u.rad ephem_location.elevation = self.location.height / u.meter ephem_location.date = ephem.Date(self.time.datetime) if self.data is None: self.alt = Latitude([], unit=u.deg) self.az = Longitude([], unit=u.deg) self.names = Column([], dtype=np.str) self.vmag = Column([]) else: ra = Longitude((self.data['RAh'], self.data['RAm'], self.data['RAs']), u.h) dec = Latitude((np.core.defchararray.add(self.data['DE-'], self.data['DEd'].astype(str)).astype(int), self.data['DEm'], self.data['DEs']), u.deg) c = SkyCoord(ra, dec, frame='icrs') altaz = c.transform_to(AltAz(obstime=self.time, location=self.location)) self.alt = altaz.alt self.az = altaz.az self.names = self.data['Name'] self.vmag = self.data['Vmag'] for ephemeris in self.ephemerides: ephemeris.compute(ephem_location) self.vmag = self.vmag.insert(0, ephemeris.mag) self.alt = self.alt.insert(0, (ephemeris.alt.znorm * u.rad).to(u.deg)) self.az = self.az.insert(0, (ephemeris.az * u.rad).to(u.deg)) self.names = self.names.insert(0, ephemeris.name) return self.names, self.vmag, self.alt, self.az
def test_disk_distribution(diskclass, diskpar, n_expected): '''This is a separate test from test_disk_radius, because it's a simpler to write if we don't have to worry about the inner hole. For the test itself: The results should be poisson distributed (or, for large numbers this will be almost normal). That makes testing it a little awkard in a short run time, thus the limits are fairly loose. This test is run for several extended sources, incl Gaussian. Stirctly speaking it should fail for a Gaussian distribution, but if the sigma is large enough it will pass a loose test (and still fail if things to catastrophically wrong, e.g. some test circles are outside the source). ''' s = diskclass(coords=SkyCoord(213., -10., unit=u.deg), **diskpar) photons = s.generate_photons(1e5) n = np.empty(20) for i in range(len(n)): circ = SkyCoord((213. + np.random.uniform(-0.1, .1)) * u.degree, (- 10. + np.random.uniform(-0.1, .1)) * u.degree) d = circ.separation(SkyCoord(photons['ra'], photons['dec'], unit='deg')) n[i] = (d < 5. * u.arcmin).sum() s, p = normaltest(n) # assert a p value here that is soo small that it's never going to be hit # by chance. assert p > .05 # better: Test number of expected photons matches # Allow large variation so that this is not triggered by chance assert np.isclose(n.mean(), n_expected, rtol=.2)
def mk_radecname(ra, dec, precision=0, prefix='', shortform=False): """ make a radec name from ra, dec e.g. HHMMSSsDDMMSS """ sep = '' radec = SkyCoord(ra=ra * u.degree, dec=dec * u.degree) if not shortform: radecname = radec.to_string('hmsdms', decimal=False, sep=sep, precision=precision) if shortform: radec_string = radec.to_string('hmsdms', decimal=False, sep=sep, precision=0) radecname = radec_string[0:4] + radec_string[7:12] radecname = np.core.defchararray.replace(radecname, ' ', '') if prefix != '': # radecname = prefix + radecname does not work radecname = np.core.defchararray.add(prefix, radecname) return str(radecname)
def __init__(self,obj,off=None,fits=None): """ obj Init the main object position ra,dec either degree or hh,dd off same for offset star fits if set read from file """ #assume flots are in deg if isinstance(obj['ra'],float): self.obj=SkyCoord(obj['ra'],obj['dec'],unit='deg') else: self.obj=SkyCoord(obj['ra'],obj['dec'],unit=(u.hourangle, u.deg)) #reference if(off): if isinstance(off['ra'],float): self.off=SkyCoord(off['ra'],off['dec'],unit='deg') else: self.off=SkyCoord(off['ra'],off['dec'],unit=(u.hourangle, u.deg)) try: self.offmag=off['mag'] except: self.offmag=0.0 #deal with image if(fits): self.loadimg(fits)
def detect_sources(file_info, cid, settings): from astrotoyz.detect_sources import find_stars import astrotoyz.viewer session_vars.catalogs[cid] = None hdulist = toyz.web.viewer.get_file(file_info) wcs = astrotoyz.viewer.get_wcs(file_info, hdulist) hdu = hdulist[int(file_info['frame'])] settings['img_data'] = hdu.data sources = find_stars(**settings) catalog = Catalog(cid, file_info=file_info, data=sources) catalog.dropna(inplace=True) if wcs is not None: from astropy.coordinates import SkyCoord id_name = catalog.settings['data']['id_name'] ra_name = catalog.settings['data']['ra_name'] dec_name = catalog.settings['data']['dec_name'] wcs_array = wcs.all_pix2world(catalog['x'], catalog['y'], 1) catalog[ra_name] = wcs_array[0] catalog[dec_name] = wcs_array[1] coords = SkyCoord(ra=wcs_array[0], dec=wcs_array[1], unit='deg') catalog[id_name] = coords.to_string('hmsdms') else: sep = np.zeroes(shape=(catalog.shape[0],),dtype='|S1') sep.fill(',') new_id = np.core.defchararray.add(catalog['x'].values.astype('|S10'), sep) new_id = np.core.defchararray.add(new_id,catalog['y'].values.astype('|S10')) catalog[id_name] = new_id catalog.set_index(id_name, inplace=True) session_vars.catalogs[cid] = catalog; print('finished detecting sources') return catalog
def predict(self, hillas_dict, inst, pointing_alt, pointing_az): ''' The function you want to call for the reconstruction of the event. It takes care of setting up the event and consecutively calls the functions for the direction and core position reconstruction. Shower parameters not reconstructed by this class are set to np.nan Parameters ----------- hillas_dict: dict dictionary with telescope IDs as key and HillasParametersContainer instances as values inst : ctapipe.io.InstrumentContainer instrumental description pointing_alt: dict[astropy.coordinates.Angle] dict mapping telescope ids to pointing altitude pointing_az: dict[astropy.coordinates.Angle] dict mapping telescope ids to pointing azimuth Raises ------ TooFewTelescopesException if len(hillas_dict) < 2 InvalidWidthException if any width is np.nan or 0 ''' # filter warnings for missing obs time. this is needed because MC data has no obs time warnings.filterwarnings(action='ignore', category=MissingFrameAttributeWarning) # stereoscopy needs at least two telescopes if len(hillas_dict) < 2: raise TooFewTelescopesException( "need at least two telescopes, have {}".format( len(hillas_dict))) # check for np.nan or 0 width's as these screw up weights if any( [np.isnan(hillas_dict[tel]['width'].value) for tel in hillas_dict]): raise InvalidWidthException( "A HillasContainer contains an ellipse of width==np.nan") if any([hillas_dict[tel]['width'].value == 0 for tel in hillas_dict]): raise InvalidWidthException( "A HillasContainer contains an ellipse of width==0") self.initialize_hillas_planes(hillas_dict, inst.subarray, pointing_alt, pointing_az) # algebraic direction estimate direction, err_est_dir = self.estimate_direction() alt = u.Quantity(list(pointing_alt.values())) az = u.Quantity(list(pointing_az.values())) if np.any(alt != alt[0]) or np.any(az != az[0]): warnings.warn('Divergent pointing not supported') telescope_pointing = SkyCoord(alt=alt[0], az=az[0], frame=AltAz()) # core position estimate using a geometric approach core_pos = self.estimate_core_position(hillas_dict, telescope_pointing) # container class for reconstructed showers result = ReconstructedShowerContainer() _, lat, lon = cartesian_to_spherical(*direction) # estimate max height of shower h_max = self.estimate_h_max() # astropy's coordinates system rotates counter-clockwise. # Apparently we assume it to be clockwise. result.alt, result.az = lat, -lon result.core_x = core_pos[0] result.core_y = core_pos[1] result.core_uncert = np.nan result.tel_ids = [h for h in hillas_dict.keys()] result.average_intensity = np.mean( [h.intensity for h in hillas_dict.values()]) result.is_valid = True result.alt_uncert = err_est_dir result.az_uncert = np.nan result.h_max = h_max result.h_max_uncert = np.nan result.goodness_of_fit = np.nan return result
def circle_distance(x, y): c1 = SkyCoord(x[0], x[1], frame='icrs', unit="deg") c2 = SkyCoord(y[0], y[1], frame='icrs', unit="deg") sep = c1.separation(c2) return sep.deg
Dir = '/1/home/heh15/workingspace/Arp240/NGC5257/12CO10/' imageDir = Dir + 'casa5.4/' picDir = Dir + 'picture/' regionDir = Dir + 'region/' mom0file = imageDir + 'NGC5257_12CO10_combine_pbcor_2rms_mom0.fits' mom1file = imageDir + 'NGC5257_12CO10_pbcor_cube_mom1.fits' ############################################################ # basic information galaxy = 'NGC5257' line = '12CO10' position = SkyCoord(dec=50.4167 * u.arcmin, ra=204.9706 * u.degree, frame='icrs') beamra = 204 * u.degree + 58 * u.arcmin + 30 * u.arcsec beamdec = 50 * u.arcmin + 3 * u.arcsec beamposition = SkyCoord(dec=beamdec, ra=beamra, frame='icrs') beammajor = 1.986 * u.arcsec / 2.0 beamminor = 1.576 * u.arcsec / 2.0 pa = -71 * u.degree ############################################################ # function def fits_import(fitsimage, item=0): hdr = fits.open(fitsimage)[item].header
def addGal(self, gal_idx, mag_clm='MAG', z_clm='ZGAL', sens_galaxies=None, magbins=None, SPL=None, Rcom_max=None, debug=False, tbinedges=None): """ Adds a set of galaxies for clustering analysis from the main astropy Table A random sample is also generated Parameters ---------- gal_idx : ndarray (int or bool) Indices of self.galaxy to add to the clustering analysis mag_clm : str, optional Name of the column for the galaxy magnitudes z_clm : str, optional Name of the column for the galaxy redshifts sens_galaxies : np.recarray array containing the shape of the sensitivity function of the galaxies being added Used for generating sensitivity function instead of the added galaxies Important to use when the subset is too small for an accurate sensitivity function magbins : ndarray (optional) SPL : dict (optional) Contains the sensitivity function (CubicSpline's) Rcom_max : float, optional Maximum comoving separation of the survey in Mpc tbinedges : ndarray Only used for debug=True Returns ------- Updates self.galreal internally """ # Grab the galaxies sub_gal = self.galaxies[gal_idx] # Rename any columns here sub_gal.rename_column(mag_clm,'MAG') sub_gal.rename_column(z_clm,'ZGAL') # Strip down to important columns only here, as desired # Convert to numpy rec array galnew = sub_gal.as_array().view(np.recarray) # Calculate randoms if sens_galaxies is None: sens_galaxies = galnew # Sensitivity function if (SPL is None) or (magbins is None): magbins, _, SPL = spline_sensitivity(sens_galaxies) # Randoms galnewrand = random_gal(galnew, self.igal_rand, magbins, SPL) # Cut on Rcom_max? if Rcom_max is not None: rcoord = SkyCoord(ra=galnewrand.RA, dec=galnewrand.DEC, unit='deg') angsep = self.coord.separation(rcoord) # R comoving Rcom = self.cosmo.kpc_comoving_per_arcmin(galnewrand.ZGAL) * angsep.to('arcmin') # Cut goodr = Rcom.to('Mpc').value < Rcom_max galnewrand = galnewrand[goodr] if debug: gcoord = SkyCoord(ra=galnew.RA, dec=galnew.DEC, unit='deg') gangsep = self.coord.separation(gcoord) gRcom = self.cosmo.kpc_comoving_per_arcmin(galnew.ZGAL) * gangsep.to('arcmin') faintR = galnewrand.MAG > 19. faintg = galnew.MAG > 19. # from matplotlib import pyplot as plt import matplotlib.gridspec as gridspec plt.clf() if True: gs = gridspec.GridSpec(2,1) ax = plt.subplot(gs[0]) ax.hist(gRcom[~faintg], color='k', bins=tbinedges, normed=1, label='DD', fill=False) ax.hist(Rcom[goodr][~faintR], edgecolor='red', bins=tbinedges, normed=1, label='RR', fill=False) # Faint ax = plt.subplot(gs[1]) ax.hist(gRcom[faintg], color='k', bins=tbinedges, normed=1, label='DD', fill=False) ax.hist(Rcom[goodr][faintR], edgecolor='red', bins=tbinedges, normed=1, label='RR', fill=False) ax.set_ylabel('Faint') ax.set_xlabel('Rcom (Mpc)') else: ax = plt.gca() zbins = np.arange(0., 0.8, 0.025) ax.hist(galnew.ZGAL[faintg], color='k', bins=zbins, normed=1, label='DD', fill=False) ax.hist(galnewrand.ZGAL[faintR], edgecolor='red', bins=zbins, normed=1, label='RR', fill=False) ax.set_xlabel('zGAL') plt.show() # Load me up if self.galreal is None: self.galreal = galnew # np rec array with galaxy properties self.galrand = galnewrand else: galnew = galnew.astype(self.galreal.dtype) self.galreal = np.append(self.galreal, galnew) self.galreal = np.rec.array(self.galreal) self.galrand = np.append(self.galrand, galnewrand) self.galrand = np.rec.array(self.galrand)
def sky_section(self, bounds, radius = None, wrap_at_180 = True): """ Extract a sub section of the survey from the sky Parameters ---------- bounds: `list` or `Quantity` or `SkyCoord` if `list` or `Quantity` must be formatted as: [min Galactic Longitude, max Galactic Longitude, min Galactic Latitude, max Galactic Latitude] or [center Galactic Longitude, center Galactic Latitude] and requires radius keyword to be set default units of u.deg are assumed if `SkyCoord', must be length 4 or length 1 or length 2 length 4 specifies 4 corners of rectangular shape length 1 specifies center of circular region and requires radius keyword to be set length 2 specifies two corners of rectangular region radius: 'number' or 'Quantity', optional, must be keyword sets radius of circular region wrap_at_180: `bool`, optional, must be keyword if True, wraps longitude angles at 180d use if mapping accross Galactic Center """ if wrap_at_180: wrap_at = "180d" else: wrap_at = "360d" if not isinstance(bounds, u.Quantity) | isinstance(bounds, SkyCoord): bounds *= u.deg logging.warning("No units provided for bounds, assuming u.deg") wham_coords = self.get_SkyCoord() if isinstance(bounds, SkyCoord): if len(bounds) == 1: if radius is None: raise TypeError("Radius must be provided if only a single coordinate is given") elif not isinstance(radius, u.Quantity): radius *= u.deg logging.warning("No units provided for radius, assuming u.deg") center = bounds elif len(bounds) >= 2: min_lon, max_lon = bounds.l.wrap_at(wrap_at).min(), bounds.l.wrap_at(wrap_at).max() min_lat, max_lat = bounds.b.min(), bounds.l.max() elif len(bounds) == 2: if radius is None: raise TypeError("Radius must be provided if only a single coordinate is given") elif not isinstance(radius, u.Quantity): radius *= u.deg logging.warning("No units provided for radius, assuming u.deg") center = SkyCoord(l = bounds[0], b = bounds[1], frame = 'galactic') elif len(bounds) == 4: min_lon, max_lon, min_lat, max_lat = Angle(bounds) min_lon = min_lon.wrap_at(wrap_at) max_lon = max_lon.wrap_at(wrap_at) else: raise TypeError("Input bounds and/or radius are not understood") # rectangular extraction if radius is None: # Mask of points inside rectangular region inside_mask = wham_coords.l.wrap_at(wrap_at) <= max_lon inside_mask &= wham_coords.l.wrap_at(wrap_at) >= min_lon inside_mask &= wham_coords.b <= max_lat inside_mask &= wham_coords.b >= min_lat else: # Circle extraction # Compute Separation # Warning to self: This is VERY slow sep = wham_coords.separation(center) # Mask of points inside circular region inside_mask = sep <= radius return self[inside_mask]
def create_blockvisibility_from_uvfits(fitsname, channum=None, ack=False, antnum=None): """ Minimal UVFIT to BlockVisibility converter The UVFITS format is much more general than the RASCIL BlockVisibility so we cut many corners. Creates a list of BlockVisibility's, split by field and spectral window :param fitsname: File name of UVFITS :param channum: range of channels e.g. range(17,32), default is None meaning all :param antnum: the number of antenna :return: """ def ParamDict(hdul): "Return the dictionary of the random parameters" """ The keys of the dictionary are the parameter names uppercased for consistency. The values are the column numbers. If multiple parameters have the same name (e.g., DATE) their columns are entered as a list. """ pre=re.compile(r"PTYPE(?P<i>\d+)") res={} for k,v in hdul.header.items(): m=pre.match(k) if m : vu=v.upper() if vu in res: res[ vu ] = [ res[vu], int(m.group("i")) ] else: res[ vu ] = int(m.group("i")) return res # Open the file with fits.open(fitsname) as hdul: # Read Spectral Window nspw = hdul[0].header['NAXIS5'] # Read Channel and Frequency Interval freq_ref = hdul[0].header['CRVAL4'] mid_chan_freq = hdul[0].header['CRPIX4'] delt_freq = hdul[0].header['CDELT4'] # Real the number of channels in one spectral window channels = hdul[0].header['NAXIS4'] freq = numpy.zeros([nspw, channels]) # Read Frequency or IF freqhdulname="AIPS FQ" sdhu = hdul.index_of(freqhdulname) if_freq = hdul[sdhu].data['IF FREQ'].ravel() for i in range(nspw): temp = numpy.array([if_freq[i] + freq_ref+delt_freq* ff for ff in range(channels)]) freq[i,:] = temp[:] freq_delt = numpy.ones(channels) * delt_freq if channum is None: channum = range(channels) primary = hdul[0].data # Read time bvtimes = Time(hdul[0].data['DATE'], hdul[0].data['_DATE'], format='jd') bv_times = numpy.unique(bvtimes.jd) ntimes = len(bv_times) # # Get Antenna # blin = hdul[0].data['BASELINE'] antennahdulname="AIPS AN" adhu = hdul.index_of(antennahdulname) try: antenna_name = hdul[adhu].data['ANNAME'] antenna_name = antenna_name.encode('ascii','ignore') except: antenna_name = None antenna_xyz = hdul[adhu].data['STABXYZ'] antenna_mount = hdul[adhu].data['MNTSTA'] try: antenna_diameter = hdul[adhu].data['DIAMETER'] except: antenna_diameter = None # To reading some UVFITS with wrong numbers of antenna if antnum is not None: if antenna_name is not None: antenna_name = antenna_name[:antnum] antenna_xyz = antenna_xyz[:antnum] antenna_mount = antenna_mount[:antnum] if antenna_diameter is not None: antenna_diameter = antenna_diameter[:antnum] nants = len(antenna_xyz) # res= {} # for i,row in enumerate(fin[ahdul].data): # res[row.field("ANNAME") ] = i +1 # Get polarisation info npol = hdul[0].header['NAXIS3'] corr_type = numpy.arange(hdul[0].header['NAXIS3']) - (hdul[0].header['CRPIX3'] - 1) corr_type *= hdul[0].header['CDELT3'] corr_type += hdul[0].header['CRVAL3'] # xx yy xy yx # These correspond to the CASA Stokes enumerations if numpy.array_equal(corr_type, [1, 2, 3, 4]): polarisation_frame = PolarisationFrame('stokesIQUV') elif numpy.array_equal(corr_type, [-1, -2, -3, -4]): polarisation_frame = PolarisationFrame('circular') elif numpy.array_equal(corr_type, [-5, -6, -7, -8]): polarisation_frame = PolarisationFrame('linear') else: raise KeyError("Polarisation not understood: %s" % str(corr_type)) configuration = Configuration(name='', data=None, location=None, names=antenna_name, xyz=antenna_xyz, mount=antenna_mount, frame=None, receptor_frame=polarisation_frame, diameter=antenna_diameter) # Get RA and DEC phase_center_ra_degrees = numpy.float(hdul[0].header['CRVAL6']) phase_center_dec_degrees = numpy.float(hdul[0].header['CRVAL7']) # Get phasecentres phasecentre = SkyCoord(ra=phase_center_ra_degrees * u.deg, dec=phase_center_dec_degrees * u.deg, frame='icrs', equinox='J2000') # Get UVW d=ParamDict(hdul[0]) if "UU" in d: uu = hdul[0].data['UU'] vv = hdul[0].data['VV'] ww = hdul[0].data['WW'] else: uu = hdul[0].data['UU---SIN'] vv = hdul[0].data['VV---SIN'] ww = hdul[0].data['WW---SIN'] _vis = hdul[0].data['DATA'] #_vis.shape = (nchan, ntimes, (nants*(nants-1)//2 ), npol, -1) #self.vis = -(_vis[...,0] * 1.j + _vis[...,1]) row = 0 nchan = len(channum) vis_list = list() for spw_index in range(nspw): bv_vis = numpy.zeros([ntimes, nants, nants, nchan, npol]).astype('complex') bv_weight = numpy.zeros([ntimes, nants, nants, nchan, npol]) bv_uvw = numpy.zeros([ntimes, nants, nants, 3]) for time_index , time in enumerate(bv_times): #restfreq = freq[channel_index] for antenna1 in range(nants-1): for antenna2 in range(antenna1 + 1, nants): for channel_no, channel_index in enumerate(channum): for pol_index in range(npol): bv_vis[time_index, antenna2, antenna1, channel_no,pol_index] = complex(_vis[row,:,:,spw_index,channel_index, pol_index ,0],_vis[row,:,:,spw_index,channel_index,pol_index ,1]) bv_weight[time_index, antenna2, antenna1, channel_no, pol_index] = _vis[row,:,:,spw_index,channel_index,pol_index ,2] bv_uvw[time_index, antenna2, antenna1, 0] = uu[row]* constants.c.value bv_uvw[time_index, antenna2, antenna1, 1] = vv[row]* constants.c.value bv_uvw[time_index, antenna2, antenna1, 2] = ww[row]* constants.c.value row += 1 vis_list.append(BlockVisibility(uvw=bv_uvw, time=bv_times, frequency=freq[spw_index][channum], channel_bandwidth=freq_delt[channum], vis=bv_vis, weight=bv_weight, imaging_weight= bv_weight, configuration=configuration, phasecentre=phasecentre, polarisation_frame=polarisation_frame)) return vis_list
def create_blockvisibility_from_ms(msname, channum=None, start_chan=None, end_chan=None, ack=False, datacolumn='DATA', selected_sources=None, selected_dds=None): """ Minimal MS to BlockVisibility converter The MS format is much more general than the RASCIL BlockVisibility so we cut many corners. This requires casacore to be installed. If not an exception ModuleNotFoundError is raised. Creates a list of BlockVisibility's, split by field and spectral window Reading of a subset of channels is possible using either start_chan and end_chan or channnum. Using start_chan and end_chan is preferred since it only reads the channels required. Channum is more flexible and can be used to read a random list of channels. :param msname: File name of MS :param channum: range of channels e.g. range(17,32), default is None meaning all :param start_chan: Starting channel to read :param end_chan: End channel to read :return: """ try: from casacore.tables import table # pylint: disable=import-error except ModuleNotFoundError: raise ModuleNotFoundError("casacore is not installed") try: from rascil.processing_components.visibility import msv2 except ModuleNotFoundError: raise ModuleNotFoundError("cannot import msv2") tab = table(msname, ack=ack) log.debug("create_blockvisibility_from_ms: %s" % str(tab.info())) if selected_sources is None: fields = numpy.unique(tab.getcol('FIELD_ID')) else: fieldtab = table('%s/FIELD' % msname, ack=False) sources = fieldtab.getcol('NAME') fields = list() for field, source in enumerate(sources): if source in selected_sources: fields.append(field) assert len(fields) > 0, "No sources selected" if selected_dds is None: dds = numpy.unique(tab.getcol('DATA_DESC_ID')) else: dds = selected_dds log.debug("create_blockvisibility_from_ms: Reading unique fields %s, unique data descriptions %s" % ( str(fields), str(dds))) vis_list = list() for field in fields: ftab = table(msname, ack=ack).query('FIELD_ID==%d' % field, style='') for dd in dds: meta = {'MSV2':{'FIELD_ID': field, 'DATA_DESC_ID':dd}} ms = ftab.query('DATA_DESC_ID==%d' % dd, style='') assert ms.nrows() > 0, "Empty selection for FIELD_ID=%d and DATA_DESC_ID=%d" % (field, dd) log.debug("create_blockvisibility_from_ms: Found %d rows" % (ms.nrows())) # The TIME column has descriptor: # {'valueType': 'double', 'dataManagerType': 'IncrementalStMan', 'dataManagerGroup': 'TIME', # 'option': 0, 'maxlen': 0, 'comment': 'Modified Julian Day', # 'keywords': {'QuantumUnits': ['s'], 'MEASINFO': {'type': 'epoch', 'Ref': 'UTC'}}} otime = ms.getcol('TIME') datacol = ms.getcol(datacolumn, nrow=1) datacol_shape = list(datacol.shape) channels = datacol.shape[-2] log.debug("create_blockvisibility_from_ms: Found %d channels" % (channels)) if channum is None: if start_chan is not None and end_chan is not None: try: log.debug("create_blockvisibility_from_ms: Reading channels from %d to %d" % (start_chan, end_chan)) blc = [start_chan, 0] trc = [end_chan, datacol_shape[-1] - 1] channum = range(start_chan, end_chan+1) ms_vis = ms.getcolslice(datacolumn, blc=blc, trc=trc) ms_weight = ms.getcol('WEIGHT') except IndexError: raise IndexError("channel number exceeds max. within ms") else: log.debug("create_blockvisibility_from_ms: Reading all %d channels" % (channels)) try: channum = range(channels) ms_vis = ms.getcol(datacolumn)[:, channum, :] ms_weight = ms.getcol('WEIGHT') channum = range(channels) except IndexError: raise IndexError("channel number exceeds max. within ms") else: log.debug("create_blockvisibility_from_ms: Reading channels %s " % (channum)) channum = range(channels) try: ms_vis = ms.getcol(datacolumn)[:, channum, :] ms_weight = ms.getcol('WEIGHT')[:, :] except IndexError: raise IndexError("channel number exceeds max. within ms") uvw = -1 * ms.getcol('UVW') antenna1 = ms.getcol('ANTENNA1') antenna2 = ms.getcol('ANTENNA2') integration_time = ms.getcol('INTERVAL') # time = Time((time-integration_time/2.0)/86400+ 2400000.5,format='jd',scale='utc').utc.value time = (otime - integration_time / 2.0) start_time = numpy.min(time)/86400.0 end_time = numpy.max(time)/86400.0 log.debug("create_blockvisibility_from_ms: Observation from %s to %s" % (Time(start_time, format='mjd').iso, Time(end_time, format='mjd').iso)) # Now get info from the subtables spwtab = table('%s/SPECTRAL_WINDOW' % msname, ack=False) cfrequency = spwtab.getcol('CHAN_FREQ')[dd][channum] cchannel_bandwidth = spwtab.getcol('CHAN_WIDTH')[dd][channum] nchan = cfrequency.shape[0] # Get polarisation info npol = 4 poltab = table('%s/POLARIZATION' % msname, ack=False) corr_type = poltab.getcol('CORR_TYPE') # These correspond to the CASA Stokes enumerations if numpy.array_equal(corr_type[0], [1, 2, 3, 4]): polarisation_frame = PolarisationFrame('stokesIQUV') elif numpy.array_equal(corr_type[0], [5, 6, 7, 8]): polarisation_frame = PolarisationFrame('circular') elif numpy.array_equal(corr_type[0], [9, 10, 11, 12]): polarisation_frame = PolarisationFrame('linear') elif numpy.array_equal(corr_type[0], [9]): npol = 1 polarisation_frame = PolarisationFrame('stokesI') else: raise KeyError("Polarisation not understood: %s" % str(corr_type)) # Get configuration anttab = table('%s/ANTENNA' % msname, ack=False) nants = anttab.nrows() mount = anttab.getcol('MOUNT') names = anttab.getcol('NAME') diameter = anttab.getcol('DISH_DIAMETER') xyz = anttab.getcol('POSITION') configuration = Configuration(name='', data=None, location=None, names=names, xyz=xyz, mount=mount, frame=None, receptor_frame=ReceptorFrame("linear"), diameter=diameter) # Get phasecentres fieldtab = table('%s/FIELD' % msname, ack=False) pc = fieldtab.getcol('PHASE_DIR')[field, 0, :] source = fieldtab.getcol('NAME')[field] phasecentre = SkyCoord(ra=pc[0] * u.rad, dec=pc[1] * u.rad, frame='icrs', equinox='J2000') time_index_row = numpy.zeros_like(time, dtype='int') time_last = time[0] time_index = 0 for row, _ in enumerate(time): if time[row] > time_last + integration_time[row]: assert time[row] > time_last, "MS is not time-sorted - cannot convert" time_index += 1 time_last = time[row] time_index_row[row] = time_index ntimes = time_index + 1 bv_times = numpy.zeros([ntimes]) bv_vis = numpy.zeros([ntimes, nants, nants, nchan, npol]).astype('complex') bv_weight = numpy.zeros([ntimes, nants, nants, nchan, npol]) bv_imaging_weight = numpy.zeros([ntimes, nants, nants, nchan, npol]) bv_uvw = numpy.zeros([ntimes, nants, nants, 3]) bv_integration_time = numpy.zeros([ntimes]) for row, _ in enumerate(time): time_index = time_index_row[row] bv_times[time_index] = time[row] bv_vis[time_index, antenna2[row], antenna1[row], ...] = ms_vis[row, ...] bv_weight[time_index, antenna2[row], antenna1[row], :, ...] = ms_weight[row, numpy.newaxis, ...] bv_imaging_weight[time_index, antenna2[row], antenna1[row], :, ...] = ms_weight[row, numpy.newaxis, ...] bv_uvw[time_index, antenna2[row], antenna1[row], :] = uvw[row, :] bv_integration_time[time_index] = integration_time[row] vis_list.append(BlockVisibility(uvw=bv_uvw, time=bv_times, frequency=cfrequency, channel_bandwidth=cchannel_bandwidth, vis=bv_vis, weight=bv_weight, integration_time = bv_integration_time, imaging_weight=bv_imaging_weight, configuration=configuration, phasecentre=phasecentre, polarisation_frame=polarisation_frame, source=source, meta=meta)) tab.close() return vis_list
def main(images, regions, colors, labels, shrdsfile=None, fluxlimit=0., wisefile=None, sigma=0., levels=[5., 10., 20., 50., 100.]): """ Plot some regions on top of some images with specified colors, and create PDF. Inputs: images = list of fits image names to plot regions = list of region filenames to plot colors = what colors to plot each region labels = label for regions shrdsfile = if not None, path to SHRDS candidates data file This will plot the SHRDS candidate regions on top fluxlimit = only plot WISE regions brighter than this peak continuum flux density (mJy/beam) wisefile = if not None, path to WISE positions data file This will plot the WISE regions on top sigma = if > 0., will plot colormap with contours at levels * sigma levels = list of contour levels Returns: Nothing """ levels = np.array(levels) outimages = [] for image in images: # # Open fits file, generate WCS # hdu = fits.open(image)[0] wcs = WCS(hdu.header) # # Generate figure # plt.ioff() fig = plt.figure() wcs_celest = wcs.sub(['celestial']) ax = plt.subplot(projection=wcs_celest) ax.set_title(image.replace('.fits', '')) # image cax = ax.imshow(hdu.data[0, 0], origin='lower', interpolation='none', cmap='viridis') # contours if sigma > 0.: con = ax.contour(hdu.data[0, 0], origin='lower', levels=levels * sigma, colors='k', linewidths=0.2) xlen, ylen = hdu.data[0, 0].shape ax.coords[0].set_major_formatter('hh:mm:ss') ax.set_xlabel('RA (J2000)') ax.set_ylabel('Declination (J2000)') # # Adjust limits # ax.set_xlim(0.1 * xlen, 0.9 * xlen) ax.set_ylim(0.1 * ylen, 0.9 * ylen) # # Plot colorbar # cbar = fig.colorbar(cax, fraction=0.046, pad=0.04) cbar.set_label('Flux Density (Jy/beam)') # # Plot beam, if it is defined # pixsize = hdu.header['CDELT2'] # deg if 'BMAJ' in hdu.header.keys(): beam_maj = hdu.header['BMAJ'] / pixsize # pix beam_min = hdu.header['BMIN'] / pixsize # pix beam_pa = hdu.header['BPA'] ellipse = Ellipse((1. / 8. * xlen, 1. / 8. * ylen), beam_min, beam_maj, angle=beam_pa, fill=True, zorder=10, hatch='///', edgecolor='black', facecolor='white') ax.add_patch(ellipse) # # Plot regions # for reg, col, lab in zip(regions, colors, labels): if not os.path.exists(reg): continue # read second line in region file with open(reg, 'r') as f: f.readline() data = f.readline() # handle point region if 'ellipse' in data: splt = data.split(' ') RA = splt[1].replace('[[', '').replace(',', '') RA_h, RA_m, RA_s = RA.split(':') RA = '{0}h{1}m{2}s'.format(RA_h, RA_m, RA_s) dec = splt[2].replace('],', '') dec_d, dec_m, dec_s, dec_ss = dec.split('.') dec = '{0}d{1}m{2}.{3}s'.format(dec_d, dec_m, dec_s, dec_ss) coord = SkyCoord(RA, dec) ax.plot(coord.ra.value, coord.dec.value, '+', color=col, markersize=10, transform=ax.get_transform('world'), label=lab) # handle point region elif 'poly' in data: splt = data.split('[[')[1] splt = splt.split(']]')[0] parts = splt.split(' ') RAs = [] decs = [] for ind in range(0, len(parts), 2): RA = parts[ind].replace('[', '').replace(',', '') RA_h, RA_m, RA_s = RA.split(':') RA = '{0}h{1}m{2}s'.format(RA_h, RA_m, RA_s) dec = parts[ind + 1].replace('],', '') dec_d, dec_m, dec_s, dec_ss = dec.split('.') dec = '{0}d{1}m{2}.{3}s'.format( dec_d, dec_m, dec_s, dec_ss) coord = SkyCoord(RA, dec) RAs.append(coord.ra.value) decs.append(coord.dec.value) RAs.append(RAs[0]) decs.append(decs[0]) ax.plot(RAs, decs, marker=None, linestyle='solid', color=col, transform=ax.get_transform('world'), label=lab, zorder=110) # # Add regions legend # if len(regions) > 0: region_legend = plt.legend(loc='upper right', fontsize=10) ax.add_artist(region_legend) # # Plot SHRDS candidate regions # if shrdsfile is not None: shrdsdata = np.genfromtxt(shrdsfile, dtype=None, delimiter=',', encoding='UTF-8', usecols=(0, 1, 4, 5, 6, 7), skip_header=1, names=('name', 'GName', 'RA', 'Dec', 'diameter', 'flux')) RA = np.zeros(len(shrdsdata)) Dec = np.zeros(len(shrdsdata)) for i, dat in enumerate(shrdsdata): parts = [float(part) for part in dat['RA'].split(':')] RA[i] = 360. / 24. * (parts[0] + parts[1] / 60. + parts[2] / 3600.) parts = [float(part) for part in dat['Dec'].split(':')] Dec[i] = np.abs(parts[0]) + parts[1] / 60. + parts[2] / 3600. if '-' in dat['Dec']: Dec[i] = -1. * Dec[i] # limit only to regions with centers within image corners = wcs_celest.calc_footprint() min_RA = np.min(corners[:, 0]) max_RA = np.max(corners[:, 0]) RA_range = max_RA - min_RA #min_RA += RA_range #max_RA -= RA_range min_Dec = np.min(corners[:, 1]) max_Dec = np.max(corners[:, 1]) Dec_range = max_Dec - min_Dec #min_Dec += Dec_range #max_Dec -= Dec_range good = (min_RA < RA) & (RA < max_RA) & (min_Dec < Dec) & ( Dec < max_Dec) & (shrdsdata['flux'] > fluxlimit) # plot them shrdsdata = shrdsdata[good] RA = RA[good] Dec = Dec[good] for R, D, dat in zip(RA, Dec, shrdsdata): xpos, ypos = wcs_celest.wcs_world2pix(R, D, 1) size = dat['diameter'] / 3600. / pixsize ell = Ellipse((xpos, ypos), size, size, color='m', fill=False, linestyle='dashed', zorder=105) ax.add_patch(ell) ax.text(R, D, dat['GName'], transform=ax.get_transform('world'), fontsize=10, zorder=105) # # Plot WISE regions # if wisefile is not None: wisedata = np.genfromtxt(wisefile, dtype=None, names=True, encoding='UTF-8') # limit only to regions with centers within image corners = wcs_celest.calc_footprint() min_RA = np.min(corners[:, 0]) max_RA = np.max(corners[:, 0]) RA_range = max_RA - min_RA #min_RA += RA_range #max_RA -= RA_range min_Dec = np.min(corners[:, 1]) max_Dec = np.max(corners[:, 1]) Dec_range = max_Dec - min_Dec #min_Dec += Dec_range #max_Dec -= Dec_range good = (min_RA < wisedata['RA']) & (wisedata['RA'] < max_RA) & ( min_Dec < wisedata['Dec']) & (wisedata['Dec'] < max_Dec) # plot them wisedata = wisedata[good] for dat in wisedata: xpos, ypos = wcs_celest.wcs_world2pix(dat['RA'], dat['Dec'], 1) size = dat['Size'] * 2. / 3600. / pixsize ell = Ellipse((xpos, ypos), size, size, color='y', fill=False, linestyle='dashed', zorder=100) ax.add_patch(ell) ax.text(dat['RA'], dat['Dec'], dat['GName'], transform=ax.get_transform('world'), fontsize=10, zorder=100) # # Add WISE+SHRDS legend # if shrdsfile is not None or wisefile is not None: patches = [] if shrdsfile is not None: ell = Ellipse((0, 0), 0.1, 0.1, color='m', fill=False, linestyle='dashed', label='SHRDS Candidates') patches.append(ell) if wisefile is not None: ell = Ellipse((0, 0), 0.1, 0.1, color='y', fill=False, linestyle='dashed', label='WISE Catalog') patches.append(ell) wise_legend = plt.legend(handles=patches, loc='lower right', fontsize=10, handler_map={Ellipse: HandlerEllipse()}) ax.add_artist(wise_legend) # # Re-scale to fit, then save # fig.savefig(image.replace('.fits', '.reg.pdf'), bbox_inches='tight') plt.close(fig) plt.ion() outimages.append(image.replace('.fits', '.reg.pdf'))
def pointing_radec(self): """Pointing positions as ICRS (`~astropy.coordinates.SkyCoord`).""" return SkyCoord(self["RA_PNT"], self["DEC_PNT"], unit="deg", frame="icrs")
#ratio = np.mean((1.-dead[ratio_mask])/(tec[ratio_mask]/fec[ratio_mask])) ratio = np.median(1. - dead[ratio_mask]) / np.median( tec[ratio_mask] / fec[ratio_mask]) print 'ratio:{0}'.format(ratio) dead[fec_mask] = 1. - ratio * tec[fec_mask] / fec[fec_mask] dead_tmp = dead[ix_tmp] dead_list.append(dead_tmp) cut_list.append(cut[ix_cut, 1]) #asp_old = np.load('../data/photon_list/%s_asp.npy'%name) #sky_data = SkyCoord(asp_old[:,1:3], unit='deg', frame=FK5, equinox='J2000.0') #calculate the image size and location #asp_map = np.load('{0}/{1}{2}_asp.npy'.format(guide_path, name, guide_suffix)) asp_map = np.load('{0}/{1}_asp.npy'.format(guide_path, name)) sky_data = SkyCoord(asp_map[:, 1:3], unit='deg', frame=FK5, equinox='J2000.0') gal = sky_data.transform_to(Galactic) asprta = np.concatenate( (np.array([gal.l.deg]).T, np.array([gal.b.deg]).T), axis=1) gal_l.append(np.mean(asprta[:, 0])) asp_solution = np.concatenate(asp_solution_list, axis=0) sky_data = SkyCoord(asp_solution[:, 1:3], unit='deg', frame=FK5, equinox='J2000.0') gal = sky_data.transform_to(Galactic) asp_solution[:, 1:3] = np.concatenate( (np.array([gal.l.deg]).T, np.array([gal.b.deg]).T), axis=1)
def handle_grb(v, pretend=False): """ Handles the actual VOEvent parsing, generating observations if appropriate. :param v: string in VOEvent XML format :param pretend: Boolean, True if we don't want to actually schedule the observations. :return: None """ log.debug("processing GRB {0}".format(v.attrib['ivorn'])) # trigger = False if 'SWIFT' in v.attrib['ivorn']: # compute the trigger id trig_id = "SWIFT_" + v.attrib['ivorn'].split('_')[-1].split('-')[0] # #The following should never be hit because of the checks made in is_grb. # grbid = v.find(".//Param[@name='GRB_Identified']").attrib['value'] # if grbid != 'true': # log.debug("SWIFT alert but not a GRB") # handlers.send_email(from_address='*****@*****.**', # to_addresses=DEBUG_NOTIFY_LIST, # subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, # msg_text=DEBUG_EMAIL_TEMPLATE % "SWIFT alert but not a GRB", # attachments=[('voevent.xml', voeventparse.dumps(v))]) # # return log.debug("SWIFT GRB trigger detected") this_trig_type = "SWIFT" # If the star tracker looses it's lock then we can't trust any of the locations so we ignore this alert. startrack_lost_lock = v.find( ".//Param[@name='StarTrack_Lost_Lock']").attrib['value'] # convert 'true' to True, and everything else to false startrack_lost_lock = startrack_lost_lock.lower() == 'true' log.debug("StarLock OK? {0}".format(not startrack_lost_lock)) if startrack_lost_lock: log.debug("The SWIFT star tracker lost it's lock") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % "SWIFT alert for GRB, but with StarTrack_Lost_Lock", attachments=[('voevent.xml', voeventparse.dumps(v))]) return # cache the event using the trigger id if trig_id not in xml_cache: grb = GRB(event=v) grb.trigger_id = trig_id xml_cache[trig_id] = grb else: grb = xml_cache[trig_id] grb.add_event(v) trig_time = float( v.find(".//Param[@name='Integ_Time']").attrib['value']) if trig_time < LONG_SHORT_LIMIT: grb.debug("Probably a short GRB: t={0} < 2".format(trig_time)) grb.short = True grb.vcsmode = SWIFT_SHORT_TRIGGERS_IN_VCSMODE trigger = True else: grb.debug("Probably a long GRB: t={0} > 2".format(trig_time)) grb.short = False grb.vcsmode = SWIFT_LONG_TRIGGERS_IN_VCSMODE trigger = True elif "Fermi" in v.attrib['ivorn']: log.debug("Fermi GRB notice detected") # cache the event using the trigger id trig_id = "Fermi_" + v.attrib['ivorn'].split('_')[-2] this_trig_type = v.attrib['ivorn'].split('_')[1] # Flt, Gnd, or Fin if trig_id not in xml_cache: grb = GRB(event=v) grb.trigger_id = trig_id xml_cache[trig_id] = grb else: grb = xml_cache[trig_id] grb.add_event(v) # Not all alerts have trigger times. # eg Fermi#GBM_Gnd_Pos if this_trig_type == 'Flt': trig_time = float( v.find(".//Param[@name='Trig_Timescale']").attrib['value']) if trig_time < LONG_SHORT_LIMIT: grb.short = True grb.debug("Possibly a short GRB: t={0}".format(trig_time)) else: msg = "Probably not a short GRB: t={0}".format(trig_time) grb.debug(msg) grb.debug("Not Triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return # don't trigger most_likely = int( v.find(".//Param[@name='Most_Likely_Index']").attrib['value']) # ignore things that don't have GRB as best guess if most_likely == 4: grb.debug("MOST_LIKELY = GRB") prob = int( v.find( ".//Param[@name='Most_Likely_Prob']").attrib['value']) # ignore things that don't reach our probability threshold if prob > FERMI_POBABILITY_THRESHOLD: grb.debug("Prob(GRB): {0}% > {1}".format( prob, FERMI_POBABILITY_THRESHOLD)) trigger = True else: msg = "Prob(GRB): {0}% <{1}".format( prob, FERMI_POBABILITY_THRESHOLD) grb.debug(msg) grb.debug("Not Triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject= 'GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return else: msg = "MOST_LIKELY != GRB" grb.debug(msg) grb.debug("Not Triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return else: # for Gnd/Fin we trigger if we already triggered on the Flt position grb.debug("Gnd/Flt message -> reverting to Flt trigger") trigger = grb.triggered else: msg = "Not a Fermi or SWIFT GRB." log.debug(msg) log.debug("Not Triggering") handlers.send_email(from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification', msg_text=DEBUG_EMAIL_TEMPLATE % msg, attachments=[('voevent.xml', voeventparse.dumps(v)) ]) return if not trigger: grb.debug("Not Triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return # get current position ra, dec, err = handlers.get_position_info(v) # add it to the list of positions grb.add_pos((ra, dec, err)) grb.debug("RA {0}, Dec {1}, err {2}".format(ra, dec, err)) if not grb.vcsmode: req_time_min = 30 else: grb.debug('Reducing request time to %d for VCS observation' % SWIFT_SHORT_VCS_TIME) req_time_min = SWIFT_SHORT_VCS_TIME # check repointing just for tests # last_pos = grb.get_pos(-2) # if None not in last_pos: # grb.info("Old position: RA {0}, Dec {1}, err {2}".format(*last_pos)) # # pos_diff = SkyCoord(ra=last_pos[0], dec=last_pos[1], unit=astropy.units.degree, frame='icrs').separation( # SkyCoord(ra=ra, dec=dec, unit=astropy.units.degree, frame='icrs')).degree # if pos_diff < REPOINTING_LIMIT: # grb.info("New position is {0} deg from previous (less than constraint of {1} deg)".format(pos_diff, # REPOINTING_LIMIT)) # grb.info("Not triggering") # handlers.send_email(from_address='*****@*****.**', # to_addresses=DEBUG_NOTIFY_LIST, # subject='GRB_fermi_swift debug notification', # msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), # attachments=[('voevent.xml', voeventparse.dumps(v))]) # return # else: # grb.info("New position is {0} deg from previous (greater than constraint of {1} deg".format(pos_diff, # REPOINTING_LIMIT)) # grb.info("Attempting trigger") # end tests # look at the schedule obslist = triggerservice.obslist(obstime=1800) if obslist is not None and len(obslist) > 0: grb.debug("Currently observing:") grb.debug(str(obslist)) # are we currently observing *this* GRB? obs = str( obslist[0][1]) # in case the obslist is returning unicode strings obs_group_id = obslist[0][ 5] # The group ID of the first observation in the list returned grb.debug("obs {0}, trig {1}".format(obs, trig_id)) # Same GRB trigger from same telescope if trig_id in obs: # if obs == trig_id: # update the schedule! grb.info("Already observing this GRB") last_pos = grb.get_pos(-2) grb.info( "Old position: RA {0}, Dec {1}, err {2}".format(*last_pos)) pos_diff = SkyCoord(ra=last_pos[0], dec=last_pos[1], unit=astropy.units.degree, frame='icrs').separation( SkyCoord(ra=ra, dec=dec, unit=astropy.units.degree, frame='icrs')).degree grb.info("New position is {0} deg from previous".format(pos_diff)) if pos_diff < REPOINTING_LIMIT: grb.info("(less than constraint of {0} deg)".format( REPOINTING_LIMIT)) grb.info("Not triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification', msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return grb.info( "(greater than constraint of {0}deg)".format(REPOINTING_LIMIT)) if "SWIFT" in trig_id: grb.info("Updating SWIFT observation with new coords") pass elif "Fermi" in trig_id: prev_type = grb.last_trig_type if this_trig_type == 'Flt' and (prev_type in ['Gnd', 'Fin']): msg = "{0} positions have precedence over {1}".format( prev_type, this_trig_type) grb.info(msg) grb.info("Not triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject= 'GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return elif this_trig_type == 'Gnd' and prev_type == 'Fin': msg = "{0} positions have precedence over {1}".format( prev_type, this_trig_type) grb.info(msg) grb.info("Not triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject= 'GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return else: grb.info("Triggering {0} to replace {1}".format( this_trig_type, prev_type)) # shorten the observing time requested so we are ~30mins total (for non VCS). # If this is a VCS mode observation, don't shorten the time - if the previous trigger was # in VCS mode, we won't be able to interrupt it, and if it wasn't, we still want the normal # length of a VCS trigger. if (grb.first_trig_time is not None) and not grb.vcsmode: req_time_min = 30 - (Time.now() - grb.first_trig_time).sec // 60 grb.debug('Set requested time to %d' % req_time_min) # if we are observing a SWIFT trigger but not the trigger we just received elif 'SWIFT' in obs: if "SWIFT" in trig_id: if obs in xml_cache: prev_short = xml_cache[obs].short else: prev_short = False # best bet if we don't know grb.info("Curently observing a SWIFT trigger") if grb.short and not prev_short: grb.info("Interrupting with a short SWIFT GRB") else: grb.info("Not interrupting previous observation") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject= 'GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return else: grb.info("Not interrupting previous obs") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return # if we are observing a FERMI trigger but not the trigger we just received elif 'Fermi' in obs: # SWIFT > Fermi if "SWIFT" in trig_id: grb.info("Replacing a Fermi trigger with a SWIFT trigger") else: grb.info( "Currently observing a different Fermi trigger, not interrupting" ) handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return else: grb.info("Not currently observing any GRBs") else: grb.debug("Current schedule empty") emaildict = { 'triggerid': grb.trigger_id, 'trigtime': Time.now().iso, 'ra': Angle(grb.ra[-1], unit=astropy.units.deg).to_string(unit=astropy.units.hour, sep=':'), 'dec': Angle(grb.dec[-1], unit=astropy.units.deg).to_string(unit=astropy.units.deg, sep=':'), 'err': grb.err[-1] } email_text = EMAIL_TEMPLATE % emaildict email_subject = EMAIL_SUBJECT_TEMPLATE % grb.trigger_id # Do the trigger result = grb.trigger_observation( ttype=this_trig_type, obsname=trig_id, time_min=req_time_min, pretend=pretend, project_id=PROJECT_ID, secure_key=SECURE_KEY, email_tolist=NOTIFY_LIST, email_text=email_text, email_subject=email_subject, creator='VOEvent_Auto_Trigger: GRB_Fermi_swift=%s' % __version__, voevent=voeventparse.dumps(v)) if result is None: handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))])
def pointing_galactic(self): """Pointing positions as Galactic (`~astropy.coordinates.SkyCoord`).""" return SkyCoord( self["GLON_PNT"], self["GLAT_PNT"], unit="deg", frame="galactic" )
def get_trilegal(filename, ra, dec, folder='.', galactic=False, filterset='kepler_2mass', area=1, magnum=1, maglim=27, binaries=False, trilegal_version='1.7', sigma_AV=0.1): """ Calls the TRILEGAL web form simulation and downloads the file. Parameters ---------- filename : string Output filename. If extension not provided, it will be added. ra : float Coordinate for line-of-sight simulation. dec : float Coordinate for line-of-sight simulation. folder : string, optional Folder to which to save file. filterset : string, optional Filter set for which to call TRILEGAL. area : float, optional Area of TRILEGAL simulation [sq. deg] magnum : integer, optional Bandpass number to limit source magnitudes in. maglim : integer, optional Limiting magnitude in ``magnum`` bandpass of the ``filterset``. binaries : boolean, optional Whether to have TRILEGAL include binary stars. Default ``False``. trilegal_version : float, optional Version of the TRILEGAL API to call. Default ``'1.7'``. sigma_AV : float, optional Fractional spread in A_V along the line of sight. """ if galactic: l, b = ra, dec else: try: c = SkyCoord(ra, dec) except UnitsError: c = SkyCoord(ra, dec, unit='deg') l, b = (c.galactic.l.value, c.galactic.b.value) if os.path.isabs(filename): folder = '' if not re.search(r'\.dat$', filename): outfile = '{}/{}.dat'.format(folder, filename) else: outfile = '{}/{}'.format(folder, filename) AV = get_AV_infinity(l, b, frame='galactic') trilegal_webcall(trilegal_version, l, b, area, binaries, AV, sigma_AV, filterset, magnum, maglim, outfile) return AV
def select_observations(self, selection=None): """Select subset of observations. Returns a new observation table representing the subset. There are 3 main kinds of selection criteria, according to the value of the **type** keyword in the **selection** dictionary: - sky regions - time intervals (min, max) - intervals (min, max) on any other parameter present in the observation table, that can be casted into an `~astropy.units.Quantity` object Allowed selection criteria are interpreted using the following keywords in the **selection** dictionary under the **type** key. - ``sky_circle`` is a circular region centered in the coordinate marked by the **lon** and **lat** keywords, and radius **radius**; uses `~gammapy.catalog.select_sky_circle` - ``time_box`` is a 1D selection criterion acting on the observation start time (**TSTART**); the interval is set via the **time_range** keyword; uses `~gammapy.data.ObservationTable.select_time_range` - ``par_box`` is a 1D selection criterion acting on any parameter defined in the observation table that can be casted into an `~astropy.units.Quantity` object; the parameter name and interval can be specified using the keywords **variable** and **value_range** respectively; min = max selects exact values of the parameter; uses `~gammapy.data.ObservationTable.select_range` In all cases, the selection can be inverted by activating the **inverted** flag, in which case, the selection is applied to keep all elements outside the selected range. A few examples of selection criteria are given below. Parameters ---------- selection : dict Dictionary with a few keywords for applying selection cuts. Returns ------- obs_table : `~gammapy.data.ObservationTable` Observation table after selection. Examples -------- >>> selection = dict(type='sky_circle', frame='galactic', ... lon=Angle(0, 'deg'), ... lat=Angle(0, 'deg'), ... radius=Angle(5, 'deg'), ... border=Angle(2, 'deg')) >>> selected_obs_table = obs_table.select_observations(selection) >>> selection = dict(type='time_box', ... time_range=Time(['2012-01-01T01:00:00', '2012-01-01T02:00:00'])) >>> selected_obs_table = obs_table.select_observations(selection) >>> selection = dict(type='par_box', variable='ALT', ... value_range=Angle([60., 70.], 'deg')) >>> selected_obs_table = obs_table.select_observations(selection) >>> selection = dict(type='par_box', variable='OBS_ID', ... value_range=[2, 5]) >>> selected_obs_table = obs_table.select_observations(selection) >>> selection = dict(type='par_box', variable='N_TELS', ... value_range=[4, 4]) >>> selected_obs_table = obs_table.select_observations(selection) """ if "inverted" not in selection: selection["inverted"] = False if "partial_overlap" not in selection: selection["partial_overlap"] = False if selection["type"] == "sky_circle": lon = Angle(selection["lon"], "deg") lat = Angle(selection["lat"], "deg") radius = Angle(selection["radius"]) if "border" in selection: border = Angle(selection["border"]) else: border = Angle(0, "deg") region = SphericalCircleSkyRegion( center=SkyCoord(lon, lat, frame=selection["frame"]), radius=radius + border, ) mask = region.contains(self.pointing_radec) if selection["inverted"]: mask = np.invert(mask) return self[mask] elif selection["type"] == "time_box": return self.select_time_range( selection["time_range"], selection["partial_overlap"], selection["inverted"], ) elif selection["type"] == "par_box": return self.select_range( selection["variable"], selection["value_range"], selection["inverted"] ) else: raise ValueError(f"Invalid selection type: {selection['type']}")
def skycoord(self): return SkyCoord(self.lon, self.lat, unit="deg", frame=coordsys_to_frame(self.coordsys))
def batss_pointing_detect(obs_id, #should be BATSS_slew object? ra, dec, # Source RA/Dec eband_name, # Source energy band err_rad): # Source error radius (arcmin, 90%) ''' Run imaging and detection for BAT pointings before and after a given slew, for given sky coordinates and energy band ''' #Check input parameters obs = [] # Initialize observation list if isinstance(obs_id, list): for obs_id0 in obs_id: obs.append(BATSS_slew(obs_id0)) else: obs.append(BATSS_slew(obs_id)) pos = SkyCoord(ra, dec, unit='deg') coord_str = ('J'+pos.ra.to_string(unit='hour',pad=True,sep='',fields=2) +(10*pos.dec).to_string(pad=True,sep='',fields=1,alwayssign=True)) coord_str_tex = coord_str[:5]+'$'+coord_str[5]+'$'+coord_str[6:] # TeX eband = BATSS_eband(eband_name) err_rad = err_rad * u.arcmin # Input/Output directories root = BATSS_dir.root dataroot = './data/' if not os.path.exists(dataroot): os.makedirs(dataroot) # Loop over BATSS observations for obs0 in obs: t0 = datetime.now() ##Time object with slew date #obs_date = Time('20'+obs0.id[:2]+'-'+obs0.id[2:4]+'-'+obs0.id[4:6]) obs_date = datetime(int('20'+obs0.id[:2]), int(obs0.id[2:4]), int(obs0.id[4:6])) print(f'{70*"="} {datetime.now():%c}') print('BATSS Observation type and ID: ', obs0.type.upper(), obs0.id) print('Coordinates to search (J2000): ',pos.to_string('hmsdms')) print('Energy band: '+eband.name+' ('+eband.str_keV+')') # Output directories datadir = dataroot+obs0.type+'_'+obs0.id+'_'+coord_str+ '_'+eband.name+'/' if not os.path.exists(datadir): os.makedirs(datadir) tempdir = datadir+'temp/' if not os.path.exists(tempdir): os.makedirs(tempdir) # Initialize output txt file txtfile = datadir+obs0.type+'_'+obs0.id+'_'+coord_str+ '_'+eband.name+'.txt' f = open(txtfile, 'w') f.write(f'{70*"="} {datetime.now():%c}\n') f.write('BATSS Observation type and ID: '+obs0.type.upper()+' '+obs0.id+'\n') f.write('Coordinates to search (J2000): '+pos.to_string('hmsdms')+'\n') f.write('Energy band: '+eband.name+' ('+eband.str_keV+')'+'\n') f.close() #Input catalog file catfile_in = tempdir+'batss.in.cat' # CATNUM: Source number within catalog # NAME: Source name # RA_CAT/GLON_CAT: Catalogued source longitude # DEC_CAT/GLAT_CAT: Catalogued source latitude # RA_OBJ/GLON_OBJ: Source longitude (to be modified upon detection) # DEC_OBJ/GLAT_OBJ: Source latitude (to be modified upon detection) # ERR_RAD_BATSS: BATSS error radius (90%, deg) cat_in_Table = Table( {'CATNUM':[0], 'NAME':['BATSS_'+coord_str], 'RA_OBJ':[pos.ra.value] * pos.ra.unit, 'DEC_OBJ':[pos.dec.value] * pos.dec.unit, 'RA_CAT':[pos.ra.value] * pos.ra.unit, 'DEC_CAT':[pos.dec.value] * pos.ra.unit, 'ERR_RAD_BATSS':[err_rad.to_value(u.deg)] * u.deg}, names=('CATNUM','NAME','RA_OBJ','DEC_OBJ','RA_CAT','DEC_CAT', 'ERR_RAD_BATSS')) #Specifies column order cat_in = fits.BinTableHDU(cat_in_Table, name='BAT_CATALOG') cat_in.header.set('HDUNAME', 'BAT_CATALOG', 'Name of extension', before='TTYPE1') #Necessary? cat_in.header.set('HDUCLASS', 'CATALOG', 'Source catalog', before='TTYPE1') cat_in.header.comments['TTYPE1'] = 'Source number within catalog' cat_in.header.set('TNULL1', -1, 'data null value', after='TFORM1') cat_in.header.comments['TTYPE2'] = 'Source name' cat_in.header.comments['TTYPE3'] = 'Detected source longitude' cat_in.header.comments['TUNIT3'] = 'physical unit of field' cat_in.header.set('TDISP3', 'F10.4', 'column display format', after='TUNIT3') cat_in.header.comments['TTYPE4'] = 'Detected source latitude' cat_in.header.comments['TUNIT4'] = 'physical unit of field' cat_in.header.set('TDISP4', 'F10.4', 'column display format', after='TUNIT4') cat_in.header.comments['TTYPE5'] = 'Catalogued source longitude' cat_in.header.comments['TUNIT5'] = 'physical unit of field' cat_in.header.set('TDISP5', 'F10.4', 'column display format', after='TUNIT5') cat_in.header.comments['TTYPE6'] = 'Catalogued source latitude' cat_in.header.comments['TUNIT6'] = 'physical unit of field' cat_in.header.set('TDISP6', 'F10.4', 'column display format', after='TUNIT6') cat_in.header.comments['TTYPE7'] = 'BATSS cat_in. error radius (90%)' cat_in.header.comments['TUNIT7'] = 'physical unit of field' cat_in.header.set('TDISP7', 'F6.4', 'column display format', after='TUNIT7') cat_in.writeto(catfile_in, overwrite=True) # Get master FITS header for slew (archival by default) flag_realtime = False if os.path.exists(obs0.fitsfile): hdrfile = obs0.fitsfile hdrext = 0 else: print('Warning: No archival master FITS file found for' f' {obs0.type} {obs0.id}. Getting header info from queue file.') if os.path.exists(obs0.queuefile): hdrfile = obs0.queuefile hdrext = obs0.type+'_'+obs0.id else: print('Warning: No archival queue file found for' f' {obs0.type} {obs0.id}. Getting header info from' f' real-time data') flag_realtime = True if os.path.exists(obs0.fitsfile_realtime): hdrfile = obs0.fitsfile_realtime hdrext = 0 else: print('Warning: No real-time master FITS file found for' f' {obs0.type} {obs0.id}. Getting header info from' f' queue file.') if os.path.exists(obs0.queuefile_realtime): hdrfile = obs0.queuefile_realtime hdrext = obs0.type+'_'+obs0.id else: raise IOError('Neither archival nor real-time files' f' found for {obs0.type} {obs0.id}') #fitsfile = obs0.fitsfile_realtime if flag_realtime else obs0.fitsfile print('Header file: '+hdrfile) print('Extension:') print(hdrext) try: header = fits.getheader(hdrfile, hdrext) except IOError as err: raise IOError(err) except: print('Some other error! (hdrfile)') # Partial coding map pcfile = obs0.pcfile_realtime if flag_realtime else obs0.pcfile try: if not os.path.exists(pcfile): # Try getting default partial coding map print('Warning: Partial coding file (' +('realtime' if flag_realtime else 'archival') +') does not exist. Reading from default file.') pcfile = BAT_pcfile_def() print('Partial coding map file: '+pcfile) pcmap, pchdr = fits.getdata(pcfile, header=True) except IOError: raise except: print('Some other error! (pcfile)') else: dims_pcmap = np.shape(pcmap) # Attitude file attfile = obs0.attfile_realtime if flag_realtime else obs0.attfile try: if not os.path.exists(attfile): raise IOError('Attitude file ('+('realtime' if flag_realtime else 'archival')+') does not exist') att = fits.getdata(obs0.attfile, 1) except IOError: raise else: flag_settled = 192 # (binary) FLAGS field for settled spacecraft # Get time windows for preceding and following pointings obs_t0 = header['BEG_SLEW'] #[MET] gti_pre = {'start':0, 'stop':header['BEG_SLEW']} #[MET] gti_pre_sod = {'start':0, 'stop':int(obs0.id[7:9])*3600 + int(obs0.id[10:12])*60 + int(obs0.id[13:15])} #[SOD] gti_pos = {'start':header['END_SLEW'], 'stop':0} #[MET] gti_pos_sod = {'start':gti_pre_sod['stop'] + int(obs0.id[17:20]), 'stop':0} #[SOD] queuefile = obs0.queuefile_realtime if flag_realtime else obs0.queuefile try: with fits.open(queuefile) as queue_hdul: w = np.array([hdu.name == 'SLEW_'+obs0.id for hdu in queue_hdul]).nonzero()[0] assert len(w) == 1 except IOError: raise else: w = w[0] # Beginning of preceding pointing if w == 1: # Get slew from previous day date_pre = obs_date - timedelta(days=1) queuefile_pre = root + f'products/{date_pre.year:04}_{date_pre.month:02}/queue{"_realtime" if flag_realtime else ""}/queue_{date_pre.year % 100:02}{date_pre.month:02}{date_pre.day:02}_{obs0.type}.fits' try: with fits.open(queuefile_pre) as queue_pre_hdul: wpre = len(queue_pre_hdul) gti_pre_sod['start'] = -86400 except OSError: print('File not found: '+queuefile_pre) raise else: queuefile_pre = queuefile queue_pre_hdul = queue_hdul wpre = w-1 slew_id_pre = queue_pre_hdul[wpre].name[5:] gti_pre['start'] = fits.getval(queuefile_pre, 'END_SLEW', ext=wpre) gti_pre_sod['start'] += int(slew_id_pre[7:9])*3600 + int(slew_id_pre[10:12])*60 + int(slew_id_pre[13:15]) + int(slew_id_pre[17:20]) # End of following pointing if w == len(queue_hdul): # Get slew from following day date_pre = obs_date + timedelta(days=1) queuefile_pos = root + f'products/{date_pos.year:04}_{date_pos.month:02}/queue{"_realtime" if flag_realtime else ""}/queue_{date_pos.year % 100:02}{date_pos.month:02}{date_pos.day:02}_{obs0.type}.fits' try: with fits.open(queuefile_pos) as queue_pos_hdul: wpos = len(queue_pos_hdul) gti_pos_sod['stop'] = 86400 except OSError: print('File not found: '+queuefile_pos) raise else: queuefile_pos = queuefile queue_pos_hdul = queue_hdul wpos = w+1 slew_id_pos = queue_pos_hdul[wpos].name[5:] gti_pos['stop'] = fits.getval(queuefile_pos, 'BEG_SLEW', ext=wpos) gti_pos_sod['stop'] += (int(slew_id_pos[7:9])*3600 + int(slew_id_pos[10:12])*60 + int(slew_id_pos[13:15])) # Read AFST files for previous, current and following days afst_obs_id = [] afst_yymmdd = [] afst_start_sod = [] afst_stop_sod = [] for d in [-1,0,1]: date0 = obs_date + timedelta(days=d) yymmdd = f'{date0.year % 100:02}{date0.month:02}{date0.day:02}' afstfile = (root + f'products/{date0.year:04}_{date0.month:02}/' f'afst/afst_{date0.year % 100:02}{date0.month:02}' f'{date0.day:02}.html') try: with open(afstfile,'r') as f0: afst_soup = BeautifulSoup(f0, features='lxml') except OSError: raise tr = afst_soup.find_all('tr') #, features='lxml') for tr0 in tr: try: afst_class = tr0['class'][0] except KeyError: continue if afst_class == 'header': continue td0 = tr0.find_all('td') start0 = td0[0].get_text(strip=True) start_sod0 = (datetime(int(start0[:4]), int(start0[5:7]), int(start0[8:10])) - obs_date).days*86400 + int(start0[11:13])*3600 + int(start0[14:16])*60 + int(start0[17:19]) stop0 = td0[1].get_text(strip=True) stop_sod0 = (datetime(int(stop0[:4]), int(stop0[5:7]), int(stop0[8:10])) - obs_date).days*86400 + int(stop0[11:13])*3600 + int(stop0[14:16])*60 + int(stop0[17:19]) afst_obs_id.append(td0[2].a.text.zfill(8) + td0[3].a.text.zfill(3)) afst_yymmdd.append(yymmdd) afst_start_sod.append(start_sod0) afst_stop_sod.append(stop_sod0) point = Table({'obs_id':afst_obs_id, 'yymmdd':afst_yymmdd, 'start_sod':afst_start_sod, 'stop_sod':afst_stop_sod}) del afst_obs_id, afst_yymmdd, afst_start_sod, afst_stop_sod # Get Observation IDs for preceding and following pointings dt_pre = point['stop_sod'].clip(max=gti_pre_sod['stop']) - point['start_sod'].clip(min=gti_pre_sod['start']) upre = np.argmax(dt_pre) dt_pre = dt_pre[upre] assert dt_pre > 0 obs_id_pre = point[upre]['obs_id'] yymmdd_pre = point[upre]['yymmdd'] dt_pos = point['stop_sod'].clip(max=gti_pos_sod['stop']) - point['start_sod'].clip(min=gti_pos_sod['start']) upos = np.argmax(dt_pos) dt_pos = dt_pos[upos] assert dt_pos > 0 obs_id_pos = point[upos]['obs_id'] yymmdd_pos = point[upos]['yymmdd'] del point # Save GTI files for preceding and following pointings gtifile_pre = tempdir+obs0.type+'_'+obs0.id+'_pre.gti' gti_pre_Table = Table({'START':[gti_pre['start']] * u.s, 'STOP':[gti_pre['stop']] * u.s}, names=('START','STOP')) gtihdr_pre = BATSS_gtihdr(gti_pre_Table) hdu_pre = fits.BinTableHDU(gti_pre_Table, header=gtihdr_pre) hdu_pre.writeto(gtifile_pre, overwrite=True) gtifile_pos = tempdir+obs0.type+'_'+obs0.id+'_pos.gti' gti_pos_Table = Table({'START':[gti_pos['start']] * u.s, 'STOP':[gti_pos['stop']] * u.s}, names=('START','STOP')) gtihdr_pos = BATSS_gtihdr(gti_pos_Table) hdu_pos = fits.BinTableHDU(gti_pos_Table, header=gtihdr_pos) hdu_pos.writeto(gtifile_pos, overwrite=True) # Perform BATSURVEY analysis on preceding and following pointings obs0.src_name = 'BATSS '+coord_str # Include BATSS source name obs0.src_name_tex = 'BATSS '+coord_str_tex # TeX formatted obs0.eband = eband for flag_pre in [True, False]: print(f'{70*"="} {datetime.now():%c}') f = open(txtfile, 'a') if flag_pre: print('PRECEDING POINTING. ',end='') f.write(f'\n{95*"="}\nPRECEDING POINTING. ') prefix = 'pre' gtifile = gtifile_pre obs_id = obs_id_pre yymmdd_point = yymmdd_pre else: print('FOLLOWING POINTING. ',end='') f.write(f'\n{95*"="}\nFOLLOWING POINTING. ') prefix = 'pos' gtifile = gtifile_pos obs_id = obs_id_pos yymmdd_point = yymmdd_pos yyyy_mm_point = '20'+yymmdd_point[:2]+'_'+yymmdd_point[2:4] print(f'Observation ID: {obs_id}') f.write(f'Observation ID: {obs_id}\n') # Get coding fraction of source from attitude data gti = fits.getdata(gtifile,1) w = ((att['time'] >= gti['start']) & (att['time'] <= gti['stop'])).nonzero()[0] assert len(w) > 0 #print(f'Attitude records found within GTI: {len(w)}') w0 = (att[w]['flags'] == flag_settled).nonzero()[0] assert len(w0) > 0 w = w[w0] #print(f'Settled records: {len(w)}') w0 = (att[w]['obs_id'] == obs_id).nonzero()[0] if len(w0) == 0: str_out = ('WARNING: No settled attitude records found for' f' Observation {obs_id}') print(str_out) f.write('\t'+str_out+'\n') obs_id0, obs_id0_pos = np.unique(att[w]['obs_id'], return_inverse=True) obs_id0_cts = np.bincount(obs_id0_pos) imax = obs_id0_cts.argmax() str_out = (f'\tUsing most frequent Obs ID: {obs_id0[imax]}' f' ({obs_id0_cts[imax]} records)') print(str_out) f.write(str_out+'\n') obs_id = obs_id0[imax] w0 = (obs_id0_pos == imax).nonzero()[0] assert len(w0) > 0 del obs_id0, obs_id0_pos, obs_id0_cts, imax w = w[w0] w0 = w[len(w)//2] ra0 = att[w0]['pointing'][0] dec0 = att[w0]['pointing'][1] roll0 = att[w0]['pointing'][2] # Modify pchdr astrometry pchdr = BAT_astrmod(pchdr, ra=ra0, dec=dec0, roll=roll0) #fits.PrimaryHDU(pcmap, pchdr).writeto(datadir+'test_pchdr_' # +prefix+'.fits', overwrite=True) #TEMP pcwcs = wcs.WCS(pchdr) pix = pcwcs.all_world2pix([[pos.ra.deg, pos.dec.deg]], 1)[0].round().astype(int)[::-1] # For [y,x] indexing! pix = pix.clip(1, dims_pcmap) - 1 pcodefr0 = 100 * pcmap[pix[0], pix[1]] str_out = f'Source coding fraction: {pcodefr0:6.2f}%. ' print(str_out, end='') f.write(str_out) if pcodefr0 == 0: str_out = 'Pointing skipped' print(str_out) f.write(str_out+'\n') if flag_pre: obs0.cat_pre = [] else: obs0.cat_pos = [] continue print('Downloading pointing data... ', end='') t1 = datetime.now() obsdir = datadir+prefix+'_'+obs0.type+'_'+obs0.id+'/' command = ['wget' # basic command ' -q' # turn off output ' -r -l0' # recursive retrieval (max depth 0) ' -nH' # no host-prefixed directories ' --cut-dirs=7' # also ignore 7 directories ' -np' # do not ascend to parent directory f' --directory-prefix={obsdir}' # top directory for output ' --no-check-certificate' # don't check server certificate ' -c' # continue partial downloading ' -N' # use same timestamping as remote file " -R'index*'" # reject all 'index*' files ' -erobots=off' # turn off Robots Exclusion Standard ' --retr-symlinks' # download symbolic links ' http://heasarc.gsfc.nasa.gov/FTP/swift/data/obs/' f'{yyyy_mm_point}//{obs_id}/'+s for s in ['bat/','auxil/']] for command0 in command: subp.run(command0.split(' ')) str_out = f'({(datetime.now()-t1).seconds}s)' print('done '+str_out) f.write(f'Pointing data downloaded {str_out}\n') f.close() # Loop over DPH and SNAPSHOT imaging datadir_in = obsdir cat_tex = [] for flag_dph in [False, True]: gti_ntries = 0 while gti_ntries < 2: gti_ntries += 1 print(f'{70*"-"} {datetime.now():%c}') print(('DPH' if flag_dph else 'SNAPSHOT')+' loop:') print(f' GTI loop {gti_ntries}: '+ ('Standard filtering' if gti_ntries == 1 else 'USERGTI filtering only')) datadir_out = (obsdir+'results_' +eband.name+('_dph' if flag_dph else '')+'/') # BATSURVEY command command = ['batsurvey', datadir_in, datadir_out, 'energybins='+eband.str, 'elimits='+eband.str, 'incatalog='+catfile_in, 'ncleaniter=2', #Always clean DPH # Apply DPH keyword 'timesep='+('DPH' if flag_dph else 'SNAPSHOT'), 'filtnames='+('all' if gti_ntries == 1 else ('global,pointing,filter_file,startracker,' 'st_lossfcn,data_flags,earthconstraints,' 'remove_midnight,occultation,usergti')), 'gtifile='+gtifile, # Minimum exposure threshold 'expothresh=150.0'] print(' '.join(command)) subp.run(command) # Find if master GTI file was created gtifile_out = glob.glob(datadir_out+'gti/master.gti') if len(gtifile_out) > 0: if gti_ntries == 1: gti_text = 'Standard' elif gti_ntries == 2: gti_text = 'Standard failed. USERGTI only' break else: if gti_ntries == 1: print('Standard GTI filtering failed. ', end='') if flag_dph: print('DPH binning does not work with ' 'USERGTI. Aborting') gti_text = ('Standard failed. DPH binning ' 'does not work with USERGTI filtering') break else: print('Standard GTI filtering failed.' ' Trying USERGTI only') elif gti_ntries == 2: print('Standard GTI and USERGTI filtering failed.' ' Aborting') gti_text = 'Standard and USERGTI failed' # Get output catalogs cat_out = [] catfile_out = glob.glob(datadir_out+'point_*/point_*_2.cat') catfile = (datadir+prefix+'_'+obs0.type+'_'+obs0.id +'_'+coord_str+'_'+eband.name +('_dph' if flag_dph else '')+'.cat') if len(catfile_out) > 0: print(('DPH' if flag_dph else 'SNAPSHOT') +' catalogs found:', len(catfile_out)) else: print('Warning: No '+('DPH' if flag_dph else 'SNAPSHOT') +' catalogs found. Skipping') for catfile_out0 in catfile_out: print('Catalog file: '+catfile_out0) t_ss = os.path.basename(catfile_out0).split('_')[1] print(f' {t_ss[:4]}-{t_ss[4:7]}-{t_ss[7:9]}' f':{t_ss[9:11]}...', end='') cat0, hdr0 = fits.getdata(catfile_out0, 1, header=True) cat0_name = cat0['name'].strip() #cat0['name'] = cat0['name'].strip() #cat0['rate'] /= 0.16 #[cts/cm2/sec] #cat0['cent_rate'] /= 0.16 #cat0['rate_err'] /= 0.16 #cat0['bkg_var'] /= 0.16 w = (cat0_name == 'BATSS_'+coord_str).nonzero()[0] if len(w) > 0: cat0 = Table(cat0) for w0 in w: if len(cat_out) == 0: cat0[w0]['CATNUM'] = 1 cat_out = Table(cat0[w0]) hdr_out = hdr0 hdr_out.remove('HISTORY', ignore_missing=True, remove_all=True) hdr_out['EXTNAME'] = 'BATSURVEY_CATALOG' hdr_out['HDUNAME'] = 'BATSURVEY_CATALOG' # Index for new sources in catalog hdr_out['NEWSRCIN'] = 2 else: cat0[w0]['CATNUM'] = hdr_out['NEWSRCIN'] hdr_out['NEWSRCIN'] += 1 cat_out.add_row(cat0[w0]) # Save catalog file n_det = len(cat_out) with open(txtfile,'a') as f: f.write(f'\n{"DPH" if flag_dph else "SNAPSHOT"}' ' processing:\n') f.write(f'GTI filtering: {gti_text}\n') f.write(f'Detections: {n_det if n_det > 0 else "NONE"}\n') if n_det > 0: fits.BinTableHDU(cat_out, hdr_out).writeto(catfile, overwrite=True) print(f'Saved {n_det} detection(s) of' f' BATSS_{coord_str} to file {catfile}') f.write(' '.join([' #', f'{"Time_start":23s}', f'{"Time_stop":23s}', f'{"Exp[s]":7s}', f'{"CF[%]":6s}', 'S/N(pix)','S/N(fit)'])+'\n') for cat0 in cat_out: f.write(' '.join([f'{cat0["CATNUM"]:2}', met2Time(cat0['TIME']).iso, met2Time(cat0['TIME_STOP']).iso, f'{cat0["EXPOSURE"]:7.1f}', f'{100*cat0["PCODEFR"]:6.2f}', f'{cat0["CENT_SNR"]:8.2f}', f'{cat0["SNR"]:8.2f}']) +'\n') cat_tex.append({ 'dt':cat0['TIME']-obs_t0, 'exp':cat0['EXPOSURE'], 'cf':100*cat0['PCODEFR'], 'cent_snr':cat0['CENT_SNR'], 'snr':cat0['SNR'] }) if flag_pre: obs0.cat_pre = cat_tex else: obs0.cat_pos = cat_tex str_out = ('\nDONE. Processing time: ' +str(datetime.now()-t0).split('.')[0]) print(str_out) with open(txtfile, 'a') as f: f.write(str_out+'\n') print('Closed output text file: ', f.name) return obs
response['hgc_y']) < 80.0: area = response['area_atdiskcenter'] response_index = i ############################################################################## # Next let's get the boundary of the coronal hole ch = responses[response_index] p1 = ch["hpc_boundcc"][9:-2] p2 = p1.split(',') p3 = [v.split(" ") for v in p2] ch_date = parse_time(ch['event_starttime']) ############################################################################## # The coronal hole was detected at different time than the AIA image was # taken so we need to rotate it to the map observation time. ch_boundary = SkyCoord([(float(v[0]), float(v[1])) * u.arcsec for v in p3], obstime=ch_date, frame=frames.Helioprojective) rotated_ch_boundary = solar_rotate_coordinate(ch_boundary, time=aia_map.date) ############################################################################## # Now let's plot the rotated coronal hole boundary on the AIA map, and fill # it with hatching. fig = plt.figure() ax = plt.subplot(projection=aia_map) aia_map.plot(axes=ax) ax.plot_coord(rotated_ch_boundary, color='c') ax.set_title('{:s}\n{:s}'.format(aia_map.name, ch['frm_specificid'])) plt.colorbar() plt.show()
def f(x, coords): """Function to minimize""" lon, lat = x center = SkyCoord(lon * u.deg, lat * u.deg) return np.sum(center.separation(coords).deg)
def test_docs_example(): # Test the example in astroplan/docs/tutorials/constraints.rst target_table_string = """# name ra_degrees dec_degrees Polaris 37.95456067 89.26410897 Vega 279.234734787 38.783688956 Albireo 292.68033548 27.959680072 Algol 47.042218553 40.955646675 Rigel 78.634467067 -8.201638365 Regulus 152.092962438 11.967208776""" from astroplan import Observer, FixedTarget from astropy.time import Time subaru = Observer.at_site("Subaru") time_range = Time(["2015-08-01 06:00", "2015-08-01 12:00"]) # Read in the table of targets from astropy.io import ascii target_table = ascii.read(target_table_string) # Create astroplan.FixedTarget objects for each one in the table from astropy.coordinates import SkyCoord import astropy.units as u targets = [ FixedTarget(coord=SkyCoord(ra=ra * u.deg, dec=dec * u.deg), name=name) for name, ra, dec in target_table ] from astroplan import Constraint, is_observable class VegaSeparationConstraint(Constraint): """ Constraint the separation from Vega """ def __init__(self, min=None, max=None): """ min : `~astropy.units.Quantity` or `None` (optional) Minimum acceptable separation between Vega and target. `None` indicates no limit. max : `~astropy.units.Quantity` or `None` (optional) Minimum acceptable separation between Vega and target. `None` indicates no limit. """ self.min = min if min is not None else 0 * u.deg self.max = max if max is not None else 180 * u.deg def compute_constraint(self, times, observer, targets): vega = SkyCoord(ra=279.23473479 * u.deg, dec=38.78368896 * u.deg) # Calculate separation between target and vega # Targets are automatically converted to SkyCoord objects # by __call__ before compute_constraint is called. vega_separation = vega.separation(targets) # Return an array that is True where the target is observable and # False where it is not return (self.min < vega_separation) & (vega_separation < self.max) constraints = [VegaSeparationConstraint(min=5 * u.deg, max=30 * u.deg)] observability = is_observable(constraints, subaru, targets, time_range=time_range) assert all(observability == [False, False, True, False, False, False])
def lonlat_to_skycoord(lon, lat, coordsys): return SkyCoord(lon, lat, frame=coordsys_to_frame(coordsys), unit="deg")
# -*- coding: utf-8 -*- """ Created on Sat Aug 17 21:05:15 2019 @author: souza """ from astropy import units as u from astropy.coordinates import SkyCoord # c = SkyCoord('21 33 27.02 -00 49 23.7', unit=(u.hourangle, u.deg)) # print c ra_dec = input('Type Ra (hour) and Dec (deg) between quotation marks: ') print ra_dec c = SkyCoord(ra_dec, unit=(u.hourangle, u.deg)) print c
def drizzle_images(label='macs0647-jd1', ra=101.9822125, dec=70.24326667, pixscale=0.06, size=10, wcs=None, pixfrac=0.8, kernel='square', theta=0, half_optical_pixscale=False, filters=[ 'f160w', 'f140w', 'f125w', 'f105w', 'f110w', 'f098m', 'f850lp', 'f814w', 'f775w', 'f606w', 'f475w', 'f555w', 'f600lp', 'f390w', 'f350lp' ], remove=True, rgb_params=RGB_PARAMS, master='grizli-jan2019', aws_bucket='s3://grizli/CutoutProducts/', scale_ab=21, thumb_height=2.0, sync_fits=True, subtract_median=True, include_saturated=True, include_ir_psf=False, show_filters=['visb', 'visr', 'y', 'j', 'h'], combine_similar_filters=True): """ label='cp561356'; ra=150.208875; dec=1.850241667; size=40; filters=['f160w','f814w', 'f140w','f125w','f105w','f606w','f475w'] """ import glob import copy import os import numpy as np import astropy.io.fits as pyfits from astropy.coordinates import SkyCoord import astropy.units as u from drizzlepac.adrizzle import do_driz import boto3 from grizli import prep, utils from grizli.pipeline import auto_script if isinstance(ra, str): coo = SkyCoord('{0} {1}'.format(ra, dec), unit=(u.hour, u.deg)) ra, dec = coo.ra.value, coo.dec.value if label is None: try: import mastquery.utils label = mastquery.utils.radec_to_targname( ra=ra, dec=dec, round_arcsec=(1 / 15, 1), targstr='j{rah}{ram}{ras}{sign}{ded}{dem}{des}') except: label = 'grizli-cutout' #master = 'cosmos' #master = 'grizli-jan2019' if master == 'grizli-jan2019': parent = 's3://grizli/MosaicTools/' s3 = boto3.resource('s3') s3_client = boto3.client('s3') bkt = s3.Bucket('grizli') elif master == 'cosmos': parent = 's3://grizli-preprocess/CosmosMosaic/' s3 = boto3.resource('s3') s3_client = boto3.client('s3') bkt = s3.Bucket('grizli-preprocess') else: # Run on local files, e.g., "Prep" directory parent = None remove = False for ext in ['_visits.fits', '_visits.npy', '_filter_groups.npy'][-1:]: if (not os.path.exists('{0}{1}'.format(master, ext))) & (parent is not None): s3_path = parent.split('/')[-2] s3_file = '{0}{1}'.format(master, ext) print('{0}{1}'.format(parent, s3_file)) bkt.download_file(s3_path + '/' + s3_file, s3_file, ExtraArgs={"RequestPayer": "requester"}) #os.system('aws s3 cp {0}{1}{2} ./'.format(parent, master, ext)) #tab = utils.read_catalog('{0}_visits.fits'.format(master)) #all_visits = np.load('{0}_visits.npy'.format(master))[0] if parent is not None: groups = np.load('{0}_filter_groups.npy'.format(master), allow_pickle=True)[0] else: # Reformat local visits.npy into a groups file groups_files = glob.glob('*filter_groups.npy') if len(groups_files) == 0: visit_file = glob.glob('*visits.npy')[0] visits, groups, info = np.load(visit_file) visit_root = visit_file.split('_visits')[0] visit_filters = np.array( [v['product'].split('-')[-1] for v in visits]) groups = {} for filt in np.unique(visit_filters): groups[filt] = {} groups[filt]['filter'] = filt groups[filt]['files'] = [] groups[filt]['footprints'] = [] groups[filt]['awspath'] = None ix = np.where(visit_filters == filt)[0] for i in ix: groups[filt]['files'].extend(visits[i]['files']) groups[filt]['footprints'].extend(visits[i]['footprints']) np.save('{0}_filter_groups.npy'.format(visit_root), [groups]) else: groups = np.load(groups_files[0])[0] #filters = ['f160w','f814w', 'f110w', 'f098m', 'f140w','f125w','f105w','f606w', 'f475w'] has_filts = [] lower_filters = [f.lower() for f in filters] for filt in lower_filters: if filt not in groups: continue visits = [copy.deepcopy(groups[filt])] #visits[0]['reference'] = 'CarlosGG/ak03_j1000p0228/Prep/ak03_j1000p0228-f160w_drz_sci.fits' visits[0]['product'] = label + '-' + filt if wcs is None: hdu = utils.make_wcsheader(ra=ra, dec=dec, size=size, pixscale=pixscale, get_hdu=True, theta=theta) h = hdu.header else: h = utils.to_header(wcs) if (filt[:2] in ['f0', 'f1', 'g1']) | (not half_optical_pixscale): #data = hdu.data pass else: for k in ['NAXIS1', 'NAXIS2', 'CRPIX1', 'CRPIX2']: h[k] *= 2 h['CRPIX1'] -= 0.5 h['CRPIX2'] -= 0.5 for k in ['CD1_1', 'CD1_2', 'CD2_1', 'CD2_2']: if k in h: h[k] /= 2 #data = np.zeros((h['NAXIS2'], h['NAXIS1']), dtype=np.int16) #pyfits.PrimaryHDU(header=h, data=data).writeto('ref.fits', overwrite=True, output_verify='fix') #visits[0]['reference'] = 'ref.fits' print('\n\n###\nMake filter: {0}'.format(filt)) if (filt.upper() in ['F105W', 'F125W', 'F140W', 'F160W' ]) & include_ir_psf: clean_i = False else: clean_i = remove status = utils.drizzle_from_visit(visits[0], h, pixfrac=pixfrac, kernel=kernel, clean=clean_i, include_saturated=include_saturated) if status is not None: sci, wht, outh = status if subtract_median: med = np.median(sci[sci != 0]) if not np.isfinite(med): med = 0. print('\n\nMedian {0} = {1:.3f}\n\n'.format(filt, med)) outh['IMGMED'] = (med, 'Median subtracted from the image') else: med = 0. outh['IMGMED'] = (0., 'Median subtracted from the image') pyfits.writeto('{0}-{1}_drz_sci.fits'.format(label, filt), data=sci, header=outh, overwrite=True, output_verify='fix') pyfits.writeto('{0}-{1}_drz_wht.fits'.format(label, filt), data=wht, header=outh, overwrite=True, output_verify='fix') has_filts.append(filt) if (filt.upper() in ['F105W', 'F125W', 'F140W', 'F160W' ]) & include_ir_psf: from grizli.galfit.psf import DrizzlePSF hdu = pyfits.open('{0}-{1}_drz_sci.fits'.format(label, filt), mode='update') flt_files = [] #visits[0]['files'] for i in range(1, 10000): key = 'FLT{0:05d}'.format(i) if key not in hdu[0].header: break flt_files.append(hdu[0].header[key]) dp = DrizzlePSF(flt_files=flt_files, driz_hdu=hdu[0]) psf = dp.get_psf(ra=dp.driz_wcs.wcs.crval[0], dec=dp.driz_wcs.wcs.crval[1], filter=filt.upper(), pixfrac=dp.driz_header['PIXFRAC'], kernel=dp.driz_header['KERNEL'], wcs_slice=dp.driz_wcs, get_extended=True, verbose=False, get_weight=False) psf[1].header['EXTNAME'] = 'PSF' #psf[1].header['EXTVER'] = filt hdu.append(psf[1]) hdu.flush() #psf.writeto('{0}-{1}_drz_sci.fits'.format(label, filt), # overwrite=True, output_verify='fix') #status = prep.drizzle_overlaps(visits, parse_visits=False, check_overlaps=True, pixfrac=pixfrac, skysub=False, final_wcs=True, final_wht_type='IVM', static=True, max_files=260, fix_wcs_system=True) # # if len(glob.glob('{0}-{1}*sci.fits'.format(label, filt))): # has_filts.append(filt) if combine_similar_filters: combine_filters(label=label) if remove: os.system('rm *_fl*fits') if len(has_filts) == 0: return [] if rgb_params: #auto_script.field_rgb(root=label, HOME_PATH=None, filters=has_filts, **rgb_params) show_all_thumbnails(label=label, thumb_height=thumb_height, scale_ab=scale_ab, close=True, rgb_params=rgb_params, filters=show_filters) if aws_bucket: #aws_bucket = 's3://grizli-cosmos/CutoutProducts/' #aws_bucket = 's3://grizli/CutoutProducts/' s3 = boto3.resource('s3') s3_client = boto3.client('s3') bkt = s3.Bucket(aws_bucket.split("/")[2]) aws_path = '/'.join(aws_bucket.split("/")[3:]) if sync_fits: files = glob.glob('{0}*'.format(label)) else: files = glob.glob('{0}*png'.format(label)) for file in files: print('{0} -> {1}'.format(file, aws_bucket)) bkt.upload_file(file, '{0}/{1}'.format(aws_path, file).replace('//', '/'), ExtraArgs={'ACL': 'public-read'}) #os.system('aws s3 sync --exclude "*" --include "{0}*" ./ {1} --acl public-read'.format(label, aws_bucket)) #os.system("""echo "<pre>" > index.html; aws s3 ls AWSBUCKETX --human-readable | sort -k 1 -k 2 | grep -v index | awk '{printf("%s %s",$1, $2); printf(" %6s %s ", $3, $4); print "<a href="$5">"$5"</a>"}'>> index.html; aws s3 cp index.html AWSBUCKETX --acl public-read""".replace('AWSBUCKETX', aws_bucket)) return has_filts
def setup(self): self.region = SphericalCircleSkyRegion( center=SkyCoord(10 * u.deg, 20 * u.deg), radius=10 * u.deg )
from ..observer import Observer from ..target import FixedTarget, get_skycoord from ..constraints import ( AltitudeConstraint, AirmassConstraint, AtNightConstraint, is_observable, is_always_observable, observability_table, time_grid_from_range, GalacticLatitudeConstraint, SunSeparationConstraint, MoonSeparationConstraint, MoonIlluminationConstraint, TimeConstraint, LocalTimeConstraint, months_observable, max_best_rescale, min_best_rescale, PhaseConstraint, PrimaryEclipseConstraint, SecondaryEclipseConstraint, is_event_observable) from ..periodic import EclipsingSystem APY_LT104 = not minversion('astropy', '1.0.4') vega = FixedTarget(coord=SkyCoord(ra=279.23473479 * u.deg, dec=38.78368896 * u.deg), name="Vega") rigel = FixedTarget(coord=SkyCoord(ra=78.63446707 * u.deg, dec=8.20163837 * u.deg), name="Rigel") polaris = FixedTarget(coord=SkyCoord(ra=37.95456067 * u.deg, dec=89.26410897 * u.deg), name="Polaris") def test_at_night_basic(): subaru = Observer.at_site("Subaru") time_ranges = [ Time(['2001-02-03 04:05:06', '2001-02-04 04:05:06']), # 1 day Time(['2007-08-09 10:11:12', '2007-08-09 11:11:12']) ] # 1 hr
############################################################ # basic setting PA = 213.3 incl = 0.43 xcenter = 159.09 ycenter = 158.29 # corresponds to 13h39m57.692s, 0d49'50.838" # ra=13*15+39*15.0/60.0+57.692*15.0/3600.0; dec=49.0/60.0+50.838/3600.0 ra = 204.9903 dec = 0.8308 steps = (33.75 - 0.75) / 1.5 + 1 radius_arcsec = np.linspace(0.75, 33.75, steps) radius_kpc = radius_arcsec * 0.48 size = radius_arcsec.shape[0] - 1 position = SkyCoord(dec=dec * u.degree, ra=ra * u.degree, frame='icrs') rings = dict.fromkeys((range(size))) rings_mask = dict.fromkeys((range(size))) pixel_area = 0.3 * 0.3 pixel_sr = pixel_area / (60**2 * 180 / math.pi)**2 D = 99 majorbeam = 2.021 minorbeam = 1.610 beamarea = majorbeam * minorbeam * 1.1331 beamarea_pix = beamarea / 0.09 ############################################################ # function def fits_import(fitsimage, item=0): hdr = fits.open(fitsimage)[item].header
def test_contains(self): coord = SkyCoord([20.1, 22] * u.deg, 20 * u.deg) mask = self.region.contains(coord) assert_equal(mask, [True, False])
def aia171_test_submap(aia171_test_map): bl = SkyCoord(-512 * u.arcsec, 100 * u.arcsec, frame=aia171_test_map.coordinate_frame) ur = SkyCoord(-100 * u.arcsec, 400 * u.arcsec, frame=aia171_test_map.coordinate_frame) return aia171_test_map.submap(bl, ur)
def get_altaz(obj_name, ipt_lon, ipt_lat, t=None): #for html scrapping #from lxml import html #from bs4 import BeautifulSoup #to place requests import requests import json import astropy.units as u from astropy.time import Time from astropy.coordinates import SkyCoord, EarthLocation, Angle, Latitude, Longitude from astroplan import FixedTarget, Observer from astroquery.simbad import Simbad as simbad import ephem if t == None: t = Time.now() ## Set up the observer obs_el = 100 * u.m loc = EarthLocation.from_geodetic(ipt_lon, ipt_lat, obs_el) my_site = Observer(name='My_Site', location=loc) obs_lat = my_site.location.latitude obs_lon = my_site.location.longitude #observer for pyephem ephem_site = ephem.Observer() ephem_site.lon, ephem_site.lat = str(obs_lon.deg), str(obs_lat.deg) ephem_site.date = ephem.Date(str(t.decimalyear)) ##Get the object #Check for planet-hood. #if planet: resolve the individual planet with pyephem. #else if satellite or ISS (or TIANGONG) scrap the appropriate websites and return info #else query simbad ############ # Put in an auto-correct for kids ############ #just make it lower case for now obj_name = obj_name.lower() if obj_name in [ "sun", "mercury", "venus", "moon", "mars", "jupiter", "saturn", "uranus", "neptune", "pluto" ]: if obj_name == "sun": my_planet = ephem.Sun() elif obj_name == "mercury": my_planet = ephem.Mercury() elif obj_name == "venus": my_planet = ephem.Venus() elif obj_name == "moon": my_planet = ephem.Moon() elif obj_name == "mars": my_planet = ephem.Mars() elif obj_name == "jupiter": my_planet = ephem.Jupiter() elif obj_name == "saturn": my_planet = ephem.Saturn() elif obj_name == "uranus": my_planet = ephem.Uranus() elif obj_name == "neptune": my_planet = ephem.Neptune() elif obj_name == "pluto": my_planet = ephem.Pluto() my_planet.compute(ephem_site) az = my_planet.az * 180 / 3.1415926535 alt = my_planet.alt * 180 / 3.1415926535 #here coded for just ISS but for all satellites we should have similar setups, probably poll site elif (obj_name == "iss"): #try a request for the iss from the open notify site. Gives current json data page = requests.get("http://api.open-notify.org/iss-now.json") issdata = page.json() tstamp = issdata['timestamp'] isslat = issdata['iss_position']['latitude'] isslon = issdata['iss_position']['longitude'] #there are issues with just this amount of data as you do not know the altitude of the object #here we fix it to 350 km issheight = 350 * u.km isslat = Latitude(isslat, unit=u.deg) isslon = Longitude(isslon, unit=u.deg) #there are issues however as this data does NOT contain the altitude so lets try scrapping the html #the issue with fullissdata is that it contains information in NASA style units (M50 Cartesian & M50 Keplerian) page = requests.get( "http://spaceflight.nasa.gov/realdata/sightings/SSapplications/Post/JavaSSOP/orbit/ISS/SVPOST.html" ) #fullissdata=html.fromstring(page.text) #there are also other satellites liseted in, issue is parsing the information as I do not know what each field contains #the issue here is that all sat data contains unknown units and uncertain which entries contain useful information page = requests.get( "http://www.celestrak.com/NORAD/elements/stations.txt") allsatdata = page.text c = SkyCoord(isslon, isslat, issheight) my_target = FixedTarget(name='ISS', coord=c) az = my_site.altaz(t, my_target).az.deg alt = my_site.altaz(t, my_target).alt.deg else: try: q = simbad.query_object(obj_name) c = SkyCoord(q["RA"][0], q["DEC"][0], unit=(u.hourangle, u.deg)) my_star = FixedTarget(name='my_star', coord=c) az = my_site.altaz(t, my_star).az.deg alt = my_site.altaz(t, my_star).alt.deg except: print("Couldn't find Object in Database") alt, az = 0, 0 return alt, az
gs=gridspec.GridSpec(n_row, n_column) gs.update(left=0.03, right=0.97, bottom=0.03, top=0.97, wspace=0.2, hspace=0.2) # Now reading the HD rgb image im_data = np.flipud(skimage.io.imread(im_rgb_hd_file)) im_size= im_data.shape avm=AVM.from_image(im_rgb_hd_file) w = avm.to_wcs() w.naxis1=im_size[1] w.naxis2=im_size[0] # Sort them by magnitude cat_ngfs.sort('m_i') cat_ngfs_coo = SkyCoord(cat_ngfs['RA'], cat_ngfs['DEC'], unit="deg") for i in np.arange(len(cat_ngfs)): print 'Processing NGFS dwarf ', cat_ngfs['ID'][i] ax=plt.subplot(gs[i]) ax.set_aspect('equal') ax.axis('off') im_crop_coo=w.wcs_world2pix([[ cat_ngfs_coo.ra[i].deg,(cat_ngfs_coo.dec[i]+dwarf_zoom_radius).deg],[cat_ngfs_coo.ra[i].deg,(cat_ngfs_coo.dec[i]-dwarf_zoom_radius).deg]], 1) im_crop_size=(np.abs(im_crop_coo[0,1]-im_crop_coo[1,1])*np.asarray([1.,1.])).astype(int) im_crop_coo=(w.wcs_world2pix([[ cat_ngfs_coo.ra[i].deg, cat_ngfs_coo.dec[i].deg]], 1)[0]).astype(int) im_crop_data=im_data[im_crop_coo[1]-im_crop_size[1]/2:im_crop_coo[1]+im_crop_size[1]/2,im_crop_coo[0]-im_crop_size[0]/2:im_crop_coo[0]+im_crop_size[0]/2] skimage.io.imsave('dwarf_zoom.png', np.flipud(im_crop_data)) im_crop_size= im_crop_data.shape
def actualSetUp(self, add_errors=False, freqwin=3, block=False, dospectral=True, dopol=False, zerow=False, makegcfcf=False): self.npixel = 256 self.low = create_named_configuration('LOWBD2', rmax=750.0) self.freqwin = freqwin self.vis_list = list() self.ntimes = 5 self.cellsize = 0.0005 # Choose the interval so that the maximum change in w is smallish integration_time = numpy.pi * (24 / (12 * 60)) self.times = numpy.linspace(-integration_time * (self.ntimes // 2), integration_time * (self.ntimes // 2), self.ntimes) if freqwin > 1: self.frequency = numpy.linspace(0.8e8, 1.2e8, self.freqwin) self.channelwidth = numpy.array(freqwin * [self.frequency[1] - self.frequency[0]]) else: self.frequency = numpy.array([1.0e8]) self.channelwidth = numpy.array([4e7]) if dopol: self.vis_pol = PolarisationFrame('linear') self.image_pol = PolarisationFrame('stokesIQUV') f = numpy.array([100.0, 20.0, -10.0, 1.0]) else: self.vis_pol = PolarisationFrame('stokesI') self.image_pol = PolarisationFrame('stokesI') f = numpy.array([100.0]) if dospectral: flux = numpy.array([f * numpy.power(freq / 1e8, -0.7) for freq in self.frequency]) else: flux = numpy.array([f]) self.phasecentre = SkyCoord(ra=+180.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000') self.bvis_list = [rsexecute.execute(ingest_unittest_visibility)(self.low, [self.frequency[freqwin]], [self.channelwidth[freqwin]], self.times, self.vis_pol, self.phasecentre, block=True, zerow=zerow) for freqwin, _ in enumerate(self.frequency)] self.vis_list = [rsexecute.execute(convert_blockvisibility_to_visibility)(bvis) for bvis in self.bvis_list] self.model_list = [rsexecute.execute(create_unittest_model, nout=freqwin)(self.vis_list[freqwin], self.image_pol, cellsize=self.cellsize, npixel=self.npixel) for freqwin, _ in enumerate(self.frequency)] self.components_list = [rsexecute.execute(create_unittest_components)(self.model_list[freqwin], flux[freqwin, :][numpy.newaxis, :], single=True) for freqwin, _ in enumerate(self.frequency)] self.components_list = rsexecute.compute(self.components_list, sync=True) self.model_list = [rsexecute.execute(insert_skycomponent, nout=1)(self.model_list[freqwin], self.components_list[freqwin]) for freqwin, _ in enumerate(self.frequency)] self.model_list = rsexecute.compute(self.model_list, sync=True) self.vis_list = [rsexecute.execute(predict_skycomponent_visibility)(self.vis_list[freqwin], self.components_list[freqwin]) for freqwin, _ in enumerate(self.frequency)] centre = self.freqwin // 2 # Calculate the model convolved with a Gaussian. self.model = self.model_list[centre] self.cmodel = smooth_image(self.model) if self.persist: export_image_to_fits(self.model, '%s/test_imaging_model.fits' % self.dir) if self.persist: export_image_to_fits(self.cmodel, '%s/test_imaging_cmodel.fits' % self.dir) if add_errors and block: self.vis_list = [rsexecute.execute(insert_unittest_errors)(self.vis_list[i]) for i, _ in enumerate(self.frequency)] self.components = self.components_list[centre] if makegcfcf: self.gcfcf = [create_awterm_convolutionfunction(self.model, nw=61, wstep=16.0, oversampling=8, support=64, use_aaf=True)] self.gcfcf_clipped = [(self.gcfcf[0][0], apply_bounding_box_convolutionfunction(self.gcfcf[0][1], fractional_level=1e-3))] self.gcfcf_joint = [create_awterm_convolutionfunction(self.model, nw=11, wstep=16.0, oversampling=8, support=64, use_aaf=True)] else: self.gcfcf = None self.gcfcf_clipped = None self.gcfcf_joint = None