def _set_coords(ra, dec, unit): try: coords = SkyCoord(ra, dec) except UnitTypeError: coords = SkyCoord(ra, dec, unit=unit) return coords
def setup(self): kitt_peak = EarthLocation.from_geodetic(lon=-111.6 * u.deg, lat=31.963333333333342 * u.deg, height=2120 * u.m) self.t = Time('2014-09-25T00:00', location=kitt_peak) obsgeoloc, obsgeovel = kitt_peak.get_gcrs_posvel(self.t) self.frame = GCRS(obstime=self.t, obsgeoloc=obsgeoloc, obsgeovel=obsgeovel) # Results returned by JPL Horizons web interface self.horizons = { 'mercury': SkyCoord(ra='13h38m58.50s', dec='-13d34m42.6s', distance=c * 7.699020 * u.min, frame=self.frame), 'moon': SkyCoord(ra='12h33m12.85s', dec='-05d17m54.4s', distance=c * 0.022054 * u.min, frame=self.frame), 'jupiter': SkyCoord(ra='09h09m55.55s', dec='+16d51m57.8s', distance=c * 49.244937 * u.min, frame=self.frame) }
def setup(self): self.t = Time('1980-03-25 00:00') self.apparent_frame = TETE(obstime=self.t) # Results returned by JPL Horizons web interface self.horizons = { 'mercury': SkyCoord(ra='22h41m47.78s', dec='-08d29m32.0s', distance=c * 6.323037 * u.min, frame=self.apparent_frame), 'moon': SkyCoord(ra='07h32m02.62s', dec='+18d34m05.0s', distance=c * 0.021921 * u.min, frame=self.apparent_frame), 'jupiter': SkyCoord(ra='10h17m12.82s', dec='+12d02m57.0s', distance=c * 37.694557 * u.min, frame=self.apparent_frame), 'sun': SkyCoord(ra='00h16m31.00s', dec='+01d47m16.9s', distance=c * 8.294858 * u.min, frame=self.apparent_frame) }
def Counts(gal_id, gal_field, z, R = 10**np.linspace(1.2,3.6,13), delta_z = 0.1, min_mass = 9.415): #making a list of galaxies in within a redshift range of given z, in the selected field, and above the mass limit# lst_gal = [] data_tmp = data[data['field'] == gal_field] #separating the potential satellites into star-forming(b) and quiescent(r) bins# mask = ((np.abs(data_tmp['z'] - z) <= delta_z) & (data_tmp['id'] != gal_id) & (data_tmp['lmass'] >= min_mass)) lst_gal = data_tmp[mask] lst_galr = lst_gal[(((lst_gal['vj'] < 0.92) & (lst_gal['uv'] > 1.3)) | ((lst_gal['vj'] > 0.8) & (lst_gal['vj'] < 1.6) & (lst_gal['uv'] > (0.88*lst_gal['vj'] +0.49))))] lst_galb = lst_gal[(((lst_gal['vj'] < 0.92) & (lst_gal['uv'] < 1.3)) | ((lst_gal['vj'] > 0.8) & (lst_gal['vj'] < 1.6) & (lst_gal['uv'] < (0.88*lst_gal['vj'] +0.49))) | (lst_gal['vj']>1.5))] #finding the various aperture radii in arcminutes based on given z# kpc_per_arcmin = cosmo.kpc_proper_per_arcmin(z) arcmin_per_kpc = kpc_per_arcmin**(-1) arcmin = arcmin_per_kpc*(R*u.kpc) #retrieving RA and DEC data of given galaxy# p1 = data_tmp[(data_tmp['id'] == gal_id)] #calculating distance in special ANGLE measure to each galaxy in lst_gal# sc0 = SkyCoord(p1['ra']*u.deg, p1['dec']*u.deg) sc1 = SkyCoord(lst_galr['ra']*u.deg, lst_galr['dec']*u.deg) sc2 = SkyCoord(lst_galb['ra']*u.deg, lst_galb['dec']*u.deg) sep1 = sc0.separation(sc1).to(u.arcmin) sep2 = sc0.separation(sc2).to(u.arcmin) #finding number of "sep's" within the list 'arcmin' already created# nnr = np.empty(len(R)) nnb = np.empty(len(R)) for ii,r in enumerate(arcmin): nnr[ii] = np.sum(sep1 <= r) nnb[ii] = np.sum(sep2 <= r) return [nnr, nnb]
def test_names(): # First check that sesame is up if urllib.request.urlopen( "http://cdsweb.u-strasbg.fr/cgi-bin/nph-sesame").getcode() != 200: pytest.skip( "SESAME appears to be down, skipping test_name_resolve.py:test_names()..." ) with pytest.raises(NameResolveError): get_icrs_coordinates("m87h34hhh") try: icrs = get_icrs_coordinates("NGC 3642") except NameResolveError: ra, dec = _parse_response(_cached_ngc3642["all"]) icrs = SkyCoord(ra=float(ra) * u.degree, dec=float(dec) * u.degree) icrs_true = SkyCoord(ra="11h 22m 18.014s", dec="59d 04m 27.27s") # use precision of only 1 decimal here and below because the result can # change due to Sesame server-side changes. np.testing.assert_almost_equal(icrs.ra.degree, icrs_true.ra.degree, 1) np.testing.assert_almost_equal(icrs.dec.degree, icrs_true.dec.degree, 1) try: icrs = get_icrs_coordinates("castor") except NameResolveError: ra, dec = _parse_response(_cached_castor["all"]) icrs = SkyCoord(ra=float(ra) * u.degree, dec=float(dec) * u.degree) icrs_true = SkyCoord(ra="07h 34m 35.87s", dec="+31d 53m 17.8s") np.testing.assert_almost_equal(icrs.ra.degree, icrs_true.ra.degree, 1) np.testing.assert_almost_equal(icrs.dec.degree, icrs_true.dec.degree, 1)
def Counts(gal_id, gal_field, z, R = 10**np.linspace(1.2,3.6,13), delta_z = 0.1, min_mass = 9.415): from astropy.coordinates.sky_coordinate import SkyCoord from astropy import units as u #making a list of galaxies in within a redshift range of given z, in the selected field, and above the mass limit# lst_gal = [] data_tmp = data_flagged[data_flagged['field'] == gal_field] mask = ((np.abs(data_tmp['z_peak'] - z) <= delta_z) & (data_tmp['id'] != gal_id) & (data_tmp['lmass'] >= min_mass)) lst_gal = data_tmp[mask] #finding the various aperture radii in arcminutes based on given z# kpc_per_arcmin = cosmo.kpc_proper_per_arcmin(z) arcmin_per_kpc = kpc_per_arcmin**(-1) arcmin = arcmin_per_kpc*(R*u.kpc) #retrieving RA and DEC data of given galaxy# p1 = data_tmp[(data_tmp['id'] == gal_id)] #calculating distance in special ANGLE measure to each galaxy in lst_gal# sc0 = SkyCoord(p1['ra']*u.deg, p1['dec']*u.deg) sc = SkyCoord(lst_gal['ra']*u.deg, lst_gal['dec']*u.deg) sep = sc0.separation(sc) sep = sep.to(u.arcmin) #finding number of "sep's" within the list 'arcmin' already created# nn = np.empty(len(R)) for ii,r in enumerate(arcmin): nn[ii] = np.sum(sep <= r) return nn
def compute_paral_angles(header, latitude, ra_key, dec_key, lst_key, acqtime_key, date_key='DATE-OBS'): """Calculates the parallactic angle for a frame, taking coordinates and local sidereal time from fits-headers (frames taken in an alt-az telescope with the image rotator off). The coordinates in the header are assumed to be J2000 FK5 coordinates. The spherical trigonometry formula for calculating the parallactic angle is taken from Astronomical Algorithms (Meeus, 1998). Parameters ---------- header : dictionary Header of current frame. latitude : float Latitude of the observatory in degrees. The dictionaries in vip_hci/conf/param.py can be used like: latitude=LBT['latitude']. ra_key, dec_key, lst_key, acqtime_key, date_key : strings Keywords where the values are stored in the header. Returns ------- pa.value : float Parallactic angle in degrees for current header (frame). """ obs_epoch = Time(header[date_key], format='iso', scale='utc') # equatorial coordinates in J2000 ra = header[ra_key] dec = header[dec_key] coor = SkyCoord(ra=ra, dec=dec, unit=(hourangle,degree), frame=FK5, equinox='J2000.0') # recalculate for DATE-OBS (precession) coor_curr = coor.transform_to(FK5(equinox=obs_epoch)) # new ra and dec in radians ra_curr = coor_curr.ra dec_curr = coor_curr.dec lst_split = header[lst_key].split(':') lst = float(lst_split[0])+float(lst_split[1])/60+float(lst_split[2])/3600 exp_delay = (header[acqtime_key] * 0.5) / 3600 # solar to sidereal time exp_delay = exp_delay*1.0027 # hour angle in degrees hour_angle = (lst + exp_delay) * 15 - ra_curr.deg hour_angle = np.deg2rad(hour_angle) latitude = np.deg2rad(latitude) # PA formula from Astronomical Algorithms pa = -np.rad2deg(np.arctan2(-np.sin(hour_angle), np.cos(dec_curr) * \ np.tan(latitude) - np.sin(dec_curr) * np.cos(hour_angle))) #if dec_curr.value > latitude: pa = (pa.value + 360) % 360 return pa.value
def compute_paral_angles(header, latitude, ra_key, dec_key, lst_key, acqtime_key, date_key='DATE-OBS'): """Calculates the parallactic angle for a frame, taking coordinates and local sidereal time from fits-headers (frames taken in an alt-az telescope with the image rotator off). The coordinates in the header are assumed to be J2000 FK5 coordinates. The spherical trigonometry formula for calculating the parallactic angle is taken from Astronomical Algorithms (Meeus, 1998). Parameters ---------- header : dictionary Header of current frame. latitude : float Latitude of the observatory in degrees. The dictionaries in vip/conf/param.py can be used like: latitude=LBT['latitude']. ra_key, dec_key, lst_key, acqtime_key, date_key : strings Keywords where the values are stored in the header. Returns ------- pa.value : float Parallactic angle in degrees for current header (frame). """ obs_epoch = Time(header[date_key], format='iso', scale='utc') # equatorial coordinates in J2000 ra = header[ra_key] dec = header[dec_key] coor = SkyCoord(ra=ra, dec=dec, unit=(hourangle,degree), frame=FK5, equinox='J2000.0') # recalculate for DATE-OBS (precession) coor_curr = coor.transform_to(FK5(equinox=obs_epoch)) # new ra and dec in radians ra_curr = coor_curr.ra dec_curr = coor_curr.dec lst_split = header[lst_key].split(':') lst = float(lst_split[0])+float(lst_split[1])/60+float(lst_split[2])/3600 exp_delay = (header[acqtime_key] * 0.5) / 3600 # solar to sidereal time exp_delay = exp_delay*1.0027 # hour angle in degrees hour_angle = (lst + exp_delay) * 15 - ra_curr.deg hour_angle = np.deg2rad(hour_angle) latitude = np.deg2rad(latitude) # PA formula from Astronomical Algorithms pa = -np.rad2deg(np.arctan2(-np.sin(hour_angle), np.cos(dec_curr) * \ np.tan(latitude) - np.sin(dec_curr) * np.cos(hour_angle))) #if dec_curr.value > latitude: pa = (pa.value + 360) % 360 return pa.value
def rand_counts(gal_field, z, R=10**np.linspace(1.2, 3.6, 13), delta_z=0.1, min_mass=9.415): #picking random location for galaxy number density# if gal_field == 'AEGIS': ra1 = random.uniform(3.746000, 3.756821) dec1 = random.uniform(0.920312, 0.925897) elif gal_field == 'COSMOS': ra1 = random.uniform(2.619737, 2.620718) dec1 = random.uniform(0.038741, 0.043811) elif gal_field == 'GOODS-N': ra1 = random.uniform(3.298072, 3.307597) dec1 = random.uniform(1.084787, 1.087936) elif gal_field == 'GOODS-S': ra1 = random.uniform(0.925775, 0.929397) dec1 = random.uniform(-0.487098, -0.483591) elif gal_field == 'UDS': ra1 = random.uniform(0.59815, 0.602889) dec1 = random.uniform(-0.091376, -0.090305) from astropy.coordinates.sky_coordinate import SkyCoord from astropy import units as u #switching ra and dec to degrees# ra1 = ra1 * (180.0 / math.pi) dec1 = dec1 * (180.0 / math.pi) #making a list of galaxies in within a redshift range of given z# lst_gal = [] data_tmp = data_flagged[data_flagged['field'] == gal_field] mask = (np.abs(data_tmp['z_peak'] - z) <= delta_z) & (data_tmp['lmass'] >= min_mass) lst_gal = data_tmp[mask] #converting radius R (kpc) to radians at given redshift z# kpc_per = cosmo.kpc_proper_per_arcmin(z) arcmin_per = kpc_per**(-1) arcmin = arcmin_per * (R * u.kpc) #retrieving RA and DEC data of given galaxy# sc0 = SkyCoord(ra1 * u.deg, dec1 * u.deg) sc = SkyCoord(lst_gal['ra'] * u.deg, lst_gal['dec'] * u.deg) sep = sc.separation(sc0) sep = sep.to(u.arcmin) nn = np.empty(len(R)) for ii, r in enumerate(arcmin): nn[ii] = np.sum(sep <= r) return nn
def rand_counts(gal_field, z, R = 10**np.linspace(1.2,3.6,13), delta_z = 0.1, min_mass = 9.415): #picking random location for galaxy number density# if gal_field == 'AEGIS': ra1 = random.uniform(3.746000, 3.756821) dec1 = random.uniform(0.920312, 0.925897) elif gal_field == 'COSMOS': ra1 = random.uniform(2.619737, 2.620718) dec1 = random.uniform(0.038741, 0.043811) elif gal_field == 'GOODS-N': ra1 = random.uniform(3.298072, 3.307597) dec1 = random.uniform(1.084787, 1.087936) elif gal_field == 'GOODS-S': ra1 = random.uniform(0.925775, 0.929397) dec1 = random.uniform(-0.487098, -0.483591) elif gal_field == 'UDS': ra1 = random.uniform(0.59815, 0.602889) dec1 = random.uniform(-0.091376, -0.090305) #switching ra and dec to degrees# ra1 = ra1*(180.0/math.pi) dec1 = dec1*(180.0/math.pi) #making a list of galaxies in within a redshift range of given z, in the selected field, and above the mass limit# lst_gal = [] data_tmp = data[data['field'] == gal_field] mask = ((np.abs(data_tmp['z'] - z) <= delta_z) & (data_tmp['lmass'] >= min_mass)) lst_gal = data_tmp[mask] lst_galr = lst_gal[(((lst_gal['vj'] < 0.92) & (lst_gal['uv'] > 1.3)) | ((lst_gal['vj'] > 0.8) & (lst_gal['vj'] < 1.6) & (lst_gal['uv'] > (0.88*lst_gal['vj'] +0.49))))] lst_galb = lst_gal[(((lst_gal['vj'] < 0.92) & (lst_gal['uv'] < 1.3)) | ((lst_gal['vj'] > 0.8) & (lst_gal['vj'] < 1.6) & (lst_gal['uv'] < (0.88*lst_gal['vj'] +0.49))) | (lst_gal['vj']>1.5))] #finding the various aperture radii in arcminutes based on given z# kpc_per_arcmin = cosmo.kpc_proper_per_arcmin(z) arcmin_per_kpc = kpc_per_arcmin**(-1) arcmin = arcmin_per_kpc*(R*u.kpc) #calculating distance in special ANGLE measure to each galaxy in lst_gal# sc0 = SkyCoord(ra1*u.deg, dec1*u.deg) sc1 = SkyCoord(lst_galr['ra']*u.deg, lst_galr['dec']*u.deg) sc2 = SkyCoord(lst_galb['ra']*u.deg, lst_galb['dec']*u.deg) sep1 = sc0.separation(sc1).to(u.arcmin) sep2 = sc0.separation(sc2).to(u.arcmin) #finding number of "sep's" within the list 'arcmin' already created# nn1 = np.empty(len(R)) nn2 = np.empty(len(R)) for ii,r in enumerate(arcmin): nn1[ii] = np.sum(sep1 <= r) nn2[ii] = np.sum(sep2 <= r) #nn1 is density list for quiescent, nn2 is for star-forming# return [nn1, nn2]
def listcord2center(coord_list): """ Return the center of a list of coordinates. This is done by averaging cartesian coordinates to avoid looping in RA-Dec. Parameters ---------- - coord_list (SkyCoord list): list of sky coordinates Outputs -------- - center (SkyCoord): SkyCoord object, center of the list """ # Get the cartesian coordinates x = coord_list.cartesian.x y = coord_list.cartesian.y z = coord_list.cartesian.z # Average the cartesian coordinates x_m = np.mean(x) y_m = np.mean(y) z_m = np.mean(z) # Transform to sky coordinates r, lat, lon = cartesian_to_spherical(x_m, y_m, z_m) center_guess = SkyCoord(lon, lat, frame='icrs') # Perform distance minimisation to make sure it is ok def fun(par): c = SkyCoord(par[0] * u.deg, par[1] * u.deg, frame='icrs') dist = coord_list.separation(c) return np.sum(dist.value**2) p_guess = np.array( [center_guess.ra.to_value('deg'), center_guess.dec.to_value('deg')]) res = minimize(fun, p_guess) center = SkyCoord(res.x[0] * u.deg, res.x[1] * u.deg, frame='icrs') # Sanity check if res.success is not True: print('!!! WARNING: not sure I found the coordinates barycenter !!!') print('Separation between cartesian center and my center: ') print(center.separation(center_guess).to_value('deg'), 'deg') print('Center: ') print(center) return center
def Counts(gal_id, gal_field, z, R = 10**np.linspace(1.2,3.6,13), delta_z = 0.1, min_mass = 9.415): from astropy.coordinates.sky_coordinate import SkyCoord from astropy import units as u #making a list of galaxies in within a redshift range of given z, in the selected field, and above the mass limit# lst_gal = [] data_tmp = data_flagged[data_flagged['field'] == gal_field] #separatign the satellite galaxies into four bins based on mass# mask = ((np.abs(data_tmp['z_peak'] - z) <= delta_z) & (data_tmp['id'] != gal_id) & (data_tmp['lmass'] >= min_mass)) lst_gal = data_tmp[mask] lst_gal1 = lst_gal[(lst_gal['lmass'] < 9.8)] lst_gal2 = lst_gal[((lst_gal['lmass'] < 10.3) & (lst_gal['lmass'] > 9.8))] lst_gal3 = lst_gal[((lst_gal['lmass'] < 10.8) & (lst_gal['lmass'] > 10.3))] lst_gal4 = lst_gal[((lst_gal['lmass'] < 11.8) & (lst_gal['lmass'] > 10.8))] #finding the various aperture radii in arcminutes based on given z# kpc_per_arcmin = cosmo.kpc_proper_per_arcmin(z) arcmin_per_kpc = kpc_per_arcmin**(-1) arcmin = arcmin_per_kpc*(R*u.kpc) #retrieving RA and DEC data of given galaxy# p1 = data_tmp[(data_tmp['id'] == gal_id)] #calculating distance in special ANGLE measure to each galaxy in lst_gal# sc0 = SkyCoord(p1['ra']*u.deg, p1['dec']*u.deg) sc1 = SkyCoord(lst_gal1['ra']*u.deg, lst_gal1['dec']*u.deg) sc2 = SkyCoord(lst_gal2['ra']*u.deg, lst_gal2['dec']*u.deg) sc3 = SkyCoord(lst_gal3['ra']*u.deg, lst_gal3['dec']*u.deg) sc4 = SkyCoord(lst_gal4['ra']*u.deg, lst_gal4['dec']*u.deg) sep1 = sc0.separation(sc1).to(u.arcmin) sep2 = sc0.separation(sc2).to(u.arcmin) sep3 = sc0.separation(sc3).to(u.arcmin) sep4 = sc0.separation(sc4).to(u.arcmin) #finding number of "sep's" within the list 'arcmin' already created# nn1 = np.empty(len(R)) nn2 = np.empty(len(R)) nn3 = np.empty(len(R)) nn4 = np.empty(len(R)) for ii,r in enumerate(arcmin): nn1[ii] = np.sum(sep1 <= r) nn2[ii] = np.sum(sep2 <= r) nn3[ii] = np.sum(sep3 <= r) nn4[ii] = np.sum(sep4 <= r) #returning four lists of counts pre radius with lower end number for lower mass bin# return [nn1, nn2, nn3, nn4]
def _trace_back_obj(self, o, c, t): """ computes minimum time and distance of object trajectory with that of c Arguments: o: object, SkyCoord c: object, SkyCoord, presumably cluster center t: time vector in years over which to perform calculations Returns: dict with keys: d_min, t_min and tol Referenct: https://arxiv.org/pdf/2005.04762.pdf """ #function to constrain dec between -90 and 90 (argument in radians) fix_lat = np.vectorize(lambda x: x if x <= np.pi/2.0 else \ ( np.pi-x if x <=3.0*np.pi/2.0 else x-2.0*np.pi)) # get the ra's and dec's of the two objects at time t # relying on astropy.units to do proper conversion to degrees o_ra = (o.ra + o.pm_ra_cosdec * t).wrap_at(360 * u.degree) c_ra = (c.ra + c.pm_ra * t).wrap_at(360 * u.degree) o_dec = coord.Angle( fix_lat((o.dec + o.pm_dec * t).radian % (2.0 * np.pi)) * u.radian) c_dec = coord.Angle( fix_lat((c.dec + c.pm_dec * t).radian % (2.0 * np.pi)) * u.radian) #sky coords at time t for both, using constant distance: o_t = SkyCoord(ra=o_ra, dec=o_dec, distance=o.distance) c_t = SkyCoord(ra=c_ra, dec=c_dec, distance=c.distance) #angular separation as function of time sep = o_t.separation(c_t) #find minimum separation and time min_i = sep.argmin() d_min = o_t[min_i].separation(c_t[min_i]) t_min = t[min_i] #calculate the tolerance (see equation 1 in reference) tol = 10 + 1.3 * c.separation(o) / (1 * u.degree) return { "t_min": t_min, "d_min": d_min, "tol": tol, 'tracesback': (d_min / (1.0 * u.arcminute)) <= tol }
def rand_counts(gal_field, z, R = 10**np.linspace(1.2,3.6,13), delta_z = 0.1, min_mass = 9.415): #picking random location for galaxy number density# if gal_field == 'AEGIS': ra1 = random.uniform(3.746000, 3.756821) dec1 = random.uniform(0.920312, 0.925897) elif gal_field == 'COSMOS': ra1 = random.uniform(2.619737, 2.620718) dec1 = random.uniform(0.038741, 0.043811) elif gal_field == 'GOODS-N': ra1 = random.uniform(3.298072, 3.307597) dec1 = random.uniform(1.084787, 1.087936) elif gal_field == 'GOODS-S': ra1 = random.uniform(0.925775, 0.929397) dec1 = random.uniform(-0.487098, -0.483591) elif gal_field == 'UDS': ra1 = random.uniform(0.59815, 0.602889) dec1 = random.uniform(-0.091376, -0.090305) from astropy.coordinates.sky_coordinate import SkyCoord from astropy import units as u #switching ra and dec to degrees# ra1 = ra1*(180.0/math.pi) dec1 = dec1*(180.0/math.pi) #making a list of galaxies in within a redshift range of given z# lst_gal = [] data_tmp = data_flagged[data_flagged['field'] == gal_field] mask = (np.abs(data_tmp['z_peak'] - z) <= delta_z) & (data_tmp['lmass'] >= min_mass) lst_gal = data_tmp[mask] #converting radius R (kpc) to radians at given redshift z# kpc_per = cosmo.kpc_proper_per_arcmin(z) arcmin_per = kpc_per**(-1) arcmin = arcmin_per*(R*u.kpc) #retrieving RA and DEC data of given galaxy# sc0 = SkyCoord(ra1*u.deg, dec1*u.deg) sc = SkyCoord(lst_gal['ra']*u.deg, lst_gal['dec']*u.deg) sep = sc.separation(sc0) sep = sep.to(u.arcmin) nn = np.empty(len(R)) for ii,r in enumerate(arcmin): nn[ii] = np.sum(sep <= r) return nn
def Counts_q(gal_id, gal_field, z, R=10**np.linspace(1.2, 3.6, 13), delta_z=0.1, min_mass=9.415): #making a list of galaxies in within a redshift range of given z, in the selected field, and above the mass limit# lst_gal = [] data_tmp = data1[data1['field'] == gal_field] mask = ((np.abs(data_tmp['z'] - z) <= delta_z) & (data_tmp['id'] != gal_id) & (data_tmp['lmass'] >= min_mass)) #making list of satellites in total and list of satellites that are quiescent# lst_gal = data_tmp[mask] lst_galr = lst_gal[(((lst_gal['vj'] < 0.92) & (lst_gal['uv'] > 1.3)) | ((lst_gal['vj'] > 0.8) & (lst_gal['vj'] < 1.6) & (lst_gal['uv'] > (0.88 * lst_gal['vj'] + 0.49))))] #finding the various aperture radii in arcminutes based on given z# kpc_per_arcmin = cosmo.kpc_proper_per_arcmin(z) arcmin_per_kpc = kpc_per_arcmin**(-1) arcmin = arcmin_per_kpc * (R * u.kpc) #retrieving RA and DEC data of given galaxy# p1 = data_tmp[(data_tmp['id'] == gal_id)] #calculating distance in special ANGLE measure to each galaxy in lst_gal# sc0 = SkyCoord(p1['ra'] * u.deg, p1['dec'] * u.deg) sc1 = SkyCoord(lst_galr['ra'] * u.deg, lst_galr['dec'] * u.deg) sc2 = SkyCoord(lst_gal['ra'] * u.deg, lst_gal['dec'] * u.deg) sep1 = sc0.separation(sc1).to(u.arcmin) sep2 = sc0.separation(sc2).to(u.arcmin) #finding number of "sep's" within the list 'arcmin' already created# nnr = np.empty(len(R)) nn = np.empty(len(R)) for ii, r in enumerate(arcmin): nnr[ii] = np.sum(sep1 <= r) nn[ii] = np.sum(sep2 <= r) lst_q = [] #calculating quiescent percentage and adding it to a list# for i in range(len(nnr)): #filtering out (making a 5 that later gets ignored) results that divide by 0# if nn[i] == 0: lst_q.append(5) else: lst_q.append(nnr[i] / nnb[i]) return lst_q
def getClusterInfo(): """ returns astropy table with info for the clusters in Gaia paper: http://simbad.u-strasbg.fr/simbad/sim-ref?bibcode=2018A%26A...616A..10G (Table1a: Nearby Open Clusters) """ #from readme file at https://cdsarc.unistra.fr/ftp/J/A+A/616/A10/ReadMe colnames = ['cluster', 'ra', 'dec', 'U', 'e_U', 'V', 'e_V', 'W', 'e_W', 'cUV', 'cUW', 'cVW', 'ra_conv', 'dec_conv', 'plx', 'e_plx', 'pmra','e_pmra','pmdec','e_pmdec', 'NMemb','uwsd','RV','e_RV'] path = 'ftp://cdsarc.u-strasbg.fr/pub/cats/J/A+A/616/A10/tablea3.dat' table3_df = pd.read_csv(path, delim_whitespace=True, header=None, index_col=None, names=colnames) table3 = Table.from_pandas(table3_df) table3.add_index('cluster') cluster_coords = SkyCoord(ra=table3['ra']*u.degree, dec = table3['dec']*u.degree, distance = 1000/table3['plx']*u.pc, pm_ra_cosdec = table3['pmra']*u.mas/u.year, pm_dec = table3['pmdec']*u.mas/u.year, radial_velocity=table3['RV']*u.km/u.second) table3['coords'] = cluster_coords return table3
def offset_coord(coord_start, delta_ra=0 * u.arcsec, delta_dec=0 * u.arcsec): """ Compute a new SkyCoord entity given a starting location and offset. This function assumes that the sky is flat! Args: coord_start: a SKyCoord object delta_ra: an RA offset, in u.arcsec delta_dec: an Dec offset, in u.arcsec Returns: a SkyCoord object """ if (delta_ra > 900 * u.arcsec) or (delta_dec > 900 * u.arcsec): warnings.warn( 'offset_coord: with offsets that large, the sky is no flat anymore!' ) return SkyCoord( ra=coord_start.ra.deg + delta_ra.to(u.degree).value / np.cos(coord_start.dec.radian), dec=coord_start.dec.deg + delta_dec.to(u.degree).value, unit=(u.degree, u.degree), frame='icrs', obstime=coord_start.obstime, equinox=coord_start.equinox, )
def getStar(self, query, load_lc=True): """ Query `Star` object Parameters ---------- query : dict Database query load_lc : bool Append light curves to star objects Returns ------- list List of `Star` objects """ _stars = self._getStars(query, load_lc) if self.delta: nearest = query.get("nearest", False) checked_stars = self.coneSearch(SkyCoord(self.ra, self.dec, unit="deg"), _stars, self.delta, nearest=nearest) return checked_stars else: return _stars
def update_wcs(head, xpos_px, ypos_px, box): wcs = WCS(head) ra, dec = wcs.wcs_pix2world(xpos_px, ypos_px, 0) coord = SkyCoord(ra=ra, dec=dec, unit=(u.deg, u.deg), frame='icrs') newhead = deepcopy(head) if not hasattr(box, "__len__"): box = np.array([box, box], dtype=int) sy, sx = box # head["WCSAXES"] = 2 # head["CTYPE1"] = 'RA---TAN' # head["CTYPE2"] = 'DEC--TAN' newhead["CRPIX1"] = 0.5 * sx + 0.5 newhead["CRPIX2"] = 0.5 * sy + 0.5 newhead["CRVAL1"] = coord.ra.deg newhead["CRVAL2"] = coord.dec.deg # head["CD1_1"] = -pfov/3600.0 # head["CD1_2"] = 0 # head["CD2_2"] = pfov/3600.0 # head["CD2_1"] = 0 return (newhead)
def getStar(self, query, load_lc=True): """ Query `Star` object Parameters ---------- query : dict Database query load_lc : bool Append light curves to star objects Returns ------- list List of `Star` objects """ stars = self.postQuery(query, load_lc) if "RA" in query and "Dec" in query and "Rad" in query: stars = self.coneSearch(SkyCoord(float(query["RA"]), float(query["Dec"]), unit="deg"), stars, float(query["Rad"] / 3600.), nearest=query.get("nearest", False)) return stars
def test_ephemeris_at_dates(self): dates: Time = Time(("2020-06-01", "2020-07-01")) eph: List[Ephemeris] = FixedTarget(SkyCoord( 123 * u.deg, 45.6 * u.deg)).ephemeris(dates=dates) assert len(eph) == 2 assert np.allclose([e.ra for e in eph], 123) assert np.allclose([e.dec for e in eph], 45.6)
def getStar(self, query, load_lc=True): stars = self.postQuery(query, load_lc) if "ra" in query and "dec" in query and "delta" in query: stars = self.coneSearch(SkyCoord(float(query["ra"]), float(query["dec"]), unit="deg"), stars, float(query["delta"] / 3600.), nearest=query.get("nearest", False)) return stars
def get_gaia_cat(ims, cat_name='gaia'): """Get the Gaia catalog for the area of input images""" from calc_bounds import bounds, get_footprints print 'Calculating coordinate ranges for Gaia query:' footprint_list = map(get_footprints, ims) ras, decs = bounds(footprint_list) ra_midpt = (np.amax(ras)+np.amin(ras))/2. dec_midpt = (np.amax(decs)+np.amin(decs))/2. ra_width = (np.amax(ras)-np.amin(ras)) dec_height = (np.amax(decs)-np.amin(decs)) print '\nPerforming Gaia query:' coord = SkyCoord(ra=ra_midpt, dec=dec_midpt, unit=(u.degree, u.degree), frame='icrs') width = Quantity(ra_width, u.deg) height = Quantity(dec_height, u.deg) r = Gaia.query_object_async(coordinate=coord, width=width, height=height) print 'Sources returned: {}'.format(len(r)) assert len(r) > 0, 'No sources found in Gaia query\n' cat_file_name = '{}.cat'.format(cat_name) print 'Writing Gaia source catalog: {}\n'.format(cat_file_name) Table([r['ra'], r['dec']]).write(cat_file_name, format='ascii.fast_commented_header') return cat_file_name
def load_target(self, dir): """ Loads ra, dec, radius, parallax data from a previously created directory Arguments: dir [string]: directory in which to look Returns: ra, dec, radius, parallax """ data_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) try: with open(data_dir + "/" + dir + "/README", 'r') as f: out(dir + ": " + f.readline()) ra = f.readline().lstrip('ra=').rstrip('\n') out(ra) dec = f.readline().lstrip('dec=').rstrip('\n') out(dec) radius = float(f.readline().lstrip('radius=').rstrip(' deg\n')) out(radius) parallax = float( f.readline().lstrip('parllax=').rstrip(' mas\n')) out(parallax) except: out("Specified directory does not contain the required data:") out(dir) return return (SkyCoord(ra, dec, unit=(u.hourangle, u.deg)), radius, parallax)
def get_localfcdata_espresso(fc_params, inpars): ''' Extracts all the important info to build a finding chart from a given ESPRESSO OB defined locally. Args: inpars: A dictionnary containing the OB parameters Returns: A dictionnary containing the ESPRESSO OB parameters ''' # Acquisition fc_params['acq'] = copy.deepcopy(espresso_acq_params) fc_params['acq']['ins_mode'] = inpars['ins_mode'] fc_params['acq']['bos_ra'] = inpars['bos_ra'] fc_params['acq']['bos_dec'] = inpars['bos_dec'] fc_params['acq']['is_gs'] = inpars['is_gs'] fc_params['acq']['gs'] = SkyCoord(inpars['gs_ra'], inpars['gs_dec'], frame='icrs', obstime=Time(fcm_m.obsdate), equinox='J2000') # "Observation" ... for ESPRESSO, just to known if Fiber B is on SKY or not fc_params['n_sci'] = 1 fc_params['sci1'] = copy.deepcopy(espresso_sci_params) fc_params['sci1']['ins_mode'] = inpars['ins_mode'] fc_params['sci1']['calsource_B'] = inpars['calsource_B'] return fc_params
def getGAIAKnownMembers(name_mapper=None): """ returns known member list from Gaia paper: http://simbad.u-strasbg.fr/simbad/sim-ref?bibcode=2018A%26A...616A..10G (Table1a: Nearby Open Clusters) Returns: Pandas dataframe indexed by SourceID, and list of cluster names retrieved. """ known_members = pd.read_csv('ftp://cdsarc.u-strasbg.fr/pub/cats/J/A+A/616/A10/tablea1a.dat', delim_whitespace=True, header=None, index_col=None, names = ['SourceID', 'Cluster', 'RAdeg', 'DEdeg', 'Gmag', 'plx', 'e_plx']) known_members.set_index('SourceID', inplace=True) cluster_names = known_members.Cluster.unique() #not sure how to deal with name mapping back to SIMBAD names #name_mapper = {'Pleiades': 'Pleiades'} #members['SimbadCluster'] = members.Cluster.apply(lambda c:name_mapper[c]) #make skycoords objects for each member, using gaia epoch of 2015.5 known_members['coords']=SkyCoord(ra = np.array(known_members.RAdeg)*u.degree, dec = np.array(known_members.DEdeg)*u.degree, obstime = Time(2015.5,format='decimalyear'), #Gaia ref epoch is 2015.5 distance = coord.Distance(parallax=Quantity(np.array(known_members.plx)*u.mas))) return(known_members, cluster_names)
def _match_anamap_to_pointing(self, extra=1.1): """ Match the ClusterPipe map and according to the pointing list. Parameters ---------- - extra (float): factor to apply to the cluster extent to have a bit of margin on the side of the map Outputs ------- The ClusterPipe map properties are modified """ # Compute the pointing barycenter and the FoV requested size list_ptg_coord = SkyCoord(self.obs_setup.coord) list_ptg_rad = self.obs_setup.rad center_ptg, fov_ptg = utilities.listcord2fov(list_ptg_coord, list_ptg_rad) # Account for the cluster fov = utilities.squeeze_fov(center_ptg, fov_ptg, self.cluster.coord, self.cluster.theta_truncation, extra=extra) # Set the cluster map to match the pointing self.map_coord = center_ptg self.map_fov = fov
def queryCoordSimbad(raw_coord, search_radius): #Import(s) import numpy as np from astropy import coordinates as coord from astropy import units as u from astroquery.simbad import Simbad from astropy.coordinates.sky_coordinate import SkyCoord #Action c = SkyCoord(raw_coord, unit=(u.hourangle, u.deg)) c = c.to_string('hmsdms') result_table = Simbad.query_region(coord.SkyCoord(c, frame='icrs'), radius=('0d0m' + str(search_radius) + 's')) names_col = result_table['MAIN_ID'] id = str(names_col[0])[1:] return id
def __init__(self, silent=False, output_dir='./KESACCO', cluster = model_cluster.Cluster(silent=True), compact_source = model_compsource.CompactSource(), obs_setup = setup_observations.ObsSetup()): """ Initialize the ClusterPipe object. Parameters ---------- - silent (bool): set to true in order not to print informations when running - output_dir (str): where to save outputs - cluster: Cluster object can be passed here directly - compact_source: CompactSource object can be passed here directly - obs_setup: ObsSetup object can be passed here directly """ #---------- Print the code header at launch if not silent: clustpipe_title.show() #---------- Admin self.silent = silent self.output_dir = output_dir cluster.output_dir = output_dir #---------- Sky model self.cluster = cluster self.compact_source = compact_source #---------- Observations (including background) self.obs_setup = obs_setup #---------- Analysis parameters # Likelihood method related self.method_stack = True self.method_binned = True self.method_stat = 'DEFAULT' # CSTAT, WSTAT, CHI2 # Map related self.map_reso = 0.02*u.deg self.map_coord = SkyCoord(0.0, 0.0, frame="icrs", unit="deg") self.map_fov = 10*u.deg self.map_UsePtgRef = True # Re-defines coordinates/FoV using pointings # Spectrum related self.spec_edisp = False self.spec_ebinalg = 'LOG' self.spec_enumbins = 10 self.spec_emin = 50*u.GeV self.spec_emax = 100*u.TeV # Time related self.time_tmin = None self.time_tmax = None self.time_phase = None
def Counts_q(gal_id, gal_field, z, R=10 ** np.linspace(1.2, 3.6, 13), delta_z=0.1, min_mass=9.415): # making a list of galaxies in within a redshift range of given z, in the selected field, and above the mass limit# lst_gal = [] data_tmp = data1[data1["field"] == gal_field] mask = (np.abs(data_tmp["z"] - z) <= delta_z) & (data_tmp["id"] != gal_id) & (data_tmp["lmass"] >= min_mass) # making list of satellites in total and list of satellites that are quiescent# lst_gal = data_tmp[mask] lst_galr = lst_gal[ ( ((lst_gal["vj"] < 0.92) & (lst_gal["uv"] > 1.3)) | ((lst_gal["vj"] > 0.8) & (lst_gal["vj"] < 1.6) & (lst_gal["uv"] > (0.88 * lst_gal["vj"] + 0.49))) ) ] # finding the various aperture radii in arcminutes based on given z# kpc_per_arcmin = cosmo.kpc_proper_per_arcmin(z) arcmin_per_kpc = kpc_per_arcmin ** (-1) arcmin = arcmin_per_kpc * (R * u.kpc) # retrieving RA and DEC data of given galaxy# p1 = data_tmp[(data_tmp["id"] == gal_id)] # calculating distance in special ANGLE measure to each galaxy in lst_gal# sc0 = SkyCoord(p1["ra"] * u.deg, p1["dec"] * u.deg) sc1 = SkyCoord(lst_galr["ra"] * u.deg, lst_galr["dec"] * u.deg) sc2 = SkyCoord(lst_gal["ra"] * u.deg, lst_gal["dec"] * u.deg) sep1 = sc0.separation(sc1).to(u.arcmin) sep2 = sc0.separation(sc2).to(u.arcmin) # finding number of "sep's" within the list 'arcmin' already created# nnr = np.empty(len(R)) nn = np.empty(len(R)) for ii, r in enumerate(arcmin): nnr[ii] = np.sum(sep1 <= r) nn[ii] = np.sum(sep2 <= r) lst_q = [] # calculating quiescent percentage and adding it to a list# for i in range(len(nnr)): # filtering out (making a 5 that later gets ignored) results that divide by 0# if nn[i] == 0: lst_q.append(5) else: lst_q.append(nnr[i] / nnb[i]) return lst_q
def _apparent_position_in_true_coordinates(skycoord): """ Convert Skycoord in GCRS frame into one in which RA and Dec are defined w.r.t to the true equinox and poles of the Earth """ jd1, jd2 = get_jd12(skycoord.obstime, 'tt') _, _, _, _, _, _, _, rbpn = erfa.pn00a(jd1, jd2) return SkyCoord( skycoord.frame.realize_frame(skycoord.cartesian.transform(rbpn)))
def Counts(gal_id, gal_field, z, R=10**np.linspace(1.2, 3.6, 13), delta_z=0.1, min_mass=9.415): from astropy.coordinates.sky_coordinate import SkyCoord from astropy import units as u #making a list of galaxies in within a redshift range of given z, in the selected field, and above the mass limit# lst_gal = [] data_tmp = data_flagged[data_flagged['field'] == gal_field] #separatign the satellite galaxies into four bins based on mass# mask = ((np.abs(data_tmp['z_peak'] - z) <= delta_z) & (data_tmp['id'] != gal_id) & (data_tmp['lmass'] >= min_mass)) lst_gal = data_tmp[mask] lst_gal1 = lst_gal[(lst_gal['lmass'] < 9.8)] lst_gal2 = lst_gal[((lst_gal['lmass'] < 10.3) & (lst_gal['lmass'] > 9.8))] lst_gal3 = lst_gal[((lst_gal['lmass'] < 10.8) & (lst_gal['lmass'] > 10.3))] lst_gal4 = lst_gal[((lst_gal['lmass'] < 11.8) & (lst_gal['lmass'] > 10.8))] #finding the various aperture radii in arcminutes based on given z# kpc_per_arcmin = cosmo.kpc_proper_per_arcmin(z) arcmin_per_kpc = kpc_per_arcmin**(-1) arcmin = arcmin_per_kpc * (R * u.kpc) #retrieving RA and DEC data of given galaxy# p1 = data_tmp[(data_tmp['id'] == gal_id)] #calculating distance in special ANGLE measure to each galaxy in lst_gal# sc0 = SkyCoord(p1['ra'] * u.deg, p1['dec'] * u.deg) sc1 = SkyCoord(lst_gal1['ra'] * u.deg, lst_gal1['dec'] * u.deg) sc2 = SkyCoord(lst_gal2['ra'] * u.deg, lst_gal2['dec'] * u.deg) sc3 = SkyCoord(lst_gal3['ra'] * u.deg, lst_gal3['dec'] * u.deg) sc4 = SkyCoord(lst_gal4['ra'] * u.deg, lst_gal4['dec'] * u.deg) sep1 = sc0.separation(sc1).to(u.arcmin) sep2 = sc0.separation(sc2).to(u.arcmin) sep3 = sc0.separation(sc3).to(u.arcmin) sep4 = sc0.separation(sc4).to(u.arcmin) #finding number of "sep's" within the list 'arcmin' already created# nn1 = np.empty(len(R)) nn2 = np.empty(len(R)) nn3 = np.empty(len(R)) nn4 = np.empty(len(R)) for ii, r in enumerate(arcmin): nn1[ii] = np.sum(sep1 <= r) nn2[ii] = np.sum(sep2 <= r) nn3[ii] = np.sum(sep3 <= r) nn4[ii] = np.sum(sep4 <= r) #returning four lists of counts pre radius with lower end number for lower mass bin# return [nn1, nn2, nn3, nn4]
def __init__(self, ra, dec, radius, parallax=None, load=None): """ Initialize a CatalogProcessing object. Performs catalog queries of Gaia, 2Mass, and WISE. Arguments: ra: Right Ascension of the target dec: Declination of the target radius: radius of the target (deg if @parallax is not supplied, pc if it is) parallax: Parallax of target, in mas """ if load is not None: info_out("Loading data from directory: " + load) try: self.skycoord, self.radius, self.parallax = self.load_target( load) self.gaia, self.tmass, self.allwise = self.load_tables(load) except: info_out("The specified directory does not exist:") data_dir = os.path.dirname( os.path.dirname(os.path.abspath(__file__))) info_out(data_dir + "/" + load) else: self.skycoord = SkyCoord(ra, dec) self.radius = radius self.parallax = parallax if self.parallax != None: self.radius = self.get_radius() out("Initializing CatalogProcessing:") info_out("ra={}".format(ra, dec, self.radius, parallax)) info_out("dec={}".format(dec)) info_out("radius={} deg".format(self.radius)) info_out("parallax={} mas".format(parallax)) # Query catalogs and save the data to file info_out("Querying Gaia...") self.gaia = self.gaia_query() self.gaia.catalogs = ["gaia"] out("Done querying Gaia!\n") info_out("Querying AllWISE...") self.allwise = self.ir_query("allwise") out("Done querying AllWISE!\n") info_out("Querying 2MASS...") self.tmass = self.ir_query("tmass") out("Done querying 2MASS!\n")
def squeeze_fov(center_fov, fov_ini, center_cluster, theta_cluster, extra=1.1): """ Take a map center and fov definition, and squeeze it so that the cluster inside remains entirely inside the map. In case the cluster is already bigger than the map, it should do nothing. Parameters ---------- - center_fov (SkyCoord): center of the field of view - fov_ini (quantity, deg): initial FoV size - center_cluster (SkyCoord): center of the cluster - theta_cluster (quantity, deg): cluster max extent Outputs -------- - fov (quantity, deg): size of the FoV """ # Distance along RA between cluster center and map center sep_x = SkyCoord(center_cluster.icrs.ra, center_fov.icrs.dec, frame='icrs').separation(center_fov) # Distance along Dec between cluster center and map center sep_y = SkyCoord(center_fov.icrs.ra, center_cluster.icrs.dec, frame='icrs').separation(center_fov) # Compute FoV along x fov_x = 2 * np.amin([ fov_ini.to_value('deg') / 2.0, (sep_x + extra * theta_cluster).to_value('deg') ]) # Compute FoV along y fov_y = 2 * np.amin([ fov_ini.to_value('deg') / 2.0, (sep_x + extra * theta_cluster).to_value('deg') ]) # The fov is squared, so take the max fov = np.amax([fov_x, fov_y]) * u.deg return fov
def test_query_object(self): connHandler = DummyConnHandler() tapplus = TapPlus("http://test:1111/tap", connhandler=connHandler) tap = GaiaClass(connHandler, tapplus) # Launch response: we use default response because the query contains # decimals responseLaunchJob = DummyResponse() responseLaunchJob.set_status_code(200) responseLaunchJob.set_message("OK") jobDataFile = data_path('job_1.vot') jobData = utils.read_file_content(jobDataFile) responseLaunchJob.set_data(method='POST', context=None, body=jobData, headers=None) # The query contains decimals: force default response connHandler.set_default_response(responseLaunchJob) sc = SkyCoord(ra=29.0, dec=15.0, unit=(u.degree, u.degree), frame='icrs') with pytest.raises(ValueError) as err: tap.query_object(sc) assert "Missing required argument: 'width'" in err.value.args[0] width = Quantity(12, u.deg) with pytest.raises(ValueError) as err: tap.query_object(sc, width=width) assert "Missing required argument: 'height'" in err.value.args[0] height = Quantity(10, u.deg) table = tap.query_object(sc, width=width, height=height) assert len(table) == 3, \ "Wrong job results (num rows). Expected: %d, found %d" % \ (3, len(table)) self.__check_results_column(table, 'alpha', 'alpha', None, np.float64) self.__check_results_column(table, 'delta', 'delta', None, np.float64) self.__check_results_column(table, 'source_id', 'source_id', None, np.object) self.__check_results_column(table, 'table1_oid', 'table1_oid', None, np.int32) # by radius radius = Quantity(1, u.deg) table = tap.query_object(sc, radius=radius) assert len(table) == 3, \ "Wrong job results (num rows). Expected: %d, found %d" % \ (3, len(table)) self.__check_results_column(table, 'alpha', 'alpha', None, np.float64) self.__check_results_column(table, 'delta', 'delta', None, np.float64) self.__check_results_column(table, 'source_id', 'source_id', None, np.object) self.__check_results_column(table, 'table1_oid', 'table1_oid', None, np.int32)
def test_database_specify(name, db_dict): # First check that at least some sesame mirror is up for url in sesame_url.get(): if urllib.request.urlopen(url).getcode() == 200: break else: pytest.skip("All SESAME mirrors appear to be down, skipping " "test_name_resolve.py:test_database_specify()...") for db in db_dict.keys(): with sesame_database.set(db): icrs = SkyCoord.from_name(name) time.sleep(1)
def rand_counts(gal_field, z, R = 10**np.linspace(1.2,3.6,13), delta_z = 0.1, min_mass = 9.415): #picking random location for galaxy number density# if gal_field == 'AEGIS': ra1 = random.uniform(3.746000, 3.756821) dec1 = random.uniform(0.920312, 0.925897) elif gal_field == 'COSMOS': ra1 = random.uniform(2.619737, 2.620718) dec1 = random.uniform(0.038741, 0.043811) elif gal_field == 'GOODS-N': ra1 = random.uniform(3.298072, 3.307597) dec1 = random.uniform(1.084787, 1.087936) elif gal_field == 'GOODS-S': ra1 = random.uniform(0.925775, 0.929397) dec1 = random.uniform(-0.487098, -0.483591) elif gal_field == 'UDS': ra1 = random.uniform(0.59815, 0.602889) dec1 = random.uniform(-0.091376, -0.090305) from astropy.coordinates.sky_coordinate import SkyCoord from astropy import units as u #switching ra and dec to degrees# ra1 = ra1*(180.0/math.pi) dec1 = dec1*(180.0/math.pi) #making a list of galaxies in within a redshift range of given z, in the selected field, and above the mass limit# lst_gal = [] data_tmp = data_flagged[data_flagged['field'] == gal_field] #binning the satellites based on mass# mask = ((np.abs(data_tmp['z_peak'] - z) <= delta_z) & (data_tmp['lmass'] >= min_mass)) lst_gal = data_tmp[mask] lst_gal1 = lst_gal[(lst_gal['lmass'] < 9.8)] lst_gal2 = lst_gal[((lst_gal['lmass'] < 10.3) & (lst_gal['lmass'] > 9.8))] lst_gal3 = lst_gal[((lst_gal['lmass'] < 10.8) & (lst_gal['lmass'] > 10.3))] lst_gal4 = lst_gal[((lst_gal['lmass'] < 11.8) & (lst_gal['lmass'] > 10.8))] #finding the various aperture radii in arcminutes based on given z# kpc_per_arcmin = cosmo.kpc_proper_per_arcmin(z) arcmin_per_kpc = kpc_per_arcmin**(-1) arcmin = arcmin_per_kpc*(R*u.kpc) #calculating distance in special ANGLE measure to each galaxy in lst_gal# sc0 = SkyCoord(ra1*u.deg, dec1*u.deg) sc1 = SkyCoord(lst_gal1['ra']*u.deg, lst_gal1['dec']*u.deg) sc2 = SkyCoord(lst_gal2['ra']*u.deg, lst_gal2['dec']*u.deg) sc3 = SkyCoord(lst_gal3['ra']*u.deg, lst_gal3['dec']*u.deg) sc4 = SkyCoord(lst_gal4['ra']*u.deg, lst_gal4['dec']*u.deg) sep1 = sc0.separation(sc1).to(u.arcmin) sep2 = sc0.separation(sc2).to(u.arcmin) sep3 = sc0.separation(sc3).to(u.arcmin) sep4 = sc0.separation(sc4).to(u.arcmin) #finding number of "sep's" within the list 'arcmin' already created# nn1 = np.empty(len(R)) nn2 = np.empty(len(R)) nn3 = np.empty(len(R)) nn4 = np.empty(len(R)) for ii,r in enumerate(arcmin): nn1[ii] = np.sum(sep1 <= r) nn2[ii] = np.sum(sep2 <= r) nn3[ii] = np.sum(sep3 <= r) nn4[ii] = np.sum(sep4 <= r) #returning four lists of counts per radius with low end number for low mass bin# return [nn1, nn2, nn3, nn4]