def axial_ratio_to_inclination_with_intrinsic(ratio, intrinsic_ratio): """ This function ... :param ratio: :param intrinsic_ratio: :return: """ # Check ratios if ratio >= 1.: raise ValueError("Axial ratio must be ratio of minor axis to major axis length") if intrinsic_ratio >= 1: raise ValueError("Intrinsic axial ratio must be ratio of scale height (smaller) to scale length (larger)") # Calculate logq and logq0 logq = np.log10(ratio) logq0 = np.log10(intrinsic_ratio) # Calculate numerator and denominator of the formula numerator = 1. - 10 ** (2. * logq) denominator = 1. - 10 ** (2. * logq0) # Calculate the inclination angle sin2i = numerator / denominator inclination_radians = np.arcsin(np.sqrt(sin2i)) inclination = Angle(inclination_radians, unit="rad") return inclination.to("deg")
def deg_to_sex(ra, dec): """ Convert an Ra and Dec position in decimal degrees to hexadecimal Parameters ---------- ra : float Ra in decimal degrees dec : float Dec in decimal degrees Returns ------- ra : str Ra in hexadecimal HH:MM:SS dec : str Dec in hexadecimal DD:MM:SS """ from astropy import units as u from astropy.coordinates import Angle rad = Angle(ra * u.deg) decd = Angle(dec * u.deg) ra = rad.to_string(unit=u.hour, sep=':') dec = decd.to_string(unit=u.deg, sep=':') return ra, dec
def test_3rd_body_Curtis(test_params): # based on example 12.11 from Howard Curtis body = test_params['body'] with solar_system_ephemeris.set('builtin'): j_date = 2454283.0 * u.day tof = (test_params['tof']).to(u.s).value body_r = build_ephem_interpolant(body, test_params['period'], (j_date, j_date + test_params['tof']), rtol=1e-2) epoch = Time(j_date, format='jd', scale='tdb') initial = Orbit.from_classical(Earth, *test_params['orbit'], epoch=epoch) r, v = cowell(initial, np.linspace(0, tof, 400), rtol=1e-10, ad=third_body, k_third=body.k.to(u.km**3 / u.s**2).value, third_body=body_r) incs, raans, argps = [], [], [] for ri, vi in zip(r, v): angles = Angle(rv2coe(Earth.k.to(u.km**3 / u.s**2).value, ri, vi)[2:5] * u.rad) # inc, raan, argp angles = angles.wrap_at(180 * u.deg) incs.append(angles[0].value) raans.append(angles[1].value) argps.append(angles[2].value) # averaging over 5 last values in the way Curtis does inc_f, raan_f, argp_f = np.mean(incs[-5:]), np.mean(raans[-5:]), np.mean(argps[-5:]) assert_quantity_allclose([(raan_f * u.rad).to(u.deg) - test_params['orbit'][3], (inc_f * u.rad).to(u.deg) - test_params['orbit'][2], (argp_f * u.rad).to(u.deg) - test_params['orbit'][4]], [test_params['raan'], test_params['inc'], test_params['argp']], rtol=1e-1)
def __init__( self, distance, inclination, azimuth, position_angle, pixels_x, pixels_y, center_x, center_y, field_x, field_y ): """ This function ... :param distance: :param inclination: :param azimuth: :param position_angle: :param pixels_x: :param pixels_y: :param center_x: :param center_y: :param field_x: :param field_y: """ self.distance = distance self.inclination = Angle(inclination, "deg") self.azimuth = Angle(azimuth, "deg") self.position_angle = Angle(position_angle, "deg") self.pixels_x = pixels_x self.pixels_y = pixels_y self.center_x = center_x self.center_y = center_y self.field_x_physical = field_x self.field_y_physical = field_y
def reformat_catalog(coordinates=True, chi=True, sharp=True, bright=True): """Format RA, Dec as floats in the final catalog""" t = Table.read('../data/extract.txt', format='ascii.commented_header') if coordinates: ra = Angle((t['rah'], t['ram'], t['ras']), unit=u.hour) dec = Angle((t['decd'], t['decm'], t['decs']), unit=u.deg) t.remove_columns(['rah', 'ram', 'ras', 'decd', 'decm', 'decs']) add_npcolumn(t, 'ra', ra.to(u.deg), index=0) add_npcolumn(t, 'dec', dec, index=1) if bright: ind = ((t['g']<18) | (t['i']<18)) & (t['ichi']<90) t_bright = t[ind] t = t[~ind] if chi: #ind = (t['gchi']<1.4) & (t['ichi']<1.4) ind = (t['gchi']<1.5) & (t['ichi']<1.5) t = t[ind] if sharp: #ind = (np.abs(t['gsharp'])<0.2) & (np.abs(t['isharp'])<0.2) ind = (np.abs(t['gsharp'])<0.3) & (np.abs(t['isharp'])<0.3) t = t[ind] if bright: t = astropy.table.vstack((t, t_bright)) t.pprint() t.write('../data/ext_catalog.txt', format='ascii.commented_header')
def axial_ratio_to_inclination_mosenkov(ratio, hubble_stage): """ This function ... :param ratio: :param hubble_stage: :return: """ # Check that axial ratio is smaller than one! if ratio >= 1.: raise ValueError("Axial ratio must be ratio of minor axis to major axis length") # From Mosenkov et al., 2017 (Appendix) # Calculate logq and logq0 logq = np.log10(ratio) logq0 = get_logq0_mosenkov(hubble_stage) # Calculate numerator and denominator of the formula numerator = 1. - 10**(2. * logq) denominator = 1. - 10**(2. * logq0) # Calculate the inclination angle sin2i = numerator / denominator inclination_radians = np.arcsin(np.sqrt(sin2i)) inclination = Angle(inclination_radians, unit="rad") return inclination.to("deg")
def plot_vhs_tiles(table=None,ra=None, dec=None, wrap_ra24hr=False, PA=90.0, overplot=True, savefig=True): """ table is astropy table ra, dec in degrees """ plot_polygon=True if wrap_ra24hr: angle=Angle(xdata * u.deg) angle.wrap_at('180d', inplace=True) xdata=angle.degree # assumes filename is in table.meta print('plotting: ', table.meta['filename']) filename = table.meta['filename'] if not plot_polygon: plt.plot(xdata, ydata, 's', ms=7.0, color='yellow', markeredgecolor='yellow', alpha=0.1, label='OB Progress (submitted)\n' + filename) if plot_polygon: plt.plot(xdata, ydata, '.', ms=7.0, color='yellow', markeredgecolor='yellow', alpha=0.1, label='OB Progress (submitted):' + filename) # filled polygon ra=xdata dec=ydata for i in range(len(ra)): if i==0 or i == len(ra): print('i: ', i) ra_poly, dec_poly=plt_tile.mk_polytile(ra_cen=ra[i], dec_cen=dec[i], coverage='twice', PA=PA) xypolygon=np.column_stack((ra_poly, dec_poly)) if i==0 or i == len(ra): print('xypolygon.shape: ', xypolygon.shape) print('xypolygon: ',xypolygon) polygon = Polygon(xypolygon, True, color='green', alpha=0.1) plt.gca().add_patch(polygon) #print(ob_table.meta) plt.suptitle(dqcfile_vhs) plt.legend(fontsize='small') figname='vhs_des_check_progress_vhs_obprogress_'+ datestamp + '.png' print('Saving: '+figname) plt.savefig(plotdir+figname) plt.suptitle('')
def deg_to_sex(ra, dec): from astropy import units as u from astropy.coordinates import Angle rad = Angle(ra * u.deg) decd = Angle(dec * u.deg) ra = rad.to_string(unit=u.hour, sep=':') dec = decd.to_string(unit=u.deg, sep=':') return ra, dec
def add_object_pos_airmass(header, history=False): """ Add object information, such as RA/Dec and airmass. Parameters ---------- header : astropy..io.fits.Header FITS header to be modified. history : bool If `True`, write history for each keyword changed. Notes ----- Has side effect of setting feder site JD to JD-OBS, which means it also assume JD.value has been set. """ # not sure why coverage is not picking up both branches, but it is not, so # marking it no cover if feder.JD_OBS.value is None: # pragma: no cover raise ValueError('Need to set JD_OBS.value ' 'before calling.') try: feder.RA.set_value_from_header(header) except ValueError: raise ValueError("No RA is present.") feder.DEC.set_value_from_header(header) feder.RA.value = feder.RA.value.replace(' ', ':') feder.DEC.value = feder.DEC.value.replace(' ', ':') obj_coord2 = SkyCoord(feder.RA.value, feder.DEC.value, unit=(u.hour, u.degree), frame='fk5') obstime = Time(feder.MJD_OBS.value, format='mjd') alt_az = obj_coord2.transform_to(AltAz(obstime=obstime, location=feder.site)) feder.ALT_OBJ.value = round(alt_az.alt.degree, 5) feder.AZ_OBJ.value = round(alt_az.az.degree, 5) feder.AIRMASS.value = round(1 / np.cos(np.pi / 2 - alt_az.alt.radian), 3) # TODO: replace the LST calculation LST = _lst_from_obstime(obstime) HA = LST.hour - obj_coord2.ra.hour HA = Angle(HA, unit=u.hour) feder.HA.value = HA.to_string(unit=u.hour, sep=':') for keyword in feder.keywords_for_light_files: if keyword.value is not None: keyword.add_to_header(header, history=history) logger.info(keyword.history_comment())
def test_3rd_body_Curtis(test_params): # based on example 12.11 from Howard Curtis body = test_params["body"] with solar_system_ephemeris.set("builtin"): j_date = 2454283.0 * u.day tof = (test_params["tof"]).to(u.s).value body_r = build_ephem_interpolant( body, test_params["period"], (j_date, j_date + test_params["tof"]), rtol=1e-2, ) epoch = Time(j_date, format="jd", scale="tdb") initial = Orbit.from_classical(Earth, *test_params["orbit"], epoch=epoch) rr, vv = cowell( Earth.k, initial.r, initial.v, np.linspace(0, tof, 400) * u.s, rtol=1e-10, ad=third_body, k_third=body.k.to(u.km ** 3 / u.s ** 2).value, third_body=body_r, ) incs, raans, argps = [], [], [] for ri, vi in zip(rr.to(u.km).value, vv.to(u.km / u.s).value): angles = Angle( rv2coe(Earth.k.to(u.km ** 3 / u.s ** 2).value, ri, vi)[2:5] * u.rad ) # inc, raan, argp angles = angles.wrap_at(180 * u.deg) incs.append(angles[0].value) raans.append(angles[1].value) argps.append(angles[2].value) # averaging over 5 last values in the way Curtis does inc_f, raan_f, argp_f = ( np.mean(incs[-5:]), np.mean(raans[-5:]), np.mean(argps[-5:]), ) assert_quantity_allclose( [ (raan_f * u.rad).to(u.deg) - test_params["orbit"][3], (inc_f * u.rad).to(u.deg) - test_params["orbit"][2], (argp_f * u.rad).to(u.deg) - test_params["orbit"][4], ], [test_params["raan"], test_params["inc"], test_params["argp"]], rtol=1e-1, )
def formatter(self, values, spacing): if not isinstance(values, u.Quantity) and values is not None: raise TypeError("values should be a Quantities array") if len(values) > 0: if self.format is None: spacing = spacing.to(u.arcsec).value if spacing > 3600: fields = 1 precision = 0 elif spacing > 60: fields = 2 precision = 0 elif spacing > 1: fields = 3 precision = 0 else: fields = 3 precision = -int(np.floor(np.log10(spacing))) decimal = False unit = u.degree else: fields = self._fields precision = self._precision decimal = self._decimal unit = self._unit if decimal: sep = None elif self._sep is not None: sep = self._sep else: if unit == u.degree: if rcParams['text.usetex']: deg = r'$^\circ$' else: deg = six.u('\xb0') sep = (deg, "'", '"') else: sep = ('h', 'm', 's') angles = Angle(values) string = angles.to_string(unit=unit, precision=precision, decimal=decimal, fields=fields, sep=sep).tolist() return string else: return []
def __init__(self, skyevent_dict): d = skyevent_dict self.position = SkyCoord(d['ra'], d['dec'], unit='deg') self.position_error = Angle(d['error'], unit='deg') self.timestamp = None if d.get('time'): self.timestamp = iso8601.parse_date(d['time'])
def __init__(self, energy, offset, data=None, data_units=""): self.energy = EnergyBounds(energy) self.offset = Angle(offset) if data is None: self.data = Quantity(np.zeros((len(energy) - 1, len(offset) - 1)), data_units) else: self.data = Quantity(data, data_units)
def time_scatterplot(time, peakr, flux, proto, num, t_init, posx, posy): #Convert the decimal degrees to HMS strings. ra_deg = Angle(str(posx)+'d') dec_deg = Angle(str(posy)+'d') ra_hms = ra_deg.to_string(unit=units.hour, sep=':', precision=2) dec_dms = dec_deg.to_string(unit=units.degree, sep=':', precision=2) #Set the x-range. x_hi = max(time)*1.1 x_lo = -max(time)*0.1 plt.xlim(x_lo, x_hi) #Set the y-range. y_hi = 1.25 y_lo = 0.75 if np.amax(peakr) > 1.25: y_hi = np.amax(peakr)*1.5 ##fi if np.amax(peakr) < 0.75: y_lo = min(peakr)*0.7 ##fi plt.ylim(y_lo, y_hi) #Define the labels. plt.xlabel('Time (days + '+t_init+')') #Write important details about the source on the plot. plt.annotate('Source Number: '+str(num), xy = (0.05,0.2), xycoords = 'axes fraction', size = 'small') plt.annotate('Peak Flux: '+'{:.4}'.format(flux)+' (Jy/Beam)', xy = (0.05,0.15), xycoords = 'axes fraction', size = 'small') plt.annotate('Class: '+proto, xy = (0.05,0.1), xycoords = 'axes fraction', size = 'small') plt.annotate('RA: '+ra_hms+', Dec: '+dec_dms, xy = (0.05,0.05), xycoords = 'axes fraction', size = 'small') #Plot the data. plt.scatter(time, peakr, alpha = 0.6, s = 60, marker = 'o', facecolor = 'r') #Plot reference lines. plt.plot((x_lo,x_hi),(1.0,1.0), '-k') plt.plot((x_lo,x_hi),(0.9,0.9), '--k') plt.plot((x_lo,x_hi),(1.1,1.1), '--k')
def sky_time(coord, time, rise_set=False, limalt=0*u.deg, site=EarthLocation(0.0, 0.0, 0.0), fuse=TimeDelta(0, format='sec', scale='tai')): """ """ if type(limalt) != u.quantity.Quantity: limalt = limalt*u.deg if time.isscalar == True: time = Time([time.iso], format='iso', scale='utc') coord = coord_pack(coord) timeut = time - fuse if len(time.shape) == 1: timeut = Time([[i] for i in timeut.jd], format='jd', scale='utc') timeut.delta_ut1_utc = 0 timeut.location = site ra, ts = mesh_coord(coord, timeut[:,0]) dif_h_sid = Angle(ra-ts) dif_h_sid.wrap_at('180d', inplace=True) dif_h_sol = dif_h_sid * (23.0 + 56.0/60.0 + 4.0916/3600.0) / 24.0 dif = TimeDelta(dif_h_sol.hour*u.h, scale='tai') culminacao = timeut + dif culminacao.delta_ut1_utc = 0 culminacao.location = site if (site.latitude > 0*u.deg): alwaysup = np.where(coord.dec >= 90*u.deg - site.latitude + limalt) neverup = np.where(coord.dec <= -(90*u.deg - site.latitude - limalt)) else: alwaysup = np.where(coord.dec <= -(90*u.deg + site.latitude + limalt)) neverup = np.where(coord.dec >= 90*u.deg + site.latitude - limalt) if rise_set == True: hangle_lim = np.arccos((np.cos(90.0*u.deg-limalt) - np.sin(coord.dec)*np.sin(site.latitude)) / (np.cos(coord.dec)*np.cos(site.latitude))) tsg_lim = Angle(ra + hangle_lim) dtsg_lim = tsg_lim - culminacao.sidereal_time('mean') dtsg_lim.wrap_at(360 * u.deg, inplace=True) dtsg_lim_sol = dtsg_lim * (23.0 + 56.0/60.0 + 4.0916/3600.0) / 24.0 a = np.where(np.isnan(dtsg_lim_sol)) dtsg_lim_sol[a] = Angle([48.0]*len(a[0])*u.hour) dtsg_np = TimeDelta((dtsg_lim_sol.hour*u.h)) sunrise = culminacao - dtsg_np sunset = culminacao + dtsg_np culminacao = culminacao + fuse sunrise = sunrise + fuse sunset = sunset + fuse return culminacao, sunrise, sunset, alwaysup, neverup culminacao = culminacao + fuse return culminacao, alwaysup, neverup
def __init__(self, energy_lo, energy_hi, offset, rad_lo, rad_hi, psf_value, energy_thresh_lo=Quantity(0.1, 'TeV'), energy_thresh_hi=Quantity(100, 'TeV')): self.energy_lo = energy_lo.to('TeV') self.energy_hi = energy_hi.to('TeV') self.offset = Angle(offset) self.rad_lo = Angle(rad_lo) self.rad_hi = Angle(rad_hi) self.psf_value = psf_value.to('sr^-1') self.energy_thresh_lo = energy_thresh_lo.to('TeV') self.energy_thresh_hi = energy_thresh_hi.to('TeV')
def averaging_params(max_decorr=0.1, frequency=(250. * 1e6 * units.Hz), baseline=(870 * units.m), corr_FoV=Angle(90., units.degree), latitude=hera_latitude, verbose=False, wrap_len=default_wrap_len): wavelength = const.c / frequency.to(1 / units.s) earth_rot_speed = (Angle(360, units.deg) / units.sday).to(units.arcminute / units.s) params = {} max_resolution = Angle(np.arcsin(wavelength / baseline), units.radian) params['max_resolution'] = max_resolution if verbose: print_desc_val('Max resolution:', max_resolution.to(units.arcminute), wrap_len) integration_time = (max_resolution * max_decorr).to(units.arcminute) / earth_rot_speed params['integration_time'] = integration_time if verbose: print_desc_val('Max integration time required to keep the decorrelation ' 'due to time integrating under max_decorr on the longest ' 'baselines:', integration_time, wrap_len) channel_width = ((const.c * max_decorr) / (baseline * np.sin(corr_FoV.to(units.rad)))).to(units.kHz) params['channel_width'] = channel_width if verbose: print_desc_val('Max channel width to keep the decorrelation due to channel ' 'width under max_decorr for a {fov} degree correlator ' 'FoV on the longest baselines:'.format(fov=corr_FoV.degree), channel_width, wrap_len) # After fringe stopping, the rotation is from the sky rotating in the beam # This is slower, so we can sum in time to longer integrations # (this summing does cause more decorrelation but it's better than without # fringe stopping and it decreases data rates) fringe_stopped_int_time = ((max_resolution.to(units.arcminute) * max_decorr) / (np.sin(corr_FoV.radian) * earth_rot_speed * abs(np.sin(hera_latitude)))) params['fringe_stopped_int_time'] = fringe_stopped_int_time if verbose: print_desc_val('Max integration time to keep the decorrelation due to time ' 'integrating after fringe stopping under max_decorr for a {fov} ' 'degree correlator FoV on the longest baselines:'. format(fov=corr_FoV.degree), fringe_stopped_int_time, wrap_len) return params
def formatter(self, values, spacing): if len(values) > 0: if self.format is None: spacing = spacing.to(u.arcsec).value if spacing > 3600: fields = 1 precision = 0 elif spacing > 60: fields = 2 precision = 0 elif spacing > 1: fields = 3 precision = 0 else: fields = 3 precision = -int(np.floor(np.log10(spacing))) decimal = False unit = u.degree else: fields = self._fields precision = self._precision decimal = self._decimal unit = self._unit if decimal: sep = None else: if unit == u.degree: sep=(six.u('\xb0'), "'", '"')[:fields] else: sep=('h', 'm', 's')[:fields] angles = Angle(np.asarray(values), unit=u.deg) string = angles.to_string(unit=unit, precision=precision, decimal=decimal, fields=fields, sep=sep).tolist() return string else: return []
def get_dispersion_PA(self): from astropy.coordinates import Angle import astropy.units as u ### extra tilt of the 1st order grism spectra x0 = self.conf.conf['BEAMA'] dy_trace, lam_trace = self.conf.get_beam_trace(x=507, y=507, dx=x0, beam='A') extra = np.arctan2(dy_trace[1]-dy_trace[0], x0[1]-x0[0])/np.pi*180 # h = self.im_header # pa = Angle((-np.arctan2(h['CD2_2'], h['CD2_1'])/np.pi*180-extra)*u.deg) ### Distorted WCS crpix = self.flt_wcs.wcs.crpix xref = [crpix[0], crpix[0]+1] yref = [crpix[1], crpix[1]] r, d = self.all_pix2world(xref, yref) pa = Angle((extra+np.arctan2(np.diff(r), np.diff(d))[0]/np.pi*180)*u.deg) self.dispersion_PA = pa.wrap_at(360*u.deg).value
def test_angle_arrays(): """ Test arrays values with Angle objects. """ # Tests incomplete a1 = Angle([0, 45, 90, 180, 270, 360, 720.], unit=u.degree) npt.assert_almost_equal([0., 45., 90., 180., 270., 360., 720.], a1.value) a2 = Angle(np.array([-90, -45, 0, 45, 90, 180, 270, 360]), unit=u.degree) npt.assert_almost_equal([-90, -45, 0, 45, 90, 180, 270, 360], a2.value) a3 = Angle(["12 degrees", "3 hours", "5 deg", "4rad"]) npt.assert_almost_equal([12., 45., 5., 229.18311805], a3.value) assert a3.unit == u.degree a4 = Angle(["12 degrees", "3 hours", "5 deg", "4rad"], u.radian) npt.assert_almost_equal(a4.degree, a3.value) assert a4.unit == u.radian a5 = Angle([0, 45, 90, 180, 270, 360], unit=u.degree) a6 = a5.sum() npt.assert_almost_equal(a6.value, 945.0) assert a6.unit is u.degree with pytest.raises(TypeError): # Arrays where the elements are Angle objects are not supported -- it's # really tricky to do correctly, if at all, due to the possibility of # nesting. a7 = Angle([a1, a2, a3], unit=u.degree) a8 = Angle(["04:02:02", "03:02:01", "06:02:01"], unit=u.degree) npt.assert_almost_equal(a8.value, [4.03388889, 3.03361111, 6.03361111]) a9 = Angle(np.array(["04:02:02", "03:02:01", "06:02:01"]), unit=u.degree) npt.assert_almost_equal(a9.value, a8.value) with pytest.raises(u.UnitsError): a10 = Angle(["04:02:02", "03:02:01", "06:02:01"])
def add_plugmap_info(plate_info, plugmap_keys): import yanny plate = plate_info['plate'] mjd = plate_info['mjd'] plan_name = os.path.join(plate_info['bossdir'], str(plate), 'spPlancomb-%s-%s.par' % (plate, mjd)) plan = yanny.yanny(plan_name) # look up plugmap filename unique_mapnames = set(zip(plan['SPEXP']['mjd'], plan['SPEXP']['mapname'])) (mapmjd, mapname) = unique_mapnames.pop() plugmap_name = os.path.join(plate_info['speclog'], str(mapmjd), 'plPlugMapM-%s.par' % mapname) plugmap = yanny.yanny(plugmap_name) plate_info['mapmjd'] = mapmjd plate_info['mapname'] = mapname # Process plugmap header keywords for keyword in plugmap_keys: plate_info[keyword] = plugmap[keyword] design_ra = Angle(float(plugmap['raCen']), unit=u.degree) design_dec = Angle(float(plugmap['decCen']), unit=u.degree) design_ha = Angle(float(plugmap['haMin']), unit=u.degree) design_alt, design_az = equatorial_to_horizontal(design_ra, design_dec, apolat, design_ha) plate_info['design_ra'] = design_ra.to(u.degree).value plate_info['design_dec'] = design_dec.to(u.degree).value plate_info['design_ha'] = design_ha.to(u.degree).value plate_info['design_alt'] = design_alt.to(u.degree).value plate_info['design_az'] = design_az.to(u.degree).value
def __init__(self, offset, dp_domega, spline_kwargs=DEFAULT_PSF_SPLINE_KWARGS): self._offset = Angle(offset).to("radian") self._dp_domega = Quantity(dp_domega).to("sr^-1") assert self._offset.ndim == self._dp_domega.ndim == 1 assert self._offset.shape == self._dp_domega.shape # Store input arrays as quantities in default internal units self._dp_dtheta = (2 * np.pi * self._offset * self._dp_domega).to("radian^-1") self._spline_kwargs = spline_kwargs self._compute_splines(spline_kwargs)
def __init__(self, rad, dp_domega, spline_kwargs=DEFAULT_PSF_SPLINE_KWARGS): self._rad = Angle(rad).to('radian') self._dp_domega = Quantity(dp_domega).to('sr^-1') assert self._rad.ndim == self._dp_domega.ndim == 1 assert self._rad.shape == self._dp_domega.shape # Store input arrays as quantities in default internal units self._dp_dr = (2 * np.pi * self._rad * self._dp_domega).to('radian^-1') self._spline_kwargs = spline_kwargs self._compute_splines(spline_kwargs)
def step(self, uniform_factor=1): """ Randomly steps all parameters with uncertainties in parifle. """ n_dim = len(self.fit_parameters) for parameter in self.fit_parameters: if (hasattr(self,parameter + 'err')): value = getattr(self, parameter) err = getattr(self, parameter + 'err') * uniform_factor / n_dim if (err != 0.): if (parameter == 'RAJ'): ra = Angle(getattr(self, 'RAJ'), unit=u.hour) err = getattr(self, 'RAJerr') / 3600 ra_new = ra.deg + err * np.random.uniform(-1., 1.) ra_new = Angle(ra_new, unit=u.deg) setattr(self, parameter, str(ra_new.to_string(unit=u.hour, sep=':', precision=10))) elif (parameter == 'DECJ'): dec = Angle(getattr(self, 'DECJ'), unit=u.deg) err = getattr(self, 'DECJerr') / 3600 dec_new = dec.deg + err * np.random.uniform(-1., 1.) dec_new = Angle(dec_new, unit=u.deg) setattr(self, parameter, str(dec_new.to_string(unit=u.deg, sep=':', precision=10))) elif (parameter == 'SINI'): cosi = np.sqrt(1 - getattr(self, 'SINI')**2) err = getattr(self, 'DECJerr') / 3600 cosi += err * np.random.uniform(-1., 1.) setattr(self, parameter, np.sqrt(1.-cosi**2)) else: setattr(self, parameter, value + err * np.random.uniform(-1., 1.)) setattr(self,parameter + 'flag', 0)
def fit_arrays(sci, wht, seg, psf, id=None, platescale=0.06, exptime=0, path='/tmp/', galfit_exec='galfit', gaussian_guess=False, components=[GalfitSersic()], recenter=True, psf_sample=1): rms = 1/np.sqrt(wht)#*exptime if exptime > 0: rms = np.sqrt((rms*exptime)**2+sci*exptime*(sci > 0))/exptime rms[wht == 0] = 1e30 if id is not None: mask = ((seg > 0) & (seg != id)) | (wht == 0) else: mask = wht == 0 sh = sci.shape[0] fp = open(path+'galfit.feedme','w') fp.write(GALFIT_IMAGES.format(input=path+'gf_sci.fits', output=path+'gf_out.fits', sigma=path+'gf_rms.fits', psf=path+'gf_psf.fits', mask=path+'gf_mask.fits', xmax=sh, ymax=sh, sh=sh, ps=platescale, psf_sample=psf_sample)) if gaussian_guess: fit, q, theta = fit_gauss(sci) from astropy.coordinates import Angle import astropy.units as u for comp in components: if recenter: comp.set(pos=[sh/2., sh/2.]) if gaussian_guess: comp.set(q=q, pa=Angle.wrap_at(theta*u.rad, 360*u.deg).to(u.deg).value) fp.write(str(comp)) fp.close() pyfits.writeto(path+'gf_sci.fits', data=sci, overwrite=True) pyfits.writeto(path+'gf_rms.fits', data=rms, overwrite=True) pyfits.writeto(path+'gf_mask.fits', data=mask*1, overwrite=True) pyfits.writeto(path+'gf_psf.fits', data=psf, overwrite=True) for ext in ['out', 'model']: if os.path.exists(path+'gf_{0}.fits'.format(ext)): os.remove(path+'gf_{0}.fits'.format(ext)) os.system('{0} {1}/galfit.feedme'.format(galfit_exec, path))
class SkyEvent(object): """ Represents universal attributes of an observed event on sky. I.e. the most basic details that we expect to find in all packets reporting events with sky-positions. Attributes: position (:class:`astropy.coordinates.SkyCoord`): Best-estimate sky-coordinates of the event being reported. position error (:class:`astropy.coordinates.Angle`): Error-cone on the position estimate. timestamp of event (:class:`datetime.datetime`): Timestamp for the reported event (UTC timezone). """ def __init__(self, skyevent_dict): d = skyevent_dict self.position = SkyCoord(d['ra'], d['dec'], unit='deg') self.position_error = Angle(d['error'], unit='deg') self.timestamp = None if d.get('time'): self.timestamp = iso8601.parse_date(d['time']) def __repr__(self): return (str(self.position) + ' +/- ' + self.position_error.to_string(decimal=True) + ' @ ' + self.timestamp.isoformat()) @staticmethod def _parse_coords_dict(coords_dict): d = coords_dict posn = SkyCoord(d['ra'], d['dec'], unit='deg') posn_error = Angle(d['error'], unit='deg') time = iso8601.parse_date(d['time']) return SkyEvent(sky_position=posn, sky_position_error=posn_error, event_timestamp=time)
def __init__( self, energy_lo, energy_hi, offset, rad_lo, rad_hi, psf_value, energy_thresh_lo=u.Quantity(0.1, "TeV"), energy_thresh_hi=u.Quantity(100, "TeV"), interp_kwargs=None, ): self.energy_lo = energy_lo.to("TeV") self.energy_hi = energy_hi.to("TeV") self.offset = Angle(offset) self.rad_lo = Angle(rad_lo) self.rad_hi = Angle(rad_hi) self.psf_value = psf_value.to("sr^-1") self.energy_thresh_lo = energy_thresh_lo.to("TeV") self.energy_thresh_hi = energy_thresh_hi.to("TeV") self._interp_kwargs = interp_kwargs or {}
from cdshealpix import cone_search import astropy.units as u ipix, depth, fully_covered = cone_search(lon=0 * u.deg, lat=0 * u.deg, radius=10 * u.deg, depth=10) from mocpy import MOC, WCS from astropy.coordinates import SkyCoord, Angle moc = MOC.from_healpix_cells(ipix, depth, fully_covered) # Plot the MOC using matplotlib import matplotlib.pyplot as plt fig = plt.figure(111, figsize=(10, 10)) # Define a astropy WCS from the mocpy.WCS class with WCS(fig, fov=30 * u.deg, center=SkyCoord(0, 0, unit='deg', frame='icrs'), coordsys="icrs", rotation=Angle(0, u.degree), projection="AIT") as wcs: ax = fig.add_subplot(1, 1, 1, projection=wcs) # Call fill with a matplotlib axe and the `~astropy.wcs.WCS` wcs object. moc.fill(ax=ax, wcs=wcs, alpha=0.5, fill=True, color="green") # Draw the perimeter of the MOC in black moc.border(ax=ax, wcs=wcs, alpha=0.5, color="black") plt.xlabel('ra') plt.ylabel('dec') plt.title('Cone search') plt.grid(color="black", linestyle="dotted") plt.show()
def common_sky_region_select_test_routines(obs_table, selection): """Common routines for the tests of sky_box/sky_circle selection of obs tables.""" type = selection['type'] if type not in ['sky_box', 'sky_circle']: raise ValueError("Invalid type: {}".format(type)) if type == 'sky_box': lon_range_eff = (selection['lon'][0] - selection['border'], selection['lon'][1] + selection['border']) lat_range_eff = (selection['lat'][0] - selection['border'], selection['lat'][1] + selection['border']) elif type == 'sky_circle': lon_cen = selection['lon'] lat_cen = selection['lat'] center = SkyCoord(lon_cen, lat_cen, frame=selection['frame']) radius_eff = selection['radius'] + selection['border'] do_wrapping = False # not needed in the case of sky_circle if type == 'sky_box' and any(l < Angle(0., 'deg') for l in lon_range_eff): do_wrapping = True # observation table skycoord = skycoord_from_table(obs_table) # test on the selection selected_obs_table = obs_table.select_observations(selection) skycoord = skycoord_from_table(selected_obs_table) if type == 'sky_box': skycoord = skycoord.transform_to(selection['frame']) lon = skycoord.data.lon lat = skycoord.data.lat if do_wrapping: lon = lon.wrap_at(Angle(180, 'deg')) assert ((lon_range_eff[0] < lon) & (lon < lon_range_eff[1]) & (lat_range_eff[0] < lat) & (lat < lat_range_eff[1])).all() elif type == 'sky_circle': ang_distance = skycoord.separation(center) assert (ang_distance < radius_eff).all() # test on the inverted selection selection['inverted'] = True inv_selected_obs_table = obs_table.select_observations(selection) skycoord = skycoord_from_table(inv_selected_obs_table) if type == 'sky_box': skycoord = skycoord.transform_to(selection['frame']) lon = skycoord.data.lon lat = skycoord.data.lat if do_wrapping: lon = lon.wrap_at(Angle(180, 'deg')) assert ((lon_range_eff[0] >= lon) | (lon >= lon_range_eff[1]) | (lat_range_eff[0] >= lat) | (lat >= lat_range_eff[1])).all() elif type == 'sky_circle': ang_distance = skycoord.separation(center) assert (ang_distance >= radius_eff).all() # the sum of number of entries in both selections should be the total # number of entries assert len(selected_obs_table) + len(inv_selected_obs_table) == len( obs_table)
def test_conversion(self): degrees = self.phase.to(u.degree) assert_equal(degrees, Angle(self.phase1 + self.phase2, u.deg))
def db_ingest(filepath, filename, force=False): '''Read an image header and add a row to the database''' global telescopeids, instrumentids if '-en0' in filename: table = 'speclcoraw' db_to_hdrkey = speclcoraw_to_hdrkey else: table = 'photlcoraw' db_to_hdrkey = photlcoraw_to_hdrkey fileindb = lsc.mysqldef.getfromdataraw(conn, table, 'filename', filename, column2='filepath') if fileindb: filepath = fileindb[0]['filepath'] # could be marked as bad if not fileindb or force: if filename[-3:] == '.fz': hdr = fits.getheader(filepath + filename, 1) else: hdr = fits.getheader(filepath + filename) groupidcode, targetid = get_groupidcode(hdr) dbdict = { 'filename': filename, 'filepath': filepath, 'groupidcode': groupidcode, 'targetid': targetid } for dbcol, hdrkey in db_to_hdrkey.items(): if hdrkey in hdr and hdr[hdrkey] not in [ 'NaN', 'UNKNOWN', None, '' ]: if hdrkey in ['RA', 'CAT-RA']: dbdict[dbcol] = Angle(hdr[hdrkey], u.hourangle).to_string(u.deg, decimal=True, precision=7) elif hdrkey in ['DEC', 'CAT-DEC']: dbdict[dbcol] = Angle(hdr[hdrkey], u.deg).to_string(decimal=True, precision=7) else: dbdict[dbcol] = hdr[hdrkey] if hdr['TELESCOP'] not in telescopeids: print hdr[ 'TELESCOP'], 'not recognized. Adding to telescopes table.' lsc.mysqldef.insert_values(conn, 'telescopes', {'name': hdr['TELESCOP']}) telescopes = lsc.mysqldef.query( ['select id, name from telescopes'], conn) telescopeids = {tel['name']: tel['id'] for tel in telescopes} dbdict['telescopeid'] = telescopeids[hdr['TELESCOP']] if hdr['INSTRUME'] not in instrumentids: print hdr[ 'INSTRUME'], 'not recognized. Adding to instruments table.' lsc.mysqldef.insert_values(conn, 'instruments', {'name': hdr['INSTRUME']}) instruments = lsc.mysqldef.query( ['select id, name from instruments'], conn) instrumentids = {inst['name']: inst['id'] for inst in instruments} dbdict['instrumentid'] = instrumentids[hdr['INSTRUME']] if fileindb: lsc.mysqldef.query([ "delete from " + table + " where filename='" + filename + "'" ], conn) print 'ingesting', filename lsc.mysqldef.insert_values(conn, table, dbdict) else: print filename, 'already ingested'
def on_region(): """Example on_region for testing.""" pos = SkyCoord("08h35m20.65525s", "-45d10m35.1545s", frame="icrs") radius = Angle(0.2, "deg") return CircleSkyRegion(pos, radius)
def test_init_basics(self): phase = Phase(1., 0.25) assert isinstance(phase, Phase) assert_equal(phase['int'], Angle(1. * u.cycle)) assert_equal(phase['frac'], FractionalPhase(0.25 * u.cycle)) assert_equal(phase.cycle, Angle(1.25 * u.cycle))
@pytest.fixture(scope="session") def bkg_estimator(observations, exclusion_mask, on_region): """Example background estimator for testing.""" maker = ReflectedRegionsBackgroundEstimator( observations=observations, on_region=on_region, exclusion_mask=exclusion_mask, min_distance_input="0.2 deg", ) maker.run() return maker region_finder_param = [ (SkyCoord(83.2, 22.5, unit="deg"), 15, Angle("82.592 deg"), 17, 17), (SkyCoord(84.2, 22.5, unit="deg"), 17, Angle("83.636 deg"), 19, 19), (SkyCoord(83.2, 21.5, unit="deg"), 15, Angle("83.672 deg"), 17, 17), ] @requires_data() @pytest.mark.parametrize("pointing_pos, nreg1, reg3_ra, nreg2, nreg3", region_finder_param) def test_find_reflected_regions(exclusion_mask, on_region, pointing_pos, nreg1, reg3_ra, nreg2, nreg3): pointing = pointing_pos finder = ReflectedRegionsFinder( center=pointing, region=on_region, exclusion_mask=exclusion_mask,
def on_region(): """Example on_region for testing.""" pos = SkyCoord(83.63, 22.01, unit="deg", frame="icrs") radius = Angle(0.11, "deg") region = CircleSkyRegion(pos, radius) return region
class EventListChecker(Checker): """Event list checker. Data format specification: ref:`gadf:iact-events` Parameters ---------- event_list : `~gammapy.data.EventList` Event list """ CHECKS = { "meta": "check_meta", "columns": "check_columns", "times": "check_times", "coordinates_galactic": "check_coordinates_galactic", "coordinates_altaz": "check_coordinates_altaz", } accuracy = {"angle": Angle("1 arcsec"), "time": Quantity(1, "microsecond")} # https://gamma-astro-data-formats.readthedocs.io/en/latest/events/events.html#mandatory-header-keywords meta_required = [ "HDUCLASS", "HDUDOC", "HDUVERS", "HDUCLAS1", "OBS_ID", "TSTART", "TSTOP", "ONTIME", "LIVETIME", "DEADC", "RA_PNT", "DEC_PNT", # TODO: what to do about these? # They are currently listed as required in the spec, # but I think we should just require ICRS and those # are irrelevant, should not be used. # 'RADECSYS', # 'EQUINOX', "ORIGIN", "TELESCOP", "INSTRUME", "CREATOR", # https://gamma-astro-data-formats.readthedocs.io/en/latest/general/time.html#time-formats "MJDREFI", "MJDREFF", "TIMEUNIT", "TIMESYS", "TIMEREF", # https://gamma-astro-data-formats.readthedocs.io/en/latest/general/coordinates.html#coords-location "GEOLON", "GEOLAT", "ALTITUDE", ] _col = collections.namedtuple("col", ["name", "unit"]) columns_required = [ _col(name="EVENT_ID", unit=""), _col(name="TIME", unit="s"), _col(name="RA", unit="deg"), _col(name="DEC", unit="deg"), _col(name="ENERGY", unit="TeV"), ] def __init__(self, event_list): self.event_list = event_list def _record(self, level="info", msg=None): obs_id = self.event_list.table.meta["OBS_ID"] return {"level": level, "obs_id": obs_id, "msg": msg} def check_meta(self): meta_missing = sorted( set(self.meta_required) - set(self.event_list.table.meta)) if meta_missing: yield self._record(level="error", msg=f"Missing meta keys: {meta_missing!r}") def check_columns(self): t = self.event_list.table if len(t) == 0: yield self._record(level="error", msg="Events table has zero rows") for name, unit in self.columns_required: if name not in t.colnames: yield self._record(level="error", msg=f"Missing table column: {name!r}") else: if Unit(unit) != (t[name].unit or ""): yield self._record( level="error", msg=f"Invalid unit for column: {name!r}") def check_times(self): dt = (self.event_list.time - self.event_list.observation_time_start).sec if dt.min() < self.accuracy["time"].to_value("s"): yield self._record(level="error", msg="Event times before obs start time") dt = (self.event_list.time - self.event_list.observation_time_end).sec if dt.max() > self.accuracy["time"].to_value("s"): yield self._record(level="error", msg="Event times after the obs end time") if np.min(np.diff(dt)) <= 0: yield self._record(level="error", msg="Events are not time-ordered.") def check_coordinates_galactic(self): """Check if RA / DEC matches GLON / GLAT.""" t = self.event_list.table if "GLON" not in t.colnames: return galactic = SkyCoord(t["GLON"], t["GLAT"], unit="deg", frame="galactic") separation = self.event_list.radec.separation(galactic).to("arcsec") if separation.max() > self.accuracy["angle"]: yield self._record(level="error", msg="GLON / GLAT not consistent with RA / DEC") def check_coordinates_altaz(self): """Check if ALT / AZ matches RA / DEC.""" t = self.event_list.table if "AZ" not in t.colnames: return altaz_astropy = self.event_list.altaz separation = angular_separation( altaz_astropy.data.lon, altaz_astropy.data.lat, t["AZ"].quantity, t["ALT"].quantity, ) if separation.max() > self.accuracy["angle"]: yield self._record(level="error", msg="ALT / AZ not consistent with RA / DEC")
def exclusion_mask(): """Example mask for testing.""" pos = SkyCoord(83.63, 22.01, unit="deg", frame="icrs") exclusion_region = CircleSkyRegion(pos, Angle(0.3, "deg")) geom = WcsGeom.create(skydir=pos, binsz=0.02, width=10.0) return ~geom.region_mask([exclusion_region])
def calculate_airmass(ctext): """ Calculates AIRMASS for the list of all FITS files and appends respective details in the headers. Args: ctext : Common text of all FITS files whose headers have to be edited Returns: None """ list_files = group_similar_files('', common_text=ctext) for file_name in list_files: hdulist = fits.open(file_name, mode='update') file_header = hdulist[0].header object_str = file_header[OBJECT_keyword] date_obs = file_header[DATE_keyword] time_start = file_header[TIMESTART_keyword] object_ra = OBJECT_RA object_dec = OBJECT_DEC if object_str == 'Feige110': object_ra = RA_Feige110 object_dec = DEC_Feige110 elif object_str == 'Feige34': object_ra = RA_Feige34 object_dec = DEC_Feige34 elif object_str == 'Feige66': object_ra = RA_Feige66 object_dec = DEC_Feige66 if RA_keyword in file_header: file_header.set(RA_keyword, object_ra) else: file_header.append((RA_keyword, object_ra)) if DEC_keyword in file_header: file_header.set(DEC_keyword, object_dec) else: file_header.append((DEC_keyword, object_dec)) time_obs = str(datetime.timedelta(seconds=int(time_start))) time_utc = date_obs + ' ' + time_obs julian_day = ephem.julian_date(time_utc) telescope = ephem.Observer() telescope.lon = OBS_LONG telescope.lat = OBS_LAT telescope.elevation = OBS_ALT telescope.pressure = 0 telescope.epoch = ephem.J2000 telescope.date = time_utc obj_pos = ephem.FixedBody() obj_pos._ra = object_ra obj_pos._dec = object_dec obj_pos._epoch = ephem.J2000 obj_pos.compute(telescope) time_sidereal = telescope.sidereal_time() object_alt = Angle(str(obj_pos.alt) + ' degrees').degree airmass = 1 / math.cos(math.radians(90 - object_alt)) list_keywords = [ 'OBSERVAT', 'OBS_LAT', 'OBS_LONG', 'OBS_ALT', 'TIMEZONE', 'DATE_OBS', 'UT', 'JD', 'ST', 'RA', 'DEC', 'ALT', 'AZ', 'AIRMASS' ] dict_header = { 'OBSERVAT': OBS_NAME, 'OBS_LAT': OBS_LAT, 'OBS_LONG': OBS_LONG, 'OBS_ALT': OBS_ALT, 'TIMEZONE': OBS_TIMEZONE, 'DATE_OBS': date_obs, 'UT': time_utc, 'JD': julian_day, 'ST': time_sidereal, 'RA': object_ra, 'DEC': object_dec, 'ALT': obj_pos.alt, 'AZ': obj_pos.az, 'AIRMASS': airmass } for keyword in list_keywords: if keyword in file_header.keys(): file_header.remove(keyword, remove_all=True) file_header.append(card=(keyword, str(dict_header[keyword]))) hdulist.flush() hdulist.close()
def _info_map(self): """Print info from map analysis.""" d = self.data ss = "\n*** Info from map analysis ***\n\n" ra_str = Angle(d["RAJ2000"], "deg").to_string(unit="hour", precision=0) dec_str = Angle(d["DEJ2000"], "deg").to_string(unit="deg", precision=0) ss += "{:<20s} : {:8.3f} = {}\n".format("RA", d["RAJ2000"], ra_str) ss += "{:<20s} : {:8.3f} = {}\n".format("DEC", d["DEJ2000"], dec_str) ss += "{:<20s} : {:8.3f} +/- {:.3f} deg\n".format( "GLON", d["GLON"].value, d["GLON_Err"].value ) ss += "{:<20s} : {:8.3f} +/- {:.3f} deg\n".format( "GLAT", d["GLAT"].value, d["GLAT_Err"].value ) ss += "{:<20s} : {:.3f}\n".format("Position Error (68%)", d["Pos_Err_68"]) ss += "{:<20s} : {:.3f}\n".format("Position Error (95%)", d["Pos_Err_95"]) ss += "{:<20s} : {:.0f}\n".format("ROI number", d["ROI_Number"]) ss += "{:<20s} : {}\n".format("Spatial model", d["Spatial_Model"]) ss += "{:<20s} : {}\n".format("Spatial components", d["Components"]) ss += "{:<20s} : {:.1f}\n".format("TS", d["Sqrt_TS"] ** 2) ss += "{:<20s} : {:.1f}\n".format("sqrt(TS)", d["Sqrt_TS"]) ss += "{:<20s} : {:.3f} +/- {:.3f} (UL: {:.3f}) deg\n".format( "Size", d["Size"].value, d["Size_Err"].value, d["Size_UL"].value ) ss += "{:<20s} : {:.3f}\n".format("R70", d["R70"]) ss += "{:<20s} : {:.3f}\n".format("RSpec", d["RSpec"]) ss += "{:<20s} : {:.1f}\n".format("Total model excess", d["Excess_Model_Total"]) ss += "{:<20s} : {:.1f}\n".format("Excess in RSpec", d["Excess_RSpec"]) ss += "{:<20s} : {:.1f}\n".format( "Model Excess in RSpec", d["Excess_RSpec_Model"] ) ss += "{:<20s} : {:.1f}\n".format("Background in RSpec", d["Background_RSpec"]) ss += "{:<20s} : {:.1f} hours\n".format("Livetime", d["Livetime"].value) ss += "{:<20s} : {:.2f}\n".format("Energy threshold", d["Energy_Threshold"]) val, err = d["Flux_Map"].value, d["Flux_Map_Err"].value ss += "{:<20s} : ({:.3f} +/- {:.3f}) x 10^-12 cm^-2 s^-1 = ({:.2f} +/- {:.2f}) % Crab\n".format( "Source flux (>1 TeV)", val / FF, err / FF, val * FLUX_TO_CRAB, err * FLUX_TO_CRAB, ) ss += "\nFluxes in RSpec (> 1 TeV):\n" ss += "{:<30s} : {:.3f} x 10^-12 cm^-2 s^-1 = {:5.2f} % Crab\n".format( "Map measurement", d["Flux_Map_RSpec_Data"].value / FF, d["Flux_Map_RSpec_Data"].value * FLUX_TO_CRAB, ) ss += "{:<30s} : {:.3f} x 10^-12 cm^-2 s^-1 = {:5.2f} % Crab\n".format( "Source model", d["Flux_Map_RSpec_Source"].value / FF, d["Flux_Map_RSpec_Source"].value * FLUX_TO_CRAB, ) ss += "{:<30s} : {:.3f} x 10^-12 cm^-2 s^-1 = {:5.2f} % Crab\n".format( "Other component model", d["Flux_Map_RSpec_Other"].value / FF, d["Flux_Map_RSpec_Other"].value * FLUX_TO_CRAB, ) ss += "{:<30s} : {:.3f} x 10^-12 cm^-2 s^-1 = {:5.2f} % Crab\n".format( "Large scale component model", d["Flux_Map_RSpec_LS"].value / FF, d["Flux_Map_RSpec_LS"].value * FLUX_TO_CRAB, ) ss += "{:<30s} : {:.3f} x 10^-12 cm^-2 s^-1 = {:5.2f} % Crab\n".format( "Total model", d["Flux_Map_RSpec_Total"].value / FF, d["Flux_Map_RSpec_Total"].value * FLUX_TO_CRAB, ) ss += "{:<35s} : {:5.1f} %\n".format( "Containment in RSpec", 100 * d["Containment_RSpec"] ) ss += "{:<35s} : {:5.1f} %\n".format( "Contamination in RSpec", 100 * d["Contamination_RSpec"] ) label, val = ( "Flux correction (RSpec -> Total)", 100 * d["Flux_Correction_RSpec_To_Total"], ) ss += f"{label:<35s} : {val:5.1f} %\n" label, val = ( "Flux correction (Total -> RSpec)", 100 * (1 / d["Flux_Correction_RSpec_To_Total"]), ) ss += f"{label:<35s} : {val:5.1f} %\n" return ss
def calcfaixa(vel, data, star, dist, ca, pa, tamanho, step, erro=None, ring=None, atm=None): vec = np.arange(0, int(8000/(np.absolute(vel.value))), step) g = np.sort(np.concatenate((vec,-vec[1:]), axis=0)) latlon = {'clat':{'lon':[], 'lat':[], 'lab': [], 'x': [], 'y': [], 'labx': []}, 'lats': {'lon':[], 'lat':[], 'lon2':[], 'lat2':[], 'x': [], 'y': [], 'x2':[], 'y2':[]}} if not erro == None: latlon['erro'] = {'lon': [], 'lat': [], 'lon2':[], 'lat2':[]} err = erro*u.mas errd = (dist.to(u.km)*err.to(u.rad)).value*u.km if not ring == None: latlon['ring'] = {'lon': [], 'lat': [], 'lon2':[], 'lat2':[]} if not atm == None: latlon['atm'] = {'lon': [], 'lat': [], 'lon2':[], 'lat2':[]} pa = Angle(pa) pa.wrap_at('180d', inplace=True) if pa > 90*u.deg: paplus = pa - 180*u.deg elif pa < -90*u.deg: paplus = pa + 180*u.deg else: paplus = pa for delt in g: deltatime = delt*u.s datas1 = data + TimeDelta(deltatime) datas1.delta_ut1_utc = 0 lon = star.ra - datas1.sidereal_time('mean', 'greenwich') m = Basemap(projection='ortho',lat_0=star.dec.value,lon_0=lon.value,resolution=None) a, b = m(lon.value, star.dec.value) a = a*u.m b = b*u.m dista = (dist.to(u.km)*ca.to(u.rad)).value*u.km ax = a + dista*np.sin(pa) + (deltatime*vel)*np.cos(paplus) by = b + dista*np.cos(pa) - (deltatime*vel)*np.sin(paplus) ax2 = ax - tamanho/2*np.sin(paplus) by2 = by - tamanho/2*np.cos(paplus) ax3 = ax + tamanho/2*np.sin(paplus) by3 = by + tamanho/2*np.cos(paplus) clon1, clat1 = m(ax.value, by.value, inverse=True) if delt == 0: latlon['clat']['cxy'] = [ax.value, by.value] if clon1 < 1e+30: latlon['clat']['lon'].append(clon1) latlon['clat']['lat'].append(clat1) latlon['clat']['lab'].append(datas1.iso) else: latlon['clat']['x'].append(ax.value) latlon['clat']['y'].append(by.value) latlon['clat']['labx'].append(datas1.iso) lon1, lat1 = m(ax2.value, by2.value, inverse=True) if lon1 < 1e+30: latlon['lats']['lon'].append(lon1) latlon['lats']['lat'].append(lat1) else: latlon['lats']['x'].append(ax2.value) latlon['lats']['y'].append(by2.value) lon2, lat2 = m(ax3.value, by3.value, inverse=True) if lon2 < 1e+30: latlon['lats']['lon2'].append(lon2) latlon['lats']['lat2'].append(lat2) else: latlon['lats']['x2'].append(ax3.value) latlon['lats']['y2'].append(by3.value) if not erro == None: ax2 = ax - errd*np.sin(paplus) by2 = by - errd*np.cos(paplus) ax3 = ax + errd*np.sin(paplus) by3 = by + errd*np.cos(paplus) lon1, lat1 = m(ax2.value, by2.value, inverse=True) if lon1 < 1e+30: latlon['erro']['lon'].append(lon1) latlon['erro']['lat'].append(lat1) lon2, lat2 = m(ax3.value, by3.value, inverse=True) if lon2 < 1e+30: latlon['erro']['lon2'].append(lon2) latlon['erro']['lat2'].append(lat2) if not ring == None: rng = ring*u.km ax2 = ax - rng*np.sin(paplus) by2 = by - rng*np.cos(paplus) ax3 = ax + rng*np.sin(paplus) by3 = by + rng*np.cos(paplus) lon1, lat1 = m(ax2.value, by2.value, inverse=True) if lon1 < 1e+30: latlon['ring']['lon'].append(lon1) latlon['ring']['lat'].append(lat1) lon2, lat2 = m(ax3.value, by3.value, inverse=True) if lon2 < 1e+30: latlon['ring']['lon2'].append(lon2) latlon['ring']['lat2'].append(lat2) if not atm == None: atmo = atm*u.km ax2 = ax - atmo*np.sin(paplus) by2 = by - atmo*np.cos(paplus) ax3 = ax + atmo*np.sin(paplus) by3 = by + atmo*np.cos(paplus) lon1, lat1 = m(ax2.value, by2.value, inverse=True) if lon1 < 1e+30: latlon['atm']['lon'].append(lon1) latlon['atm']['lat'].append(lat1) lon2, lat2 = m(ax3.value, by3.value, inverse=True) if lon2 < 1e+30: latlon['atm']['lon2'].append(lon2) latlon['atm']['lat2'].append(lat2) return latlon
def exclusion_mask(geom): """Example mask for testing.""" pos = SkyCoord(83.633, 22.014, unit="deg", frame="icrs") region = CircleSkyRegion(pos, Angle(0.3, "deg")) return ~geom.region_mask([region])
def _info_map(self): """Print info from map analysis.""" d = self.data ss = '\n*** Info from map analysis ***\n\n' ra_str = Angle(d['RAJ2000'], 'deg').to_string(unit='hour', precision=0) dec_str = Angle(d['DEJ2000'], 'deg').to_string(unit='deg', precision=0) ss += '{:<20s} : {:8.3f} = {}\n'.format('RA', d['RAJ2000'], ra_str) ss += '{:<20s} : {:8.3f} = {}\n'.format('DEC', d['DEJ2000'], dec_str) ss += '{:<20s} : {:8.3f} +/- {:.3f} deg\n'.format( 'GLON', d['GLON'].value, d['GLON_Err'].value) ss += '{:<20s} : {:8.3f} +/- {:.3f} deg\n'.format( 'GLAT', d['GLAT'].value, d['GLAT_Err'].value) ss += '{:<20s} : {:.3f}\n'.format('Position Error (68%)', d['Pos_Err_68']) ss += '{:<20s} : {:.3f}\n'.format('Position Error (95%)', d['Pos_Err_95']) ss += '{:<20s} : {:.0f}\n'.format('ROI number', d['ROI_Number']) ss += '{:<20s} : {}\n'.format('Spatial model', d['Spatial_Model']) ss += '{:<20s} : {}\n'.format('Spatial components', d['Components']) ss += '{:<20s} : {:.1f}\n'.format('TS', d['Sqrt_TS']**2) ss += '{:<20s} : {:.1f}\n'.format('sqrt(TS)', d['Sqrt_TS']) ss += '{:<20s} : {:.3f} +/- {:.3f} (UL: {:.3f}) deg\n'.format( 'Size', d['Size'].value, d['Size_Err'].value, d['Size_UL'].value) ss += '{:<20s} : {:.3f}\n'.format('R70', d['R70']) ss += '{:<20s} : {:.3f}\n'.format('RSpec', d['RSpec']) ss += '{:<20s} : {:.1f}\n'.format('Total model excess', d['Excess_Model_Total']) ss += '{:<20s} : {:.1f}\n'.format('Excess in RSpec', d['Excess_RSpec']) ss += '{:<20s} : {:.1f}\n'.format('Model Excess in RSpec', d['Excess_RSpec_Model']) ss += '{:<20s} : {:.1f}\n'.format('Background in RSpec', d['Background_RSpec']) ss += '{:<20s} : {:.1f} hours\n'.format('Livetime', d['Livetime'].value) ss += '{:<20s} : {:.1f}\n'.format('Energy threshold', d['Energy_Threshold']) val, err = d['Flux_Map'].value, d['Flux_Map_Err'].value ss += '{:<20s} : ({:.2f} +/- {:.2f}) x 10^-12 cm^-2 s^-1 = ({:.1f} +/- {:.1f}) % Crab\n'.format( 'Source flux (>1 TeV)', val / FF, err / FF, val * FLUX_TO_CRAB, err * FLUX_TO_CRAB) ss += '\nFluxes in RSpec (> 1 TeV):\n' ss += '{:<30s} : {:.2f} x 10^-12 cm^-2 s^-1 = {:5.1f} % Crab\n'.format( 'Map measurement', d['Flux_Map_RSpec_Data'].value / FF, d['Flux_Map_RSpec_Data'].value * FLUX_TO_CRAB) ss += '{:<30s} : {:.2f} x 10^-12 cm^-2 s^-1 = {:5.1f} % Crab\n'.format( 'Source model', d['Flux_Map_RSpec_Source'].value / FF, d['Flux_Map_RSpec_Source'].value * FLUX_TO_CRAB) ss += '{:<30s} : {:.2f} x 10^-12 cm^-2 s^-1 = {:5.1f} % Crab\n'.format( 'Other component model', d['Flux_Map_RSpec_Other'].value / FF, d['Flux_Map_RSpec_Other'].value * FLUX_TO_CRAB) ss += '{:<30s} : {:.2f} x 10^-12 cm^-2 s^-1 = {:5.1f} % Crab\n'.format( 'Large scale component model', d['Flux_Map_RSpec_LS'].value / FF, d['Flux_Map_RSpec_LS'].value * FLUX_TO_CRAB) ss += '{:<30s} : {:.2f} x 10^-12 cm^-2 s^-1 = {:5.1f} % Crab\n'.format( 'Total model', d['Flux_Map_RSpec_Total'].value / FF, d['Flux_Map_RSpec_Total'].value * FLUX_TO_CRAB) ss += '{:<35s} : {:5.1f} %\n'.format('Containment in RSpec', 100 * d['Containment_RSpec']) ss += '{:<35s} : {:5.1f} %\n'.format('Contamination in RSpec', 100 * d['Contamination_RSpec']) label, val = 'Flux correction (RSpec -> Total)', 100 * d[ 'Flux_Correction_RSpec_To_Total'] ss += '{:<35s} : {:5.1f} %\n'.format(label, val) label, val = 'Flux correction (Total -> RSpec)', 100 * ( 1 / d['Flux_Correction_RSpec_To_Total']) ss += '{:<35s} : {:5.1f} %\n'.format(label, val) return ss
if metacat is not None: # Appends metadata sources[distbin][0].append(distbin+1) sources[distbin][1].append(cat[m,0]) sources[distbin][2].append(cat[m,1]) sources[distbin][4].append(z[infield][i]) # appends redshift if reliability > threshold if metacat[m,2] >= rel_thres: sources[distbin][3].append(metacat[m,3]) else: sources[distbin][3].append(-1) break #""" ## Random Sampling ## R_map_deg = Angle(R_map[i]).deg # converts to deg extract = rnd.randint(MC_repeat) # random simulation extraction for sim in range(MC_repeat): # loops MC_repeat times for the MC simulations while True: # random RA/Dec for random sampling (RA accounts for PBCs in ICRS frame) ra_rand = rnd.uniform(RA_min + R_map_deg, RA_max + 360 - R_map_deg) dec_rand = rnd.uniform(DEC_min + R_map_deg, DEC_max - R_map_deg) coords_rand = SkyCoord(ra_rand % 360, dec_rand, unit="deg") # cuts random sample try: # catches exception if cluster is at edge boxrand = Cutout2D(rebin, coords_rand, 2*R_map[i], wcs=w_new, mode="strict") cutrand = boxrand.data # image data of random cutout except (PartialOverlapError, NoOverlapError, ValueError): continue if np.count_nonzero(cutrand) == len(cutrand)**2:
def align_by_wcs(files, target=None, observer=None, time_key='DATE-OBS', **kwargs): """Align a set of images using their world coordinate systems. Parameters ---------- files : list The list of FITS files to align. target : SolarSysObject Align in the reference frame of this object. observer : SolarSysObject Observe `target` with this observer. time_key : string The header keyword for the observation time. **kwargs Keyword arguments for `imshift`. Results ------- stack : ndarray The aligned images. dyx : ndarray The offsets. """ import astropy.units as u from astropy.io import fits from astropy.wcs import WCS from astropy.coordinates import Angle im, h0 = fits.getdata(files[0], header=True) wcs0 = WCS(h0) stack = np.zeros((len(files), ) + im.shape) stack[0] = im y0, x0 = np.array(im.shape) / 2.0 if target is not None: assert observer is not None, "observer required" g0 = observer.observe(target, h0[time_key]) xt, yt = wcs0.wcs_world2pix(np.c_[g0.ra, g0.dec], 0)[0] ra0, dec0 = Angle(wcs0.wcs_pix2world(np.c_[x0, y0], 0)[0] * u.deg) dra = 0 * u.deg ddec = 0 * u.deg dyx = np.zeros((len(files), 2)) for i in range(1, len(files)): im, h = fits.getdata(files[i], header=True) wcs = WCS(h) if target is not None: g = observer.observe(target, h[time_key]) dra = g.ra - g0.ra ddec = g.dec - g0.dec x, y = wcs.wcs_world2pix(np.c_[ra0 + dra, dec0 + ddec], 0)[0] dyx[i] = y0 - y, x0 - x stack[i] = core.imshift(im, dyx[i], **kwargs) if int(dyx[i, 0]) != 0: if int(dyx[i, 0]) < 0: stack[i, :, int(dyx[i, 0]):] = np.nan else: stack[i, :, :int(dyx[i, 0])] = np.nan if int(dyx[i, 1]) != 0: if int(dyx[i, 1]) < 0: stack[i, int(dyx[i, 1]):] = np.nan else: stack[i, :int(dyx[i, 1])] = np.nan return stack, dyx
def test_sample_sphere(): random_state = np.random.RandomState(seed=0) # test general case lon, lat = sample_sphere(size=2, random_state=random_state) assert_quantity_allclose(lon, Angle([3.44829694, 4.49366732], "radian")) assert_quantity_allclose(lat, Angle([0.20700192, 0.08988736], "radian")) # test specify a limited range lon_range = Angle([40.0, 45.0], "deg") lat_range = Angle([10.0, 15.0], "deg") lon, lat = sample_sphere( size=10, lon_range=lon_range, lat_range=lat_range, random_state=random_state ) assert ((lon_range[0] <= lon) & (lon < lon_range[1])).all() assert ((lat_range[0] <= lat) & (lat < lat_range[1])).all() # test lon within (-180, 180) deg range lon_range = Angle([-40.0, 0.0], "deg") lon, lat = sample_sphere(size=10, lon_range=lon_range, random_state=random_state) assert ((lon_range[0] <= lon) & (lon < lon_range[1])).all() lat_range = Angle([-90.0, 90.0], "deg") assert ((lat_range[0] <= lat) & (lat < lat_range[1])).all() # test lon range explicitly (0, 360) deg lon_range = Angle([0.0, 360.0], "deg") lon, lat = sample_sphere(size=100, lon_range=lon_range, random_state=random_state) # test values in the desired range lat_range = Angle([-90.0, 90.0], "deg") assert ((lon_range[0] <= lon) & (lon < lon_range[1])).all() assert ((lat_range[0] <= lat) & (lat < lat_range[1])).all() # test if values are distributed along the whole range nbins = 4 lon_delta = (lon_range[1] - lon_range[0]) / nbins lat_delta = (lat_range[1] - lat_range[0]) / nbins for i in np.arange(nbins): assert ( (lon_range[0] + i * lon_delta <= lon) & (lon < lon_range[0] + (i + 1) * lon_delta) ).any() assert ( (lat_range[0] + i * lat_delta <= lat) & (lat < lat_range[0] + (i + 1) * lat_delta) ).any() # test lon range explicitly (-180, 180) deg lon_range = Angle([-180.0, 180.0], "deg") lon, lat = sample_sphere(size=100, lon_range=lon_range, random_state=random_state) # test values in the desired range lat_range = Angle([-90.0, 90.0], "deg") assert ((lon_range[0] <= lon) & (lon < lon_range[1])).all() assert ((lat_range[0] <= lat) & (lat < lat_range[1])).all() # test if values are distributed along the whole range nbins = 4 lon_delta = (lon_range[1] - lon_range[0]) / nbins lat_delta = (lat_range[1] - lat_range[0]) / nbins for i in np.arange(nbins): assert ( (lon_range[0] + i * lon_delta <= lon) & (lon < lon_range[0] + (i + 1) * lon_delta) ).any() assert ( (lat_range[0] + i * lat_delta <= lat) & (lat < lat_range[0] + (i + 1) * lat_delta) ).any() # test box around Galactic center lon_range = Angle([-5.0, 5.0], "deg") lon, lat = sample_sphere(size=10, lon_range=lon_range, random_state=random_state) # test if values are distributed along the whole range nbins = 2 lon_delta = (lon_range[1] - lon_range[0]) / nbins for i in np.arange(nbins): assert ( (lon_range[0] + i * lon_delta <= lon) & (lon < lon_range[0] + (i + 1) * lon_delta) ).any() # test box around Galactic anticenter lon_range = Angle([175.0, 185.0], "deg") lon, lat = sample_sphere(size=10, lon_range=lon_range, random_state=random_state) # test if values are distributed along the whole range nbins = 2 lon_delta = (lon_range[1] - lon_range[0]) / nbins for i in np.arange(nbins): assert ( (lon_range[0] + i * lon_delta <= lon) & (lon < lon_range[0] + (i + 1) * lon_delta) ).any()
def test_cube_pipe(tmpdir): """Example how to make a Cube analysis from a 2D background model.""" tmpdir = str(tmpdir) outdir = tmpdir outdir2 = outdir + '/background' Path(outdir2).mkdir() ds = DataStore.from_dir("$GAMMAPY_EXTRA/datasets/hess-crab4-hd-hap-prod2") ds.copy_obs(ds.obs_table, tmpdir) data_store = DataStore.from_dir(tmpdir) # Create the background model from the 4 Crab observations bgmaker = OffDataBackgroundMaker(data_store, outdir=outdir2) bgmaker.select_observations(selection='all') bgmaker.group_observations() bgmaker.make_model("2D") bgmaker.save_models("2D") fn = outdir2 + '/group-def.fits' # New hdu table that contains the link to the background model hdu_index_table = bgmaker.make_total_index_table( data_store=data_store, modeltype='2D', out_dir_background_model=outdir2, filename_obs_group_table=fn) fn = outdir + '/hdu-index.fits.gz' hdu_index_table.write(fn, overwrite=True) offset_band = Angle([0, 2.49], 'deg') ref_cube_images = make_empty_cube(emin=0.5, emax=100, enumbins=5) ref_cube_exposure = make_empty_cube(emin=0.1, emax=120, enumbins=80, data_unit="m2 s") ref_cube_skymask = make_empty_cube(emin=0.5, emax=100, enumbins=5) data_store = DataStore.from_dir(tmpdir) refheader = ref_cube_images.sky_image_ref.to_image_hdu().header exclusion_mask = SkyImage.read( '$GAMMAPY_EXTRA/datasets/exclusion_masks/tevcat_exclusion.fits') exclusion_mask = exclusion_mask.reproject(reference=refheader) ref_cube_skymask.data = np.tile(exclusion_mask.data, (5, 1, 1)) # TODO: Problem with the load psftable for one of the run that is not implemented yet... data_store.hdu_table.remove_row(14) # Cube Analysis cube_maker = StackedObsCubeMaker( empty_cube_images=ref_cube_images, empty_exposure_cube=ref_cube_exposure, offset_band=offset_band, data_store=data_store, obs_table=data_store.obs_table, exclusion_mask=ref_cube_skymask, save_bkg_scale=True, ) cube_maker.make_cubes(make_background_image=True, radius=10.) assert_allclose(cube_maker.counts_cube.data.sum(), 4898.0, atol=3) assert_allclose(cube_maker.bkg_cube.data.sum(), 4260.120595293951, atol=3) # Note: the tolerance in the following assert is low to pass here: # https://travis-ci.org/gammapy/gammapy/jobs/234062946#L2112 cube_maker.significance_cube.data[np.where( np.isinf(cube_maker.significance_cube.data))] = 0 actual = np.nansum(cube_maker.significance_cube.data) assert_allclose(actual, 65777.69960178432, rtol=0.1) actual = cube_maker.excess_cube.data.sum() assert_allclose(actual, 637.8794047060486, rtol=1e-2) actual = np.nansum(cube_maker.exposure_cube.data.to('m2 s').value) assert_allclose(actual, 5399539029926424.0, rtol=1e-2) assert_allclose(cube_maker.table_bkg_scale[0]["bkg_scale"], 0.8996676356375191, rtol=0.03) assert len(cube_maker.counts_cube.energies()) == 5 assert len(cube_maker.bkg_cube.energies()) == 5 assert len(cube_maker.significance_cube.energies()) == 5 assert len(cube_maker.excess_cube.energies()) == 5 assert len(cube_maker.exposure_cube.energies()) == 80
class TestPhaseInit: def test_init_basics(self): phase = Phase(1., 0.25) assert isinstance(phase, Phase) assert_equal(phase['int'], Angle(1. * u.cycle)) assert_equal(phase['frac'], FractionalPhase(0.25 * u.cycle)) assert_equal(phase.cycle, Angle(1.25 * u.cycle)) @pytest.mark.parametrize('phase1, phase2', ((1., 0.1), (1. * u.cycle, 0.125 * u.cycle), (0., np.arange(-90., 170., 45.) * u.deg), (Angle(720., 'deg'), FractionalPhase(0.25)), ('720d', '90d'), ('0h', '3h00m00s'))) def test_init(self, phase1, phase2): phase = Phase(phase1, phase2) assert isinstance(phase, Phase) assert_equal(phase['int'], Angle(phase1, u.cycle)) assert_equal(phase['frac'], FractionalPhase(phase2, u.cycle)) expected_cycle = Angle(phase1, u.cycle) + Angle(phase2, u.cycle) assert_equal(phase.cycle, expected_cycle) def test_init_with_phase(self): phase = Phase(1., 0.125) phase2 = Phase(phase) assert_equal(phase2, phase) assert phase2 is not phase assert not np.may_share_memory(phase2, phase) phase3 = Phase(phase, copy=False) assert phase3 is phase phase4 = Phase(phase, 0., copy=False) assert phase4 is not phase assert_equal(phase4, phase) phase5 = Phase(0., phase) assert phase5 is not phase assert_equal(phase5, phase) phase6 = Phase(phase, phase) assert_equal(phase6, Phase(2., 0.25)) def test_init_with_subclass(self): class MyPhase(Phase): pass my_phase = MyPhase(1., 0.25) assert type(my_phase) is MyPhase phase2 = Phase(my_phase) assert type(phase2) is Phase phase3 = Phase(my_phase, subok=True) assert type(phase3) is MyPhase assert phase3 is not my_phase assert not np.may_share_memory(phase3, my_phase) phase4 = Phase(my_phase, copy=False) assert type(phase4) is Phase assert np.may_share_memory(phase4, my_phase) phase5 = Phase(my_phase, copy=False, subok=True) assert phase5 is my_phase phase6 = Phase(my_phase, 0., copy=False, subok=True) assert type(phase6) is MyPhase assert not np.may_share_memory(phase6, my_phase) phase7 = Phase(my_phase, phase2, copy=False, subok=True) assert type(phase7) is MyPhase phase8 = Phase(phase2, my_phase, copy=False, subok=True) assert type(phase8) is MyPhase def test_init_complex(self): phase = Phase(1j) assert isinstance(phase, Phase) assert phase.imaginary assert_equal(phase.int, Angle(1j, u.cycle)) assert_equal(phase.frac, Angle(0j, u.cycle)) assert_equal(phase.cycle, Angle(1j, u.cycle)) assert '1j cycle' in repr(phase) phase2 = Phase(1 + 0j) assert isinstance(phase2, Phase) assert not phase2.imaginary assert_equal(phase2, Phase(1)) assert '1j cycle' not in repr(phase2) with pytest.raises(ValueError): Phase(1., 0.0001j)
def degrees_to_ddmmss(dec): return Angle(dec, u.degree).to_string(unit=u.degree, fields=2)
for j in range(len(NUMBER_ID)): DETECTION_ID[j]='CAHA_CAFOS_BBI_DR1_'+CAHA_ID[0]+'_'+'0'*(3-int(np.log10(1+j)))+NUMBER_ID[j] c2 = fits.Column(name='Detection_ID', array=DETECTION_ID, format='50A') MJD_array=np.zeros(len(final_objects[:,0])) MJD_array[:]=MJD c3 = fits.Column(name='MJD', array=MJD_array, format='D') c4 = fits.Column(name='SNR_WIN',array=final_objects[:,SNR_WIN], format='E') #c5 = fits.Column(name='RAJ2000', unit='deg',array=np.around(final_objects[:,ALPHA_J2000],5), format='E') #c6 = fits.Column(name='DEJ2000', unit='deg',array=np.around(final_objects[:,DELTA_J2000],5), format='E') #c7 = fits.Column(name='e_RAJ2000', unit='arcsec',array=np.around(3600*final_objects[:,ERRX2_WORLD]**0.5,5), format='E') #c8 = fits.Column(name='e_DEJ2000', unit='arcsec',array=np.around(3600*final_objects[:,ERRY2_WORLD]**0.5,5), format='E') c5 = fits.Column(name='RAJ2000', unit='deg',array=final_objects[:,ALPHA_J2000], format='E') c6 = fits.Column(name='DEJ2000', unit='deg',array=final_objects[:,DELTA_J2000], format='E') c7 = fits.Column(name='e_RAJ2000', unit='arcsec',array=3600*final_objects[:,ERRX2_WORLD]**0.5, format='E') c8 = fits.Column(name='e_DEJ2000', unit='arcsec',array=3600*final_objects[:,ERRY2_WORLD]**0.5, format='E') RA=Angle(final_objects[:,ALPHA_J2000]* u.deg) DEC=Angle(final_objects[:,DELTA_J2000]* u.deg) e_RA=Angle(final_objects[:,ERRX2_WORLD]**0.5* u.deg) e_DEC=Angle(final_objects[:,ERRY2_WORLD]**0.5* u.deg) c9 = fits.Column(name='RA_hms', unit='hh:mm:ss', array=RA.to_string(unit=u.hourangle, sep=(':',':')), format='20A') c10 = fits.Column(name='DE_dms', unit='dd:mm:ss', array=DEC.to_string(unit=u.deg, sep=(':',':')), format='20A') c11 = fits.Column(name='e_RA_hms', unit='hh:mm:ss', array=e_RA.to_string(unit=u.hourangle, sep=(':',':')), format='20A') c12 = fits.Column(name='e_DE_dms', unit='dd:mm:ss', array=e_DEC.to_string(unit=u.deg, sep=(':',':')), format='20A') c13 = fits.Column(name='MAG',array=np.around(calibration_mag,3), format='E') c14 = fits.Column(name='e_MAG',array=np.around(calibration_mag_error,3), format='E') c15 = fits.Column(name='MAG_sex',array=np.around(final_objects[:,MAG_PSF],3), format='E') c16 = fits.Column(name='e_MAG_sex',array=np.around(final_objects[:,MAGERR_PSF],3), format='E') c17 = fits.Column(name='cl_SDSS',array=cl_SDSS, format='E') c18 = fits.Column(name='SPREAD_MODEL',array=np.around(SM_flag,2), format='E') c19 = fits.Column(name='flag_calib',array=extrapolation_mag, format='3A') c20 = fits.Column(name='Filter',array=np.array([name_filter]*len(final_objects[:,0])), format='10A')
def degrees_to_hhmmss(ra): return Angle(ra, u.degree).to_string(unit=u.hour, fields=2)
def solve_file(self, fname, solve_params, downsample=2, search_rad=10): """ Plate solve the specified file using solve-field :param str fname: Filename of the file to be solved. :param PlateSolveParameters solve_params: Parameters for plate solver. :param int downsample: Downsample factor for image. :param float search_rad: Number of degrees to search. :returns: solved_position (SkyCoord) The J2000 sky coordinate of the plate solve match, or None if no match was found. angle (Angle) Position angle of Y axis expressed as East of North. """ # determine installed version of solve-field rev = self.probe_solve_field_revision() # example cmdline # /usr/bin/solve-field -O --no-plots --no-verify --resort --no-fits2fits --do^Csample 2 -3 310.521 -4 45.3511 -5 10 --config /etc/astrometry.cfg -W /tmp/solution.wcs plate_solve_image.fits # remove any solved files # logging.debug(f'{filename} {extension} {solved_filename}') # if os.path.isfile(solved_filename): # logging.debug(f'Removing existing solved file {solved_filename}') # os.remove(solved_filename) cmd_line = self.exec_path cmd_line += ' -O --no-plots --no-verify --resort' cmd_line += f' --downsample {downsample}' # this is only needed for rev of 0.67 or earlier if rev <= 0.67: cmd_line += ' --no-fits2fits' cmd_line += f' -3 {solve_params.radec.ra.degree}' cmd_line += f' -4 {solve_params.radec.dec.degree}' # give guess of pixel scale unless given as 0 if solve_params.pixel_scale is not None and solve_params.pixel_scale > 0: scale = solve_params.pixel_scale cmd_line += f' -u arcsecperpix' cmd_line += f' -L {0.9*scale} -H {1.1*scale}' # search radius - default to 10 if not given if search_rad is None: search_rad = 10 cmd_line += f' -5 {search_rad}' cmd_line += ' --config /etc/astrometry.cfg ' #cmd_line += ' -W /tmp/solution.wcs' cmd_line += ' --crpix-center ' # disable most output files # cmd_line += '-W none ' cmd_line += '-U none ' #cmd_line += '--axy none6 ' cmd_line += '-I none ' cmd_line += '-M none ' cmd_line += '-R none ' cmd_line += '-B none ' # cmd_line += f'{solve_params.fov_x.radian},' # cmd_line += f'{solve_params.fov_y.radian},' # cmd_line += fname + ',' # cmd_line += f'{wait}' #/usr/bin/solve-field -O --no-plots --no-verify --resort --no-fits2fits --do^Csample 2 -3 310.521 -4 45.3511 -5 10 --config /etc/astrometry.cfg -W /tmp/solution.wcs plate_solve_image.fits #tmpdirname = tempfile.TemporaryDirectory() with tempfile.TemporaryDirectory() as tmpdirname: logging.debug(f'Created temp dir {tmpdirname}') # put solve-field files in this temp dir new_fits_name = os.path.join(tmpdirname, "solved.fit") solved_name = os.path.join(tmpdirname, "solved") cmd_line += f'-D {tmpdirname} ' cmd_line += f'-N {new_fits_name} ' cmd_line += f'-S {solved_name} ' cmd_line += ' ' + fname import shlex cmd_args = shlex.split(cmd_line) logging.debug(f'cmd_line for astrometry.net local = "{cmd_line}"') logging.debug(f'cmd_args for astrometry.net local = "{cmd_args}"') with subprocess.Popen(cmd_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) as net_proc: # poll_value = None # while True: # poll_value = net_proc.poll() # if poll_value is not None: # break for l in net_proc.stdout: logging.debug(f'astromentrynetlocal: {l.strip()}') # see if solve succeeded if os.path.isfile(solved_name): logging.info('Solved file found!') else: logging.error('No solved file - solve failed!') #import time #time.sleep(5) return None # output #Field center: (RA,Dec) = (2.101258, 29.091103) deg. #Field center: (RA H:M:S, Dec D:M:S) = (00:08:24.302, +29:05:27.971). #Field size: 76.07 x 57.4871 arcminutes #Field rotation angle: up is 1.12149 degrees E of N # for l in net_proc.stdout.readlines(): # ll = ''.join(filter(lambda x: x.isalnum() or x.isspace() or x == '.', l)) # print(ll) # parse solution.wcs from astropy import wcs import astropy.io.fits as pyfits #import time #time.sleep(5) wcs_hdulist = pyfits.open(new_fits_name) #print(wcs_hdulist) #print('wcs_hdulist: ', wcs_hdulist[0], vars(wcs_hdulist[0])) w = wcs.WCS(wcs_hdulist[0].header) #print(w.wcs.naxis) wcs_hdulist.close() #print('wcs.wcs=', wcs) #print('vars(wcs.wcs): ',vars(wcs.wcs)) #wcs.wcs.print_contents() # print('CRPIX = ', w.wcs.crpix) # print('CD = ', w.wcs.cd) # print('pixel scales = ', wcs.utils.proj_plane_pixel_scales(w)) solved_ra, solved_dec = w.wcs.crval logging.info(f'solved_ra solved_dec = {solved_ra} {solved_dec}') # solved_scale_x, solved_scale_y = wcs.utils.proj_plane_pixel_scales(w) #FIXME just take X scale # solved_scale = solved_scale_x # convert CD matrix cd_1_1 = w.wcs.cd[0][0] cd_1_2 = w.wcs.cd[0][1] cd_2_1 = w.wcs.cd[1][0] cd_2_2 = w.wcs.cd[1][1] cdelt1 = math.sqrt(cd_1_1**2 + cd_2_1**2) cdelt2 = math.sqrt(cd_1_2**2 + cd_2_2**2) # convention is to set cdelt1 negative if det of CD is negative # if cd_1_1*cd_2_2-cd_1_2*cd_2_2 < 0: # cdelt1 = -cdelt1 # # logging.info(f'cdelt = {cdelt1*3600:5.2f} {cdelt2*3600:5.2f} arcsec/pixel') # # cdel = cdelt1 - cdelt2 # # logging.debug(f'cd_1_1 cd_2_1 cd_1_2 cd_2_2 = {cd_1_1} {cd_2_1} {cd_1_2} {cd_2_2})') # logging.debug(f'cdelt1 cdelt2 cdel = {cdelt1} {cdelt2} {cdel}') # compute angle between North and the positive Y axis of sensor # positive is CCW crota = math.atan2(cd_2_1, cd_1_1) crota_deg = np.rad2deg(crota) logging.debug( f'cdelt = {cdelt1*3600:5.2f} {cdelt2*3600:5.2f} arcsec/pixel') logging.debug(f'crota/crota_deg = {crota} {crota_deg}') # get roll angle roll_angle_deg = -crota_deg logging.info(f'roll_angle_deg = {roll_angle_deg:5.2f}') solved_scale = cdelt1 * 3600 solved_angle = roll_angle_deg # ra_str = None # dec_str = None # ang_str = None # fov_x_str = None # for l in net_proc.stdout.readlines(): # ll = ''.join(filter(lambda x: x.isalnum() or x.isspace() or x == '.', l)) # print(ll) # fields = ll.split() # # look for ra/dec in deg first # if 'center' in ll and 'deg' in ll: # # should look like: # # Field center RADec 2.101258 29.091103 deg # print(fields) # ra_str = fields[3] # dec_str = fields[4] # elif 'angle' in ll: # # should look like: # # Field rotation angle up is 1.12149 degrees E of N. # ang_str = fields[5] # elif 'Field size' in ll: # fov_x_str = fields[2] # # logging.info(f'{ra_str} {dec_str} {ang_str}') # # try: # solved_ra = float(ra_str) # solved_dec = float(dec_str) # solved_angle = float(ang_str) # # # fov is given in arcmin so convert to arcsec # fov_x = float(fov_x_str)*60.0 # # logging.info(f'solved fov = {fov_x} arcsec') # if solve_params.width is not None: # solved_scale = fov_x / solve_params.width # logging.info(f'using given width of {solve_params.width} pixel scale is {solved_scale} arcsec/pix') # else: # solved_scale = None # logging.warning('No width given so pixel scale not computed!') # # except Exception as e: # logging.exception('Failed to parse solution') # return None # # logging.info(f'{solved_ra} {solved_dec} {solved_angle} {solved_scale}') radec = SkyCoord(ra=solved_ra * u.degree, dec=solved_dec * u.degree, frame='fk5', equinox='J2000') logging.info( f"AstrometryNetLocal solved coordinates: {radec.to_string('hmsdms', sep=':')}" ) return PlateSolveSolution(radec, pixel_scale=solved_scale, angle=Angle(solved_angle * u.degree), binning=solve_params.bin_x)
class SdssTranslator(FitsTranslator): """Metadata translator for SDSS standard headers. NB: calibration data is not handled as calibration frames were not available to me at time of writing. """ name = "SDSS" """Name of this translation class""" supported_instrument = "Imager" """Supports the SDSS imager instrument.""" default_resource_root = posixpath.join(CORRECTIONS_RESOURCE_ROOT, "SDSS") """Default resource path root to use to locate header correction files.""" # SDSS has has a rotator, but in drift scan mode, the instrument # angle on sky is set to +X=East, +Y=North which we define as a # 0 degree rotation. _const_map = {"boresight_rotation_angle": Angle(0*u.deg), "boresight_rotation_coord": "sky", "dark_time": 0.0*u.s, # Drift scan implies no dark time "instrument": "Imager on SDSS 2.5m", # We only ever ingest data from the imager "telescope": "SDSS 2.5m", # Value of TELESCOP in header is ambiguous "relative_humidity": None, "temperature": None, "pressure": None, "detector_serial": "UNKNOWN", } _trivial_map = {"exposure_time": ("EXPTIME", dict(unit=u.s)), "object": "OBJECT", "physical_filter": "FILTER", "exposure_id": "RUN", "visit_id": "RUN", "science_program": "OBJECT", # This is the closest I can think of to a useful program "detector_name": "CCDLOC", # This is a numeric incoding of the "slot", i.e. filter+camcol } # Need a mapping from unique name to index. The order is arbitrary. detector_name_id_map = {"g1": 0, "z1": 1, "u1": 2, "i1": 3, "r1": 4, "g2": 5, "z2": 6, "u2": 7, "i2": 8, "r2": 9, "g3": 10, "z3": 11, "u3": 12, "i3": 13, "r3": 14, "g4": 15, "z4": 16, "u4": 17, "i4": 18, "r4": 19, "g5": 20, "z5": 21, "u5": 22, "i5": 23, "r5": 24, "g6": 25, "z6": 26, "u6": 27, "i6": 28, "r6": 29} @classmethod def can_translate(cls, header, filename=None): """Indicate whether this translation class can translate the supplied header. Parameters ---------- header : `dict`-like Header to convert to standardized form. filename : `str`, optional Name of file being translated. Returns ------- can : `bool` `True` if the header is recognized by this class. `False` otherwise. """ if (cls.is_keyword_defined(header, "ORIGIN") and cls.is_keyword_defined(header, "CCDMODE") and cls.is_keyword_defined(header, "TELESCOP") and "2.5m" in header["TELESCOP"] and "SDSS" in header["ORIGIN"] and "DRIFT" in header["CCDMODE"]): return True return False @cache_translation def to_detector_unique_name(self): # Docstring will be inherited. Property defined in properties.py if self.is_key_ok("CAMCOL"): return self.to_physical_filter()+str(self._header["CAMCOL"]) else: raise ValueError(f"{self._log_prefix}: CAMCOL key is not definded") @cache_translation def to_detector_num(self): # Docstring will be inherited. Property defined in properties.py return self.detector_name_id_map[self.to_detector_unique_name()] @cache_translation def to_observation_id(self): """Calculate the observation ID. Returns ------- observation_id : `str` A string uniquely describing the observation. This incorporates the run, camcol, filter and frame. """ return " ".join([str(self._header[el]) for el in ["RUN", "CAMCOL", "FILTER", "FRAME"]]) @cache_translation def to_datetime_begin(self): # Docstring will be inherited. Property defined in properties.py # We know it is UTC value = self._from_fits_date_string(self._header["DATE-OBS"], time_str=self._header["TAIHMS"], scale="tai") self._used_these_cards("DATE-OBS", "TAIHMS") return value @cache_translation def to_datetime_end(self): # Docstring will be inherited. Property defined in properties.py return self.to_datetime_begin() + self.to_exposure_time() @cache_translation def to_location(self): """Calculate the observatory location. Returns ------- location : `astropy.coordinates.EarthLocation` An object representing the location of the telescope. """ # Look up the value since files do not have location value = EarthLocation.of_site("apo") return value @cache_translation def to_observation_type(self): """Calculate the observation type. Returns ------- typ : `str` Observation type. Normalized to standard set. """ obstype_key = "FLAVOR" if not self.is_key_ok(obstype_key): return "none" obstype = self._header[obstype_key].strip().lower() self._used_these_cards(obstype_key) return obstype @cache_translation def to_tracking_radec(self): # Docstring will be inherited. Property defined in properties.py radecsys = ("RADECSYS",) radecpairs = (("RA", "DEC"),) return tracking_from_degree_headers(self, radecsys, radecpairs, unit=u.deg) @cache_translation def to_altaz_begin(self): # Docstring will be inherited. Property defined in properties.py try: az = self._header["AZ"] alt = self._header["ALT"] # It appears SDSS defines azimuth as increasing # from South through East. This translates to # North through East az = (-az + 180.)%360. altaz = AltAz(az * u.deg, alt * u.deg, obstime=self.to_datetime_begin(), location=self.to_location()) self._used_these_cards("AZ", "ALT") return altaz except Exception as e: if self.to_observation_type() != "science": return None # Allow Alt/Az not to be set for calibrations raise(e) @cache_translation def to_boresight_airmass(self): # Docstring will be inherited. Property defined in properties.py altaz = self.to_altaz_begin() if altaz is not None: return altaz.secz.value # This is an estimate @cache_translation def to_detector_exposure_id(self): # Docstring will be inherited. Property defined in properties.py try: frame_field_map = dict(r=0, i=2, u=4, z=6, g=8) run = self._header["RUN"] filt = self._header["FILTER"] camcol = self._header["CAMCOL"] field = self._header["FRAME"] - frame_field_map[filt] self._used_these_cards("RUN", "FILTER", "CAMCOL", "FRAME") except Exception as e: if self.to_observation_type() != "science": return None raise(e) filter_id_map = dict(u=0, g=1, r=2, i=3, z=4) return ((int(run) * 10 + filter_id_map[filt]) * 10 + int(camcol)) * 10000 + int(field) @cache_translation def to_detector_group(self): # Docstring will be inherited. Property defined in properties.py if self.is_key_ok("CAMCOL"): return str(self._header["CAMCOL"]) else: raise ValueError(f"{self._log_prefix}: CAMCOL key is not definded")
def test_select_sky_regions(): # create random observation table with many entries random_state = np.random.RandomState(seed=0) obs_table = make_test_observation_table(n_obs=100, random_state=random_state) # test sky box selection in gal coordinates lon_range = Angle([-100., 50.], 'deg') lat_range = Angle([-25., 25.], 'deg') frame = 'galactic' border = Angle(2., 'deg') selection = dict(type='sky_box', frame=frame, lon=lon_range, lat=lat_range, border=border) common_sky_region_select_test_routines(obs_table, selection) # test sky box selection in radec coordinates lon_range = Angle([150., 300.], 'deg') lat_range = Angle([-50., 0.], 'deg') frame = 'icrs' border = Angle(2., 'deg') selection = dict(type='sky_box', frame=frame, lon=lon_range, lat=lat_range, border=border) common_sky_region_select_test_routines(obs_table, selection) # test sky circle selection in gal coordinates lon_cen = Angle(0., 'deg') lat_cen = Angle(0., 'deg') radius = Angle(50., 'deg') frame = 'galactic' border = Angle(2., 'deg') selection = dict(type='sky_circle', frame=frame, lon=lon_cen, lat=lat_cen, radius=radius, border=border) common_sky_region_select_test_routines(obs_table, selection) # test sky circle selection in radec coordinates lon_cen = Angle(130., 'deg') lat_cen = Angle(-40., 'deg') radius = Angle(50., 'deg') frame = 'icrs' border = Angle(2., 'deg') selection = dict(type='sky_circle', frame=frame, lon=lon_cen, lat=lat_cen, radius=radius, border=border) common_sky_region_select_test_routines(obs_table, selection)
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # make sure telescope coordinate is in range [-180°, 180°] if isinstance(self._data, UnitSphericalRepresentation): self._data.lon.wrap_angle = Angle(180, unit=u.deg)
def _generator(self): with self.pyhessio.open_hessio(self.input_url) as file: # the container is initialized once, and data is replaced within # it after each yield counter = 0 eventstream = file.move_to_next_event() data = DataContainer() data.meta['origin'] = "hessio" # some hessio_event_source specific parameters data.meta['input_url'] = self.input_url data.meta['max_events'] = self.max_events for event_id in eventstream: if counter == 0: # subarray info is only available when an event is loaded, # so load it on the first event. data.inst.subarray = self._build_subarray_info(file) obs_id = file.get_run_number() tels_with_data = set(file.get_teldata_list()) data.count = counter data.r0.obs_id = obs_id data.r0.event_id = event_id data.r0.tels_with_data = tels_with_data data.r1.obs_id = obs_id data.r1.event_id = event_id data.r1.tels_with_data = tels_with_data data.dl0.obs_id = obs_id data.dl0.event_id = event_id data.dl0.tels_with_data = tels_with_data # handle telescope filtering by taking the intersection of # tels_with_data and allowed_tels if len(self.allowed_tels) > 0: selected = tels_with_data & self.allowed_tels if len(selected) == 0: continue # skip event data.r0.tels_with_data = selected data.r1.tels_with_data = selected data.dl0.tels_with_data = selected data.trig.tels_with_trigger = ( file.get_central_event_teltrg_list()) time_s, time_ns = file.get_central_event_gps_time() data.trig.gps_time = Time(time_s * u.s, time_ns * u.ns, format='unix', scale='utc') data.mc.energy = file.get_mc_shower_energy() * u.TeV data.mc.alt = Angle(file.get_mc_shower_altitude(), u.rad) data.mc.az = Angle(file.get_mc_shower_azimuth(), u.rad) data.mc.core_x = file.get_mc_event_xcore() * u.m data.mc.core_y = file.get_mc_event_ycore() * u.m first_int = file.get_mc_shower_h_first_int() * u.m data.mc.h_first_int = first_int data.mc.x_max = file.get_mc_shower_xmax() * u.g / (u.cm**2) data.mc.shower_primary_id = file.get_mc_shower_primary_id() # mc run header data data.mcheader.run_array_direction = Angle( file.get_mc_run_array_direction() * u.rad) # this should be done in a nicer way to not re-allocate the # data each time (right now it's just deleted and garbage # collected) data.r0.tel.clear() data.r1.tel.clear() data.dl0.tel.clear() data.dl1.tel.clear() data.mc.tel.clear() # clear the previous telescopes for tel_id in tels_with_data: mc = data.mc.tel[tel_id] r0 = data.r0.tel[tel_id] r1 = data.r1.tel[tel_id] pointing = data.pointing[tel_id] adc_samples = file.get_adc_sample(tel_id) if adc_samples.size == 0: adc_samples = file.get_adc_sum(tel_id)[..., None] dc_to_pe = file.get_calibration(tel_id) pedestal = file.get_pedestal(tel_id) r0.waveform = adc_samples r1.waveform = ((adc_samples - pedestal[..., np.newaxis]) * dc_to_pe[..., np.newaxis]) mc.dc_to_pe = dc_to_pe mc.pedestal = pedestal r0.num_trig_pix = file.get_num_trig_pixels(tel_id) r0.trig_pix_id = file.get_trig_pixels(tel_id) mc.reference_pulse_shape = file.get_ref_shapes(tel_id) # load the data per telescope/pixel hessio_mc_npe = file.get_mc_number_photon_electron(tel_id) mc.photo_electron_image = hessio_mc_npe mc.meta['refstep'] = file.get_ref_step(tel_id) mc.time_slice = file.get_time_slice(tel_id) mc.azimuth_raw = file.get_azimuth_raw(tel_id) mc.altitude_raw = file.get_altitude_raw(tel_id) azimuth_cor = file.get_azimuth_cor(tel_id) altitude_cor = file.get_altitude_cor(tel_id) # hessioeventsource pass 0 if there is no altitude/azimuth correction if azimuth_cor == 0 and mc.azimuth_raw != 0: mc.azimuth_cor = np.nan pointing.azimuth = u.Quantity(mc.azimuth_raw, u.rad) else: mc.azimuth_cor = azimuth_cor pointing.azimuth = u.Quantity(azimuth_cor, u.rad) if altitude_cor == 0 and mc.altitude_raw != 0: mc.altitude_cor = np.nan pointing.altitude = u.Quantity(mc.altitude_raw, u.rad) else: mc.altitude_cor = altitude_cor pointing.altitude = u.Quantity(mc.altitude_cor, u.rad) yield data counter += 1 return
def make_theta_squared_table(observations, theta_squared_axis, position, position_off=None): """Make theta squared distribution in the same FoV for a list of `Observation` objects. The ON theta2 profile is computed from a given distribution, on_position. By default, the OFF theta2 profile is extracted from a mirror position radially symmetric in the FOV to pos_on. The ON and OFF regions are assumed to be of the same size, so the normalisation factor between both region alpha = 1. Parameters ---------- observations: `~gammapy.data.Observations` List of observations theta_squared_axis : `~gammapy.maps.geom.MapAxis` Axis of edges of the theta2 bin used to compute the distribution position : `~astropy.coordinates.SkyCoord` Position from which the on theta^2 distribution is computed position_off : `astropy.coordinates.SkyCoord` Position from which the OFF theta^2 distribution is computed. Default: reflected position w.r.t. to the pointing position Returns ------- table : `~astropy.table.Table` Table containing the on counts, the off counts, acceptance, off acceptance and alpha for each theta squared bin. """ if not theta_squared_axis.edges.unit.is_equivalent("deg2"): raise ValueError("The theta2 axis should be equivalent to deg2") table = Table() table["theta2_min"] = theta_squared_axis.edges[:-1] table["theta2_max"] = theta_squared_axis.edges[1:] table["counts"] = 0 table["counts_off"] = 0 table["acceptance"] = 0.0 table["acceptance_off"] = 0.0 alpha_tot = np.zeros(len(table)) livetime_tot = 0 create_off = position_off is None for observation in observations: separation = position.separation(observation.events.radec) counts, _ = np.histogram(separation**2, theta_squared_axis.edges) table["counts"] += counts if create_off: # Estimate the position of the mirror position pos_angle = observation.pointing_radec.position_angle(position) sep_angle = observation.pointing_radec.separation(position) position_off = observation.pointing_radec.directional_offset_by( pos_angle + Angle(np.pi, "rad"), sep_angle) # Angular distance of the events from the mirror position separation_off = position_off.separation(observation.events.radec) # Extract the ON and OFF theta2 distribution from the two positions. counts_off, _ = np.histogram(separation_off**2, theta_squared_axis.edges) table["counts_off"] += counts_off # Normalisation between ON and OFF is one acceptance = np.ones(theta_squared_axis.nbin) acceptance_off = np.ones(theta_squared_axis.nbin) table["acceptance"] += acceptance table["acceptance_off"] += acceptance_off alpha = acceptance / acceptance_off alpha_tot += alpha * observation.observation_live_time_duration.to_value( "s") livetime_tot += observation.observation_live_time_duration.to_value( "s") alpha_tot /= livetime_tot table["alpha"] = alpha_tot stat = WStatCountsStatistic(table["counts"], table["counts_off"], table["alpha"]) table["excess"] = stat.n_sig table["sqrt_ts"] = stat.sqrt_ts table["excess_errn"] = stat.compute_errn() table["excess_errp"] = stat.compute_errp() table.meta["ON_RA"] = position.icrs.ra table.meta["ON_DEC"] = position.icrs.dec return table
def factorDirectionsWriter(table, fileName): """ Writes patches to a Factor directions file. Note that Factor respects the order of patches and they are sorted here by apparent flux from brightest to faintest. Parameters ---------- table : astropy.table.Table object Input sky model table; must have patches defined fileName : str Output file to which the sky model is written """ log = logging.getLogger('LSMTool.Write') regionFile = open(fileName, 'w') log.debug('Writing Factor directions file to {0}'.format(fileName)) outLines = [] outLines.append('# name position atrous_do mscale_field_do cal_imsize ' 'solint_ph solint_amp dynamic_range region_selfcal ' 'region_field peel_skymodel outlier_source cal_radius_deg cal_flux\n') if 'History' in table.meta: outLines.append('\n# LSMTool history:\n# ') outLines.append('\n# '.join(table.meta['History'])) outLines.append('\n') outLines.append('\n') # Make sure all columns have the correct units for colName in table.columns: units = allowedColumnUnits[colName.lower()] if units is not None: table[colName].convert_unit_to(units) table = table.group_by('Patch') patchNames = table.groups.keys['Patch'] if 'patch_order' in table.meta: indx = table.meta['patch_order'] else: indx = range(len(table.groups)) if 'patch_size' in table.meta: sizes = table.meta['patch_size'] else: sizes = [''] * len(table.groups) if 'patch_flux' in table.meta: fluxes = table.meta['patch_flux'] else: fluxes = [''] * len(table.groups) for patchName, size, flux in zip(patchNames[indx], sizes[indx], fluxes[indx]): if patchName in table.meta: gRA, gDec = table.meta[patchName] else: gRA = Angle(0.0) gDec = Angle(0.0) outLines.append('{0} {1},{2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} ' '{13} {14}\n'.format(patchName, gRA.to_string(unit='hourangle', sep='hms'), gDec.to_string(sep='dms'), 'empty', 'empty', 0, 0, 0, 'LD', 'empty', 'empty', 'empty', False, size, flux)) regionFile.writelines(outLines) regionFile.close()
def validate(cls, v): return Angle(v)
def create_bg_observation_list(indir, scheme, outdir, overwrite, test): """Make total observation list and filter the observations. In a first version, all obs taken within 3 deg of a known source will be rejected. If a source is extended, twice the extension is added to the corresponding exclusion region radius of 3 deg. Parameters ---------- indir : str Input directory (that contains the event lists) scheme : str Scheme of file naming. outdir : str Dir path to store the results. overwrite : bool If true, run fast (not recommended for analysis). test : bool If true, run fast: skip many runs and catalog sources. """ log.info(' ') log.info("#######################################") log.info("# Starting create_bg_observation_list #") log.info("#######################################") # get full list of observations data_store = DataStore(dir=indir, scheme=scheme) observation_table = data_store.make_observation_table() # for testing, only process a small subset of observations if test and len(observation_table) > 100: observation_table = observation_table.select_linspace_subset(num=100) log.debug(' ') log.debug("Full observation table:") log.debug(observation_table) # filter observations: load catalog and reject obs too close to sources # load catalog: TeVCAT (no H.E.S.S. catalog) catalog = load_catalog_tevcat() # for testing, only process a small subset of sources if test: catalog = catalog[:5] # sources coordinates sources_coord = SkyCoord(catalog['coord_ra'], catalog['coord_dec']) # sources sizes (x, y): radius sources_size = Angle([catalog['size_x'], catalog['size_y']]) sources_size = sources_size.reshape(len(catalog), 2) # substitute nan with 0 sources_size[np.isnan(sources_size)] = 0 # sources max size sources_max_size = np.amax(sources_size, axis=1) # sources exclusion radius = 2x max size + 3 deg (fov + 0.5 deg?) sources_excl_radius = 2 * sources_max_size + Angle(3., 'deg') # mask all obs taken within the excl radius of any of the sources # loop over sources obs_coords = SkyCoord(observation_table['RA'], observation_table['DEC']) for i_source in range(len(catalog)): selection = dict(type='sky_circle', frame='icrs', lon=sources_coord[i_source].ra, lat=sources_coord[i_source].dec, radius=sources_excl_radius[i_source], inverted=True, border=Angle(0., 'deg')) observation_table = observation_table.select_observations(selection) # save the bg observation list to a fits file outfile = Path(outdir) / 'bg_observation_table.fits.gz' log.info("Writing {}".format(outfile)) observation_table.write(str(outfile), overwrite=overwrite)
def offset(self): """Event offset from the array pointing position (`~astropy.coordinates.Angle`).""" position = self.radec center = self.pointing_radec offset = center.separation(position) return Angle(offset, unit="deg")