bandpass = dorado.sensitivity.bandpasses.NUV_D st.sidebar.markdown('## Background') night = st.sidebar.checkbox('Orbit night (low airglow background)', True) zodi = st.sidebar.radio('Zodiacal light background', ('Low', 'Medium', 'High', 'Specific time and place')) if zodi == 'Specific time and place': time = st.sidebar.text_input("""Time (e.g. '2025-03-01 12:00:00')""", '2025-03-01 12:00:00') try: time = Time(time) except ValueError: st.sidebar.error('Did not understand time format') st.stop() st.sidebar.success(f'Resolved to ' + time.isot) coord = st.sidebar.text_input( """Coordinates (e.g. 'NGC 4993', '13h09m47.706s -23d23'01.79"')""", 'NGC 4993') try: coord = SkyCoord.from_name(coord) except NameResolveError: try: coord = SkyCoord(coord) except ValueError: st.sidebar.error('Did not understand coordinate format')
#corr[0] = XX #corr[1] = XY #corr[2] = YX #corr[3] = XX c = 299792458.0 #(m/s) spw_table = '::'.join((myms, 'SPECTRAL_WINDOW')) for a in xds_from_table(spw_table): chan = a.CHAN_FREQ.data.squeeze(0).compute() wavel = c / chan #msdata = xms.xds_from_ms(myms, columns=['TIME', 'FLAG', 'FIELD_ID', 'UVW', 'CORRECTED_DATA', 'DATA']) for i in datasets: mjd = i.TIME.values / 86400 t = Time(mjd, format='mjd') st_time = t.iso v = i.UVW.values[:, 1] u = i.UVW.values[:, 0] phase = numpy.angle(i.DATA.values[:, 0]) #rads amp = numpy.abs(i.DATA.values[:, 0]) real = numpy.real(i.DATA.values[:, 0]) imag = numpy.imag(i.DATA.values[:, 0]) #corr=p.POLARIZATION.CORR_PRODUCT.data. #print (st_time) #UV distance uvdist = ((u**2.0) + (v**2.0))**0.5 #convert the xarray into panda dataframe to prepare it for plotting ... #frame=i.to_dataframe()
def cone_search_async(self, coo, rad, epoch, location='500', position_error=120, find_planets=True, find_asteroids=True, find_comets=True, get_query_payload=False, get_raw_response=False, cache=True): """ This method queries the IMCCE `SkyBoT <http://vo.imcce.fr/webservices/skybot/?conesearch>`_ cone search service and produces a `~astropy.table.QTable` object containing all Solar System bodies that might be in the cone defined by the cone center coordinates and epoch provided. Parameters ---------- coo : `~astropy.coordinates.SkyCoord` object or tuple Center coordinates of the search cone in ICRS coordinates. If provided as tuple, the input is excepted as (right ascension in degrees, declination in degrees). rad : `~astropy.units.Quantity` object or float Radius of the search cone. If no units are provided (input as float), degrees are assumed. The maximum search radius is 10 degrees; if this maximum radius is exceeded, it will be clipped and a warning will be provided to the user. epoch : `~astropy.time.Time` object, float, or string Epoch of search process in UT. If provided as float, it is interpreted as Julian Date, if provided as string, it is interpreted as date in the form ``'YYYY-MM-DD HH-MM-SS'``. location : int or str, optional Location of the observer on Earth as defined in the official `list of IAU codes <https://www.minorplanetcenter.net/iau/lists/ObsCodes.html>`_. Default: geocentric location (``'500'``) position_error : `~astropy.units.Quantity` or float, optional Maximum positional error for targets to be queried. If no unit is provided, arcseconds are assumed. Maximum positional error is 120 arcseconds, larger values are clipped and warning will be provided to the user. Default: 120 arcseconds find_planets : boolean, optional If ``True``, planets will be included in the search. Default: ``True`` find_asteroids : boolean, optional If ``True``, asteroids will be included in the search. Default: ``True`` find_comets : boolean, optional If ``True``, comets will be included in the search. Default: ``True`` get_query_payload : boolean, optional Returns the query payload only and performs no query. Default: ``False`` get_raw_response : boolean, optional Returns the raw response as provided by the IMCCE server instead of the parsed output. Default: ``False`` cache : boolean, optional Cache this specfific query so it might be retrieved faster in the future. Default: ``True`` Notes ----- The following parameters are queried from the SkyBoT service: +------------------+-----------------------------------------------+ | Column Name | Definition | +==================+===============================================+ | ``'Number'`` | Target Number (``-1`` if none provided, int) | +------------------+-----------------------------------------------+ | ``'Name'`` | Target Name (str) | +------------------+-----------------------------------------------+ | ``'RA'`` | Target RA (J2000, deg, float) | +------------------+-----------------------------------------------+ | ``'DEC'`` | Target declination (J2000, deg, float) | +------------------+-----------------------------------------------+ | ``'Type'`` | Target dynamical/physical type (str) | +------------------+-----------------------------------------------+ | ``'V'`` | Target apparent brightness (V-band, mag, | | | float) | +------------------+-----------------------------------------------+ | ``'posunc'`` | Positional uncertainty (arcsec, float) | +------------------+-----------------------------------------------+ | ``'centerdist'`` | Angular distance of target from cone center | | | (arcsec, float) | +------------------+-----------------------------------------------+ | ``'RA_rate'`` | RA rate of motion (arcsec/hr, float) | +------------------+-----------------------------------------------+ | ``'DEC_rate'`` | Declination rate of motion (arcsec/hr, float) | +------------------+-----------------------------------------------+ | ``'geodist'`` | Geocentric distance of target (au, float) | +------------------+-----------------------------------------------+ | ``'heliodist'`` | Heliocentric distance of target (au, float) | +------------------+-----------------------------------------------+ | ``'alpha'`` | Solar phase angle (deg, float) | +------------------+-----------------------------------------------+ | ``'elong'`` | Solar elongation angle (deg, float) | +------------------+-----------------------------------------------+ | ``'x'`` | Target equatorial vector x (au, float) | +------------------+-----------------------------------------------+ | ``'y'`` | Target equatorial vector y (au, float) | +------------------+-----------------------------------------------+ | ``'z'`` | Target equatorial vector z (au, float) | +------------------+-----------------------------------------------+ | ``'vx'`` | Target velocity vector x (au/d, float) | +------------------+-----------------------------------------------+ | ``'vy'`` | Target velocity vector y (au/d, float) | +------------------+-----------------------------------------------+ | ``'vz'`` | Target velocity vector z (au/d, float) | +------------------+-----------------------------------------------+ | ``'epoch'`` | Ephemerides epoch (JD, float) | +------------------+-----------------------------------------------+ Examples -------- >>> from astroquery.imcce import Skybot >>> from astropy.coordinates import SkyCoord >>> from astropy.time import Time >>> import astropy.units as u >>> field = SkyCoord(1*u.deg, 1*u.deg) >>> epoch = Time('2019-05-29 21:42', format='iso') >>> Skybot.cone_search(field, 0.1*u.deg, epoch) # doctest: +SKIP <QTable length=2> Number Name RA ... vy vz epoch deg ... AU / d AU / d d int64 str9 float64 ... float64 float64 float64 ------ --------- ------------------ ... ----------- ----------- --------- 180969 2005 MM39 1.0019566666666666 ... 0.00977568 0.003022634 2458630.0 107804 2001 FV58 1.0765258333333332 ... 0.006551369 0.003846177 2458630.0 """ URL = conf.skybot_server TIMEOUT = conf.timeout # check for types and units if not isinstance(coo, SkyCoord): coo = SkyCoord(ra=coo[0]*u.degree, dec=coo[1]*u.degree, frame='icrs') if isinstance(rad, u.Quantity): rad = Angle(rad.value, unit=rad.unit) if not isinstance(rad, u.Quantity): rad = Angle(rad, unit=u.degree) if rad > Angle(10, unit=u.degree): rad = Angle(10, unit=u.degree) warnings.warn('search cone radius set to maximum: 10 deg', UserWarning) if isinstance(epoch, (int, float)): epoch = Time(epoch, format='jd') elif isinstance(epoch, str): epoch = Time(epoch, format='iso') if isinstance(position_error, u.Quantity): position_error = Angle(position_error.value, unit=position_error.unit) if not isinstance(position_error, u.Quantity): position_error = Angle(position_error, unit=u.arcsec) if position_error > Angle(120, unit=u.arcsec): position_error = Angle(120, unit=u.arcsec) warnings.warn('positional error set to maximum: 120 arcsec', UserWarning) # assemble payload request_payload = {'-ra': coo.ra.deg, '-dec': coo.dec.deg, '-rd': rad.deg, '-ep': str(epoch.jd), '-loc': str(location), '-filter': position_error.arcsec, '-objFilter': str(int(find_asteroids)) + str(int(find_planets)) + str(int(find_comets)), '-refsys': 'EQJ2000', '-output': 'all', '-mime': 'text'} # check for diagnostic flags if get_query_payload: return request_payload self._get_raw_response = get_raw_response response = self._request(method='GET', url=URL, params=request_payload, timeout=TIMEOUT, cache=cache) self._uri = response.url return response
def readsdofile(datadir=None, wavelength=None, jdtime=None, isexists=False, timtol=1): ''' read sdo file from local database :param datadir: :param wavelength: :param jdtime: the timestamp or timerange in Julian days. if is timerange, return a list of files in the timerange :param isexists: check if file exist. if files exist, return file name :param timtol: time difference tolerance in days for considering data as the same timestamp :return: ''' from astropy.time import Time import sunpy.map from datetime import date from datetime import timedelta as td if timtol < 12. / 3600 / 24: timtol = 12. / 3600 / 24 if isinstance(jdtime, list) or isinstance( jdtime, tuple) or type(jdtime) == np.ndarray: if len(jdtime) != 2: raise ValueError( 'jdtime must be a number or a two elements array/list/tuple') else: if jdtime[1] < jdtime[0]: raise ValueError( 'start time must be occur earlier than end time!') else: sdofitspath = [] jdtimestr = [Time(ll, format='jd').iso for ll in jdtime] ymd = [ll.split(' ')[0].split('-') for ll in jdtimestr] d1 = date(int(ymd[0][0]), int(ymd[0][1]), int(ymd[0][2])) d2 = date(int(ymd[1][0]), int(ymd[1][1]), int(ymd[1][2])) delta = d2 - d1 for i in xrange(delta.days + 1): ymd = d1 + td(days=i) sdofitspathtmp = glob.glob( datadir + '/{:04d}/{:02d}/{:02d}/aia.lev1_*Z.{}.image_lev1.fits'. format(ymd.year, ymd.month, ymd.day, wavelength)) if len(sdofitspathtmp) > 0: sdofitspath = sdofitspath + sdofitspathtmp if len(sdofitspath) == 0: if isexists: return sdofitspath else: raise ValueError( 'No SDO file found under {} at the time range of {} to {}. Download the data with EvtBrowser first.' .format(datadir, jdtimestr[0], jdtimestr[1])) sdofits = [os.path.basename(ll) for ll in sdofitspath] sdotimeline = Time([ insertchar( insertchar( ll.split('.')[2].replace('T', ' ').replace( 'Z', ''), ':', -4), ':', -2) for ll in sdofits ], format='iso', scale='utc') sdofitspathnew = [ x for (y, x) in sorted(zip(sdotimeline.jd, sdofitspath)) ] sdofitsnew = [os.path.basename(ll) for ll in sdofitspathnew] sdotimelinenew = Time([ insertchar( insertchar( ll.split('.')[2].replace('T', ' ').replace( 'Z', ''), ':', -4), ':', -2) for ll in sdofitsnew ], format='iso', scale='utc') sdofile = list( np.array(sdofitspathnew)[np.where( np.logical_and(jdtime[0] < sdotimelinenew.jd, sdotimelinenew.jd < jdtime[1]))[0]]) return sdofile else: jdtimstr = Time(jdtime, format='jd').iso ymd = jdtimstr.split(' ')[0].split('-') sdofitspath = glob.glob(datadir + '/{}/{}/{}/aia.lev1_*Z.{}.image_lev1.fits'. format(ymd[0], ymd[1], ymd[2], wavelength)) if len(sdofitspath) == 0: raise ValueError('No SDO file found under {}.'.format(datadir)) sdofits = [os.path.basename(ll) for ll in sdofitspath] sdotimeline = Time([ insertchar( insertchar( ll.split('.')[2].replace('T', ' ').replace('Z', ''), ':', -4), ':', -2) for ll in sdofits ], format='iso', scale='utc') if timtol < np.min(np.abs(sdotimeline.jd - jdtime)): raise ValueError( 'No SDO file found at the select timestamp. Download the data with EvtBrowser first.' ) idxaia = np.argmin(np.abs(sdotimeline.jd - jdtime)) sdofile = sdofitspath[idxaia] if isexists: return sdofile else: try: sdomap = sunpy.map.Map(sdofile) return sdomap except: raise ValueError('File not found or invalid input')
if use_fnu: output_filename = core_name + '_fnu_sensitivity_curve.txt' else: pass obs_fits = fits.open(observed_file) header = fits.getheader(observed_file) obs_waves1 = obs_fits[0].data obs_flux1 = obs_fits[1].data obs_dlambda = obs_fits[4].data airmass = header['AIRMASS'] obs_time = header['OPENTIME'] obs_date = header['OPENDATE'] obs_time = obs_date + 'T' + obs_time obs_time = Time(obs_time, format='isot', scale='utc').mjd exptime = header['EXPTIME'] obs_spec = np.vstack([obs_waves1, obs_flux1]) try: dlambda = obs_fits[4].data obs_spec = np.copy(spt.counts_to_flambda(obs_spec, dlambda)) print('Observed spectrum in units of erg/s/cm^2/angstrom') except IndexError: print( 'No dlambda extension in observed FITS file. You need to redo wave_cal.py to include that extension.' ) print( 'This file is most likely generated before 2019-07-16 when this change was implemented' ) sys.exit()
def inttag(tagfile, output, starttime=None, increment=None, rcount=1, highres=False, allevents=False, verbose=True): """Convert an events table of TIMETAG into an integrated ACCUM image. Parameters ---------- tagfile: str input file that contains TIMETAG event stream. This is ordinarily a FITS file containing two tables. The TIMETAG data are in the table with EXTNAME = "EVENTS", and the "good time intervals" are in the table with EXTNAME = "GTI". If the GTI table is missing or empty, all times will be considered "good". output: str Name of the output FITS file. starttime: float Start time for integrating events, in units of seconds since the beginning of the exposure. The default value of None means that the start time will be set to the first START time in the GTI table. increment: float Time interval in seconds. The default value of None means integrate to the last STOP time in the GTI table, divided by rcount. rcount: int Repeat count, the number of output image sets to create. If rcount is greater than 1 and increment is not specified, will subdivide the total exposure time by rcount. highres: bool Create a high resolution output image? Default is False. allevents: bool If allevents is set to True, all events in the input EVENTS table will be accumulated into the output image. The TIME column in the EVENTS table will only be used to determine the exposure time, and the GTI table will be ignored. verbose: bool Print additional info? Returns ------- """ # Open Input File (_tag.fits) with fits.open(tagfile) as tag_hdr: # Read in TIME-TAG data (Events data and GTI data) events_data = tag_hdr[1].data if allevents: # If allevents, ignore GTI and generate gti_data based on the time of the first and last event gti_data = np.rec.array( [(events_data['TIME'][0], events_data['TIME'][-1])], formats=">f8,>f8", names='START, STOP') else: # Otherwise, retrieve the GTIs from the GTI extension gti_data = tag_hdr['GTI'].data # Read in relevant header info tag_pri_hdr = tag_hdr[0].header cenx = tag_pri_hdr['CENTERA1'] # xcenter in c code ceny = tag_pri_hdr['CENTERA2'] # ycenter in c code siz_axx = tag_pri_hdr['SIZAXIS1'] # nx in c code siz_axy = tag_pri_hdr['SIZAXIS2'] # ny in c code tag_sci_hdr = tag_hdr[1].header tzero_mjd = tag_sci_hdr['EXPSTART'] # MJD zero point # Determine start and stop times for counting events gti_start = gti_data['START'][0] gti_stop = gti_data['STOP'][-1] # Calculate corners from array size and centers xcorner = ((cenx - siz_axx / 2.) - 1) * 2 ycorner = ((ceny - siz_axy / 2.) - 1) * 2 # Adjust axis sizes for highres, determine binning bin_n = 2 if highres: siz_axx *= 2 siz_axy *= 2 bin_n = 1 ltvx = ((bin_n - 2.) / 2. - xcorner) / bin_n ltvy = ((bin_n - 2.) / 2. - ycorner) / bin_n ltm = 2. / bin_n # Read in start and stop time parameters if starttime is None or starttime < gti_start: starttime = gti_start # The first START time in the GTI (or first event) if increment is None: increment = (gti_stop - gti_start) / rcount stoptime = starttime + increment imset_hdr_ver = 0 # output header value corresponding to imset texptime = 0 # total exposure time hdu_list = [] for imset in range(rcount): # Truncate stoptime at last available event time (GTI or allevents) if it exceeds that if stoptime > gti_stop: stoptime = gti_stop # Get Exposure Times exp_time, expstart, expstop, good_events = exp_range( starttime, stoptime, events_data, gti_data, tzero_mjd) if len(good_events) == 0: if verbose: print("Skipping imset, due to no overlap with GTI\n", starttime, stoptime) starttime = stoptime stoptime += increment continue imset_hdr_ver += 1 if imset_hdr_ver == 1: # If first science header, texpstart keyword value is expstart texpstart = expstart texpend = expstop # texpend will be expstop of last imset if verbose: print("imset: {}, start: {}, stop: {}, exposure time: {}".format( imset_hdr_ver, starttime, stoptime, exp_time)) # Convert events table to accum image accum = events_to_accum(good_events, siz_axx, siz_axy, highres) # Calculate errors from accum image # Note: C version takes the square root of the counts, inttag.py uses a more robust confidence interval conf_int = astropy.stats.poisson_conf_interval( accum, interval='sherpagehrels', sigma=1) err = conf_int[ 1] - accum # error is the difference between upper confidence boundary and the data # Copy EVENTS extension header to SCI, ERR, DQ extensions sci_hdu = fits.ImageHDU(data=accum, header=tag_sci_hdr.copy(), name='SCI') err_hdu = fits.ImageHDU(data=err, header=tag_sci_hdr.copy(), name='ERR') dq_hdu = fits.ImageHDU(header=tag_sci_hdr.copy(), name='DQ') # Generate datetime for 'DATE' header keyword dtstr = str(dt.utcnow()) date, h, m, s = [ dtstr.split()[0], dtstr.split()[1].split(':')[0], dtstr.split()[1].split(':')[1], str(round(float(dtstr.split()[1].split(':')[-1]))) ] if len(s) == 1: s = '0' + s dtval = date + 'T' + h + ':' + m + ':' + s # Populate extensions for idx, hdu in enumerate([sci_hdu, err_hdu, dq_hdu]): hdu.header['EXPTIME'] = exp_time hdu.header['EXPSTART'] = expstart hdu.header['EXPEND'] = expstop date_obs, time_obs = Time(float(expstart), format='mjd').isot.split('T') hdu.header['DATE-OBS'] = date_obs hdu.header['TIME-OBS'] = time_obs hdu.header['EXTVER'] = imset_hdr_ver hdu.header['DATE'] = (dtval, "Date FITS file was generated") hdu.header['ORIGIN'] = "stistools inttag.py" # Check if image-specific WCS keywords already exist in the tag file (older tag files do) keyword_list = list(hdu.header.keys()) if not any("CTYPE" in keyword for keyword in keyword_list): n, k = [ keyword[-1] for keyword in keyword_list if "TCTYP" in keyword ] # Rename keywords for val, i in zip([n, k], ['1', '2']): hdu.header.rename_keyword('TCTYP' + val, 'CTYPE' + i) hdu.header.rename_keyword('TCRPX' + val, 'CRPIX' + i) hdu.header.rename_keyword('TCRVL' + val, 'CRVAL' + i) hdu.header.rename_keyword('TCUNI' + val, 'CUNIT' + i) hdu.header.rename_keyword('TC{}_{}'.format(n, n), 'CD{}_{}'.format(1, 1)) hdu.header.rename_keyword('TC{}_{}'.format(n, k), 'CD{}_{}'.format(1, 2)) hdu.header.rename_keyword('TC{}_{}'.format(k, n), 'CD{}_{}'.format(2, 1)) hdu.header.rename_keyword('TC{}_{}'.format(k, k), 'CD{}_{}'.format(2, 2)) # Time tag events table keywords hdu.header['WCSAXES'] = 2 hdu.header['LTM1_1'] = ltm hdu.header['LTM2_2'] = ltm hdu.header['LTV1'] = ltvx hdu.header['LTV2'] = ltvy # Convert keyword values to lowres scale if not highres if not highres: hdu.header['CD1_1'] *= 2 hdu.header['CD1_2'] *= 2 hdu.header['CD2_1'] *= 2 hdu.header['CD2_2'] *= 2 hdu.header['CRPIX1'] = (hdu.header['CRPIX1'] + 0.5) / 2. hdu.header['CRPIX2'] = (hdu.header['CRPIX2'] + 0.5) / 2. # Populate DQ header with dq specific keywords if idx == 2: hdu.header['NPIX1'] = siz_axx hdu.header['NPIX2'] = siz_axy hdu.header[ 'PIXVALUE'] = 0 # Fixes issue with calstis not running on raw output files # Append imset extensions to header list hdu_list.append(sci_hdu) hdu_list.append(err_hdu) hdu_list.append(dq_hdu) # Prepare start and stop times for next image in imset starttime = stoptime stoptime += increment texptime += exp_time # Copy tag file primary header to output header pri_hdu = fits.PrimaryHDU(header=tag_pri_hdr.copy()) # Add/Modify primary header keywords pri_hdu.header[ 'NEXTEND'] = imset_hdr_ver * 3 # Three extensions per imset (SCI, ERR, DQ) pri_hdu.header['NRPTEXP'] = imset_hdr_ver pri_hdu.header['TEXPSTRT'] = texpstart pri_hdu.header['TEXPEND'] = texpend pri_hdu.header['TEXPTIME'] = texptime pri_hdu.header['BINAXIS1'] = bin_n pri_hdu.header['BINAXIS2'] = bin_n pri_hdu.header['FILENAME'] = output.split('/')[-1] pri_hdu.header['DATE'] = (dtval, "Date FITS file was generated") pri_hdu.header['ORIGIN'] = "stistools inttag.py" if not highres: pri_hdu.header[ 'LORSCORR'] = "COMPLETE" # Corr flag detailing MAMA data conversion to low res # Write output file hdu_list = [pri_hdu] + hdu_list out_hdul = fits.HDUList(hdu_list) out_hdul.writeto(output, overwrite=True)
def sex_phot(zero, error, stackcat): src = ascii.read(stackcat) srccoords = SkyCoord(src["ALPHA_J2000"], src["DELTA_J2000"], unit="degree") target = SkyCoord(287.63114, 7.8977429, unit="degree") sample1 = SkyCoord(287.62955, 7.8983132, unit="degree") sample2 = SkyCoord(287.65794, 7.8851428, unit="degree") sample3 = SkyCoord(287.61966, 7.8913336, unit="degree") real_ra = np.array([ target.ra.degree, sample1.ra.degree, sample2.ra.degree, sample3.ra.degree ]) real_dec = np.array([ target.dec.degree, sample1.dec.degree, sample2.dec.degree, sample3.dec.degree ]) real_wcs = np.array([real_ra, real_dec]) dist = target.separation(srccoords) dist1 = sample1.separation(srccoords) dist2 = sample2.separation(srccoords) dist3 = sample3.separation(srccoords) target_idx = src[np.argmin(dist)] sample1_idx = src[np.argmin(dist1)] sample2_idx = src[np.argmin(dist2)] sample3_idx = src[np.argmin(dist3)] data = [target_idx, sample1_idx, sample2_idx, sample3_idx] target_mag = data[0]["MAG_AUTO"] + zero sample1_mag = data[1]["MAG_AUTO"] + zero sample2_mag = data[2]["MAG_AUTO"] + zero sample3_mag = data[3]["MAG_AUTO"] + zero mag = np.round(np.array( [target_mag, sample1_mag, sample2_mag, sample3_mag]), decimals=2) target_magerr = data[0]["MAGERR_AUTO"] sample1_magerr = data[1]["MAGERR_AUTO"] sample2_magerr = data[2]["MAGERR_AUTO"] sample3_magerr = data[3]["MAGERR_AUTO"] magerr_ls = np.sqrt( np.array([ target_magerr, sample1_magerr, sample2_magerr, sample3_magerr ])**2 + error**2) magerr = np.round(magerr_ls, decimals=3) target_FWHM = data[0]["FWHM_WORLD"] * 3600 sample1_FWHM = data[1]["FWHM_WORLD"] * 3600 sample2_FWHM = data[2]["FWHM_WORLD"] * 3600 sample3_FWHM = data[3]["FWHM_WORLD"] * 3600 FWHM_ls = np.array([target_FWHM, sample1_FWHM, sample2_FWHM, sample3_FWHM]) FWHM = np.round(FWHM_ls, 2) sys_ra = np.array([ data[0]["ALPHA_J2000"], data[1]["ALPHA_J2000"], data[2]["ALPHA_J2000"], data[3]["ALPHA_J2000"] ]) sys_dec = np.array([ data[0]["DELTA_J2000"], data[1]["DELTA_J2000"], data[2]["DELTA_J2000"], data[3]["DELTA_J2000"] ]) sys_wcs = np.array([sys_ra, sys_dec]) factor_name = [ "object", "real_ra", "real_dec", "sys_ra", "sys_dec", "mag", "magerr", "fwhm" ] sokko_object = ["target", "comp1", "comp2", "comp3"] ascii.write((sokko_object, real_wcs[0], real_wcs[1], sys_wcs[0], sys_wcs[1], mag, magerr, FWHM), "sample.dat", names=factor_name) g = pyfits.open("GL191032+075314_Hw.fits") date = g[0].header["DATE-OBS"] #astro_t = Time(date,format="isot",scale="utc") #mjd = astro_t.mjd mjd = np.array([Time(date, format="isot", scale="utc").mjd]) ascii.write((mjd), "mjd.dat", names=["mjd"])
def _parse_netcdf(filepath): """ Parses the netCDF GOES files to return the data, header and associated units. Parameters ---------- filepath : `str` The path of the file to parse """ with h5netcdf.File(filepath, mode="r", **XRSTimeSeries._netcdf_read_kw) as h5nc: header = MetaDict(OrderedDict(h5nc.attrs)) flux_name = h5nc.variables.get("a_flux") or h5nc.variables.get( "xrsa_flux") if flux_name is None: raise ValueError( f"No flux data (either a_flux or xrsa_flux) found in file: {filepath}" ) flux_name_a = flux_name.name flux_name_b = flux_name_a.replace("a", "b") xrsa = np.array(h5nc[flux_name_a]) xrsb = np.array(h5nc[flux_name_b]) xrsa_quality = np.array(h5nc[flux_name_a.replace("flux", "flags")]) xrsb_quality = np.array(h5nc[flux_name_b.replace("flux", "flags")]) start_time_str = h5nc["time"].attrs["units"] # h5netcdf < 0.14 return bytes instead of a str if isinstance(start_time_str, bytes): start_time_str = start_time_str.decode("utf-8") start_time_str = start_time_str.lstrip("seconds since").rstrip( "UTC").strip() times = Time(parse_time(start_time_str).unix + h5nc["time"], format="unix") try: times = times.datetime except ValueError: # We do not make the assumption that the leap second occurs at the end of the file. # Therefore, we need to find it: # To do so, we convert the times to isot strings, use numpy to find the the leap second string, # then use that to workout the index of the leap timestamp. idx = np.argwhere((np.char.find(times.isot, ":60.") != -1) == True) # We only handle the case there is only 1 leap second in the file. # I don't think there every would be a case where it would be more than 1. if len(idx) != 1: raise ValueError( f"More than one leap second was found in: {Path(filepath).name}" ) warn_user( f"There is one leap second timestamp present in: {Path(filepath).name}, " "This timestamp has been rounded to `:59.999` to allow its conversion into a Python datetime. " f"The leap second timestamp was: {times.isot[idx]}") times[idx] = Time(times[idx].isot.tolist()[0][0][:17] + "59.999").unix times = times.datetime data = DataFrame( { "xrsa": xrsa, "xrsb": xrsb, "xrsa_quality": xrsa_quality, "xrsb_quality": xrsb_quality }, index=times) data = data.replace(-9999, np.nan) units = OrderedDict([ ("xrsa", u.W / u.m**2), ("xrsb", u.W / u.m**2), ("xrsa_quality", int), ("xrsb_quality", int), ]) return data, header, units
#plt.show() plt.savefig('uranus_skymodel/ortho_{}'.format(i)) plt.close() if __name__ == "__main__": #sbs = np.arange(76,198)[:78] sbs = np.arange(76, 198) freqs = sb_to_f(sbs, 3) start = '2020-12-15T20:04:00' #isot format duration = 176 * 60 * u.second # seconds n_elem = 96 myobs = Observation('uranus', start, 'isot', duration.value, freqs) t = Time('2020-12-15T20:04:00', format='isot') c = coords.get_body('uranus', t) ilofar = coords.EarthLocation(lat='53.095', lon='-7.9218', height=100 * u.m) aa = coords.AltAz(location=ilofar, obstime=t) altaz_coords = c.transform_to(aa) #at ilofar gal_coords = c.transform_to(coords.Galactic()) print(myobs) print('AltAz Coordinates\n' + 'alt: {}\taz: {}'.format(altaz_coords.alt, altaz_coords.az)) print('Galactic Coordinates\n' + 'l: {}\tb: {}\n'.format(gal_coords.l, gal_coords.b))
def telemetry(): path = '/Users/christoph/OneDrive - UNSW/telemetry/' # # tau Ceti observation times # jd_sep = readcol('/Volumes/BERGRAID/data/veloce/reduced/tauceti/tauceti_with_LFC/sep2018/' + 'tauceti_all_jds.dat') # jd_nov = readcol('/Volumes/BERGRAID/data/veloce/reduced/tauceti/tauceti_with_LFC/nov2018/' + 'tauceti_all_jds.dat') # jd = list(jd_sep) + list(jd_nov) # # read detector temperature file(s) # with open(path + 'arccamera.txt') as det_temp_file: # for line in det_temp_file: # if line[0] == '#': # column_names = line.lstrip('#').split() # else: # timestamp, det_temp, cryohead_temp, perc_heater = line.rstrip('\n') timestamp_cam, det_temp, cryohead_temp, perc_heater = readcol( path + 'arccamera.txt', skipline=1, twod=False) timestamp_cam_0, det_temp_0, cryohead_temp_0, perc_heater_0 = readcol( path + 'arccamera.txt.0', skipline=1, twod=False) timestamp_cam_1, det_temp_1, cryohead_temp_1, perc_heater_1 = readcol( path + 'arccamera.txt.1', skipline=1, twod=False) timestamp_cam_2, det_temp_2, cryohead_temp_2, perc_heater_2 = readcol( path + 'arccamera.txt.2', skipline=1, twod=False) cam_time = Time(timestamp_cam, format='unix') # from 17/10/2018 - 27/11/2018 cam_time_0 = Time(timestamp_cam_0, format='unix') # from 19/04/2018 - 17/10/2018 cam_time_1 = Time(timestamp_cam_1, format='unix') cam_time_2 = Time(timestamp_cam_2, format='unix') # combine all the subsets t = np.array( list(cam_time_2.jd) + list(cam_time_1.jd) + list(cam_time_0.jd) + list(cam_time.jd)) - 2.458e6 tobj = Time(t + 2.458e6, format='jd') dtemp = np.array( list(det_temp_2) + list(det_temp_1) + list(det_temp_0) + list(det_temp)) cryo = np.array( list(cryohead_temp_2) + list(cryohead_temp_1) + list(cryohead_temp_0) + list(cryohead_temp)) heater_load = np.array( list(perc_heater_2) + list(perc_heater_1) + list(perc_heater_0) + list(perc_heater)) # # for showing tau ceti obstimes # ix = np.zeros(len(jd)) # # I checked, and the maximum diff in time is less than ~30s :) # for i in range(len(jd)): # ix[i] = find_nearest(t, jd[i] - 2.458e6, return_index=True) # # now len(ix) = 138, whereas len(x_shift = 133, b/c the LFC peaks were not successfully measured for the first 5 tau ceti observations; # # hence cut away first 5 entries # ix = ix[5:] # ix = ix.astype(int) # observing runs were: # 20180917 - 20180926 tstart_sep = 2458378.0 tend_sep = 2458388.0 # 20181115 - 20181127 tstart_nov = 2458437.0 tend_nov = 2458450.0 # 20190121 - 20190203 tstart_janfeb = 2458504.0 tend_janfeb = 2458518.0 # 20190408 - 20190415 tstart_apr = 2458581.0 tend_apr = 2458589.0 # 20190503 - 20190512 tstart_may01 = 2458606.0 tend_may01 = 2458616.0 # 20190517 - 20190528 tstart_may02 = 2458620.0 tend_may02 = 2458632.0 # 20190531 - 20190605 tstart_jun01 = 2458634.0 tend_jun01 = 2458640.0 # 20190619 - 20190625 tstart_jun02 = 2458653.0 tend_jun02 = 2458660.0 # 20190722 - 20190724 tstart_jul = 2458686.0 tend_jul = 2458689.0 starts = np.array([ tstart_sep, tstart_nov, tstart_janfeb, tstart_apr, tstart_may01, tstart_may02, tstart_jun01, tstart_jun02, tstart_jul ]) - 2.458e6 ends = np.array([ tend_sep, tend_nov, tend_janfeb, tend_apr, tend_may01, tend_may02, tend_jun01, tend_jun02, tend_jul ]) - 2.458e6 ##### make a nice stacked plot ##### run = 'sep' # runs = np.array(['all', 'sep', 'nov', 'jan', 'apr', 'may', 'jun', 'jul']) # runix = np.argwhere(runs == run)[0] # tlow, thigh = (starts[runix] - 3, ends[runix] + 3) if run == 'all': tlow, thigh = (np.min(starts) - 3, np.max(ends) + 3) elif run == 'sep': tlow, thigh = (tstart_sep - 2.458e6 - 3, tend_sep - 2.458e6 + 3) elif run == 'nov': tlow, thigh = (tstart_nov - 2.458e6 - 3, tend_nov - 2.458e6 + 3) elif run == 'jan': tlow, thigh = (tstart_janfeb - 2.458e6 - 3, tend_janfeb - 2.458e6 + 3) elif run == 'apr': tlow, thigh = (tstart_apr - 2.458e6 - 3, tend_apr - 2.458e6 + 3) elif run == 'may': tlow, thigh = (tstart_may01 - 2.458e6 - 3, tend_may01 - 2.458e6 + 3) elif run == 'jun': tlow, thigh = (tstart_may02 - 2.458e6 - 3, tend_jun02 - 2.458e6 + 3) elif run == 'jul': tlow, thigh = (tstart_jul - 2.458e6 - 3, tend_jul - 2.458e6 + 3) # t_plot = tobj.datetime # for plotting nice calendar dates t_plot = t.copy() #for plotting JD fig, axarr = plt.subplots(3, sharex=True, figsize=(12, 6.75)) # plot 3 subplots axarr[0].plot(t_plot, dtemp, 'b') axarr[1].plot(t_plot, cryo, 'b') axarr[2].plot(t_plot, heater_load, 'b') # set (global) x-range # axarr[0].set_xlim(370,455) # axarr[0].set_xlim(500,692) # ~17 Jan - 28 July 2019 # axarr[0].set_xlim(375, 692) # ~14 Sep 2018 - 28 July 2019 axarr[0].set_xlim(tlow, thigh) # set y-ranges axarr[0].set_ylim(138, 150) axarr[1].set_ylim(82, 88) axarr[2].set_ylim(-5, 30) # set titles axarr[0].set_title('detector temp') axarr[1].set_title('cryohead temp') axarr[2].set_title('heater load') # set x-axis label axarr[2].set_xlabel('JD - 2458000.0') # set y-axis labels axarr[0].set_ylabel('T [K]') axarr[1].set_ylabel('T [K]') axarr[2].set_ylabel('[%]') # indicate when Veloce was actually observing for x1, x2 in zip(starts, ends): axarr[0].axvspan(x1, x2, alpha=0.3, color='green') axarr[1].axvspan(x1, x2, alpha=0.3, color='green') axarr[2].axvspan(x1, x2, alpha=0.3, color='green') # # indicate tau Ceti obstimes with dashed vertical lines # for tobs in jd: # axarr[0].axvline(tobs-2.458e6, color='gray', linestyle='--') # axarr[1].axvline(tobs-2.458e6, color='gray', linestyle='--') # axarr[2].axvline(tobs-2.458e6, color='gray', linestyle='--') # save to file # plt.savefig(...) timestamp_mech, temp_internal, temp_external, temp_room, temp_elec_cabinet, p_internal, p_room, p_regulator, p_set_point, p_rosso_cryo, h_internal, h_external, h_room, focus = readcol( path + 'velocemech.txt', skipline=1, twod=False) timestamp_mech_0, temp_internal_0, temp_external_0, temp_room_0, temp_elec_cabinet_0, p_internal_0, p_room_0, p_regulator_0, p_set_point_0, p_rosso_cryo_0, h_internal_0, h_external_0, h_room_0, focus_0 = readcol( path + 'velocemech.txt.0', skipline=1, twod=False) timestamp_mech_1, temp_internal_1, temp_external_1, temp_room_1, temp_elec_cabinet_1, p_internal_1, p_room_1, p_regulator_1, p_set_point_1, p_rosso_cryo_1, h_internal_1, h_external_1, h_room_1, focus_1 = readcol( path + 'velocemech.txt.1', skipline=1, twod=False) mech_time = Time(timestamp_mech, format='unix') # from 26/10/2018 - 27/11/2018 mech_time_0 = Time(timestamp_mech_0, format='unix') # from 23/09/2018 - 26/10/2018 mech_time_1 = Time(timestamp_mech_1, format='unix') # from 13/08/2018 - 23/09/2018 timestamp_therm, temp_mc_setpoint, temp_mc_int, temp_extencl, temp_extencl_setpoint, state_mc = readcol( path + 'velocetherm.txt', skipline=1, twod=False) therm_time = Time(timestamp_therm, format='unix') timestamp_int_therm, temp_enc_setpoint, temp_enc_target, temp_cryo_setpoint, temp_sensor_1, temp_sensor_2, temp_sensor_3, temp_sensor_4, temp_sensor_5, temp_sensor_6, temp_sensor_7,\ pwm_1, pwm_2, pwm_3, pwm_4, pwm_5 = readcol(path + 'veloceinttherm.txt', skipline=1, twod=False) timestamp_int_therm_0, temp_enc_setpoint_0, temp_enc_target_0, temp_cryo_setpoint_0, temp_sensor_1_0, temp_sensor_2_0, temp_sensor_3_0, temp_sensor_4_0, temp_sensor_5_0, temp_sensor_6_0, temp_sensor_7_0,\ pwm_1_0, pwm_2_0, pwm_3_0, pwm_4_0, pwm_5_0 = readcol(path + 'veloceinttherm.txt.0', skipline=1, twod=False) timestamp_int_therm_1, temp_enc_setpoint_1, temp_enc_target_1, temp_cryo_setpoint_1, temp_sensor_1_1, temp_sensor_2_1, temp_sensor_3_1, temp_sensor_4_1, temp_sensor_5_1, temp_sensor_6_1, temp_sensor_7_1,\ pwm_1_1, pwm_2_1, pwm_3_1, pwm_4_1, pwm_5_0 = readcol(path + 'veloceinttherm.txt.1', skipline=1, twod=False) int_therm_time = Time(timestamp_int_therm, format='unix') # from 26/11/2018 - 27/11/2018 int_therm_time_0 = Time(timestamp_int_therm_0, format='unix') # from 25/11/2018 - 26/11/2018 int_therm_time_1 = Time(timestamp_int_therm_1, format='unix') # from 24/11/2018 - 25/11/2018 return
ephcoord = np.loadtxt(arq[4].strip(), skiprows=3, usecols=(2, 4, 5, 6, 7, 8, 9), unpack=True, dtype='S20', ndmin=1) coor = coords[1] for i in np.arange(len(ephcoord))[2:]: coor = np.core.defchararray.add(coor, ' ') coor = np.core.defchararray.add(coor, ephcoord[i]) jpl = SkyCoord(coor, frame='icrs', unit=(u.hourangle, u.degree)) dalfa = jpl.ra * np.cos(jpl.dec) - lau.ra * np.cos(lau.dec) ddelta = jpl.dec - lau.dec a = np.where(tempo > Time('2015-01-01 00:00:00').jd) print 'RA: ', np.absolute(dalfa[a].mas).max(), ', Dec: ', np.absolute( ddelta[a].mas).max() r = [] for i in np.arange(2015, 2019, 1): r.append(Time('{}-01-01 00:00:00'.format(i), format='iso').jd - 2451544.5) #print r r = np.array(r) ############## Declinacao ############################################ plt.plot(tempo - 2451544.5, dalfa.mas, label=r'$\Delta\alpha\cos\delta$') plt.plot(tempo - 2451544.5, ddelta.mas, '--', label=r'$\Delta\delta$')
def setup(self): etrue = np.logspace(-1, 1, 10) * u.TeV self.e_true = MapAxis.from_energy_edges(etrue, name="energy_true") ereco = np.logspace(-1, 1, 5) * u.TeV elo = ereco[:-1] ehi = ereco[1:] self.e_reco = MapAxis.from_energy_edges(ereco, name="energy") start = u.Quantity([0], "s") stop = u.Quantity([1000], "s") time_ref = Time("2010-01-01 00:00:00.0") self.gti = GTI.create(start, stop, time_ref) self.livetime = self.gti.time_sum self.on_region = make_region("icrs;circle(0.,1.,0.1)") off_region = make_region("icrs;box(0.,1.,0.1, 0.2,30)") self.off_region = off_region.union( make_region("icrs;box(-1.,-1.,0.1, 0.2,150)") ) self.wcs = WcsGeom.create(npix=300, binsz=0.01, frame="icrs").wcs self.aeff = RegionNDMap.create( region=self.on_region, wcs=self.wcs, axes=[self.e_true], unit="cm2" ) self.aeff.data += 1 data = np.ones(elo.shape) data[-1] = 0 # to test stats calculation with empty bins axis = MapAxis.from_edges(ereco, name="energy", interp="log") self.on_counts = RegionNDMap.create( region=self.on_region, wcs=self.wcs, axes=[axis], meta={"EXPOSURE": self.livetime.to_value("s")}, ) self.on_counts.data += 1 self.on_counts.data[-1] = 0 self.off_counts = RegionNDMap.create( region=self.off_region, wcs=self.wcs, axes=[axis] ) self.off_counts.data += 10 acceptance = RegionNDMap.from_geom(self.on_counts.geom) acceptance.data += 1 data = np.ones(elo.shape) data[-1] = 0 acceptance_off = RegionNDMap.from_geom(self.off_counts.geom) acceptance_off.data += 10 self.edisp = EDispKernelMap.from_diagonal_response( self.e_reco, self.e_true, self.on_counts.geom.to_image() ) exposure = self.aeff * self.livetime exposure.meta["livetime"] = self.livetime mask_safe = RegionNDMap.from_geom(self.on_counts.geom, dtype=bool) mask_safe.data += True self.dataset = SpectrumDatasetOnOff( counts=self.on_counts, counts_off=self.off_counts, exposure=exposure, edisp=self.edisp, acceptance=acceptance, acceptance_off=acceptance_off, name="test", gti=self.gti, mask_safe=mask_safe )
def fullstack_times(request): return Time(request.param)
def inverse(self, value): return Time(value, scale=self.scale, format=self.format, copy=False)
def from_miriade(cls, targetids, objtype='asteroid', epochs=None, location='500', **kwargs): """Load target ephemerides from `IMCCE Miriade <http://vo.imcce.fr/webservices/miriade/>`_ using `astroquery.imcce.MiriadeClass.get_ephemerides` Parameters ---------- targetids : str or iterable of str Target identifier, i.e., a number, name, designation, or JPL Horizons record number, for one or more targets. objtype : str, optional The nature of ``targetids`` provided; possible values are ``'asteroid'``, ``'comet'``, ``'dwarf planet'``, ``'planet'``, or ``'satellite'``. Default: ``'asteroid'`` epochs : `~astropy.time.Time` object, or dictionary, optional Epochs of elements to be queried; `~astropy.time.Time` objects support single and multiple epochs; a dictionary including keywords ``start`` and ``stop``, as well as either ``step`` or ``number``, can be used to generate a range of epochs. ``start`` and ``stop`` have to be `~astropy.time.Time` objects (see :ref:`epochs`). If ``step`` is provided, a range of epochs will be queried starting at ``start`` and ending at ``stop`` in steps of ``step``; ``step`` has to be provided as a `~astropy.units.Quantity` object with integer value and a unit of either seconds, minutes, hours, or days. If ``number`` is provided as an integer, the interval defined by ``start`` and ``stop`` is split into ``number`` equidistant intervals. If ``None`` is provided, current date and time are used. All epochs should be provided in UTC; if not, they will be converted to UTC and a `~sbpy.data.TimeScaleWarning` will be raised. Default: ``None`` location : str or `~astropy.coordinates.EarthLocation`, optional Location of the observer using IAU observatory codes (see `IAU observatory codes <https://www.minorplanetcenter.net/iau/lists/ObsCodesF.html>`__) or as `~astropy.coordinates.EarthLocation`. Default: ``'500'`` (geocentric) **kwargs : optional Arguments that will be provided to `astroquery.imcce.MiriadeClass.get_ephemerides`. Notes ----- * For detailed explanations of the queried fields, refer to `astroquery.imcce.MiriadeClass.get_ephemerides` and the `Miriade documentation <http://vo.imcce.fr/webservices/miriade/?documentation>`_. * By default, all properties are provided in the J2000.0 reference system. Different settings can be chosen using additional keyword arguments as used by `astroquery.imcce.MiriadeClass.get_ephemerides`. Returns ------- `~Ephem` object The resulting object will be populated with columns as defined in `~astroquery.imcce.MiriadeClass.get_ephemerides`; refer to that document on information on how to modify the list of queried parameters. Examples -------- >>> from sbpy.data import Ephem >>> from astropy.time import Time >>> epoch = Time('2018-05-14', scale='utc') >>> eph = Ephem.from_horizons('ceres', epochs=epoch) # doctest: +SKIP """ # modify epoch input to make it work with astroquery.imcce.Miriade if epochs is None: epochs = {'start': Time.now().utc.jd} elif isinstance(epochs, Time): if epochs.scale is not 'utc': warn(('converting {} epochs to utc for use in ' 'astroquery.imcce').format(epochs.scale), TimeScaleWarning) epochs = epochs.utc epochs = {'start': epochs} elif isinstance(epochs, dict): if epochs['start'].scale is not 'utc': warn(('converting {} start epoch to utc for use in ' 'astroquery.imcce').format(epochs['start'].scale), TimeScaleWarning) epochs['start'] = epochs['start'].utc if 'stop' in epochs and epochs['stop'].scale is not 'utc': warn(('converting {} stop epoch to utc for use in ' 'astroquery.imcce').format(epochs['stop'].scale), TimeScaleWarning) epochs['stop'] = epochs['stop'].utc if 'number' in epochs: # turn interval/number into step size based on full minutes epochs['step'] = int( (Time(epochs['stop']) - Time(epochs['start'])).jd * 86400 / (epochs['number'] - 1)) * u.s elif 'step' in epochs: epochs['number'] = ( (Time(epochs['stop']) - Time(epochs['start'])).jd * 86400 / epochs['step'].to('s').value) + 1 if 'step' in epochs: epochs['step'] = '{:f}{:s}'.format(epochs['step'].value, { u.s: 's', u.minute: 'm', u.hour: 'h', u.day: 'd' }[epochs['step'].unit]) # if targetids is a list, run separate Horizons queries and append if not isinstance(targetids, (list, ndarray, tuple)): targetids = [targetids] # turn EarthLocation into dictionary of strings as used by # astroquery.jplhorizons if isinstance(location, EarthLocation): location = '{:+f} {:+f} {:.1f}'.format( location.lon.deg, location.lat.deg, location.height.to('m').value) # append ephemerides table for each targetid all_eph = None for targetid in targetids: query = Miriade() try: if 'step' not in epochs and 'number' not in epochs: if not iterable(epochs['start']): # single epoch eph = query.get_ephemerides(targetname=targetid, objtype=objtype, location=location, epoch=epochs['start'], **kwargs) else: # multiple epochs eph = [] for i in range(len(epochs['start'])): e = query.get_ephemerides(targetname=targetid, objtype=objtype, location=location, epoch=epochs['start'][i], **kwargs) e['epoch'] = Time(e['epoch'], format='jd', scale='utc').iso eph.append(e) eph = vstack(eph) eph['epoch'] = Time(eph['epoch'], scale='utc', format='iso') else: # dictionary eph = query.get_ephemerides(targetname=targetid, objtype=objtype, location=location, epoch=epochs['start'], epoch_step=epochs['step'], epoch_nsteps=epochs['number'], **kwargs) except RuntimeError as e: raise QueryError( ('Error raised by astroquery.imcce: {:s}\n' 'The following query was attempted: {:s}').format( str(e), query.uri)) if all_eph is None: all_eph = eph else: all_eph = vstack([all_eph, eph]) self = cls.from_table(all_eph) # turn epochs into astropy.time.Time and apply timescale self.table['epoch'] = Time(self.table['epoch'], format='jd', scale='utc') return self
usecols=[1, 2]) # In[36]: # adjust data to plot: starid = list([s.strip('fsubbfrmaster') for s in starid]) starid = list([s.replace('fsubbfrmaster', '') for s in starid]) starid = list([s.strip('.fits') for s in starid]) starid = list([s.replace('.fits', '') for s in starid]) statid = np.asarray([float(i) for i in starid]) x = data[:, 1] magnitude = data[:, 0] ## need to convert observation dates from date to mjd date = Time(dateobs, format='isot', scale='utc').mjd time = [] for i in range(0, len(date)): for j in range(0, 30): time = np.append(time, x[(j + ((i) * 30))] + date[i]) # In[39]: SI = np.tile(np.arange(30), 53) plt.scatter(time, magnitude, s=1, c=SI) plt.gca().invert_yaxis() plt.title('Eclipsing Binary Star Magnitude versus Time') plt.xlabel('Time (mjd)') plt.ylabel('Magnitude') # In[ ]:
def from_oo(self, orbit, epochs=None, location='500', scope='full', dynmodel='N', ephfile='de430'): """Uses pyoorb to derive ephemerides from an `~Orbit` object. For a list of output parameters, please read the `pyoorb documentation <https://github.com/oorb/oorb/tree/master/python>`_. Parameters ---------- orbit : `~Orbit` object Can contain any number of orbits, ephemerides will be calculated for each orbit. Required fields are: * target identifier (``'targetname'``) * semi-major axis (``'a'``, for Keplerian orbit) or perihelion distance (``'q'``, for cometary orbit), typically in au or or x-component of state vector (``'x'``, for cartesian orbit), typically in au * eccentricity (``'e'``, for Keplerian or cometary orbit) or y-component of state vector (``'y'``, for cartesian orbit) in au * inclination (``'i'``, for Keplerian or cometary orbit) in degrees or z-component of state vector (``'z'``, for cartesian orbit) in au * longitude of the ascending node (``'Omega'``, for Keplerian or cometary orbit) in degrees or x-component of velocity vector (``'vx'``, for cartesian orbit), au/day * argument of the periapsis (``'w'``, for Keplerian or cometary orbit) in degrees or y-component of velocity vector (``'vy'``, for cartesian orbit) in au/day * mean anomaly (``'M'``, for Keplerian orbits) in degrees or perihelion epoch (``'Tp_jd'``, for cometary orbits) in JD or z-component of velocity vector (``'vz'``, for cartesian orbit) in au/day * epoch (``'epoch'``) as `~astropy.time.Time` * absolute magnitude (``'H'``) in mag * photometric phase slope (``'G'``) epochs : `~astropy.time.Time` object, optional Epochs of elements to be queried; must be a `~astropy.time.Time` object holding a single or multiple epochs (see :ref:`epochs`). If ``None`` is provided, current date and time are used. The same time scale that is used in ``epochs`` will be applied to the results. Default: ``None`` location : str, optional, default ``'500'`` (geocentric) Location of the observer. scope : str Scope of data to be determined: ``'full'`` obtains all available properties, ``'basic'`` obtains only a limited amount of data. Default: ``'full'`` dynmodel : str, optional The dynamical model to be used in the propagation: ``'N'`` for n-body simulation or ``'2'`` for a 2-body simulation. Default: ``'N'`` ephfile : str, optional Planet and Lunar ephemeris file version as provided by JPL to be used in the propagation. Default: ``'de430'`` Returns ------- `~Ephem` object Examples -------- Compute ephemerides for Ceres as seen from the Discovery Channel Telescope for the next 10 days at 1hr intervals: >>> import numpy as np >>> from sbpy.data import Orbit, Ephem >>> from astropy.time import Time >>> epochs = Time(Time.now().jd + np.arange(0, 10, 1/24), format='jd') >>> ceres = Orbit.from_horizons('1') # doctest: +REMOTE_DATA >>> eph = Ephem.from_oo(ceres, epochs, 'G37') # doctest: +REMOTE_DATA >>> eph # doctest: +SKIP <QTable length=240> targetname epoch ... obsz trueanom d ... AU deg str7 float64 ... float64 float64 ---------- ------------------ ... ----------------------- ----------------- 1 Ceres 2458519.316966272 ... 3.2083678848104924e-06 68.0863831954328 1 Ceres 2458519.3586329385 ... 2.7022422510736277e-07 68.09589266358881 1 Ceres 2458519.4002996054 ... -3.111046209036683e-06 68.10540191585879 1 Ceres 2458519.441966272 ... -6.700369254264427e-06 68.11491095202307 1 Ceres 2458519.4836329385 ... -1.0248419404668141e-05 68.12441977218093 1 Ceres 2458519.5252996054 ... -1.3508703580356052e-05 68.13392837643161 ... ... ... ... ... 1 Ceres 2458529.066966272 ... 1.2522500440509399e-05 70.30569661787204 1 Ceres 2458529.1086329385 ... 1.4101698473351076e-05 70.31515536712485 1 Ceres 2458529.1502996054 ... 1.4771304981564537e-05 70.3246138990413 1 Ceres 2458529.191966272 ... 1.448582020449618e-05 70.33407221340468 1 Ceres 2458529.2336329385 ... 1.326517587380005e-05 70.34353031031534 1 Ceres 2458529.2752996054 ... 1.1193369555934085e-05 70.35298818987367 """ import pyoorb # create a copy of orbit from . import Orbit orb = Orbit.from_table(orbit.table) if epochs is None: epochs = Time.now() # extract time scale timescale = epochs.scale.upper() # initialize pyoorb if os.getenv('OORB_DATA') is None: # oorb installed using conda pyoorb.pyoorb.oorb_init() else: ephfile = os.path.join(os.getenv('OORB_DATA'), ephfile + '.dat') pyoorb.pyoorb.oorb_init(ephfile) # identify orbit type based on available table columns orbittype = None for testtype in ['KEP', 'COM', 'CART']: try: orb._translate_columns(conf.oorb_orbit_fields[testtype][1:6]) orbittype = testtype break except KeyError: pass if orbittype is None: raise OpenOrbError('orbit type cannot be determined from elements') # add/update orbittype column orb['orbittype'] = [orbittype] * len(orb) # derive and apply default units default_units = {} for idx, field in enumerate(conf.oorb_orbit_fields[orbittype]): try: default_units[orb._translate_columns(field) [0]] = conf.oorb_orbit_units[orbittype][idx] except KeyError: pass for colname in orb.field_names: if (colname in default_units.keys() and not isinstance(orb[colname], (u.Quantity, u.CompositeUnit, Time))): orb[colname].unit = default_units[colname] # convert epochs to TT orb['epoch'] = orb['epoch'].tt epochs = epochs.tt try: epochs = list( zip(epochs.mjd, [conf.oorb_timeScales['TT']] * len(epochs))) except TypeError: epochs = [(epochs.mjd, conf.oorb_timeScales['TT'])] if scope == 'full': oo_eph, err = pyoorb.pyoorb.oorb_ephemeris_full( orb._to_oo(), location, epochs, dynmodel) elif scope == 'basic': oo_eph, err = pyoorb.pyoorb.oorb_ephemeris_basic( orb._to_oo(), location, epochs, dynmodel) if err != 0: OpenOrbError('pyoorb failed with error code {:d}'.format(err)) # reorder data on per-column basis and apply units oo_eph_col = hstack([ oo_eph.transpose()[:, :, i] for i in range(oo_eph.shape[0]) ]).tolist() oo_eph_col_u = [] if scope == 'full': for i, col in enumerate(oo_eph_col): oo_eph_col_u.append( Ephem._unit_apply(col, conf.oorb_ephem_full_units[i])) ephem = self.from_columns(oo_eph_col_u, names=conf.oorb_ephem_full_fields) elif scope == 'basic': for i, col in enumerate(oo_eph_col): oo_eph_col_u.append( Ephem._unit_apply(col, conf.oorb_ephem_basic_units[i])) ephem = self.from_columns(oo_eph_col_u, names=conf.oorb_ephem_basic_fields) # add targetname column ephem.table.add_column(Column(data=sum( [[orb['targetname'][i]] * len(epochs) for i in range(len(orb.table))], []), name='targetname'), index=0) # convert MJD to astropy.time.TimeJulian Date ephem.table['epoch'] = Time(Time(ephem['MJD'], format='mjd', scale=timescale.lower()), format='jd') ephem.table.remove_column('MJD') return ephem
HD142527_sampler.run_sampler(total_orbits, burn_steps=burn_steps, thin=thin) myResults = HD142527_sampler.results # save posterior to disk savefile = 'paperdraft_posterior.hdf5' myResults.save_results(savefile) print('Saved Posterior!') plt.rcParams['font.family'] = 'monospace' # Fonts plt.rcParams['font.monospace'] = 'DejaVu Sans Mono' sns.set_context("talk") starttime = Time(datetime.strptime('1990 January 1', '%Y %B %d')).to_value('mjd', 'long') # posterior plot median_values = np.median(myResults.post, axis=0) # Compute median of each parameter range_values = np.ones_like( median_values) * 0.95 # Plot only 95% range for each parameter corner_figure_median_95 = myResults.plot_corner(range=range_values, truths=median_values) corner_figure_median_95.savefig('hd142_postplot.png') print('Saved basic corner plot!') # seaborn posterior plot params = [ 'a$_{1}$ [au]', 'e$_{1}$', 'i$_{1}$ [rad]', '$\\omega_{0}$ [rad]', '$\\Omega_{0}$ [rad]', '$\\tau_{1}$', '$\\pi$ [mas]', '$\\mu_\\alpha$',
def get_from_flatdb(raw_file, mask=False): ''' Retrieve a master flat and bpmap from the database. Uses the difference in time to find the most suitable flat. Params: - raw_file: The file to be calibrated with the master flat - mask: Indicates whether or not the program is searching for a mask flat Return: - masterFlat: The closest masterFlat - bp_map: The corresponding bad pixel map ''' # Establish database connection connection = sql.MySQLConnection() connection.connect(buffered=True, host=host, port=port, user=user, passwd=passwd) cursor = connection.cursor() cursor.execute('USE WIRC_POL') masterFlat = '' bp_map = '' fore = fits.getheader(raw_file)['FORE'] cursor.execute('SELECT COUNT(*) FROM information_schema.tables WHERE table_name = \"master_flats\"') if(cursor.fetchone()[0] > 0): cursor.execute('SELECT COUNT(*) from master_flats where FORE = \"PG\"') if (not mask) and fore == 'PG' and cursor.fetchone()[0] > 0: cursor.execute('SELECT File_Path, BP_MAP, UTSHUT FROM master_flats WHERE FORE = \"PG\"') else: cursor.execute('SELECT File_Path, BP_MAP, UTSHUT FROM master_flats WHERE NOT FORE = \"PG\"') files = cursor.fetchall() if len(files) > 0: # Retrieve the time in modified julian form for comparison time_0 = Time([fits.getheader(raw_file)['UTSHUT']]) time_0 = time_0.mjd[0] # If finding a mask flat, attempt to identify all of the mask flats, removing anything else # If there are none, return empty strings if mask: files_temp = [] for file in files: hdu = fits.open(file[0]) header = hdu[0].header try: if 'OUT' in header['MASKPOS']: files_temp.append(file) except KeyError: pass hdu.close() if files_temp == []: return '', '' else: files = files_temp # Identify the times in modified julian form for comparison # When attempting to match by filters as well, program was too often returning no results. Switched to only time times = [x[2] for x in files] times = Time(times) times = times.mjd # Create list of differences in time diff = [] for time in times: diff.append(abs(float(time) - float(time_0))) diff = np.array(diff) # Identify the closest master flat in time ind = np.where(diff == min(diff))[0][0] connection.commit() connection.close() # Retrieve the filepaths masterFlat = files[ind][0] bp_map = files[ind][1] return masterFlat, bp_map
lat='25.995789d', height=12 * u.m) }, { 'name': "Guillermo Haro", 'location': EarthLocation.from_geodetic(lon='-110.384722d', lat='31.052778d', height=2480 * u.m) }, ] from astropy.time import Time # complete with Apr 10, 2019 and whatever time here date_time = Time(...) # Name, RA (in hour angle!!) and Dec (in degrees) from astropy.io import ascii target_list = ascii.read(""" PGC003183 0.90109 73.08478 UGC03858 7.51289 73.63019 UGC03859 7.51347 73.70633 UGC03889 7.56557 73.64353 PGC616899 14.55716 -37.83552 PGC021381 7.61031 74.44653 PGC021386 7.61209 74.45029 UGC03929 7.66532 75.42469 ESO336-006 18.60201 -37.94586 ESO336-001 18.54346 -39.45677 ESO327-012 14.71358 -40.60005
def photometry(image_paths, master_dark_path, master_flat_path, star_positions, aperture_radii, centroid_stamp_half_width, psf_stddev_init, aperture_annulus_radius): """ Parameters ---------- master_dark_path : str Path to master dark frame master_flat_path :str Path to master flat field target_centroid : `~numpy.ndarray` position of centroid, with shape (2, 1) comparison_flux_threshold : float Minimum fraction of the target star flux required to accept for a comparison star to be included aperture_radii : `~numpy.ndarray` Range of aperture radii to use centroid_stamp_half_width : int Centroiding is done within image stamps centered on the stars. This parameter sets the half-width of the image stamps. psf_stddev_init : float Initial guess for the width of the PSF stddev parameter, used for fitting 2D Gaussian kernels to the target star's PSF. aperture_annulus_radius : int For each aperture in ``aperture_radii``, measure the background in an annulus ``aperture_annulus_radius`` pixels bigger than the aperture radius """ master_dark = fits.getdata(master_dark_path) master_flat = fits.getdata(master_flat_path) star_positions = np.array(star_positions)#.T # Initialize some empty arrays to fill with data: times = np.zeros(len(image_paths)) fluxes = np.zeros((len(image_paths), len(star_positions), len(aperture_radii))) errors = np.zeros((len(image_paths), len(star_positions), len(aperture_radii))) xcentroids = np.zeros((len(image_paths), len(star_positions))) ycentroids = np.zeros((len(image_paths), len(star_positions))) airmass = np.zeros(len(image_paths)) psf_stddev = np.zeros(len(image_paths)) medians = np.zeros(len(image_paths)) with ProgressBar(len(image_paths)) as bar: for i in range(len(image_paths)): bar.update() # Subtract image by the dark frame, normalize by flat field imagedata = (fits.getdata(image_paths[i]) - master_dark) / master_flat from scipy.ndimage import gaussian_filter smoothed_image = gaussian_filter(imagedata, 3) brightest_star_coords = np.unravel_index(np.argmax(smoothed_image), smoothed_image.shape) if i == 0: brightest_start_coords_init = brightest_star_coords offset = np.array(brightest_start_coords_init) - np.array(brightest_star_coords) print('offset', offset) # Collect information from the header imageheader = fits.getheader(image_paths[i]) exposure_duration = imageheader['EXPTIME'] times[i] = Time(imageheader['DATE-OBS'], format='isot', scale=imageheader['TIMESYS'].lower()).jd medians[i] = np.median(imagedata) airmass[i] = imageheader['AIRMASS'] # Initial guess for each stellar centroid informed by previous centroid for j in range(len(star_positions)): init_x = star_positions[j][0] + offset[0] init_y = star_positions[j][1] + offset[1] # Cut out a stamp of the full image centered on the star image_stamp = imagedata[int(init_y) - centroid_stamp_half_width: int(init_y) + centroid_stamp_half_width, int(init_x) - centroid_stamp_half_width: int(init_x) + centroid_stamp_half_width] x_stamp_centroid, y_stamp_centroid = np.unravel_index(np.argmax(image_stamp), image_stamp.shape) x_centroid = x_stamp_centroid + init_x - centroid_stamp_half_width y_centroid = y_stamp_centroid + init_y - centroid_stamp_half_width # plt.imshow(image_stamp, origin='lower') # plt.scatter(y_stamp_centroid, x_stamp_centroid) # plt.show() # plt.imshow(imagedata, origin='lower') # plt.scatter(x_centroid, y_centroid) # xcentroids[i, j] = x_centroid ycentroids[i, j] = y_centroid # For the target star, measure PSF: if j == 0: psf_model_init = models.Gaussian2D(amplitude=np.max(image_stamp), x_mean=centroid_stamp_half_width, y_mean=centroid_stamp_half_width, x_stddev=psf_stddev_init, y_stddev=psf_stddev_init) fit_p = fitting.LevMarLSQFitter() y, x = np.mgrid[:image_stamp.shape[0], :image_stamp.shape[1]] best_psf_model = fit_p(psf_model_init, x, y, image_stamp - np.median(image_stamp)) psf_stddev[i] = 0.5*(best_psf_model.x_stddev.value + best_psf_model.y_stddev.value) positions = np.vstack([ycentroids[i, :], xcentroids[i, :]]).T for k, aperture_radius in enumerate(aperture_radii): target_apertures = CircularAperture(positions, aperture_radius) background_annuli = CircularAnnulus(positions, r_in=aperture_radius + aperture_annulus_radius, r_out=aperture_radius + 2 * aperture_annulus_radius) flux_in_annuli = aperture_photometry(imagedata, background_annuli)['aperture_sum'].data background = flux_in_annuli/background_annuli.area() flux = aperture_photometry(imagedata, target_apertures)['aperture_sum'].data background_subtracted_flux = (flux - background * target_apertures.area()) print(background, flux) # plt.imshow(smoothed_image, origin='lower') # target_apertures.plot() # background_annuli.plot() # plt.show() fluxes[i, :, k] = background_subtracted_flux/exposure_duration errors[i, :, k] = np.sqrt(flux) # Save some values results = PhotometryResults(times, fluxes, errors, xcentroids, ycentroids, airmass, medians, psf_stddev, aperture_radii) return results
def hdrcheck(imlist_name='a*.fit', camera='KCT_STX16803'): """ 1. Description : When performing observation, header input could be entered wrong. This issue can name the calibrated files inconsistent way for IMSNG survey. To resolve this problem, header ['OBJECT'] would be modified to the name in IMSNG target catalog (alltarget.dat) when the image center is < fov/2 of KCT STX16803. In addition, MJD will be entered, so even if input images are not IMSNG target, they should be processed by this function. 2. Usage >>> hdrcheck() 3. History 2018. Firstly made. 2020.03.01 Edited for KL4040. 2020.03.06 Modified for KCT STX16803 process """ import os import sys import glob import astropy.units as u from astropy.time import Time from astropy.io import ascii from astropy.io import fits from lgpy.hdrcheck import wcscenter from astropy.coordinates import SkyCoord KCT_fov = 49.4 # arcmin all_catname = '/data1/code/alltarget.dat' all_cat = ascii.read(all_catname) ra, dec = all_cat['ra'], all_cat['dec'] radeg, decdeg = [], [] for i in range(len(all_cat)): c = SkyCoord(str(ra[i]) + ' ' + str(dec[i]), unit=(u.hourangle, u.deg)) radeg.append(c.ra.deg) decdeg.append(c.dec.deg) all_cat['radeg'] = radeg all_cat['decdeg'] = decdeg coo_all = SkyCoord(radeg, decdeg, unit=(u.deg, u.deg)) imlist = glob.glob(imlist_name) imlist.sort() for i in range(len(imlist)): inim = imlist[i] print(inim) data, hdr = fits.getdata(inim, header=True) CRVAL1, CRVAL2 = wcscenter(inim) #camera = inim.split('-')[1] mjd0 = 2400000.5 if camera == 'MAO_SNUCAM': t = Time(hdr['utdate'] + 'T' + hdr['utstart'], format='isot', scale='utc') jd = t.jd mjd = t.mjd hdr['JD'] = round(jd, 5) elif camera == 'KCT_STX16803': t = Time(hdr['DATE-OBS'], format='isot', scale='utc') jd = t.jd mjd = t.mjd else: jd = hdr['jd'] mjd = jd - mjd0 hdr['MJD'] = round(mjd, 5) coo_target = SkyCoord(CRVAL1, CRVAL2, unit=(u.deg, u.deg)) indx, d2d, d3d = coo_target.match_to_catalog_sky(coo_all) if d2d.arcmin > KCT_fov / 2.: print( 'Coordinates of the image are not in IMSNG catalog. No matching. Maybe you obtained wrong field. OR Non-IMSNG target.' ) fits.writeto(inim, data, header=hdr, overwrite=True) print('Only MJD is entered in image header.') pass elif d2d.arcmin < KCT_fov / 2.: obj = all_cat[indx]['obj'] print('======================================') print(obj + ' is matched.') print(str(round(d2d.arcmin[0], 3)) + ' arcmin apart') print('======================================') hdr['object'] = obj fits.writeto(inim, data, header=hdr, overwrite=True) print('Header info inspection is finished.')
def t_DATE(self, t): r'\d\d\d\d-\d\d-\d\d(T\d\d:\d\d(:\d\d(.\d+)?)?)?Z?' from astropy.time import Time t.value = Time(t.value, scale='utc') return t
# units must simply all be the same. For proper motion (pmxi, pmxn) they # must be [angular] per year. # coords in radians a = coords.ra.rad d = coords.dec.rad # parallax factors in xi and xn Fxi = (R(t).dot(W(a))) Fxn = (R(t).dot(N(a,d))) # time difference in years dt = t.jyear - t0.jyear # coordinates at times in 't' xix = xi0 + dt * pmxi + plx * Fxi xnx = xn0 + dt * pmxn + plx * Fxn # format output if isinstance(xix, float): return np.array([xix, xnx]) else: return np.array(zip(xix, xnx)) if __name__=="__main__": t0 = Time(2012.0, format='jyear') t = Time([2010.0, 2010.5, 2011.3, 2015.0], format='jyear') coord = SkyCoord(ra=180., dec=45., unit='deg', frame='icrs') #print T_pos(t, t0, coord, 0, 0, 2, 1, 0.5)
val = float(s) return True except ValueError: return False if __name__ == "__main__": sys.exit("Not prepared yet. Under construction!") thetamin = 100.0 #mas thetamax = 2000.0 #mas maxalt = 30.0 #maximum altitude lat_subaru = 19.828611 lon_subaru = 204.51945 height_subaru = 4139.0 utcoffset = 10.0 * u.hour midlocal = Time('2019-7-16 00:00:00') midnight = midlocal + utcoffset print(midlocal.iso) location_subaru = EarthLocation(lat=lat_subaru * u.deg, lon=lon_subaru * u.deg, height=height_subaru * u.m) delta_midnight = np.linspace(-12, 12, 1000) * u.hour times_obs = midnight + delta_midnight frame = AltAz(obstime=midnight + delta_midnight, location=location_subaru) sunaltazs = get_sun(midnight + delta_midnight).transform_to(frame) nightmask = (sunaltazs.alt < -0 * u.deg) dat = pd.read_csv("../database/sixth/sixth.dat", delimiter="|", comment="#")
def from_mpc(cls, targetids, epochs=None, location='500', **kwargs): """Load ephemerides from the `Minor Planet Center <http://minorplanetcenter.net>`_. Parameters ---------- targetids : str or iterable of str Target identifier, resolvable by the Minor Planet Ephemeris Service [MPES]_, e.g., 2P, C/1995 O1, P/Encke, (1), 3200, Ceres, and packed designations, for one or more targets. epochs : `~astropy.time.Time` object, or dictionary, optional Request ephemerides at these epochs. May be a single epoch or multiple epochs as `~astropy.time.Time` (see :ref:`epochs`)or a dictionary describing a linearly-spaced array of epochs. All epochs should be provided in UTC; if not, they will be converted to UTC and a `~sbpy.data.TimeScaleWarning` will be raised. If ``None`` (default), the current date and time will be used. For the dictionary format, the keys ``start`` (start epoch), ``step`` (step size), ``stop`` (end epoch), and/or ``number`` (number of epochs total) are used. Only one of ``stop`` and ``number`` may be specified at a time. ``step``, ``stop``, and ``number`` are optional. The values of of ``start`` and ``stop`` must be `~astropy.time.Time` objects. ``number`` should be an integer value; ``step`` should be a `~astropy.units.Quantity` with an integer value and units of seconds, minutes, hours, or days. All epochs should be provided in UTC; if not, they will be converted to UTC and a `~sbpy.data.TimeScaleWarning` will be raised. location : various, optional Location of the observer as an IAU observatory code [OBSCODES]_ (string), a 3-element array of Earth longitude, latitude, altitude, or an `~astropy.coordinates.EarthLocation`. Longitude and latitude should be parseable by `~astropy.coordinates.Angle`, and altitude should be parsable by `~astropy.units.Quantity` (with units of length). If ``None``, then the geocenter (code 500) is used. **kwargs Additional keyword arguments are passed to `~astroquery.mpc.MPC.get_ephemerides`: ``eph_type``, ``ra_format``, ``dec_format``, ``proper_motion``, ``proper_motion_unit``, ``suppress_daytime``, ``suppress_set``, ``perturbed``, ``unc_links``, ``cache``. Returns ------- `~Ephem` object The resulting object will be populated with columns as defined in `~astroquery.mpc.get_ephemerides`; refer to that document on information on how to modify the list of queried parameters. Examples -------- Query a single set of ephemerides of Ceres as observed from Maunakea: >>> from sbpy.data import Ephem >>> from astropy.time import Time >>> epoch = Time('2018-05-14', scale='utc') >>> eph = Ephem.from_mpc('ceres', epoch, 568) # doctest: +REMOTE_DATA Query a range of ephemerides of comet 2P/Encke as observed from Maunakea: >>> epochs = {'start': Time('2019-01-01'), ... 'step': 1*u.d, 'number': 365} >>> eph = Ephem.from_mpc('2P', epochs, 568) # doctest: +REMOTE_DATA Notes ----- * All properties are provided in the J2000.0 reference system. * See `astroquery.mpc.MPC.get_ephemerides` and the Minor Planet Ephemeris Service user's guide [MPES]_ for details, including accetable target names. References ---------- .. [MPES] Wiliams, G. The Minor Planet Ephemeris Service. https://minorplanetcenter.org/iau/info/MPES.pdf .. [OBSCODES] IAU Minor Planet Center. List of observatory codes. https://minorplanetcenter.org/iau/lists/ObsCodesF.html """ # parameter check # if targetids is a list, run separate Horizons queries and append if not isinstance(targetids, (list, ndarray, tuple)): targetids = [targetids] if isinstance(epochs, Time): if epochs.scale is not 'utc': warn(('converting {} epochs to utc for use in ' 'astroquery.mpc').format(epochs.scale), TimeScaleWarning) epochs = epochs.utc start = None elif isinstance(epochs, dict): start = epochs['start'] # required if start.scale is not 'utc': warn(('converting {} start epoch to utc for use in ' 'astroquery.mpc').format(start.scale), TimeScaleWarning) start = start.utc step = epochs.get('step') stop = epochs.get('stop') if stop is not None and stop.scale is not 'utc': warn(('converting {} stop epoch to utc for use in ' 'astroquery.mpc').format(stop.scale), TimeScaleWarning) stop = stop.utc number = epochs.get('number') if step is not None and stop is None: step = u.Quantity(step) if step.unit not in (u.d, u.h, u.min, u.s): raise QueryError( 'step must have units of days, hours, minutes,' ' or seconds') if stop is not None: if step is not None and number is None: # start and stop both defined, estimate number of steps dt = (Time(stop).jd - Time(start).jd) * u.d number = int((dt / step).decompose()) + 1 elif step is None and number is not None: step = int( (stop - start).jd * 1440 / (number - 1)) * u.minute else: raise QueryError( ('epoch definition unclear; step xor number ' 'must be provided with start and stop')) else: start = None if epochs is None: epochs = Time.now() if not iterable(epochs): epochs = [epochs] # append ephemerides table for each targetid all_eph = None for targetid in targetids: try: # get ephemeris if start is None: eph = [] for i in range(len(epochs)): e = MPC.get_ephemeris(targetid, location=location, start=Time(epochs[i], scale='utc'), number=1, **kwargs) e['Date'] = e['Date'].iso # for vstack to work eph.append(e) eph = vstack(eph) eph['Date'] = Time(eph['Date'], scale='utc') else: eph = MPC.get_ephemeris(targetid, location=location, start=start, step=step, number=number, **kwargs) except InvalidQueryError as e: raise QueryError('Error raised by astroquery.mpc: {:s}'.format( str(e))) # add targetname column eph.add_column(Column([targetid] * len(eph), name='Targetname'), index=0) if all_eph is None: all_eph = eph else: all_eph = vstack([all_eph, eph]) # if ra_format or dec_format is defined, then units must be # dropped or else QTable will raise an exception because # strings cannot have units if 'ra_format' in kwargs: all_eph['RA'].unit = None if 'dec_format' in kwargs: all_eph['Dec'].unit = None return cls.from_table(all_eph)
def get_ephemerides_async(self, targetname, objtype='asteroid', epoch=None, epoch_step='1d', epoch_nsteps=1, location=500, coordtype=1, timescale='UTC', planetary_theory='INPOP', ephtype=1, refplane='equator', elements='ASTORB', radial_velocity=False, get_query_payload=False, get_raw_response=False, cache=True): """ Query the `IMCCE Miriade <http://vo.imcce.fr/webservices/miriade/>`_ `ephemcc <http://vo.imcce.fr/webservices/miriade/?ephemcc>`_ service. Parameters ---------- targetname : str Name of the target to be queried. objtype : str, optional Type of the object to be queried. Available are: ``'asteroid'``, ``'comet'``, ``'dwarf planet'``, ``'planet'``, ``'satellite'``. Default: ``'asteroid'`` epoch : `~astropy.time.Time` object, float, str,``None``, optional Start epoch of the query. If a float is provided, it is expected to be a Julian Date; if a str is provided, it is expected to be an iso date of the form ``'YYYY-MM-DD HH-MM-SS'``. If ``None`` is provided, the current date and time are used as epoch. Default: ``None`` epoch_step : str, optional Step size for ephemerides calculation. Must consist of a decimal number followed by a single character: (d)ays, (h)ours, (m)inutes or (s)econds. Default: ``'1d'`` epoch_nsteps : int, optional Number of increments of ``epoch_step`` starting from ``epoch`` for which ephemerides are calculated. Maximum number of steps is 5000. Default: 1 location : str, optional Location of the observer on Earth as a code or a set of coordinates. See the `Miriade manual <http://vo.imcce.fr/webservices/miriade/?documentation#field_7>`_ for details. Default: geocentric location (``'500'``) coordtype : int, optional Type of coordinates to be calculated: ``1``: spherical, ``2``: rectangular, ``3``: local coordinates (azimuth and elevation), ``4``: hour angle coordinates, ``5``: dedicated to observation, ``6``: dedicated to AO observation. Default: ``1`` timescale : str, optional The time scale used in the computation of the ephemerides: ``'UTC'`` or ``'TT'``. Default: ``'UTC'`` planetary_theory : str, optional Planetary ephemerides set to be utilized in the calculations: ``'INPOP'``, ``'DE405'``, ``'DE406'``. Default: ``'INPOP'`` ephtype : int, optional Type of ephemerides to be calculated: ``1``: astrometric J2000, ``2``: apparent of the date, ``3``: mean of the date, ``4``: mean J2000, Default: ``1`` refplane : str, optional Reference plane: ``'equator'`` or ``'ecliptic'``. Default: ``'equator'`` elements : str, optional Set of osculating elements to be used in the calculations: ``'ASTORB'`` or ``'MPCORB'``. Default: ``'ASTORB'`` radial_velocity : bool, optional Calculate additional information on the target's radial velocity. Default: ``False`` get_query_payload : bool, optional When set to ``True`` the method returns the HTTP request parameters as a dict, default: ``False`` get_raw_response : bool, optional Return raw data as obtained by Miriade without parsing the data into a table, default: ``False`` cache : bool, optional If ``True`` the query will be cached. Default: ``True`` Notes ----- The following parameters can be queried using this function. Note that different ``coordtype`` setting provide different sets of parameters; number in parentheses denote which ``coordtype`` settings include the parameters. +------------------+-----------------------------------------------+ | Column Name | Definition | +==================+===============================================+ | ``target`` | Target name (str, 1, 2, 3, 4, 5, 6 ) | +------------------+-----------------------------------------------+ | ``epoch`` | Ephemerides epoch (JD, float, 1, 2, 3, 4, 5, | | | 6) | +------------------+-----------------------------------------------+ | ``RA`` | Target RA at ``ephtype`` (deg, float, 1) | +------------------+-----------------------------------------------+ | ``DEC`` | Target declination at ``ephtype`` (deg, | | | float, 1, 4, 5) | +------------------+-----------------------------------------------+ | ``RAJ2000`` | Target RA at J2000 (deg, float, 5, 6) | +------------------+-----------------------------------------------+ | ``DECJ2000`` | Target declination at J2000 (deg, float, 5, 6)| +------------------+-----------------------------------------------+ | ``AZ`` | Target azimuth (deg, float, 3, 5) | +------------------+-----------------------------------------------+ | ``EL`` | Target elevation (deg, float, 3, 5) | +------------------+-----------------------------------------------+ | ``delta`` | Distance from observer (au, float, 1, 2, 3, | | | 4, 5, 6) | +------------------+-----------------------------------------------+ | ``delta_rate`` | Rate in observer distance (km/s, float, | | | 1, 5, 6) | +------------------+-----------------------------------------------+ | ``V`` | Apparent visual magnitude (mag, float, 1, 2, | | | 3, 4, 5, 6) | +------------------+-----------------------------------------------+ | ``alpha`` | Solar phase angle (deg, 1, 2, 3, 4, 5, 6) | +------------------+-----------------------------------------------+ | ``elong`` | Solar elongation angle (deg, 1, 2, 3, 4, 5, 6)| +------------------+-----------------------------------------------+ | ``RAcosD_rate`` | Rate of motion in RA * cos(DEC) (arcsec/min, | | | float, 1, 5, 6) | +------------------+-----------------------------------------------+ | ``DEC_rate`` | Rate of motion in DEC (arcsec/min, float, 1, | | | 5, 6) | +------------------+-----------------------------------------------+ | ``x`` | X position state vector (au, float, 2) | +------------------+-----------------------------------------------+ | ``y`` | Y position state vector (au, float, 2) | +------------------+-----------------------------------------------+ | ``z`` | Z position state vector (au, float, 2) | +------------------+-----------------------------------------------+ | ``vx`` | X velocity state vector (au/d, float, 2) | +------------------+-----------------------------------------------+ | ``vy`` | Y velocity state vector (au/d, float, 2) | +------------------+-----------------------------------------------+ | ``vz`` | Z velocity state vector (au/d, float, 2) | +------------------+-----------------------------------------------+ | ``rv`` | Radial velocity (km/s, float, 2) | +------------------+-----------------------------------------------+ | ``heldist`` | Target heliocentric distance (au, float, 2, | | | 5, 6) | +------------------+-----------------------------------------------+ | ``x_h`` | X heliocentric position vector (au, float, 2) | +------------------+-----------------------------------------------+ | ``y_h`` | Y heliocentric position vector (au, float, 2) | +------------------+-----------------------------------------------+ | ``z_h`` | Z heliocentric position vector (au, float, 2) | +------------------+-----------------------------------------------+ | ``vx_h`` | X heliocentric vel. vector (au/d, float, 2) | +------------------+-----------------------------------------------+ | ``vy_h`` | Y heliocentric vel. vector (au/d, float, 2) | +------------------+-----------------------------------------------+ | ``vz_h`` | Z heliocentric vel. vector (au/d, float, 2) | +------------------+-----------------------------------------------+ | ``hourangle`` | Target hour angle (deg, float, 4, 5) | +------------------+-----------------------------------------------+ | ``siderealtime`` | Local sidereal time (hr, float, 5, 6) | +------------------+-----------------------------------------------+ | ``refraction`` | Atmospheric refraction (arcsec, float, 5, 6) | +------------------+-----------------------------------------------+ | ``airmass`` | Target airmass (float, 5, 6) | +------------------+-----------------------------------------------+ | ``posunc`` | Positional uncertainty (arcsec, float, 5, 6) | +------------------+-----------------------------------------------+ Examples -------- >>> from astroquery.imcce import Miriade >>> from astropy.time import Time >>> epoch = Time('2019-01-01', format='iso') >>> Miriade.get_ephemerides('3552', epoch=epoch) # doctest: +SKIP <Table masked=True length=1> target epoch RA ... DEC_rate delta_rate d deg ... arcs / min km / s bytes20 float64 float64 ... float64 float64 ----------- -------------------- ------------------ ... ---------- ------------ Don Quixote 2458484.5 16.105294999999998 ... -0.25244 31.4752734 """ URL = conf.ephemcc_server TIMEOUT = conf.timeout if isinstance(epoch, (int, float)): epoch = Time(epoch, format='jd') elif isinstance(epoch, str): epoch = Time(epoch, format='iso') elif epoch is None: epoch = Time.now() request_payload = OrderedDict([ ('-name', targetname), ('-type', objtype[0].upper()+objtype[1:]), ('-ep', str(epoch.jd)), ('-step', epoch_step), ('-nbd', epoch_nsteps), ('-observer', location), ('-output', '--jul'), ('-tscale', timescale), ('-theory', planetary_theory), ('-teph', ephtype), ('-tcoor', coordtype), ('-rplane', {'equator': 1, 'ecliptic': 2}[refplane]), ('-oscelem', elements), ('-mime', 'votable')]) if radial_velocity: request_payload['-output'] += ',--rv' if get_query_payload: return request_payload # query and parse response = self._request('GET', URL, params=request_payload, timeout=TIMEOUT, cache=cache) self._query_uri = response.url self._get_raw_response = get_raw_response return response
def from_horizons(cls, targetids, id_type='smallbody', epochs=None, location='500', **kwargs): """Load target ephemerides from `JPL Horizons <https://ssd.jpl.nasa.gov/horizons.cgi>`_ using `astroquery.jplhorizons.HorizonsClass.ephemerides` Parameters ---------- targetids : str or iterable of str Target identifier, i.e., a number, name, designation, or JPL Horizons record number, for one or more targets. id_type : str, optional The nature of ``targetids`` provided; possible values are ``'smallbody'`` (asteroid or comet), ``'majorbody'`` (planet or satellite), ``'designation'`` (asteroid or comet designation), ``'name'`` (asteroid or comet name), ``'asteroid_name'``, ``'comet_name'``, ``'id'`` (Horizons id). Default: ``'smallbody'`` epochs : `~astropy.time.Time` object, or dictionary, optional Epochs of elements to be queried; `~astropy.time.Time` objects support single and multiple epochs; a dictionary including keywords ``start`` and ``stop``, as well as either ``step`` or ``number``, can be used to generate a range of epochs. ``start`` and ``stop`` have to be `~astropy.time.Time` objects (see :ref:`epochs`). If ``step`` is provided, a range of epochs will be queries starting at ``start`` and ending at ``stop`` in steps of ``step``; ``step`` has to be provided as a `~astropy.units.Quantity` object with integer value and a unit of either minutes, hours, days, or years. If ``number`` is provided as an integer, the interval defined by ``start`` and ``stop`` is split into ``number`` equidistant intervals. If ``None`` is provided, current date and time are used. All epochs should be provided in UTC; if not, they will be converted to UTC and a `~sbpy.data.TimeScaleWarning` will be raised. Default: ``None`` location : str or `~astropy.coordinates.EarthLocation`, optional Location of the observer using IAU observatory codes (see `IAU observatory codes <https://www.minorplanetcenter.net/iau/lists/ObsCodesF.html>`__) or as `~astropy.coordinates.EarthLocation`. Default: ``'500'`` (geocentric) **kwargs : optional Arguments that will be provided to `astroquery.jplhorizons.HorizonsClass.ephemerides`. Notes ----- * For detailed explanations of the queried fields, refer to `astroquery.jplhorizons.HorizonsClass.ephemerides` and the `JPL Horizons documentation <https://ssd.jpl.nasa.gov/?horizons_doc>`_. * By default, all properties are provided in the J2000.0 reference system. Different settings can be chosen using additional keyword arguments as used by `astroquery.jplhorizons.HorizonsClass.ephemerides`. Returns ------- `~Ephem` object The resulting object will be populated with columns as defined in `~astroquery.jplhorizons.HorizonsClass.ephemerides`; refer to that document on information on how to modify the list of queried parameters. Examples -------- >>> from sbpy.data import Ephem >>> from astropy.time import Time >>> epoch = Time('2018-05-14', scale='utc') >>> eph = Ephem.from_horizons('ceres', epochs=epoch) # doctest: +SKIP """ # modify epoch input to make it work with astroquery.jplhorizons # maybe this stuff should really go into that module.... if epochs is None: epochs = [Time.now().utc.jd] elif isinstance(epochs, Time): if epochs.scale is not 'utc': warn(('converting {} epochs to utc for use in ' 'astroquery.jplhorizons').format(epochs.scale), TimeScaleWarning) epochs = epochs.utc.jd elif isinstance(epochs, dict): if 'start' in epochs and 'stop' in epochs and 'number' in epochs: epochs['step'] = epochs['number'] * u.dimensionless_unscaled # convert to utc and iso for astroquery.jplhorizons epochs['start'] = epochs['start'].utc.iso epochs['stop'] = epochs['stop'].utc.iso if 'step' in epochs: if epochs['step'].unit is not u.dimensionless_unscaled: epochs['step'] = '{:d}{:s}'.format( int(epochs['step'].value), { u.minute: 'm', u.hour: 'h', u.d: 'd', u.year: 'y' }[epochs['step'].unit]) else: epochs['step'] = '{:d}'.format( int(epochs['step'].value - 1)) # if targetids is a list, run separate Horizons queries and append if not isinstance(targetids, (list, ndarray, tuple)): targetids = [targetids] # turn EarthLocation into dictionary of strings as used by # astroquery.jplhorizons if isinstance(location, EarthLocation): location = { 'lon': location.lon.deg, 'lat': location.lat.deg, 'elevation': location.height.to('km') } # append ephemerides table for each targetid all_eph = None for targetid in targetids: # load ephemerides using astroquery.jplhorizons obj = Horizons(id=targetid, id_type=id_type, location=location, epochs=epochs) try: eph = obj.ephemerides(**kwargs) except ValueError as e: raise QueryError( ('Error raised by astroquery.jplhorizons: {:s}\n' 'The following query was attempted: {:s}').format( str(e), obj.uri)) # workaround for current version of astroquery to make # column units compatible with astropy.table.QTable # should really change '---' units to None in # astroquery.jplhorizons.__init__.py for column_name in eph.columns: if eph[column_name].unit == '---': eph[column_name].unit = None # workaround for astroquery 0.3.9.dev5056 and earlier, # Horizons column named RA_rate always includes the # cos(Dec) term: if 'RA_rate' in eph.colnames: eph['RA_rate'].name = 'RA*cos(Dec)_rate' if all_eph is None: all_eph = eph else: all_eph = vstack([all_eph, eph]) # turn epochs into astropy.time.Time and apply timescale # convert ut1 epochs to utc # https://ssd.jpl.nasa.gov/?horizons_doc if any(all_eph['datetime_jd'] < 2437665.5): all_eph['datetime_jd'][all_eph['datetime_jd'] < 2437665.5] = Time( all_eph['datetime_jd'][all_eph['datetime_jd'] < 2437665.5], scale='ut1', format='jd').utc.jd all_eph['epoch'] = Time(all_eph['datetime_jd'], format='jd', scale='utc') all_eph['siderealtime'].unit = u.Unit('hour') all_eph.remove_column('datetime_jd') all_eph.remove_column('datetime_str') return cls.from_table(all_eph)
def from_hdu(self, hdu=None, module=None, output=None, **kwargs): """Class method to instantiate a KeplerCotrendingBasisVectors object from a CBV FITS HDU. Kepler/K2 CBVs are all in the same FITS file for each quarter/campaign, so, when instantiating the CBV object we must specify which module and output we desire. Only Single-Scale CBVs are stored for Kepler. Parameters ---------- hdu : astropy.io.fits.hdu.hdulist.HDUList A pyfits opened FITS file containing the CBVs module : int Kepler CCD module 2 - 84 output : int Kepler CCD output 1 - 4 **kwargs : Optional arguments Passed to the TimeSeries superclass """ assert module > 1 and module < 85, 'Invalid module number' assert output > 0 and output < 5, 'Invalid output number' # Get the mission: Kepler or K2 # Sadly, the HDU does not explicitly say if this is Kepler or K2 CBVs. if 'QUARTER' in hdu['PRIMARY'].header: mission = 'Kepler' elif 'CAMPAIGN' in hdu['PRIMARY'].header: mission = 'K2' else: raise Exception( 'This does not appear to be a Kepler or K2 FITS HDU') extName = 'MODOUT_{0}_{1}'.format(module, output) try: # Read the columns and meta data dataTbl = Table.read(hdu[extName], format="fits") dataTbl.meta.update(hdu[0].header) dataTbl.meta.update(hdu[extName].header) # TimeSeries-based objects require a dedicated time column # Replace NaNs with default time '2000-01-01', otherwise, # astropy.time.Time complains nanHere = np.nonzero(np.isnan(dataTbl['TIME_MJD'].data))[0] timeData = dataTbl['TIME_MJD'].data timeData[nanHere] = Time(['2000-01-01'], scale='utc').mjd cbvTime = Time(timeData, format='mjd') dataTbl.remove_column('TIME_MJD') # Gaps are labelled as 'GAPFLAG' so rename! dataTbl['GAP'] = dataTbl['GAPFLAG'] dataTbl.remove_column('GAPFLAG') dataTbl.meta['MISSION'] = mission dataTbl.meta['CBV_TYPE'] = 'SingleScale' except: dataTbl = None cbvTime = None # Here we instantiate the actual object return self(data=dataTbl, time=cbvTime, **kwargs)
def test_time2et(self): from astropy.time import Time core.time2et(Time('2000-1-1', scale='utc'))