sun = e['sun'] print('Earth:', type(earth)) print(' Sun:', type(sun)) t = ts.utc(datetime.datetime.now(datetime.timezone.utc)) #Now print(sun.at(t)) print(sun.at(t).observe(earth)) #sys.exit() epochs = { 'start': '2020-03-21 00:00:00', 'stop': '2020-03-21 01:00:00', 'step': '1m' } obj = Horizons(id='LRO', location='500@0', id_type='majorbody', epochs=epochs) vec = obj.vectors() print(vec) print(vec.columns) column_keys = ['datetime_jd', 'x', 'y', 'z', 'vx', 'vy', 'vz'] pos_keys = ['x', 'y', 'z'] vel_keys = ['vx', 'vy', 'vz'] t_key = 'datetime_jd' print(np.array(vec[pos_keys])[0]) lro = skyfield.positionlib.Barycentric(np.array(vec[pos_keys]), np.array(vec[vel_keys]), t=np.array(vec[t_key]), center=0, target=-85) print(lro.distance())
def from_horizons( cls, name, epochs, *, attractor=None, plane=Planes.EARTH_EQUATOR, id_type="smallbody", ): """Return `Ephem` for an object using JPLHorizons module of Astroquery. Parameters ---------- name : string Name of the body to query for. epochs: ~astropy.time.Time Epochs to sample the body positions. attractor : ~poliastro.bodies.SolarSystemPlanet, optional Body to use as central location, if not given the Solar System Barycenter will be used. plane : ~poliastro.frames.Planes, optional Fundamental plane of the frame, default to Earth Equator. id_type : string, optional Use "smallbody" for Asteroids and Comets (default), and "majorbody" for Planets and Satellites. """ if epochs.isscalar: epochs = epochs.reshape(1) refplanes_dict = { Planes.EARTH_EQUATOR: "earth", Planes.EARTH_ECLIPTIC: "ecliptic", } refplane = refplanes_dict[plane] if attractor is not None: bodies_dict = { "sun": 10, "mercury": 199, "venus": 299, "earth": 399, "mars": 499, "jupiter": 599, "saturn": 699, "uranus": 799, "neptune": 899, } location = "500@{}".format(bodies_dict[attractor.name.lower()]) else: location = "@ssb" obj = Horizons(id=name, location=location, epochs=epochs.jd, id_type=id_type).vectors(refplane=refplane) x = obj["x"] y = obj["y"] z = obj["z"] d_x = obj["vx"] d_y = obj["vy"] d_z = obj["vz"] coordinates = CartesianRepresentation( x, y, z, differentials=CartesianDifferential(d_x, d_y, d_z)) return cls(coordinates, epochs, plane)
def get_horizons_coord(body, time='now', id_type='majorbody'): """ Queries JPL HORIZONS and returns a `~astropy.coordinates.SkyCoord` for the location of a solar-system body at a specified time. This location is the instantaneous or "true" location, and is not corrected for light travel time or observer motion. .. note:: This function requires the Astroquery package to be installed and requires an Internet connection. Parameters ---------- body : `str` The solar-system body for which to calculate positions. One can also use the search form linked below to find valid names or ID numbers. id_type : `str` If 'majorbody', search by name for planets, satellites, or other major bodies. If 'smallbody', search by name for asteroids or comets. If 'id', search by ID number. time : {parse_time_types} Time to use in a parse_time-compatible format Returns ------- `~astropy.coordinates.SkyCoord` Location of the solar-system body Notes ----- Be aware that there can be discrepancies between the coordinates returned by JPL HORIZONS, the coordinates reported in mission data files, and the coordinates returned by `~sunpy.coordinates.get_body_heliographic_stonyhurst`. References ---------- * `JPL HORIZONS <https://ssd.jpl.nasa.gov/?horizons>`_ * `JPL HORIZONS form to search bodies <https://ssd.jpl.nasa.gov/horizons.cgi?s_target=1#top>`_ * `Astroquery <https://astroquery.readthedocs.io/en/latest/>`_ Examples -------- .. Run these tests with a temp cache dir .. testsetup:: >>> from astropy.config.paths import set_temp_cache >>> import tempfile >>> c = set_temp_cache(tempfile.mkdtemp()) >>> _ = c.__enter__() >>> from sunpy.coordinates import get_horizons_coord Query the location of Venus >>> get_horizons_coord('Venus barycenter', '2001-02-03 04:05:06') # doctest: +REMOTE_DATA INFO: Obtained JPL HORIZONS location for Venus Barycenter (2) [sunpy.coordinates.ephemeris] <SkyCoord (HeliographicStonyhurst: obstime=2001-02-03T04:05:06.000): (lon, lat, radius) in (deg, deg, AU) (-33.93155883, -1.64998481, 0.71915147)> Query the location of the SDO spacecraft >>> get_horizons_coord('SDO', '2011-11-11 11:11:11') # doctest: +REMOTE_DATA INFO: Obtained JPL HORIZONS location for Solar Dynamics Observatory (spac [sunpy.coordinates.ephemeris] <SkyCoord (HeliographicStonyhurst: obstime=2011-11-11T11:11:11.000): (lon, lat, radius) in (deg, deg, AU) (0.01018888, 3.29640407, 0.99011042)> Query the location of the SOHO spacecraft via its ID number (-21) >>> get_horizons_coord(-21, '2004-05-06 11:22:33', 'id') # doctest: +REMOTE_DATA INFO: Obtained JPL HORIZONS location for SOHO (spacecraft) (-21) [sunpy.coordinates.ephemeris] <SkyCoord (HeliographicStonyhurst: obstime=2004-05-06T11:22:33.000): (lon, lat, radius) in (deg, deg, AU) (0.2523461, -3.55863351, 0.99923086)> .. testcleanup:: >>> _ = c.__exit__() """ obstime = parse_time(time) array_time = np.reshape(obstime, (-1, )) # Convert to an array, even if scalar # Import here so that astroquery is not a module-level dependency from astroquery.jplhorizons import Horizons query = Horizons( id=body, id_type=id_type, location='500@10', # Heliocentric (mean ecliptic) epochs=array_time.tdb.jd.tolist()) # Time must be provided in JD TDB try: result = query.vectors() except Exception: # Catch and re-raise all exceptions, and also provide query URL if generated if query.uri is not None: log.error( f"See the raw output from the JPL HORIZONS query at {query.uri}" ) raise log.info(f"Obtained JPL HORIZONS location for {result[0]['targetname']}") log.debug(f"See the raw output from the JPL HORIZONS query at {query.uri}") # JPL HORIZONS results are sorted by observation time, so this sorting needs to be undone. # Calling argsort() on an array returns the sequence of indices of the unsorted list to put the # list in order. Calling argsort() again on the output of argsort() reverses the mapping: # the output is the sequence of indices of the sorted list to put that list back in the # original unsorted order. unsorted_indices = obstime.argsort().argsort() result = result[unsorted_indices] vector = CartesianRepresentation(result['x'], result['y'], result['z']) coord = SkyCoord(vector, frame=HeliocentricMeanEcliptic, obstime=obstime) return coord.transform_to(HGS).reshape(obstime.shape)
"y": [], "z": [], "vx": [], "vy": [], "vz": [] } ids_planets = ["Sun", 199, 299, 399, 499, 599, 699, 799, 899] masses_planets = [1, 1.6601367952719304e-07, 2.447838938855945e-06, 3.0034895963231186e-06, 3.2271514450538743e-07, 0.0002858859806661029, 0.0009547919384243222, 4.3662440433515637e-05, 5.151389020535497e-05] for i, m in zip(ids_planets, masses_planets): print(str(i)) try: obj = Horizons(id=i, location='@sun', epochs=2458133.33546, id_type='majorbody') row = obj.vectors()[0] except ValueError as e: print(e) continue dic["name"].append(row["targetname"]) dic["m"].append(m) dic["x"].append(row["x"]) dic["y"].append(row["y"]) dic["z"].append(row["z"]) dic["vx"].append(row["vx"]) dic["vy"].append(row["vy"]) dic["vz"].append(row["vz"]) df = pd.read_csv("jfc_comets.csv") jfc_ids = np.random.choice(np.array(df.spkid, dtype=np.int), 100)
def get_horizons_coord(body, time='now', id_type='majorbody', *, include_velocity=False): """ Queries JPL HORIZONS and returns a `~astropy.coordinates.SkyCoord` for the location of a solar-system body at a specified time. This location is the instantaneous or "true" location, and is not corrected for light travel time or observer motion. .. note:: This function requires the Astroquery package to be installed and requires an Internet connection. Parameters ---------- body : `str` The solar-system body for which to calculate positions. One can also use the search form linked below to find valid names or ID numbers. id_type : `str` If 'majorbody', search by name for planets, satellites, or other major bodies. If 'smallbody', search by name for asteroids or comets. If 'id', search by ID number. time : {parse_time_types}, `dict` Time to use in a parse_time-compatible format. Alternatively, this can be a dictionary defining a range of times and dates; the range dictionary has to be of the form {{'start': start_time, 'stop': stop_time, 'step':’n[y|d|m|s]’}}. ``start_time`` and ``stop_time`` must be in a parse_time-compatible format, and are interpreted as UTC time. ``step`` must be a string with either a number and interval length (e.g. for every 10 seconds, ``'10s'``), or a plain number for a number of evenly spaced intervals. For more information see the docstring of `astroquery.jplhorizons.HorizonsClass`. Keyword Arguments ----------------- include_velocity : `bool` If True, include the body's velocity in the output coordinate. Defaults to False. Returns ------- `~astropy.coordinates.SkyCoord` Location of the solar-system body Notes ----- Be aware that there can be discrepancies between the coordinates returned by JPL HORIZONS, the coordinates reported in mission data files, and the coordinates returned by `~sunpy.coordinates.get_body_heliographic_stonyhurst`. References ---------- * `JPL HORIZONS <https://ssd.jpl.nasa.gov/?horizons>`_ * `JPL HORIZONS form to search bodies <https://ssd.jpl.nasa.gov/horizons.cgi?s_target=1#top>`_ * `Astroquery <https://astroquery.readthedocs.io/en/latest/>`_ Examples -------- >>> from sunpy.coordinates.ephemeris import get_horizons_coord Query the location of Venus >>> get_horizons_coord('Venus barycenter', '2001-02-03 04:05:06') # doctest: +REMOTE_DATA INFO: Obtained JPL HORIZONS location for Venus Barycenter (2) [sunpy.coordinates.ephemeris] <SkyCoord (HeliographicStonyhurst: obstime=2001-02-03T04:05:06.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU) (-33.93155836, -1.64998443, 0.71915147)> Query the location of the SDO spacecraft >>> get_horizons_coord('SDO', '2011-11-11 11:11:11') # doctest: +REMOTE_DATA INFO: Obtained JPL HORIZONS location for Solar Dynamics Observatory (spac [sunpy.coordinates.ephemeris] <SkyCoord (HeliographicStonyhurst: obstime=2011-11-11T11:11:11.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU) (0.01019118, 3.29640728, 0.99011042)> Query the location of the SOHO spacecraft via its ID number (-21) >>> get_horizons_coord(-21, '2004-05-06 11:22:33', 'id') # doctest: +REMOTE_DATA INFO: Obtained JPL HORIZONS location for SOHO (spacecraft) (-21) [sunpy.coordinates.ephemeris] <SkyCoord (HeliographicStonyhurst: obstime=2004-05-06T11:22:33.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU) (0.25234902, -3.55863633, 0.99923086)> Query the location and velocity of the asteroid Juno >>> get_horizons_coord('Juno', '1995-07-18 07:17', 'smallbody', include_velocity=True) # doctest: +REMOTE_DATA INFO: Obtained JPL HORIZONS location for 3 Juno (A804 RA) [sunpy.coordinates.ephemeris] <SkyCoord (HeliographicStonyhurst: obstime=1995-07-18T07:17:00.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU) (-25.16107532, 14.59098438, 3.17667664) (d_lon, d_lat, d_radius) in (arcsec / s, arcsec / s, km / s) (-0.03306548, 0.00052415, -2.66709222)> Query the location of Solar Orbiter at a set of 12 regularly sampled times >>> get_horizons_coord('Solar Orbiter', ... time={{'start': '2020-12-01', ... 'stop': '2020-12-02', ... 'step': '12'}}) # doctest: +REMOTE_DATA INFO: Obtained JPL HORIZONS location for Solar Orbiter (spacecraft) (-144 [sunpy.coordinates.ephemeris] ... """ if isinstance(time, dict): if set(time.keys()) != set(['start', 'stop', 'step']): raise ValueError('time dictionary must have the keys ["start", "stop", "step"]') epochs = time jpl_fmt = '%Y-%m-%d %H:%M:%S' epochs['start'] = parse_time(epochs['start']).tdb.strftime(jpl_fmt) epochs['stop'] = parse_time(epochs['stop']).tdb.strftime(jpl_fmt) else: obstime = parse_time(time) array_time = np.reshape(obstime, (-1,)) # Convert to an array, even if scalar epochs = array_time.tdb.jd.tolist() # Time must be provided in JD TDB # Import here so that astroquery is not a module-level dependency from astroquery.jplhorizons import Horizons query = Horizons(id=body, id_type=id_type, location='500@10', # Heliocentric (mean ecliptic) epochs=epochs) try: result = query.vectors() except Exception as e: # Catch and re-raise all exceptions, and also provide query URL if generated if query.uri is not None: log.error(f"See the raw output from the JPL HORIZONS query at {query.uri}") raise e finally: query._session.close() log.info(f"Obtained JPL HORIZONS location for {result[0]['targetname']}") log.debug(f"See the raw output from the JPL HORIZONS query at {query.uri}") if isinstance(time, dict): obstime = parse_time(result['datetime_jd'], format='jd', scale='tdb') else: # JPL HORIZONS results are sorted by observation time, so this sorting needs to be undone. # Calling argsort() on an array returns the sequence of indices of the unsorted list to put the # list in order. Calling argsort() again on the output of argsort() reverses the mapping: # the output is the sequence of indices of the sorted list to put that list back in the # original unsorted order. unsorted_indices = obstime.argsort().argsort() result = result[unsorted_indices] vector = CartesianRepresentation(result['x'], result['y'], result['z']) if include_velocity: velocity = CartesianDifferential(result['vx'], result['vy'], result['vz']) vector = vector.with_differentials(velocity) coord = SkyCoord(vector, frame=HeliocentricEclipticIAU76, obstime=obstime) return coord.transform_to(HeliographicStonyhurst).reshape(obstime.shape)
def from_horizons(cls, targetids, id_type='smallbody', epochs=None, center='500@10', **kwargs): """Load target orbital elements from `JPL Horizons <https://ssd.jpl.nasa.gov/horizons.cgi>`_ using `astroquery.jplhorizons.HorizonsClass.elements` Parameters ---------- targetids : str or iterable of str Target identifier, i.e., a number, name, designation, or JPL Horizons record number, for one or more targets. id_type : str, optional The nature of ``targetids`` provided; possible values are ``'smallbody'`` (asteroid or comet), ``'majorbody'`` (planet or satellite), ``'designation'`` (asteroid or comet designation), ``'name'`` (asteroid or comet name), ``'asteroid_name'``, ``'comet_name'``, ``'id'`` (Horizons id). Default: ``'smallbody'`` epochs : `~astropy.time.Time` or dict, optional Epochs of elements to be queried; requires a `~astropy.time.Time` object with a single or multiple epochs. A dictionary including keywords ``start`` and ``stop``, as well as either ``step`` or ``number``, can be used to generate a range of epochs. ``start`` and ``stop`` have to be `~astropy.time.Time` objects (see :ref:`epochs`). If ``step`` is provided, a range of epochs will be queries starting at ``start`` and ending at ``stop`` in steps of ``step``; ``step`` has to be provided as a `~astropy.units.Quantity` object with integer value and a unit of either minutes, hours, days, or years. If ``number`` is provided as an integer, the interval defined by ``start`` and ``stop`` is split into ``number`` equidistant intervals. If ``None`` is provided, current date and time are used. All epochs should be provided in TDB; if not, they will be converted to TDB and a `~sbpy.data.TimeScaleWarning` will be raised. Default: ``None`` center : str, optional, default ``'500@10'`` (center of the Sun) Elements will be provided relative to this position. **kwargs : optional Arguments that will be provided to `astroquery.jplhorizons.HorizonsClass.elements`. Notes ----- * For detailed explanations of the queried fields, refer to `astroquery.jplhorizons.HorizonsClass.elements` and the `JPL Horizons documentation <https://ssd.jpl.nasa.gov/?horizons_doc>`_. * By default, elements are provided in the J2000.0 reference system and relative to the ecliptic and mean equinox of the reference epoch. Different settings can be chosen using additional keyword arguments as used by `astroquery.jplhorizons.HorizonsClass.elements`. Returns ------- `~Orbit` object Examples -------- >>> from sbpy.data import Orbit >>> from astropy.time import Time >>> epoch = Time('2018-05-14', scale='tdb') >>> eph = Orbit.from_horizons('Ceres', epochs=epoch) # doctest: +REMOTE_DATA """ # modify epoch input to make it work with astroquery.jplhorizons # maybe this stuff should really go into that module.... if epochs is None: epochs = [Time.now().tdb.jd] elif isinstance(epochs, Time): if epochs.scale != 'tdb': warn(('converting {} epochs to tdb for use in ' 'astroquery.jplhorizons').format(epochs.scale), TimeScaleWarning) epochs = epochs.tdb.jd elif isinstance(epochs, dict): if 'start' in epochs and 'stop' in epochs and 'number' in epochs: epochs['step'] = epochs['number'] * u.dimensionless_unscaled # convert to tdb and iso for astroquery.jplhorizons epochs['start'] = epochs['start'].tdb.iso epochs['stop'] = epochs['stop'].tdb.iso if 'step' in epochs: if epochs['step'].unit is not u.dimensionless_unscaled: epochs['step'] = '{:d}{:s}'.format( int(epochs['step'].value), { u.minute: 'm', u.hour: 'h', u.d: 'd', u.year: 'y' }[epochs['step'].unit]) else: epochs['step'] = '{:d}'.format( int(epochs['step'].value - 1)) # if targetids is a list, run separate Horizons queries and append if not isinstance(targetids, (list, ndarray, tuple)): targetids = [targetids] # append elements table for each targetid all_elem = None for targetid in targetids: # load elements using astroquery.jplhorizons obj = Horizons(id=targetid, id_type=id_type, location=center, epochs=epochs) try: elem = obj.elements(**kwargs) except ValueError as e: raise QueryError( ('Error raised by astroquery.jplhorizons: {:s}\n' 'The following query was attempted: {:s}').format( str(e), obj.uri)) # workaround for current version of astroquery to make # column units compatible with astropy.table.QTable # should really change '---' units to None in # astroquery.jplhorizons.__init__.py for column_name in elem.columns: if elem[column_name].unit == '---': elem[column_name].unit = None if all_elem is None: all_elem = elem else: all_elem = vstack([all_elem, elem]) # turn epochs into astropy.time.Time and apply timescale # https://ssd.jpl.nasa.gov/?horizons_doc all_elem['epoch'] = Time(all_elem['datetime_jd'], format='jd', scale='tdb') all_elem['Tp'] = Time(all_elem['Tp_jd'], format='jd', scale='tdb') all_elem.remove_column('datetime_jd') all_elem.remove_column('datetime_str') all_elem.remove_column('Tp_jd') return cls.from_table(all_elem)
def targetStatesFromHorizons(target_id, observer_id, tstart, tstop, ephemeris_dt='1h', frame='ecliptic'): """Query JPL Horizons via astroquery to get sun-observer state vectors. Parameters: ----------- target_id ... Horizons identifier of target, e.g. 'Ceres' observer_id ... Horizons identifier observer, e.g. 'I11' tstart ... start time for ephemeris in Horizons format, e.g. 'JD2456789.5' tstop ... end time for ephemeris in Horizons format, e.g. 'JD2456799.5' ephemeris_dt ... Time step for ephemeris query. Typically 1h since the actual times will be interpolated later. frame ... coordinate frame ('ecliptic' or 'icrf') Returns: -------- observer_xyz ... Heliocentric observer positions at gridded epochs in [au]. observer_vxyz ... Heliocentric observer velocities at gridded epochs in [au]. observer_jd ... Gridded ephemeris epochs (JD / TDB) External Function Requirements: ------------------------------- # External API's from astroquery.jplhorizons import Horizons """ try: # Get observer locations (caution: choose the right plane of reference and direction of the vectors!) # check query by copy/pasting the output of print(observer_sun.uri) into a webbrowser if there are problems. obs2target = Horizons(id=target_id, location=observer_id, epochs={ 'start': tstart, 'stop': tstop, 'step': ephemeris_dt }) if (frame == 'ecliptic'): vec = obs2target.vectors(refplane='ecliptic') elif (frame == 'ICRF' or frame == 'J2000' or frame == 'earth' or frame == 'icrf'): vec = obs2target.vectors(refplane='earth') else: raise Exception('Error: requested frame unknown.') target_jd = np.array(vec['datetime_jd']).astype('float') #We need the sun-observer vector not the observer-sun vector target_xyz = np.array([vec['x'], vec['y'], vec['z']]).astype('float') target_vxyz = np.array([vec['vx'], vec['vy'], vec['vz']]).astype('float') except: print( "Error: potential online ephemeris query failure. Make sure internet connectivity is available." ) raise target_mjd = tr.jd2mjd(target_jd) return target_mjd, target_xyz, target_vxyz
if __name__ == "__main__": ## Parameters from datetime import datetime, timezone startDate = datetime(2012, 10, 1, tzinfo=timezone.utc) endDate = datetime(2013, 6, 1, tzinfo=timezone.utc) ## Getting Integral position integralSpiceId = '-198' observer = '500' # Earth from astroquery.jplhorizons import Horizons epochs = {'start': '{:4d}-{:02d}-{:02d}'.format(startDate.year, startDate.month, startDate.day), 'stop': '{:4d}-{:02d}-{:02d}'.format(endDate.year, endDate.month, endDate.day), 'step': '10m'} obj = Horizons(id=integralSpiceId, location=observer, epochs=epochs, id_type='id') vec = obj.vectors() # By default, coordinates should be in J2000 ecliptic coordinates, but not sure... import numpy as np integralDate_jd = np.array([row[1] for row in vec]) # Date is defined in Julian date format integralPosition_au = np.array([[row[3], row[4], row[5]] for row in vec]) au2km = 149597870.7 integralPosition_km = au2km * integralPosition_au #print(integralPosition_km) #print(integralDate_jd) ## Getting WIND position import cdflib from pathlib import Path windPosition_km = None
def curve_of_growth_analysis(filenames, parameters, nodeblending=False, display=False, diagnostics=False): output = {} obsparam = parameters['obsparam'] logging.info('starting photometry with parameters: %s' % (', '.join([('%s: %s' % (var, str(val))) for var, val in list(locals().items())]))) # re-extract sources for curve-of-growth analysis aprads = parameters['aprad'] if not isinstance(aprads, list) and not isinstance(aprads, numpy.ndarray): print('need a list of aprads...') os.abort() logging.info('run pp_extract using %d apertures' % len(aprads)) print('* extract sources from %d images using %d apertures' % (len(filenames), len(aprads))) extractparameters = { 'sex_snr': parameters['sex_snr'], 'source_minarea': parameters['source_minarea'], 'paramfile': _pp_conf.rootpath + '/setup/twentyapertures.sexparam', 'aprad': aprads, 'telescope': parameters['telescope'], 'nodeblending': nodeblending, 'quiet': False } extraction = pp_extract.extract_multiframe(filenames, extractparameters) extraction = [e for e in extraction if len(e) > 0] # curve-of-growth analysis # arrays for accumulating source information as a function of aprad background_flux = [] # numpy.zeros(len(aprads)) target_flux = [] # numpy.zeros(len(aprads)) background_snr = [] # numpy.zeros(len(aprads)) target_snr = [] # numpy.zeros(len(aprads)) for filename in filenames: if display: print('processing curve-of-growth for frame %s' % filename) if not parameters['background_only']: hdu = fits.open(filename, ignore_missing_end=True) # pull target coordinates from Horizons targetname = hdu[0].header[obsparam['object']] if parameters['manobjectname'] is not None: targetname = parameters['manobjectname'].translate( _pp_conf.target2filename) image = hdu[0].data # derive MIDTIMJD, if not yet in the FITS header obsparam = parameters['obsparam'] if not 'MIDTIMJD' in hdu[0].header: exptime = float(hdu[0].header[obsparam['exptime']]) if obsparam['date_keyword'].find('|') == -1: date = hdu[0].header[obsparam['date_keyword']] date = dateobs_to_jd(date) + exptime / 2. / 86400. else: date_key = obsparam['date_keyword'].split('|')[0] time_key = obsparam['date_keyword'].split('|')[1] date = hdu[0].header[date_key]+'T' +\ hdu[0].header[time_key] date = dateobs_to_jd(date) + exptime / 2. / 86400. else: date = hdu[0].header['MIDTIMJD'] # call HORIZONS to get target coordinates obj = Horizons(targetname.replace('_', ' '), epochs=date, location=str(obsparam['observatory_code'])) try: eph = obj.ephemerides() n = len(eph) except ValueError: print('Target (%s) not a small body' % targetname) logging.warning('Target (%s) not a small body' % targetname) n = None if n is None or n == 0: logging.warning('WARNING: No position from Horizons!' + 'Name (%s) correct?' % targetname) logging.warning('HORIZONS call: %s' % obj.uri) logging.info('proceeding with background sources analysis') parameters['background_only'] = True else: logging.info('ephemerides for %s pulled from Horizons' % targetname) target_ra, target_dec = eph[0]['RA'], eph[0]['DEC'] # pull data from LDAC file ldac_filename = filename[:filename.find('.fit')] + '.ldac' data = catalog('Sextractor_LDAC') data.read_ldac(ldac_filename, maxflag=3) if data.shape[0] == 0: continue # identify target and extract its curve-of-growth n_target_identified = 0 if not parameters['background_only']: residuals = numpy.sqrt((data['ra_deg'] - target_ra)**2 + (data['dec_deg'] - target_dec)**2) target_idx = numpy.argmin(residuals) if residuals[target_idx] > _pp_conf.pos_epsilon / 3600: logging.warning(('WARNING: frame %s, large residual to ' + 'HORIZONS position of %s: %f arcsec; ' + 'ignore this frame') % (filename, targetname, residuals[numpy.argmin(residuals)] * 3600.)) else: target_flux.append( data[target_idx]['FLUX_' + _pp_conf.photmode] / max(data[target_idx]['FLUX_' + _pp_conf.photmode])) target_snr.append( data[target_idx]['FLUX_' + _pp_conf.photmode] / data[target_idx]['FLUXERR_' + _pp_conf.photmode] / max(data[target_idx]['FLUX_' + _pp_conf.photmode] / data[target_idx]['FLUXERR_' + _pp_conf.photmode])) n_target_identified += 1 # extract background source fluxes and snrs # assume n_background_sources >> 1, do not reject target if not parameters['target_only']: # n_src = data.shape[0] # use all sources n_src = 50 # use only 50 sources for idx, src in enumerate(data.data[:n_src]): if (numpy.any(numpy.isnan(src['FLUX_' + _pp_conf.photmode])) or numpy.any( numpy.isnan(src['FLUXERR_' + _pp_conf.photmode])) or src['FLAGS'] > 3): continue # create growth curve background_flux.append(src['FLUX_' + _pp_conf.photmode] / max(src['FLUX_' + _pp_conf.photmode])) background_snr.append(src['FLUX_' + _pp_conf.photmode] / src['FLUXERR_' + _pp_conf.photmode] / max(src['FLUX_' + _pp_conf.photmode] / src['FLUXERR_' + _pp_conf.photmode])) # investigate curve-of-growth logging.info('investigate curve-of-growth based on %d frames' % len(filenames)) # combine results n_target = len(target_flux) if n_target > 0: target_flux = (numpy.median(target_flux, axis=0), numpy.std(target_flux, axis=0) / numpy.sqrt(n_target)) target_snr = numpy.median(target_snr, axis=0) else: target_flux = (numpy.zeros(len(aprads)), numpy.zeros(len(aprads))) target_snr = numpy.zeros(len(aprads)) n_background = len(background_flux) if n_background > 0: background_flux = (numpy.median(background_flux, axis=0), numpy.std(background_flux, axis=0) / numpy.sqrt(n_background)) background_snr = numpy.median(background_snr, axis=0) else: background_flux = (numpy.zeros(len(aprads)), numpy.zeros(len(aprads))) background_snr = numpy.zeros(len(aprads)) if n_target == 0: logging.info('No target fluxes available, using background sources, ' + 'only') parameters['background_only'] = True if n_background == 0: logging.info('No background fluxes available, using target, only') parameters['target_only'] = True # find optimum aperture radius if parameters['target_only']: aprad_strategy = 'smallest target aprad that meets fluxlimit criterion' optimum_aprad_idx = numpy.argmin( numpy.fabs(target_flux[0] - _pp_conf.fluxlimit_aprad)) elif parameters['background_only']: aprad_strategy = 'smallest background aprad that meets fluxlimit ' + \ 'criterion' optimum_aprad_idx = numpy.argmin( numpy.fabs(background_flux[0] - _pp_conf.fluxlimit_aprad)) else: # flux_select: indices where target+background fluxes > fluxlimit flux_select = numpy.where( (target_flux[0] > _pp_conf.fluxlimit_aprad) & (background_flux[0] > _pp_conf.fluxlimit_aprad))[0] flux_res = numpy.fabs(target_flux[0][flux_select] - background_flux[0][flux_select]) if numpy.min(flux_res) < _pp_conf.fluxmargin_aprad: aprad_strategy = 'target+background fluxes > fluxlimit, ' + \ 'flux difference < margin' optimum_aprad_idx = flux_select[numpy.where( flux_res < _pp_conf.fluxmargin_aprad)[0][0]] else: aprad_strategy = 'target+background fluxes > fluxlimit, ' + \ 'flux difference minimal' optimum_aprad_idx = flux_select[numpy.argmin(flux_res)] optimum_aprad = parameters['aprad'][optimum_aprad_idx] output['aprad_strategy'] = aprad_strategy output['optimum_aprad'] = optimum_aprad output['pos_epsilon'] = _pp_conf.pos_epsilon output['fluxlimit_aprad'] = _pp_conf.fluxlimit_aprad output['fluxmargin_aprad'] = _pp_conf.fluxmargin_aprad output['n_target'] = len(target_flux[0]) output['n_bkg'] = len(background_flux[0]) output['target_flux'] = target_flux output['target_snr'] = target_snr output['background_flux'] = background_flux output['background_snr'] = background_snr output['parameters'] = parameters # write results to file outf = open('aperturephotometry_curveofgrowth.dat', 'w') outf.writelines('# background target flux\n' + '# rad flux sigma snr flux sigma snr residual\n') for i in range(len(parameters['aprad'])): outf.writelines( ('%5.2f %5.3f %5.3f %4.2f %6.3f %5.3f %4.2f ' + '%6.3f\n') % (parameters['aprad'][i], background_flux[0][i], background_flux[1][i], background_snr[i], target_flux[0][i], target_flux[1][i], target_snr[i], target_flux[0][i] - background_flux[0][i])) outf.close() # extraction content # # -> see pp_extract.py # ### # output content # # { 'aprad_strategy' : optimum aperture finding strategy, # 'optimum_aprad' : optimum aperature radius, # 'pos_epsilon' : required positional uncertainty ("), # 'fluxlimit_aprad' : min flux for both target and background, # 'fluxmargin_aprad': max flux difference between target and background, # 'n_target' : number of frames with target flux measurements, # 'n_bkg' : number of frames with background measurements, # 'target_flux' : target fluxes as a function of aprad, # 'target_snr' : target snrs as a function of aprad, # 'background_flux' : background fluxes as a function of aprad, # 'background_snr' : background snrs as a function of aprad, # 'parameters' : source extractor parameters # } ### # diagnostics if diagnostics: if display: print('creating diagnostic output') logging.info(' ~~~~~~~~~ creating diagnostic output') diag.add_photometry(output, extraction) # update image headers for filename in filenames: hdu = fits.open(filename, mode='update', ignore_missing_end=True) hdu[0].header['APRAD'] = (optimum_aprad, 'aperture phot radius (px)') hdu[0].header['APIDX'] = (optimum_aprad_idx, 'optimum aprad index') hdu.flush() hdu.close() # display results if display: print( '\n#################################### PHOTOMETRY SUMMARY:\n###') print('### best-fit aperture radius %5.2f (px)' % (optimum_aprad)) print('###\n#####################################################\n') logging.info('==> best-fit aperture radius: %3.1f (px)' % (optimum_aprad)) return output
w.wcs.crval = [crvalx, crvaly] w.wcs.crpix = [crpixx, crpixy] w.wcs.cdelt = [cdeltx, cdelty] #Define start and stop times for ephemeris data; since jpl does not accept seconds, #all times are in YY:MM:DD hh:mm format;dt is added to stop time to ensure ephemeris #data range extends beyond exposure time eph_tstart = Time(tstart, out_subfmt='date_hm') dt = TimeDelta(0.125, format='jd') eph_tstop = Time(tstop + dt, out_subfmt='date_hm') if name_flag == 0: obj = Horizons(id=obj_id, location=telescope, epochs={ 'start': eph_tstart.iso, 'stop': eph_tstop.iso, 'step': '5m' }, id_type=jpl_id_type) eph = obj.ephemerides() if name_flag == 1: obj = Horizons(id=obj_id, location=telescope, epochs={ 'start': eph_tstart.iso, 'stop': eph_tstop.iso, 'step': '5m' }) eph = obj.ephemerides() #Create interpolation function for RA and DEC based on ephemeris data
def from_horizons( cls, name, attractor, epoch=None, plane=Planes.EARTH_EQUATOR, id_type="smallbody", ): """Return osculating `Orbit` of a body using JPLHorizons module of Astroquery. Parameters ---------- name : string Name of the body to query for. epoch : ~astropy.time.Time, optional Epoch, default to None. plane : ~poliastro.frames.Planes Fundamental plane of the frame. id_type : string, optional Use "smallbody" for Asteroids and Comets, and "majorbody" for Planets and Satellites. """ if not epoch: epoch = time.Time.now() if plane == Planes.EARTH_EQUATOR: refplane = "earth" elif plane == Planes.EARTH_ECLIPTIC: refplane = "ecliptic" bodies_dict = { "sun": 10, "mercury": 199, "venus": 299, "earth": 399, "mars": 499, "jupiter": 599, "saturn": 699, "uranus": 799, "neptune": 899, } location = "500@{}".format(bodies_dict[attractor.name.lower()]) obj = Horizons(id=name, location=location, epochs=epoch.jd, id_type=id_type).elements(refplane=refplane) a = obj["a"][0] * u.au ecc = obj["e"][0] * u.one inc = obj["incl"][0] * u.deg raan = obj["Omega"][0] * u.deg argp = obj["w"][0] * u.deg nu = obj["nu"][0] * u.deg ss = cls.from_classical(attractor, a, ecc, inc, raan, argp, nu, epoch=epoch.tdb, plane=plane) return ss
def find_star_path(self, index, pi_trig, mu_a, mu_d, start, end, step='1month'): # creating an empty pandas dataframe bc easiest to work with coord_df = pd.DataFrame(columns=['time', 'ra', 'dec']) #first need to pull general data on star and convert to arcseconds a_0 = list(self.stars.ra)[index] * 3600 d_0 = list(self.stars.dec)[index] * 3600 mu_a = mu_a / 1000 mu_d = mu_d / 1000 # make inputted times into jd -- note that t_split is temp, so i reuse for the observed date and the start date # initial time if type(self.bd.observ_date) == float: t_0 = self.bd.observ_date else: t_split = self.bd.observ_date.split('-') t_0 = float( t_split[0]) + (strptime(t_split[1], '%b').tm_mon / 12) + ( float(t_split[2]) / 365) #when observations happened # start time if type(start) == float: t_start = start else: t_split = start.split('-') t_start = float( t_split[0]) + (strptime(t_split[1], '%b').tm_mon / 12) + ( float(t_split[2]) / 365) #when observations happened # grab ephemerides in vector form obj = Horizons(id='399', id_type='majorbody', epochs={ 'start': self.bd.observ_date, 'stop': end, 'step': step }) vectors = obj.vectors() vectors = vectors['targetname', 'datetime_jd', 'x', 'y', 'z'] #run through each ephemeride coordinate/time (time as months) for coord in vectors: #converting coord to year t = Time(float(coord[1]), format='jd') t.format = 'jyear' t = t.value #cue formula for ra and dec at a given time. d_prime = d_0 + (mu_d * (t - t_0)) #converting d to rad d_prime_r = float(d_prime / 206265) a_prime = a_0 + (mu_a * (t - t_0) / (np.cos(d_prime_r))) #convert a to rad a_prime_r = float(a_prime / 206265) # actual equations a_t = a_prime + ((pi_trig * ( (coord[2] * np.sin(a_prime_r)) - (coord[3] * np.cos(a_prime_r))) / np.cos(d_prime_r))) d_t = d_prime + (pi_trig * ( (coord[2] * np.cos(a_prime_r) * np.sin(d_prime_r)) + (coord[3] * np.sin(a_prime_r) * np.sin(d_prime_r)) - (coord[4] * np.cos(d_prime_r)))) #convert a_t and d_t to degrees a_t = a_t / 3600 d_t = d_t / 3600 #add to the coord dataframe, but only if during or after when we want the start if t > t_start: coord_df = coord_df.append({ 'time': t, 'ra': a_t, 'dec': d_t }, ignore_index=True) return coord_df
DEFAULT_STEP = "1m" arg_parser = argparse.ArgumentParser() arg_parser.add_argument("--target") arg_parser.add_argument("--target-type") arg_parser.add_argument("--location") arg_parser.add_argument("--time") args = arg_parser.parse_args() start_time = datetime.strptime(args.time, utils.constants.DATETIME_FORMAT) end_time_str = (start_time + timedelta(0, 1)).strftime(utils.constants.DATETIME_FORMAT) target = Horizons( id=args.target, location=args.location, id_type=args.target_type, epochs={"start": args.time, "stop": end_time_str, "step": DEFAULT_STEP,}, ) eph = target.ephemerides(quantities="19,20,22") dumps = json.dumps( { "distance_from_the_sun": {"value": math.floor(eph["r"].data[0] * AU_IN_M), "unit": "m"}, "distance_from_location": { "value": math.floor(eph["delta"].data[0] * AU_IN_M), "unit": "m", }, "velocity_with_respect_to_the_sun": { "value": math.floor(eph["vel_sun"].data[0] * 1000),
def __init__(self, id, idType, relativeTo, time=Time.now()): self.obj = Horizons(id=id, id_type=idType, location=relativeTo, epochs=time.jd)
def get_horizons_ephemerides(name, pov, epoch_start, epoch_stop, step_size, type_elements): # step: step size, [10m, 1d, 1y] if pov.lower() == 'sun': loc = '500@10' # position relative to the sun elif pov.lower() == 'goldstone': loc = '257' # from goldstone elif pov.lower() == 'maunakea': loc = '568' # maunakea else: print('Not Valid Location Point Of View') # Process to get homogeneity from main script full name '2012QD8' to a valid name for Horizon call '2012 QD8' if len( re.findall('([0-9])', name) ) <= 4: # 4 is the min numbers in every name, the date year of discovery r = re.compile("([0-9]+)([a-zA-Z]+)").match(name) k1 = r.group(1) # the date of the name k2 = r.group(2) # the code of the date valid_name = k1 + " " + k2 else: r = re.compile("([0-9]+)([a-zA-Z]+)([0-9]+)").match(name) k1 = r.group(1) # the date of the name k2 = r.group(2) # the code of the date k3 = r.group(3) # id after the letters valid_name = k1 + " " + k2 + k3 obj = Horizons(id=valid_name, location=loc, epochs={ 'start': epoch_start, 'stop': epoch_stop, 'step': step_size }) if type_elements.lower() == 'vectors': data = obj.vectors() # vectorial elements len_rows = len(data) len_cols = 6 # 3 positions 'x','y','z', and 3 velocities 'vx', 'vy', 'vz' idx_x = 5 # 'x' is at position 5 in the table (starting from 0) adata = np.zeros([len_rows, len_cols]) for row in range(len_rows): for col in range(6): idx_col_in_table = idx_x + col # because the 'x' start at 6th col, going up till the 12th that is 'vz' adata[row, col] = data[row][idx_col_in_table] elif type_elements.lower() == 'elements': # refsystem = 'J2000', # Element reference system for geometric and astrometric quantities # refplane = 'ecliptic' #ecliptic and mean equinox of reference epoch data = obj.elements(refsystem='J2000', refplane='ecliptic') len_rows = len(data) len_cols = 6 # (a e i OM om theta) adata = np.zeros([len_rows, len_cols]) for row in range(len_rows): adata[row, 0] = data[row][14] # 15th column of data -> semimajor axis adata[row, 1] = data[row][5] # 6th column of data -> eccentricity adata[row, 2] = data[row][7] # 8th column of data -> inclination adata[row, 3] = data[row][8] # 9th column of data -> RAAN, (OMEGA) adata[row, 4] = data[row][ 9] # 10th column of data -> argument of perigee, (omega) adata[row, 5] = data[row][ 13] # 14th column of data -> True anomaly, (theta) return adata
def from_horizons(cls, targetids, id_type='smallbody', epochs=None, location='500', **kwargs): """Load target ephemerides from `JPL Horizons <https://ssd.jpl.nasa.gov/horizons.cgi>`_ using `astroquery.jplhorizons.HorizonsClass.ephemerides` Parameters ---------- targetids : str or iterable of str Target identifier, i.e., a number, name, designation, or JPL Horizons record number, for one or more targets. id_type : str, optional The nature of ``targetids`` provided; possible values are ``'smallbody'`` (asteroid or comet), ``'majorbody'`` (planet or satellite), ``'designation'`` (asteroid or comet designation), ``'name'`` (asteroid or comet name), ``'asteroid_name'``, ``'comet_name'``, ``'id'`` (Horizons id). Default: ``'smallbody'`` epochs : `~astropy.time.Time` object, or dictionary, optional Epochs of elements to be queried; `~astropy.time.Time` objects support single and multiple epochs; a dictionary including keywords ``start`` and ``stop``, as well as either ``step`` or ``number``, can be used to generate a range of epochs. ``start`` and ``stop`` have to be `~astropy.time.Time` objects (see :ref:`epochs`). If ``step`` is provided, a range of epochs will be queries starting at ``start`` and ending at ``stop`` in steps of ``step``; ``step`` has to be provided as a `~astropy.units.Quantity` object with integer value and a unit of either minutes, hours, days, or years. If ``number`` is provided as an integer, the interval defined by ``start`` and ``stop`` is split into ``number`` equidistant intervals. If ``None`` is provided, current date and time are used. All epochs should be provided in UTC; if not, they will be converted to UTC and a `~sbpy.data.TimeScaleWarning` will be raised. Default: ``None`` location : str or `~astropy.coordinates.EarthLocation`, optional Location of the observer using IAU observatory codes (see `IAU observatory codes <https://www.minorplanetcenter.net/iau/lists/ObsCodesF.html>`__) or as `~astropy.coordinates.EarthLocation`. Default: ``'500'`` (geocentric) **kwargs : optional Arguments that will be provided to `astroquery.jplhorizons.HorizonsClass.ephemerides`. Notes ----- * For detailed explanations of the queried fields, refer to `astroquery.jplhorizons.HorizonsClass.ephemerides` and the `JPL Horizons documentation <https://ssd.jpl.nasa.gov/?horizons_doc>`_. * By default, all properties are provided in the J2000.0 reference system. Different settings can be chosen using additional keyword arguments as used by `astroquery.jplhorizons.HorizonsClass.ephemerides`. Returns ------- `~Ephem` object The resulting object will be populated with columns as defined in `~astroquery.jplhorizons.HorizonsClass.ephemerides`; refer to that document on information on how to modify the list of queried parameters. Examples -------- >>> from sbpy.data import Ephem >>> from astropy.time import Time >>> epoch = Time('2018-05-14', scale='utc') >>> eph = Ephem.from_horizons('ceres', epochs=epoch) # doctest: +SKIP """ # modify epoch input to make it work with astroquery.jplhorizons # maybe this stuff should really go into that module.... _epochs = None # avoid modifying epochs in-place if epochs is None: _epochs = [Time.now().utc.jd] elif isinstance(epochs, Time): if epochs.scale is not 'utc': warn(('converting {} epochs to utc for use in ' 'astroquery.jplhorizons').format(epochs.scale), TimeScaleWarning) _epochs = epochs.utc.jd elif isinstance(epochs, dict): _epochs = epochs.copy() if 'start' in _epochs and 'stop' in _epochs and 'number' in epochs: _epochs['step'] = _epochs['number'] * u.dimensionless_unscaled # convert to utc and iso for astroquery.jplhorizons _epochs['start'] = _epochs['start'].utc.iso _epochs['stop'] = _epochs['stop'].utc.iso if 'step' in _epochs: if _epochs['step'].unit is not u.dimensionless_unscaled: _epochs['step'] = '{:d}{:s}'.format( int(_epochs['step'].value), { u.minute: 'm', u.hour: 'h', u.d: 'd', u.year: 'y' }[_epochs['step'].unit]) else: _epochs['step'] = '{:d}'.format( int(_epochs['step'].value - 1)) else: raise ValueError('Invalid `epochs` parameter') # if targetids is a list, run separate Horizons queries and append if not isinstance(targetids, (list, ndarray, tuple)): targetids = [targetids] # turn EarthLocation into dictionary of strings as used by # astroquery.jplhorizons if isinstance(location, EarthLocation): location = { 'lon': location.lon.deg, 'lat': location.lat.deg, 'elevation': location.height.to('km') } # append ephemerides table for each targetid all_eph = None for targetid in targetids: # load ephemerides using astroquery.jplhorizons obj = Horizons(id=targetid, id_type=id_type, location=location, epochs=_epochs) try: eph = obj.ephemerides(**kwargs) except ValueError as e: raise QueryError( ('Error raised by astroquery.jplhorizons: {:s}\n' 'The following query was attempted: {:s}').format( str(e), obj.uri)) # workaround for current version of astroquery to make # column units compatible with astropy.table.QTable # should really change '---' units to None in # astroquery.jplhorizons.__init__.py for column_name in eph.columns: if eph[column_name].unit == '---': eph[column_name].unit = None # workaround for astroquery 0.3.9.dev5056 and earlier, # Horizons column named RA_rate always includes the # cos(Dec) term: if 'RA_rate' in eph.colnames: eph['RA_rate'].name = 'RA*cos(Dec)_rate' if all_eph is None: all_eph = eph else: all_eph = vstack([all_eph, eph]) # turn epochs into astropy.time.Time and apply timescale # convert ut1 epochs to utc # https://ssd.jpl.nasa.gov/?horizons_doc if any(all_eph['datetime_jd'] < 2437665.5): all_eph['datetime_jd'][all_eph['datetime_jd'] < 2437665.5] = Time( all_eph['datetime_jd'][all_eph['datetime_jd'] < 2437665.5], scale='ut1', format='jd').utc.jd all_eph['epoch'] = Time(all_eph['datetime_jd'], format='jd', scale='utc') if 'siderealtime' in all_eph.colnames: all_eph['siderealtime'].unit = u.Unit('hour') all_eph.remove_column('datetime_jd') all_eph.remove_column('datetime_str') return cls.from_table(all_eph)
# ********************************************************************************************************** # # * FIND THE NEARBY STARS AND THEIR PIXEL COORDINATE * # # ********************************************************************************************************** # ''' By using Gaia catalog, we find the nearby stars and convert their (ra,dec) coordinate to (x,y) (i.e., pixel coordiante). Due to the non-sidreal tracking, (x,y) of nearby stars at the exposure start point and the end point are different. So, we obtain (x,y) of nearby stars for both the exposure start point and end point. As a result, in one image, we have 4 pixel coordinate for one background star that is (x_star_str_o, y_star_str_o) : Pixel coordinate of a nearby star in ordinary component when the exposure starts (x_star_str_e, y_star_str_e) : Pixel coordinate of a nearby star in extra-ordinary component when the exposure ends (x_end_str_o, y_end_str_o) : Pixel coordinate of a nearby star in ordinary component when the exposure starts (x_end_str_e, y_end_str_e) : Pixel coordinate of a nearby star in extra-ordinary component when the exposure ends ''' # Bring the (ra,dec) of 2005 UD when the exposure starts obj = Horizons(id=155140, location='Z23', epochs=EXP_str) eph = obj.ephemerides() ra_str, dec_str = eph['RA'][0], eph['DEC'][0] # Find the background stars nearby 2005UD by using Gaia coord = SkyCoord(ra=ra_str, dec=dec_str, unit=(u.degree, u.degree), frame='icrs') width = u.Quantity(0.04, u.deg) # Nearby stars within 0.04deg from the target height = u.Quantity(0.04, u.deg) r = Gaia.query_object_async(coordinate=coord, width=width, height=height) # (ra,dec) of nearby stars RA_star = []
def autotarget(self): ''' Query JPL Horizons for the position of the target and match to a source ''' #Execute query and retrieve position information eph = Horizons(id=self.object,epochs=self.time).ephemerides() key = ['V','Tmag','Nmag'] if 'V' in eph.keys(): ra,dec,raerr,decerr,mag = eph['RA'][0],eph['DEC'][0],eph['RA_3sigma'][0],eph['DEC_3sigma'][0],eph['V'][0] elif 'Tmag' in eph.keys(): if np.isfinite(eph['Tmag'][0]): ra,dec,raerr,decerr,mag = eph['RA'][0],eph['DEC'][0],eph['RA_3sigma'][0],eph['DEC_3sigma'][0],eph['Tmag'][0] elif np.isfinite(eph['Nmag'][0]): ra,dec,raerr,decerr,mag = eph['RA'][0],eph['DEC'][0],eph['RA_3sigma'][0],eph['DEC_3sigma'][0],eph['Nmag'][0] else: print "No magnitude information found!" pdb.set_trace() #Find matching source self.error = 1 sources = self.sources if self.flipxy: sourcera = self.calc_points[:,1] sourcedec = self.calc_points[:,0] else: sourcera = self.calc_points[:,0] sourcedec = self.calc_points[:,1] seeing = np.median(self.matches[:,4]) seeingvar = np.std(self.matches[:,4]) if self.zp == 0: self.found=False return else: sourcemag = -2.5*np.log10(sources.flux)+self.zp w = np.where((abs(sourcera-ra)/arcsectodeg<np.sqrt(raerr**2+(self.error)**2)) & (abs(sourcedec-dec)/arcsectodeg<np.sqrt(decerr**2+(self.error)**2))& (abs(sourcemag-mag) < 3))[0] if len(w) == 1: print("Target found!") self.found = True self.x = sources.x[w[0]] self.y = sources.y[w[0]] self.flux = sources.flux[w[0]] self.fluxerr = sources.fluxerr[w[0]] self.fwhm = sources.fwhm[w[0]] self.mag = -2.5*np.log10(self.flux)+self.zp self.magerr = np.sqrt(self.zperr**2+(2.5*self.fluxerr/self.flux/np.log(10))**2) print self.x,self.y elif len(w) > 1: print ("WARNING: multiple possible matches found!") fwhm = sources.fwhm[w] w1 = np.where((fwhm>seeing-seeingvar) & (fwhm<seeing+seeingvar))[0] ## plt.scatter(sources.x[w[w1]],sources.y[w[w1]]) ## plt.show() if len(w1) == 1: print("Target found!") self.found = True self.x = sources.x[w[w1[0]]] self.y = sources.y[w[w1[0]]] self.flux = sources.flux[w[w1[0]]] self.fluxerr = sources.fluxerr[w[w1[0]]] self.fwhm = sources.fwhm[w[w1[0]]] self.mag = -2.5*np.log10(self.flux)+self.zp self.magerr = np.sqrt(self.zperr**2+(2.5*self.fluxerr/self.flux/np.log(10))**2) print self.x,self.y else: print "FAILURE:Possible matches rejected!" self.found = False else: self.found = False print "FAILURE:No possible matches found!"
def getObserverInterpolant(tmin, tmax, origin='SSB', observer_location='I11', ephemeris_dt='1h', frame='ecliptic'): """Produce sun-observer state vectors at observation epochs. Parameters: ----------- tmin,tmax ... float, float, start and stop time for interpolant [MJD] origin ... str, origin of the coordinate system (e.g. 'Sun' or 'SSB' =Solar System Barycenter) observer_location ... str, Horizons identifyer of observer location, e.g. 'I11' ephemeris_dt ... float, Time step for ephemeris query. Typically 1h since the actual times will be interpolated later. frame ... str, Coordinate system reference frame: 'ecliptic' or 'ICRF' Returns: -------- ipos, ivel ... scipy interpolants [au], [au/day] Heliocentric observer positions and velocity interpolants as function of time [MJD]. External Function Requirements: ------------------------------- # Interpolation import scipy.interpolate as spi # time transform mjd2jd ... change modified Julian date to Julian date, timescale TDB) # NASA JPL HORIZONS API call wrapper observerStatesFromHorizons ... Wrapper function for JPL Horizons state query via astropy """ tminjd = tr.mjd2jd(tmin) tmaxjd = tr.mjd2jd(tmax) #Start and stop times of the survey tstart = 'JD' + str(tminjd - 1.) tstop = 'JD' + str(tmaxjd + 1.) try: # Get observer locations (caution: choose the right plane of reference and direction of the vectors!) # check query by copy/pasting the output of print(observer_sun.uri) into a webbrowser if there are problems. if (origin == 'SSB' or origin == '@0'): observer_origin = Horizons(id='Sun', location=observer_location, id_type='majorbody', epochs={ 'start': tstart, 'stop': tstop, 'step': ephemeris_dt }) origin_barycenter = Horizons(id='Sun', location='@0', id_type='majorbody', epochs={ 'start': tstart, 'stop': tstop, 'step': ephemeris_dt }) if (frame == 'ecliptic'): oo = observer_origin.vectors(refplane='ecliptic') ob = origin_barycenter.vectors(refplane='ecliptic') elif (frame == 'ICRF' or frame == 'J2000' or frame == 'earth' or frame == 'icrf'): oo = observer_origin.vectors(refplane='earth') ob = origin_barycenter.vectors(refplane='earth') else: raise Exception('Error: requested frame unknown.') observer_xyz = ( -1) * (np.array([oo['x'], oo['y'], oo['z']]).astype('float') + np.array([ob['x'], ob['y'], ob['z']]).astype('float')) observer_vxyz = (-1) * ( np.array([oo['vx'], oo['vy'], oo['vz']]).astype('float') + np.array([ob['vx'], ob['vy'], ob['vz']]).astype('float')) observer_jd = np.array(oo['datetime_jd']).astype('float') else: observer_origin = Horizons(id=origin, location=observer_location, id_type='majorbody', epochs={ 'start': tstart, 'stop': tstop, 'step': ephemeris_dt }) if (frame == 'ecliptic'): obs = observer_origin.vectors(refplane='ecliptic') elif (frame == 'ICRF' or frame == 'J2000' or frame == 'earth' or frame == 'icrf'): obs = observer_orignin.vectors(refplane='earth') else: raise Exception('Error: requested frame unknown.') #We need the sun-observer vector not the observer-sun vector observer_xyz = (-1) * np.array([obs['x'], obs['y'], obs['z'] ]).astype('float') observer_vxyz = (-1) * np.array([obs['vx'], obs['vy'], obs['vz'] ]).astype('float') observer_jd = np.array(obs['datetime_jd']).astype('float') except: print( "Error: potential online ephemeris query failure. Make sure internet connectivity is available." ) raise observer_mjd = tr.jd2mjd(observer_jd) # Interpolate heliocentric observer positions to the actual observation epochs ipos = spi.CubicHermiteSpline(observer_mjd, observer_xyz, observer_vxyz, axis=1, extrapolate=None) # Interpolate heliocentric observer velocities to the actual observation epochs ivel = spi.CubicSpline(observer_mjd, observer_vxyz, axis=1, extrapolate=None) return ipos, ivel
#print(sys.path) fp_cfg = '/'.join([args.cfg_path, args.cfg_file]) print(fp_cfg) if not os.path.isfile(fp_cfg) == True: print('ERROR: Invalid Configuration File: {:s}'.format(fp_cfg)) sys.exit() print('Importing configuration File: {:s}'.format(fp_cfg)) with open(fp_cfg, 'r') as json_data: cfg = json.load(json_data) json_data.close() print(cfg) #set up data path #create query object obj = Horizons(id=cfg['body']['name'], location=cfg['body']['origin_center'], id_type=cfg['body']['id_type'], epochs=cfg['body']['epoch']) if cfg['body']['type'] == "vectors": vec = obj.vectors() #print(vec) #print(vec.columns) keys = ['datetime_jd', 'x', 'y', 'z', 'vx', 'vy', 'vz'] pos_keys = ['x', 'y', 'z'] vel_keys = ['vx', 'vy', 'vz'] t_key = 'datetime_jd' #print(np.array(vec[keys])) df = pd.DataFrame(np.array(vec[keys]), columns=keys) #print(df) #Export Data
system.ruth3(stepsize) focus_pos.append(np.copy(body_list[BodyID].pos)) earth_data.append(np.copy(body_list[2].pos)) xval.append(system.time / 365) focus_data = np.stack(focus_pos, axis=0) earth_data = np.stack(earth_data, axis=0) #print(mars_data) # get mars horizons data body = Horizons(id="301", location='@ssb', epochs={ 'start': '1970-1-1', 'stop': '2000-1-1', 'step': '1d' }, id_type="majorbody") vecs = body.vectors() jpldat = vecs["x", "y", "z"] jpl_pos = np.array(jpldat).view((float, len(jpldat.dtype.names))) print("Dat{}, jpl:{}".format(focus_data.shape[0], jpl_pos.shape[0])) min_len = min(jpl_pos.shape[0], focus_data.shape[0]) jpl_pos = jpl_pos[:min_len] focus_data = focus_data[:min_len] earth_data = earth_data[:min_len]
start = Time.now() start.format = "jd" res = options.stepsize.split() resolution_secs = float(res[0])*{"second": 1, "minute": 60, "hour": 3600, "day": 36400}[res[1]] dt = TimeDelta(resolution_secs, format="sec") times = [start + dt*i for i in range(options.duration)] jdtimes = [t.value for t in times] print("Going out to Horizons...") query = Horizons(id=options.targetid, location=options.loc, epochs=jdtimes, id_type=options.targettype) eph = query.ephemerides() print("Finished.") pointingdata = [] for i in eph: invrange = 0.0 #FIXME calc this -- what units? mjdtime = Time(i['datetime_jd'], format="jd") mjdtime.format = "mjd" pointingdata.append("{:.0f} {:.05f} {:.05f} {:e}".format(mjdtime.value*1e9, i['AZ'], i['EL'], invrange)) if len(args) > 0: filename=args[0] f = open(filename, 'w') for line in pointingdata:
'Marte: 11 missioni', 'Parker Solar Probe', 'Solar Orbiter', 'Hayabusa 2', 'BepiColombo', 'Osiris-REx', 'Stereo A', 'altro', 'altro', 'altro' ]] names = [ 'Mercurio', 'Venere: Akatsuki', 'Luna,L1,L2: 12 missioni', 'Marte: 11 missioni', 'Parker Solar Probe', 'Solar Orbiter', 'Hayabusa 2', 'BepiColombo', 'Osiris-REx', 'Stereo A', 'altro', 'altro', 'altro' ] #ma questo lo sto ancora usando? #texty = [.47, .73, 1, 1.5] #for i, nasaid in enumerate( # [1, 2, 3, 4, -96, -144, -37, -121, -64, # -234]): # The 1st, 2nd, 3rd, 4th planet in solar system for i, nasaid in enumerate(corpo[0]): obj = Horizons(id=nasaid, location="@sun", epochs=ss.time, id_type='id').vectors() ss.add_planet( Object(corpo[1][i], 20 * sizes, colors[i], [np.double(obj[xi]) for xi in ['x', 'y', 'z']], [np.double(obj[vxi]) for vxi in ['vx', 'vy', 'vz']])) def animate(i): return ss.evolve() ani = animation.FuncAnimation( fig, animate, repeat=False, frames=sim_duration,
def get_horizons_ephemerides_elements(name, pov, epoch_start): # step: step size, [10m, 1d, 1y] if pov.lower() == 'sun': loc = '500@10' # position relative to the sun elif pov.lower() == 'goldstone': loc = '257' # from goldstone elif pov.lower() == 'maunakea': loc = '568' # maunakea else: print('Not Valid Location Point Of View') # Process to get homogeneity from main script full name '2012QD8' to a valid name for Horizon call '2012 QD8' if len( re.findall('([0-9])', name) ) <= 4: # 4 is the min numbers in every name, the date year of discovery r = re.compile("([0-9]+)([a-zA-Z]+)").match(name) k1 = r.group(1) # the date of the name k2 = r.group(2) # the code of the date valid_name = k1 + " " + k2 else: r = re.compile("([0-9]+)([a-zA-Z]+)([0-9]+)").match(name) k1 = r.group(1) # the date of the name k2 = r.group(2) # the code of the date k3 = r.group(3) # id after the letters valid_name = k1 + " " + k2 + k3 # always a day after the input, anyway you consider the moment of input, the first of the data output extracted epoch_start chunks = epoch_start.split('-') chunks2 = int(chunks[2]) + 1 # add 1 day list_string = [chunks[0], chunks[1], str(chunks2)] epoch_stop = '-'.join(list_string) step_size = '1d' obj = Horizons(id=valid_name, location=loc, epochs={ 'start': epoch_start, 'stop': epoch_stop, 'step': step_size }) # refsystem = 'J2000', # Element reference system for geometric and astrometric quantities # refplane = 'ecliptic' #ecliptic and mean equinox of reference epoch data = obj.elements(refsystem='J2000', refplane='ecliptic') len_cols = 7 # jd,ec,qr,tp,incl,OM,om adata = np.zeros([1, len_cols]) # always assign the first row of output data -> the first date required! #for row in range(len_rows): adata[0, 0] = data[0][5] # 6th column of data -> e, eccentricity (-) adata[0, 1] = data[0][6] # 7th column of data -> qr, periapsis distance (AU) adata[0, 2] = data[0][10] # 11th column of data -> tp, time of periapsis (JD) adata[0, 3] = data[0][7] # 8th column of data -> incl, inclination (deg) adata[0, 4] = data[0][ 8] # 10th column of data -> OM, longitude of Asc. Node (deg) adata[0, 5] = data[0][ 9] # 11th column of data -> om, argument of periapsis (deg) adata[0, 6] = data[0][ 1] # 2nd column of the data extracted -> jd of evaluation return adata
#!/usr/bin/env python """ Fetch planet information from nasa.org and store it in a json file. """ import numpy as np import json from astropy.time import Time from astroquery.jplhorizons import Horizons sim_start_date = "2018-01-01" # simulating a solar system starting from this date names = ['Mercury', 'Venus', 'Earth-Moon', 'Earth-Moon', 'Mars'] sizes = [0.38, 0.95, 1., 0.27, 0.53] nasaids = [1, 2, 399, 301, 4] # The 1st, 2nd, 3rd (399 and 301), 4th planet in solar system data = dict(info="Solar planets database, including positions and velocities at the given date", date=sim_start_date) for i in range(5): nasaid = nasaids[i] obj = Horizons(id=nasaid, location="@sun", epochs=Time(sim_start_date).jd, id_type='id').vectors() data[str(nasaid)] = { "name": names[i], "size": sizes[i], "r": [np.double(obj[xi]) for xi in ['x', 'y', 'z']], "v": [np.double(obj[vxi]) for vxi in ['vx', 'vy', 'vz']] } with open("planets.json", 'w') as f: json.dump(data, f, indent=4)
# Bring the header info.================================================== iNR_STR = header['INR-STR'] iNR_END = header['INR-END'] INST_pa = header['INST-PA'] JD = np.mean([header['MJD-STR'], header['MJD-END']]) JD = JD + 2400000.5 # MJD -> JD epoch = Time(JD, format='jd').isot RN = header['RDNOISE'] # [electron] gain = header['GAIN'] # [electron/DN] Filter = header['FILTER'] exp = header['EXPTIME'] # in sec # ************************************************************************************* # # * Bring the observer quantities from JPL Horizons * # # ************************************************************************************* # obj = Horizons(id=OBJECT, location=Observatory, epochs=JD) eph = obj.ephemerides() psANG = eph['sunTargetPA'][0] # [deg] pA = eph['alpha'][0] # [deg] # ************************************************************************************* # # * BRING THE CENTER COORDINATE OF 2005 UD * # # ************************************************************************************* # # Here, we use Phot package in IRAF to find the center of the target # If you find the center of the target by other methods, please change this part. # Bring the center mag1 = ascii.read(image_i + '.mag.1') xo, yo = ( mag1['XCENTER'][0] - 1, mag1['YCENTER'][0] - 1 ) # pixel coordinate (x,y) of target's center in ordinary component
def combine(filenames, obsparam, comoving, targetname, manual_rates, combine_method, keep_files, backsub=False, display=True, diagnostics=True): """ image combination wrapper output: diagnostic properties """ # start logging logging.info('starting image combination with parameters: %s' % (', '.join([('%s: %s' % (var, str(val))) for var, val in list(locals().items())]))) # check if images have been run through pp_prepare try: midtime_jd = fits.open(filenames[0], verify='silentfix', ignore_missing_end=True)[0].header['MIDTIMJD'] except KeyError: raise KeyError(('%s image header incomplete, have the data run ' + 'through pp_prepare?') % filenames[0]) return None # adopt first frame as reference frame hdulist = fits.open(filenames[0]) header = hdulist[0].header refdate = float(header['MIDTIMJD']) # read out ra and dec from header if obsparam['radec_separator'] == 'XXX': ref_ra_deg = float(header[obsparam['ra']]) ref_dec_deg = float(header[obsparam['dec']]) if obsparam['telescope_keyword'] == 'UKIRTWFCAM': ref_ra_deg = ref_ra_deg/24.*360. - 795/3600. ref_dec_deg -= 795/3600. else: ra_string = header[obsparam['ra']].split( obsparam['radec_separator']) dec_string = header[obsparam['dec']].split( obsparam['radec_separator']) ref_ra_deg = 15.*(float(ra_string[0]) + old_div(float(ra_string[1]), 60.) + old_div(float(ra_string[2]), 3600.)) ref_dec_deg = (abs(float(dec_string[0])) + old_div(float(dec_string[1]), 60.) + old_div(float(dec_string[2]), 3600.)) if dec_string[0].find('-') > -1: ref_dec_deg = -1 * ref_dec_deg if obsparam['telescope_keyword'] == 'UKIRTWFCAM': ref_ra_deg = ref_ra_deg/24.*360. if obsparam['telescope_keyword'] == "UKIRTWFCAM": ref_ra_deg -= float(header['TRAOFF'])/3600 ref_dec_deg -= float(header['TDECOFF'])/3600 hdulist.close() # modify individual frames if comoving == True if comoving: movingfilenames = [] # sort filenames by MIDTIMJD mjds = [] for filename in filenames: hdulist = fits.open(filename) mjds.append(float(hdulist[0].header['MIDTIMJD'])) filenames = [filenames[i] for i in numpy.argsort(mjds)] for filename in filenames: movingfilename = filename[:filename.find('.fits')]+'_moving.fits' print('shifting %s -> %s' % (filename, movingfilename)) logging.info('shifting %s -> %s' % (filename, movingfilename)) # read out date and pointing information hdulist = fits.open(filename) header = hdulist[0].header date = hdulist[0].header['MIDTIMJD'] data = hdulist[0].data hdulist.close() # use ephemerides from Horizons if no manual rates are provided if manual_rates is None: # call HORIZONS to get target coordinates obj = Horizons(targetname.replace('_', ' '), epochs=date, location=str(obsparam['observatory_code'])) try: eph = obj.ephemerides() n = len(eph) except ValueError: print('Target (%s) not an asteroid' % targetname) logging.warning('Target (%s) not an asteroid' % targetname) n = None time.sleep(0.5) if n is None or n == 0: logging.warning('WARNING: No position from Horizons!' + 'Name (%s) correct?' % targetname) logging.warning('HORIZONS call: %s' % eph.url) raise(ValueError, 'no Horizons ephemerides available') else: logging.info('ephemerides for %s pulled from Horizons' % targetname) logging.info('Horizons call: %s' % obj.uri) target_ra, target_dec = eph[0]['RA'], eph[0]['DEC'] # get image pointing from header if obsparam['radec_separator'] == 'XXX': ra_deg = float(header[obsparam['ra']]) dec_deg = float(header[obsparam['dec']]) if obsparam['telescope_keyword'] == 'UKIRTWFCAM': ra_deg = ra_deg/24.*360. - 795/3600. dec_deg -= 795/3600. else: ra_string = header[obsparam['ra']].split( obsparam['radec_separator']) dec_string = header[obsparam['dec']].split( obsparam['radec_separator']) ra_deg = 15.*(float(ra_string[0]) + old_div(float(ra_string[1]), 60.) + old_div(float(ra_string[2]), 3600.)) dec_deg = (abs(float(dec_string[0])) + old_div(float(dec_string[1]), 60.) + old_div(float(dec_string[2]), 3600.)) if dec_string[0].find('-') > -1: dec_deg = -1 * dec_deg if filename == filenames[0]: ref_offset_ra = target_ra - ref_ra_deg ref_offset_dec = target_dec - ref_dec_deg offset_ra = target_ra - ref_ra_deg - ref_offset_ra offset_dec = target_dec - ref_dec_deg - ref_offset_dec else: # use manual rates (since they are provided) offset_ra = ((float(header['MIDTIMJD'])-refdate)*86400 * float(manual_rates[0]))/3600 offset_dec = ((float(header['MIDTIMJD'])-refdate)*86400 * float(manual_rates[1]))/3600 logging.info('offsets in RA and Dec: %f, %f arcsec' % (offset_ra*3600, offset_dec*3600)) crval1 = float(header['CRVAL1']) crval2 = float(header['CRVAL2']) # write new CRVALi keywords in different file new_hdu = fits.PrimaryHDU(data) new_hdu.header = header new_hdu.header['CRVAL1'] = (crval1-offset_ra, 'updated in the moving frame of the object') new_hdu.header['CRVAL2'] = (crval2-offset_dec, 'updated in the moving frame of the object') movingfilenames.append(movingfilename) new_hdu.writeto(movingfilename, overwrite=True, output_verify='silentfix') if comoving: outfile_name = 'comove.fits' fileline = " ".join(movingfilenames) n_frames = len(movingfilenames) else: outfile_name = 'skycoadd.fits' fileline = " ".join(filenames) n_frames = len(filenames) # run swarp on all image catalogs using different catalogs commandline = (('swarp -combine Y -combine_type %s -delete_tmpfiles ' + 'Y -imageout_name %s -interpolate Y -subtract_back %s ' + '-weight_type NONE -copy_keywords %s -write_xml N ' + '-CENTER_TYPE MOST %s') % ({'median': 'MEDIAN', 'average': 'AVERAGE', 'clipped': 'CLIPPED -CLIP_AMPFRAC 0.2 -CLIP_SIGMA 0.1 '} [combine_method], outfile_name, {True: 'Y', False: 'N'}[backsub], obsparam['copy_keywords'], fileline)) logging.info('call SWARP as: %s' % commandline) print('running SWARP to combine {:d} frames...'.format(n_frames)) try: swarp = subprocess.Popen(shlex.split(commandline), stdout=DEVNULL, stderr=DEVNULL, close_fds=True) # do not direct stdout to subprocess.PIPE: # for large FITS files, PIPE will clog, stalling # subprocess.Popen except Exception as e: print('SWARP call:', (e)) logging.error('SWARP call:', (e)) return None swarp.wait() print('done!') # remove files that are not needed anymore if not keep_files: if comoving: for filename in movingfilenames: os.remove(filename) # update combined image header total_exptime = 0 for filename in filenames: hdulist = fits.open(filename) total_exptime += float(hdulist[0].header[obsparam['exptime']]) hdulist = fits.open(outfile_name, mode='update') hdulist[0].header[obsparam['exptime']] = (total_exptime, 'PP: cumulative') hdulist[0].header['COMBO_N'] = (len(filenames), 'PP: N files combo') hdulist[0].header['COMBO_M'] = (combine_method, 'PP: combo method') hdulist[0].header['COMOVE'] = (str(comoving), 'PP: comoving?') hdulist.flush() return n_frames
def from_horizons(cls, targetids, id_type='smallbody', epochs=None, location='500', **kwargs): """Load target ephemerides from `JPL Horizons <https://ssd.jpl.nasa.gov/horizons.cgi>`_ using `astroquery.jplhorizons.HorizonsClass.ephemerides` Parameters ---------- targetids : str or iterable of str Target identifier, i.e., a number, name, designation, or JPL Horizons record number, for one or more targets. id_type : str, optional The nature of ``targetids`` provided; possible values are ``'smallbody'`` (asteroid or comet), ``'majorbody'`` (planet or satellite), ``'designation'`` (asteroid or comet designation), ``'name'`` (asteroid or comet name), ``'asteroid_name'``, ``'comet_name'``, ``'id'`` (Horizons id). Default: ``'smallbody'`` epochs : `~astropy.time.Time` object, or dictionary of `astropy.time.Time` objects, optional Epochs of elements to be queried; `~astropy.time.Time` objects support iterables within the object so an `~astropy.time.Time` object should still be used for a number of discrete epochs; a dictionary including keywords ``start``, ``step``, and ``stop`` and `~astropy.time.Time` objects can be used to generate a range of epochs (see `~astroquery.jplhorizons.HorizonsClass.Horizons.ephemerides` for details); if ``None`` is provided, current date and time are used. Default: ``None`` location : str, optional, default ``'500'`` (geocentric) Location of the observer. **kwargs : optional Arguments that will be provided to `astroquery.jplhorizons.HorizonsClass.ephemerides`. Returns ------- `~Ephem` object Examples -------- >>> from sbpy.data import Ephem >>> from astropy.time import Time >>> epoch = Time('2018-05-14', scale='utc') >>> eph = Ephem.from_horizons('ceres', epochs=epoch) # doctest: +SKIP """ # modify epoch input to make it work with astroquery.jplhorizons # maybe this stuff should really go into that module.... if epochs is None: epochs = [Time.now().jd] elif isinstance(epochs, Time): epochs = epochs.jd if isinstance(epochs, float): epochs = [epochs] new_epochs = [None] * len(epochs) for i in range(len(epochs)): new_epochs[i] = epochs[i] epochs = new_epochs elif isinstance(epochs, dict): for key, val in epochs.items(): if isinstance(val, Time): val.format = 'iso' val.out_subfmt = 'date_hm' epochs[key] = val.value else: epochs[key] = epochs[key] # if targetids is a list, run separate Horizons queries and append if not isinstance(targetids, (list, ndarray, tuple)): targetids = [targetids] # append ephemerides table for each targetid all_eph = None for targetid in targetids: # load ephemerides using astroquery.jplhorizons obj = Horizons(id=targetid, id_type=id_type, location=location, epochs=epochs) try: eph = obj.ephemerides(**kwargs) except ValueError as e: raise RuntimeError( ('Error raised by astroquery.jplhorizons: {:s}\n' 'The following query was attempted: {:s}').format( str(e), obj.uri)) # workaround for current version of astroquery to make # column units compatible with astropy.table.QTable # should really change '---' units to None in # astroquery.jplhorizons.__init__.py for column_name in eph.columns: if eph[column_name].unit == '---': eph[column_name].unit = None # workaround for astroquery 0.3.9.dev5056 and earlier, # Horizons column named RA_rate always includes the # cos(Dec) term: if 'RA_rate' in eph.colnames: eph['RA_rate'].name = 'RA*cos(Dec)_rate' if all_eph is None: all_eph = eph else: all_eph = vstack([all_eph, eph]) # identify time scales returned by Horizons query timescales = array(['UTC'] * len(all_eph)) timescales[all_eph['datetime_jd'] < 2437665.5] = 'UT1' # according to Horizons documentation all_eph.add_column(Column(timescales, name='timescale')) if bib.status() is None or bib.status(): bib.register('sbpy.data.Ephem.from_horizons', {'data service': '1996DPS....28.2504G'}) return cls.from_table(all_eph)
def getPlots(PointingGroups, pgNum, KBOList, findObjects, fileType='DeepDiff', ccdList=None, LH_lim = 0., dataPath='/astro/store/epyc/users/smotherh/DECAM_Data_Reduction/pointing_groups_hyak/Pointing_Group_{0:03}', paperFormat=False): pgObjectData = {} dupObjectNum = 0 allFindMotion=[] all_obj = {} for KBONum,KBO in enumerate(KBOList): objectName = KBO['Name'] df = PointingGroups allTimes = [] for i in range(len(PointingGroups)): date_obs = df['date_obs'].iloc[i].decode()#[2:-1] time_obj = Time(date_obs, format='iso', scale='utc') allTimes.append(time_obj.jd) allTimes = np.array(allTimes) times = allTimes try: obj = Horizons(id=objectName, location='W84', epochs=times) #ccd 43 orbits = obj.ephemerides(quantities='1, 9') except: continue orbits['visit'] = [int(visit) for visit in df['visit_id']] orbits['x_pixel'] = -99 orbits['y_pixel'] = -99 orbits['ccd'] = -99 orbits['times'] = times #visitMask = [np.logical_and(orbits['visit']>=845580,orbits['visit']<=845682)] #orbits = orbits[visitMask] findObjects.dataPath = (dataPath.format(pgNum)) findObjects.cutDF = orbits findObjects.fileType = fileType nightVisits = np.array(orbits['visit']) findObjects.testVisit = nightVisits[-1] with mp.Pool(20) as pool: results = pool.map(findObjects.matchCcds,range(1,63)) if ccdList is None: ccdList = np.linspace(0,61,62).astype(int) for j in ccdList: foo = results[j] onCcd = foo[foo['ccd']>0] if len(onCcd)>0: findObjects.ccdNum = onCcd['ccd'][0] print(objectName+' is on ccd '+str(findObjects.ccdNum)) if findObjects.ccdNum is None: print(objectName+' is not on any ccd') else: with mp.Pool(20) as pool: results = pool.map(findObjects.matchVisits,nightVisits) allResults = vstack(results) allResults = allResults[allResults['ccd']>0] if (int(allResults[0]['visit'])==int(PointingGroups['visit_id'][0])): coaddData,objectData,findMotion, SNR = searchKnownObject( allResults, findObjects.dataPath, stampSize=[21,21], numCols=5, fileType=fileType, paperFormat=paperFormat) if SNR > LH_lim: pgKey = 'pg{:03}_ccd{:02}'.format(pgNum, findObjects.ccdNum) allFindMotion.append(findMotion) if pgKey not in pgObjectData: pgObjectData[pgKey] = objectData else: dupObjectNum += 1 pgObjectData[pgKey+'_'+str(dupObjectNum)] = objectData if paperFormat: plt.savefig('known_objects/pg{:03}_ccd{:02}_{}.pdf'.format( pgNum,findObjects.ccdNum,objectName.replace(" ", "_"))) else: plt.savefig('known_objects/pg{:03}_ccd{:02}_{}'.format( pgNum,findObjects.ccdNum,objectName.replace(" ", "_"))) all_obj[objectName] = obj plt.close() else: print('Object {} has a coadded likelihood less than {}'.format( objectName, LH_lim)) plt.close() else: print('Object {} is not present in the first visit'.format( objectName)) findObjects.ccdNum = None return(pgObjectData, allFindMotion, all_obj)
def sunHorizons(start, stop, step): return Horizons(id='10', location='geo', id_type='majorbody', \ epochs={'start': time2Str(start), 'stop': time2Str(stop), 'step':step})