def logging_kernel_names(self): n_kernels = spice.ktotal('ALL') msg = 'After loading/unloading kernels in {:s}, {:d} kernel files are loaded.'.format( self.meta_kernel_path, n_kernels) self.logger.info(msg) for i in range(n_kernels): self.logger.debug(spice.kdata(i, 'ALL'))
def is_kernel_already_loaded(self): result = False n_kernels = spice.ktotal('ALL') for i in range(n_kernels): file, _, source, _ = spice.kdata(i, 'ALL') if file == self.meta_kernel_path: msg = 'Already loaded. file: {:s}, source: {:s}.'.format( file, source) result = True break return result
def f_manage_kernels(kernel_meta): #Unload all kernels spiceypy.unload( kernel_meta ) #Load the necessary kernels from meta file spiceypy.kclear() spiceypy.furnsh(kernel_meta) spiceypy.furnsh(r'E:\Data Science Projects\Space Science\SpaceScience-P2-SSBandSunWobbling\data\external\_kernels\lsk\naif0012.tls') #List loaded kernels count = spiceypy.ktotal( 'ALL' ) for i in range(0, count): [ file, type, source, handle] = spiceypy.kdata(i, 'ALL'); print( 'File {0}'.format(file) ) print( 'Type {0}'.format(type) ) print( 'Source {0}\n'.format(source) )
def show_loaded_kernels(): "Print overview of loaded kernels." count = spice.ktotal("all") if count == 0: print("No kernels loaded at this time.") else: print("The loaded files are:\n(paths relative to kernels.KERNELROOT)\n") for which in range(count): print(which) out = spice.kdata(which, "all", 100, 100, 100) print("Position:", which) p = Path(out[0]) print("Path", p.relative_to(KERNELROOT)) print("Type:", out[1]) print("Source:", out[2]) print("Handle:", out[3]) print("Found:", out[4])
def describe_loaded_kernels(kind='all'): """Print a list of loaded spice kernels of :kind:""" all_kinds = ('spk', 'pck', 'ck', 'ek', 'text', 'meta') if kind == 'all': for k in all_kinds: describe_loaded_kernels(k) return n = spiceypy.ktotal(kind) if n == 0: print('No loaded %s kernels' % kind) return print("Loaded %s kernels:" % kind) for i in range(n): data = spiceypy.kdata(i, kind, 100, 10, 100) print("\t%d: %s" % (i, data[0]))
def search_solar_objects(obsinfo): solar_objects = [] count = spice.ktotal("spk") for which in range(count): filename, _filetype, _source, _handle = spice.kdata(which, "spk") ids = spice.spkobj(filename) for i in range(spice.card(ids)): obj = ids[i] target = spice.bodc2n(obj) if is_target_in_fov( obsinfo.inst, target, obsinfo.et, obsinfo.abcorr, obsinfo.obsrvr, ): solar_objects.append(get_solar_object(obsinfo, obj, target)) return solar_objects
def loadedKernels(kType=None, print_or_return="p"): """function description""" if not kType: kType = "all" # Get number of kernels loaded ktot = spice.ktotal(kType) # Now print them all kerns = [] if ktot > 0: kerns = [ spice.kdata(k, kType, 255, 255, 255)[0] for k in range(ktot) ] if print_or_return == "p": print('{:d} {:s} kernels loaded:'.format(ktot, kType.upper())) for k in kerns: print(k) elif print_or_return == "r": return kerns
with open('../js/ikuchi-pregenerated.js', 'w') as fh: fh.write( '// All data and functions in this file are auto-generated. Do not modify.\n' ) fh.write('// Build date: {}\n'.format(datetime.datetime.now().__str__())) fh.write('// Build platform: {} {}\n'.format(platform.platform(), platform.processor())) fh.write('// Build Python version: {}\n'.format(platform.python_version())) fh.write('// Build NumPy version: {}\n'.format(np.__version__)) fh.write('// Build SciPy version: {}\n'.format(scipy.__version__)) fh.write('// Build Matplotlib version: {}\n'.format( matplotlib.__version__)) fh.write('// Build SPICE version: {}\n'.format(spiceypy.tkvrsn('toolkit'))) fh.write('// SPICE Kernels used:\n') for i in range(spiceypy.ktotal('ALL')): [file, type, source, handle] = spiceypy.kdata(i, 'ALL') fh.write('// {} ({})\n'.format(file, type)) fh.write('\n\n\n') # Output default planet rotation angle functions. fh.write('//Fits for Default Planet\n') fh.write('function rotzDefault(t){\n\treturn(0.0);\n}\n\n') fh.write('function rotyDefault(t){\n\treturn(0.0);\n}\n\n\n') # We have full time series of the two rotation angles we need, so now we # do fits and write the results of the fits to javascript functions we # need in the main web app. print('[ikuchi-build] Fitting and generating Javascript functions') # We work in unix timestamps measured in milliseconds, but the SPICE # data is in ephemeris time in seconds.
def __init__(self, kernels_folder=None): """ Parameters: kernels_folder (string): If not provided, the path stored in the environment variable ``TESSPHOT_SPICE_KERNELS`` is used, and if that is not set, the ``data/spice`` directory is used. """ logger = logging.getLogger(__name__) # If no kernel folder is given, used the one stored in env.var. or the default location: if kernels_folder is None: kernels_folder = os.environ.get( 'TESSPHOT_SPICE_KERNELS', os.path.join(os.path.dirname(__file__), 'data', 'spice')) # Make sure the kernel directory exists: kernels_folder = os.path.abspath(kernels_folder) os.makedirs(kernels_folder, exist_ok=True) # Automatically download kernels from TASOC, if they don't already exist? #urlbase = 'https://archive.stsci.edu/missions/tess/models/' urlbase = 'https://tasoc.dk/pipeline/spice/' downlist = [] for fname in self.kernel_files: fpath = os.path.join(kernels_folder, fname) if not os.path.exists(fpath): downlist.append([urlbase + fname, fpath]) if downlist: download_parallel(downlist) # Path where meta-kernel will be saved: hashkey = kernels_folder + ',' + ','.join(self.kernel_files) fileshash = hashlib.md5(hashkey.encode()).hexdigest() self.METAKERNEL = os.path.join(kernels_folder, 'metakernel-' + fileshash + '.txt') # Write meta-kernel to file: if not os.path.exists(self.METAKERNEL): with open(self.METAKERNEL, 'w') as fid: fid.write("KPL/MK\n") fid.write(r"\begindata" + "\n") fid.write("PATH_VALUES = ('" + kernels_folder + "')\n") fid.write("PATH_SYMBOLS = ('KERNELS')\n") fid.write("KERNELS_TO_LOAD = (\n") fid.write(",\n".join([ "'$KERNELS/" + fname + "'" for fname in self.kernel_files ])) fid.write(")\n") fid.write(r"\begintext" + "\n") fid.write("End of MK file.\n") # Because SpiceyPy loads kernels into a global memory scope (BAAAAADDDD SpiceyPy!!!), # we first check if we have already loaded this into the global scope: # This is to attempt to avoid loading in the same kernels again and again when # running things in parallel. already_loaded = False for k in range(spiceypy.ktotal('META')): if os.path.abspath(spiceypy.kdata(k, 'META')[0]) == self.METAKERNEL: logger.debug("SPICE Meta-kernel already loaded.") already_loaded = True break # Define TESS object if it doesn't already exist: try: spiceypy.bodn2c('TESS') except SpiceyError: logger.debug("Defining TESS name in SPICE") spiceypy.boddef('TESS', -95) # Load kernels if needed: if not already_loaded: logger.debug("Loading SPICE Meta-kernel: %s", self.METAKERNEL) spiceypy.furnsh(self.METAKERNEL) # Let's make sure astropy is using the de430 kernels as well: # Default is to use the same as is being used by SPOC (de430). # If using astropy 4.0+, we can load the local one directly. Before this, # it needs to be downloaded and cached: # NOTE: https://github.com/astropy/astropy/pull/8767 if astropy_major_version >= 4: self.planetary_ephemeris = os.path.join( kernels_folder, 'tess2018338154429-41241_de430.bsp') else: self.planetary_ephemeris = urlbase + 'tess2018338154429-41241_de430.bsp' self._old_solar_system_ephemeris = coord.solar_system_ephemeris.get() coord.solar_system_ephemeris.set(self.planetary_ephemeris)
def __init__(self, kernels_folder=None): """ Parameters: kernels_folder (string): If not provided, the path stored in the environment variable ``TESSPHOT_SPICE_KERNELS`` is used, and if that is not set, the ``data/spice`` directory is used. """ logger = logging.getLogger(__name__) # If no kernel folder is given, used the one stored in env.var. or the default location: if kernels_folder is None: kernels_folder = os.environ.get( 'TESSPHOT_SPICE_KERNELS', os.path.join(os.path.dirname(__file__), 'data', 'spice')) # Create list of kernels that should be loaded: files = ( # Planetary ephemeris and TESS clock kernels: 'tess2018338154046-41240_naif0012.tls', 'tess2018338154429-41241_de430.bsp', 'tess2019113195500-41374_sclk.tsc', # Predictive kernels of TESS's expected position: #'TESS_EPH_PRE_2YEAR_2018171_01.bsp', 'TESS_EPH_PRE_LONG_2018109_02.bsp', 'TESS_EPH_PRE_LONG_2019045_01.bsp', # Definite kernels of TESS's actual position: #'TESS_EPH_DEF_2018004_01.bsp', # Does not contain any information 'TESS_EPH_DEF_2018080_01.bsp', #'TESS_EPH_DEF_2018108_01.bsp', # Surpassed by never version below 'TESS_EPH_DEF_2018108_02.bsp', 'TESS_EPH_DEF_2018115_01.bsp', 'TESS_EPH_DEF_2018124_01.bsp', 'TESS_EPH_DEF_2018133_01.bsp', 'TESS_EPH_DEF_2018150_01.bsp', 'TESS_EPH_DEF_2018183_01.bsp', 'TESS_EPH_DEF_2018186_01.bsp', 'TESS_EPH_DEF_2018190_01.bsp', 'TESS_EPH_DEF_2018193_01.bsp', 'TESS_EPH_DEF_2018197_01.bsp', 'TESS_EPH_DEF_2018200_01.bsp', 'TESS_EPH_DEF_2018204_01.bsp', 'TESS_EPH_DEF_2018207_01.bsp', 'TESS_EPH_DEF_2018211_01.bsp', 'TESS_EPH_DEF_2018214_01.bsp', 'TESS_EPH_DEF_2018218_01.bsp', 'TESS_EPH_DEF_2018221_01.bsp', 'TESS_EPH_DEF_2018225_01.bsp', 'TESS_EPH_DEF_2018228_01.bsp', 'TESS_EPH_DEF_2018232_01.bsp', 'TESS_EPH_DEF_2018235_01.bsp', 'TESS_EPH_DEF_2018239_01.bsp', 'TESS_EPH_DEF_2018242_01.bsp', 'TESS_EPH_DEF_2018246_01.bsp', 'TESS_EPH_DEF_2018249_01.bsp', 'TESS_EPH_DEF_2018253_01.bsp', 'TESS_EPH_DEF_2018256_01.bsp', 'TESS_EPH_DEF_2018260_01.bsp', 'TESS_EPH_DEF_2018263_01.bsp', 'TESS_EPH_DEF_2018268_01.bsp', 'TESS_EPH_DEF_2018270_01.bsp', 'TESS_EPH_DEF_2018274_01.bsp', 'TESS_EPH_DEF_2018277_01.bsp', 'TESS_EPH_DEF_2018282_01.bsp', 'TESS_EPH_DEF_2018285_01.bsp', 'TESS_EPH_DEF_2018288_01.bsp', 'TESS_EPH_DEF_2018291_01.bsp', 'TESS_EPH_DEF_2018295_01.bsp', 'TESS_EPH_DEF_2018298_01.bsp', 'TESS_EPH_DEF_2018302_01.bsp', 'TESS_EPH_DEF_2018305_01.bsp', 'TESS_EPH_DEF_2018309_01.bsp', 'TESS_EPH_DEF_2018312_01.bsp', 'TESS_EPH_DEF_2018316_01.bsp', 'TESS_EPH_DEF_2018319_01.bsp', 'TESS_EPH_DEF_2018323_01.bsp', 'TESS_EPH_DEF_2018327_01.bsp', 'TESS_EPH_DEF_2018330_01.bsp', 'TESS_EPH_DEF_2018333_01.bsp', 'TESS_EPH_DEF_2018337_01.bsp', 'TESS_EPH_DEF_2018340_01.bsp', 'TESS_EPH_DEF_2018344_01.bsp', 'TESS_EPH_DEF_2018347_01.bsp', 'TESS_EPH_DEF_2018351_01.bsp', 'TESS_EPH_DEF_2018354_01.bsp', 'TESS_EPH_DEF_2018358_01.bsp', 'TESS_EPH_DEF_2018361_01.bsp', 'TESS_EPH_DEF_2018365_01.bsp', 'TESS_EPH_DEF_2019003_01.bsp', 'TESS_EPH_DEF_2019007_01.bsp', 'TESS_EPH_DEF_2019010_01.bsp', 'TESS_EPH_DEF_2019014_01.bsp', 'TESS_EPH_DEF_2019017_01.bsp', 'TESS_EPH_DEF_2019021_01.bsp', 'TESS_EPH_DEF_2019024_01.bsp', 'TESS_EPH_DEF_2019028_01.bsp', 'TESS_EPH_DEF_2019031_01.bsp', 'TESS_EPH_DEF_2019035_01.bsp', 'TESS_EPH_DEF_2019038_01.bsp', 'TESS_EPH_DEF_2019042_01.bsp', 'TESS_EPH_DEF_2019045_01.bsp', 'TESS_EPH_DEF_2019049_01.bsp', 'TESS_EPH_DEF_2019052_01.bsp', 'TESS_EPH_DEF_2019056_01.bsp', 'TESS_EPH_DEF_2019059_01.bsp', 'TESS_EPH_DEF_2019063_01.bsp', 'TESS_EPH_DEF_2019066_01.bsp', 'TESS_EPH_DEF_2019070_01.bsp', 'TESS_EPH_DEF_2019073_01.bsp', 'TESS_EPH_DEF_2019077_01.bsp', 'TESS_EPH_DEF_2019080_01.bsp', 'TESS_EPH_DEF_2019084_01.bsp', 'TESS_EPH_DEF_2019087_01.bsp', 'TESS_EPH_DEF_2019091_01.bsp', 'TESS_EPH_DEF_2019094_01.bsp', 'TESS_EPH_DEF_2019098_01.bsp', 'TESS_EPH_DEF_2019102_01.bsp', 'TESS_EPH_DEF_2019105_01.bsp', 'TESS_EPH_DEF_2019108_01.bsp', 'TESS_EPH_DEF_2019112_01.bsp', 'TESS_EPH_DEF_2019115_01.bsp', 'TESS_EPH_DEF_2019119_01.bsp', 'TESS_EPH_DEF_2019122_01.bsp', 'TESS_EPH_DEF_2019126_01.bsp') # Make sure the kernel directory exists: if not os.path.exists(kernels_folder): os.makedirs(kernels_folder) # Automatically download kernels from TASOC, if they don't already exist? #urlbase = 'https://archive.stsci.edu/missions/tess/models/' urlbase = 'https://tasoc.dk/pipeline/spice/' for fname in files: fpath = os.path.join(kernels_folder, fname) if not os.path.exists(fpath): download_file(urlbase + fname, fpath) # Path where meta-kernel will be saved: fileshash = hashlib.md5(','.join(files).encode()).hexdigest() self.METAKERNEL = os.path.abspath( os.path.join(kernels_folder, 'metakernel-' + fileshash + '.txt')) # Write meta-kernel to file: if not os.path.exists(self.METAKERNEL): with open(self.METAKERNEL, 'w') as fid: fid.write("KPL/MK\n") fid.write(r"\begindata" + "\n") fid.write("PATH_VALUES = ('" + os.path.abspath(kernels_folder) + "')\n") fid.write("PATH_SYMBOLS = ('KERNELS')\n") fid.write("KERNELS_TO_LOAD = (\n") fid.write(",\n".join( ["'$KERNELS/" + fname + "'" for fname in files])) fid.write(")\n") fid.write(r"\begintext" + "\n") fid.write("End of MK file.\n") # Because SpiceyPy loads kernels into a global memory scope (BAAAAADDDD SpiceyPy!!!), # we first check if we have already loaded this into the global scope: # This is to attempt to avoid loading in the same kernels again and again when # running things in parallel. already_loaded = False for k in range(spiceypy.ktotal('META')): if os.path.abspath(spiceypy.kdata(k, 'META')[0]) == self.METAKERNEL: logger.debug("SPICE Meta-kernel already loaded.") already_loaded = True break # Define TESS object if it doesn't already exist: try: spiceypy.bodn2c('TESS') except SpiceyError: logger.debug("Defining TESS name in SPICE") spiceypy.boddef('TESS', -95) # Load kernels if needed: if not already_loaded: logger.debug("Loading SPICE Meta-kernel: %s", self.METAKERNEL) spiceypy.furnsh(self.METAKERNEL) # Let's make sure astropy is using the de430 kernels as well: # Default is to use the same as is being used by SPOC (de430). # If using astropy 4.0+, we can load the local one directly. Before this, # it needs to be downloaded and cached: # NOTE: https://github.com/astropy/astropy/pull/8767 #self.planetary_ephemeris = 'de430' if astropy_major_version >= 4: self.planetary_ephemeris = os.path.abspath( os.path.join(kernels_folder, 'tess2018338154429-41241_de430.bsp')) else: self.planetary_ephemeris = 'https://tasoc.dk/pipeline/spice/tess2018338154429-41241_de430.bsp' self._old_solar_system_ephemeris = coord.solar_system_ephemeris.get() coord.solar_system_ephemeris.set(self.planetary_ephemeris)
def isd_from_json(data, meta): instrument_name = { 'IMAGING SCIENCE SUBSYSTEM NARROW ANGLE': 'CASSINI_ISS_NAC', 'IMAGING SCIENCE SUBSYSTEM WIDE ANGLE': 'CASSINI_ISS_WAC', 'IMAGING SCIENCE SUBSYSTEM - NARROW ANGLE': 'CASSINI_ISS_NAC', 'MDIS-NAC': 'MSGR_MDIS_NAC', 'MERCURY DUAL IMAGING SYSTEM NARROW ANGLE CAMERA': 'MSGR_MDIS_NAC', 'MERCURY DUAL IMAGING SYSTEM WIDE ANGLE CAMERA': 'MSGR_MDIS_WAC' } spacecraft_names = {'CASSINI ORBITER': 'CASSINI', 'MESSENGER': 'MESSENGER'} # This is the return dict isd = {} # Meta kernels are keyed by body, spacecraft, and year - grab from the data spacecraft_name = spacecraft_names[data['spacecraft_id']] target_name = data['target_name'] time = parser.parse(data['capture_date']) for k in meta: if k.year.year == time.year: obs_kernels = k.path # Load the meta kernel spice.furnsh(obs_kernels) path, tpe, handle, found = spice.kdata(0, 'TEXT') if not found: directory = os.path.dirname(path) directory = os.path.abspath(os.path.join(directory, '../iak')) additional_ik = glob.glob(directory + '/*.ti') spice.furnsh(additional_ik) # Spice likes ids over names, so grab the ids from the names instrument_name = instrument_name[data['instrument']] spacecraft_id = spice.bods2c(spacecraft_name) ikid = spice.bods2c(instrument_name) # Load the instrument and target metadata into the ISD isd['instrument_id'] = instrument_name isd['target_name'] = target_name isd['spacecraft_name'] = spacecraft_name # Prepend IAU to all instances of the body name reference_frame = 'IAU_{}'.format(target_name) # Load information from the IK kernel isd['focal_length'] = spice.gdpool('INS{}_FOCAL_LENGTH'.format(ikid), 0, 1) isd['focal_length_epsilon'] = spice.gdpool( 'INS{}_FL_UNCERTAINTY'.format(ikid), 0, 1) isd['nlines'] = spice.gipool('INS{}_PIXEL_LINES'.format(ikid), 0, 1) isd['nsamples'] = spice.gipool('INS{}_PIXEL_SAMPLES'.format(ikid), 0, 1) isd['original_half_lines'] = isd['nlines'] / 2.0 isd['original_half_samples'] = isd['nsamples'] / 2.0 isd['pixel_pitch'] = spice.gdpool('INS{}_PIXEL_PITCH'.format(ikid), 0, 1) isd['ccd_center'] = spice.gdpool('INS{}_CCD_CENTER'.format(ikid), 0, 2) isd['ifov'] = spice.gdpool('INS{}_IFOV'.format(ikid), 0, 1) isd['boresight'] = spice.gdpool('INS{}_BORESIGHT'.format(ikid), 0, 3) isd['transx'] = spice.gdpool('INS{}_TRANSX'.format(ikid), 0, 3) isd['transy'] = spice.gdpool('INS{}_TRANSY'.format(ikid), 0, 3) isd['itrans_sample'] = spice.gdpool('INS{}_ITRANSS'.format(ikid), 0, 3) isd['itrans_line'] = spice.gdpool('INS{}_ITRANSL'.format(ikid), 0, 3) try: isd['odt_x'] = spice.gdpool('INS-{}_OD_T_X'.format(ikid), 0, 10) except: isd['odt_x'] = np.zeros(10) isd['odt_x'][1] = 1 try: isd['odt_y'] = spice.gdpool('INS-{}_OD_T_Y'.format(ikid), 0, 10) except: isd['odt_y'] = np.zeros(10) isd['odt_y'][2] = 1 try: isd['starting_detector_sample'] = spice.gdpool( 'INS{}_FPUBIN_START_SAMPLE'.format(ikid), 0, 1) except: isd['starting_detector_sample'] = 0 try: isd['starting_detector_line'] = spice.gdpool( 'INS{}_FPUBIN_START_LINE'.format(ikid), 0, 1) except: isd['starting_detector_line'] = 0 # Get temperature from SPICE and adjust focal length if 'focal_plane_temperature' in data.keys(): try: # TODO: Remove once WAC temperature dependent is working temp_coeffs = spice.gdpool('INS-{}_FL_TEMP_COEFFS'.format(ikid), 0, 6) temp = data['focal_plane_temperature'] isd['focal_length'] = distort_focal_length(temp_coeffs, temp) except: isd['focal_length'] = spice.gdpool( 'INS-{}_FOCAL_LENGTH'.format(ikid), 0, 1) else: isd['focal_length'] = spice.gdpool('INS-{}_FOCAL_LENGTH'.format(ikid), 0, 1) # Get the radii from SPICE rad = spice.bodvrd(isd['target_name'], 'RADII', 3) radii = rad[1] isd['semi_major_axis'] = rad[1][0] isd['semi_minor_axis'] = rad[1][1] # Now time sclock = data['spacecraft_clock_count'] exposure_duration = data['exposure_duration'] exposure_duration = exposure_duration * 0.001 # Scale to seconds # Get the instrument id, and, since this is a framer, set the time to the middle of the exposure et = spice.scs2e(spacecraft_id, sclock) et += (exposure_duration / 2.0) isd['ephemeris_time'] = et # Get the Sensor Position loc, _ = spice.spkpos(isd['target_name'], et, reference_frame, 'LT+S', spacecraft_name) loc *= -1000 isd['x_sensor_origin'] = loc[0] isd['y_sensor_origin'] = loc[1] isd['z_sensor_origin'] = loc[2] # Get the rotation angles from MDIS NAC frame to Mercury body-fixed frame camera2bodyfixed = spice.pxform(instrument_name, reference_frame, et) opk = spice.m2eul(camera2bodyfixed, 3, 2, 1) isd['omega'] = opk[2] isd['phi'] = opk[1] isd['kappa'] = opk[0] # Get the sun position sun_state, lt = spice.spkezr("SUN", et, reference_frame, data['lighttime_correction'], target_name) # Convert to meters isd['x_sun_position'] = sun_state[0] * 1000 isd['y_sun_position'] = sun_state[1] * 1000 isd['z_sun_position'] = sun_state[2] * 1000 # Get the velocity v_state, lt = spice.spkezr(spacecraft_name, et, reference_frame, data['lighttime_correction'], target_name) isd['x_sensor_velocity'] = v_state[3] * 1000 isd['y_sensor_velocity'] = v_state[4] * 1000 isd['z_sensor_velocity'] = v_state[5] * 1000 # Misc. insertion # A lookup here would be smart - similar to the meta kernals, what is the newest model, etc. if 'model_name' not in data.keys(): isd['model_name'] = 'ISIS_MDISNAC_USGSAstro_1_Linux64_csm30.so' isd['min_elevation'] = data['min_elevation'] isd['max_elevation'] = data['max_elevation'] spice.unload(obs_kernels) # Also unload iak return isd
def utc2et(Date, ut): ''' Convert Date and ut to the ephemeris time used for SPICE. Inputs ====== Date : int Date(s) in format yyyymmdd ut : float Time(s) in hours from beginning of the day Returns ======= et : ephemeris times ''' #split up the dates and times n = np.size(ut) if np.size(Date) == 1: Date = np.zeros(n, dtype='int32') + Date if np.size(ut) == 1: ut = np.zeros(n, dtype='float32') + ut yr = Date // 10000 mn = (Date % 10000) // 100 dy = Date % 100 hh, mm, ss, ms = TT.DectoHHMM(ut) ss = np.float32(ss) + np.float32(ms) / 1000 #create an array of strings strfmt = '{:04d} {:02d} {:02d} {:02d} {:02d} {:06.3f}' utc_str = np.array([ strfmt.format(int(yr[i]), int(mn[i]), int(dy[i]), int(hh[i]), int(mm[i]), float(ss[i])) for i in range(0, n) ]) #check that lsk is loaded cnt = sp.ktotal('ALL') loaded = False if cnt != 0: for i in range(0, cnt): k_name, k_type, k_src, k_handle = sp.kdata(i, 'ALL', 128, 32, 128) if k_name == lsk_path: loaded = True break if loaded == False: sp.furnsh(lsk_path) #create the output array et = np.zeros((n, ), dtype='float64') for i in range(0, n): et[i] = sp.str2et(utc_str[i]) if loaded == False: sp.unload(lsk_path) if n == 1: return et[0] else: return et
def fetch_kernels(self): count = spiceypy.ktotal('ALL') return [spiceypy.kdata(i, 'ALL') for i in range(count)]