def relative_position(obj, reference, ettemp, frame='J2000'): kernels = load_kernels() if isinstance(ettemp, Time): et = spice.str2et(ettemp.isot) elif isinstance(ettemp, float): et = ettemp else: et = [ spice.str2et(e.isot) if type(e) == type(Time.now()) else e for e in ettemp ] abcor = 'LT+S' if isinstance(et, float): posvel, lt = spice.spkezr(obj, et, frame, abcor, reference) pos, vel = posvel[:3], posvel[3:] else: pos = np.zeros((len(et), 3)) vel = np.zeros((len(et), 3)) for i, t in enumerate(et): posvel, lt = spice.spkezr(obj, t, frame, abcor, reference) pos[i, :] = np.array(posvel[:3]) vel[i, :] = np.array(posvel[3:]) pos *= u.km vel *= u.km / u.s for k in kernels: spice.unload(k) return pos, vel
def __exit__(self, exc_type, exc_val, exc_tb): """ Called when the context goes out of scope. Once this is done, the object is out of scope and the kernels can be unloaded. """ spice.unload(self.metakernel)
def cal2et(time, format='UTC', support_ker=False, unload=False): """ Converts UTC or Calendar TDB (CAL) time to Ephemeris Time (ET). Accepts a single time or a lists of times. This function assumes that the support kernels (meta-kernel or leapseconds) kernel has been loaded. :param time: Input UTC or CAL time :type time: Union[float, list] :param format: Input format; 'UTC' or 'CAL' :type format: str :param unload: If True it will unload the input meta-kernel :type unload: bool :return: Output ET :rtype: Union[str, list] """ out_list = [] if isinstance(time, str): time = [time] # # We need to specify that is Calendar format in TDB. If it is UTC we need # to load the support kernels # if support_ker: spiceypy.furnsh(support_ker) if format == 'CAL': time[:] = [x.replace('T', ' ') for x in time] time[:] = [x + ' TDB' for x in time] for element in time: try: if format == 'UTC': out_elm = spiceypy.utc2et(element) elif format == 'CAL': out_elm = spiceypy.str2et(element) else: out_elm = element except: out_elm = spiceypy.str2et(element) out_list.append(out_elm) if len(out_list) == 1: out_time = out_list[0] else: out_time = out_list if unload: spiceypy.unload(support_ker) return out_time
def simulate(self, starttime, stoptime, camera, threshold, obscode): """ """ # loading all SPICE kernels required for simulation sp.furnsh(camera.ikfile) sp.furnsh(obscode + ".bsp") sp.furnsh(camera.ckfile) sp.furnsh(camera.fkfile) sp.furnsh(camera.sclkfile) count = 0 #Print header # head="#AstID " head = "ObjID " head = head + "FieldID " head = head + "FieldMJD " head = head + "AstRange(km) " head = head + "AstRangeRate(km/s) " head = head + "AstRA(deg) " head = head + "AstRARate(deg/day) " head = head + "AstDec(deg) " head = head + "AstDecRate(deg/day) " head = head + "Ast-Sun(J2000x)(km) " head = head + "Ast-Sun(J2000y)(km) " head = head + "Ast-Sun(J2000z)(km) " head = head + "Ast-Sun(J2000vx)(km/s) " head = head + "Ast-Sun(J2000vy)(km/s) " head = head + "Ast-Sun(J2000vz)(km/s) " head = head + "Obs-Sun(J2000x)(km) " head = head + "Obs-Sun(J2000y)(km) " head = head + "Obs-Sun(J2000z)(km) " head = head + "Obs-Sun(J2000vx)(km/s) " head = head + "Obs-Sun(J2000vy)(km/s) " head = head + "Obs-Sun(J2000vz)(km/s) " head = head + "Sun-Ast-Obs(deg) " head = head + "V " head = head + "V(H=0) " print(head) while self.asteroids: i = self.asteroids[0] i.nightlystates([starttime, stoptime]) [times, ids] = i.shortlist(camera, threshold) i.checkvisspice(obscode, camera, times, ids) del i del self.asteroids[0] count = count + 1 # Unloading all SPICE kernels required for simulation sp.unload(camera.ikfile) sp.unload(camera.ckfile) sp.unload(camera.fkfile) sp.unload(camera.sclkfile) sp.unload(obscode + ".bsp")
def unload_kernels(self): self.sub_count() if self.get_count() <= 0: spice.unload(self.meta_kernel_path) msg = 'Kernels in the file {:s} are unloaded.'.format( self.meta_kernel_path) self.logger.info(msg) self.logging_kernel_names() else: msg = '{:s} is still used by {:d} objects.'.format( self.meta_kernel_path, self.get_count()) self.logger.info(msg)
def getsta(target, TIME, mode="LT+S", observer="SOLAR SYSTEM BARYCENTER"): # https://spiceypy.readthedocs.io/en/master/remote_sensing.html # # Local parameters # METAKR = 'getsta.tm' # # Load the kernels that this program requires. We # will need a leapseconds kernel to convert input # UTC time strings into ET. We also will need the # necessary SPK files with coverage for the bodies # in which we are interested. # spiceypy.furnsh(METAKR) # #Prompt the user for the input time string. # utctim = parseDate(TIME) #print( 'Converting UTC Time: {:s}'.format(utctim) ) # #Convert utctim to ET. # et = spiceypy.str2et(utctim) #print( ' ET seconds past J2000: {:16.3f}'.format(et) ) # # Compute the apparent state of target as seem from # observer in the J2000 frame. All of the ephemeris # readers return states in units of kilometers and # kilometers per second. # [state, ltime] = spiceypy.spkezr(target, et, 'J2000', mode, observer) #print( ' Apparent state of MARS BARYCENTER (4) as seen ' #'from SSB (0) in the J2000\n' #' frame (km, km/s):' ) #print( ' X = {:16.3f}'.format(state[0]) ) #print( ' Y = {:16.3f}'.format(state[1]) ) #print( ' Z = {:16.3f}'.format(state[2]) ) #print( ' VX = {:16.3f}'.format(state[3]) ) #print( ' VY = {:16.3f}'.format(state[4]) ) #print( ' VZ = {:16.3f}'.format(state[5]) ) spiceypy.unload(METAKR) return state
def set_meta_kernel(self, kernel): if kernel == self.meta_kernel: return if self.meta_kernel is not None: spiceypy.unload(self.meta_kernel) if kernel is not None: s3manager.get_meta_kernel(kernel) spiceypy.furnsh(kernel) self.meta_kernel = kernel
def UTC2ET(time, target): #converting from UTC time to ephemeris seconds since J2000 -- need leapseconds kernel -- 'kernels/lsk/naif0008.tls' #get mission named from target mission = getMissionFromTarget(target) #get metakernel metakernel = getKernels(mission, target, 'UTC2ET',time) #load kernels from metakernel spice.furnsh(metakernel) #convert time from UTC to ET ET = spice.str2et(time) #unload kernels spice.unload(metakernel) return(ET)
def CarringtonLongitude(Date, ut): ''' Get MESSENGER's Carrington longitude ''' n = np.size(ut) et = np.zeros((n, ), dtype='float64') lon = np.zeros(n, dtype='float64') sp.furnsh(lsk_path) sp.furnsh(spk_kernel) sp.furnsh(spk_kernel2) sp.furnsh(pck_kernel) if np.size(Date) == 1: et[0] = utc2et(Date, 0.0) et = et[0] + ut * 3600.0 else: ud = np.unique(Date) for i in range(0, ud.size): use = np.where(Date == ud[i])[0] tmp = utc2et(ud[i], 0.0) et[use] = tmp + ut[use] * 3600.0 pos, lt = sp.spkpos('MESSENGER', et, 'IAU_SUN', 'NONE', 'SUN') pos = np.array(pos) lon = np.arctan2(pos.T[1], pos.T[0]) * 180 / np.pi sp.unload(lsk_path) sp.unload(spk_kernel) sp.unload(spk_kernel2) sp.unload(pck_kernel) return (lon)
def _generate_solar_panel_kernel(self, observations, metakernel_file_path, ck_file_path, output_folder_path, extra_time_hours: float = None, step_size_s: float = None): if extra_time_hours is None: extra_time_hours = self.juice_config.get_solar_panel_ck_span_days( ) * 24.0 / 2 if step_size_s is None: step_size_s = self.juice_config.get_solar_panel_ck_sampling_seconds( ) spp = SolarPanelProcessor("JUICE") spy.furnsh(metakernel_file_path) spy.furnsh(ck_file_path) td = timedelta(hours=extra_time_hours) start_time = self._find_first_start_time(observations) - td end_time = self._find_last_end_time(observations) + td spp.create_panel_ck( start_time, end_time, step_size_s, os.path.abspath( os.path.join(output_folder_path, 'spacecraft', 'solar_panel_kernel.ck'))) spy.unload(ck_file_path) spy.unload(metakernel_file_path) spacecraft_json_path = os.path.join(output_folder_path, "spacecraft", "spacecraft", "JUICE_panel_def.json") with open(spacecraft_json_path) as f: spacecraft_json = json.load(f) during = spacecraft_json["items"][0] before = spacecraft_json["items"][1] after = spacecraft_json["items"][2] def format_time_str(time: datetime) -> str: return time.strftime("%Y-%m-%d %H:%M:%S.000 UTC") during["startTime"] = format_time_str(start_time) during["endTime"] = format_time_str(end_time) before["endTime"] = format_time_str(start_time) after["startTime"] = format_time_str(end_time) with open(spacecraft_json_path, 'w') as json_file: json.dump(spacecraft_json, json_file, indent=2) return
def f_manage_kernels(kernel_meta): #Unload all kernels spiceypy.unload( kernel_meta ) #Load the necessary kernels from meta file spiceypy.kclear() spiceypy.furnsh(kernel_meta) spiceypy.furnsh(r'E:\Data Science Projects\Space Science\SpaceScience-P2-SSBandSunWobbling\data\external\_kernels\lsk\naif0012.tls') #List loaded kernels count = spiceypy.ktotal( 'ALL' ) for i in range(0, count): [ file, type, source, handle] = spiceypy.kdata(i, 'ALL'); print( 'File {0}'.format(file) ) print( 'Type {0}'.format(type) ) print( 'Source {0}\n'.format(source) )
def PosVSO(Date, ut): ''' VEX position in VSO coordinates ''' #create the output arrays n = np.size(ut) et = np.zeros((n, ), dtype='float64') x = np.zeros(n, dtype='float64') y = np.zeros(n, dtype='float64') z = np.zeros(n, dtype='float64') #load the relevant kernels sp.furnsh(lsk_path) sp.furnsh(spk_kernel) VEXspk = ListVenusSPK(Date) for vk in VEXspk: sp.furnsh(vk) sp.furnsh(pck_kernel) sp.furnsh(vso_kernel) #do each unique date to speed things up if np.size(Date) == 1: et[0] = utc2et(Date, 0.0) et = et[0] + ut * 3600.0 else: ud = np.unique(Date) for i in range(0, ud.size): use = np.where(Date == ud[i])[0] tmp = utc2et(ud[i], 0.0) et[use] = tmp + ut[use] * 3600.0 #get the positions for each date/time pos, lt = sp.spkpos('VEX', et, 'VENUSVSO', 'NONE', 'VENUS') x = pos.T[0] y = pos.T[1] z = pos.T[2] #unload kernels sp.unload(lsk_path) sp.unload(spk_kernel) for vk in VEXspk: sp.unload(vk) sp.unload(pck_kernel) sp.unload(vso_kernel) return (x, y, z)
def removeKernel(cls, kern, rmDupsOnly=False): # Determine number of times kernel is loaded nDups = cls.checkDuplicates(kern) if rmDupsOnly: # only want to remove duplicates if nDups > 0: # recursively remove # NOTE: I wouldn't like to say where in the full kernel last # the last one remaining will be spice.unload(kern) cls.removeKernel(kern, rmDupsOnly=rmDupsOnly) else: if nDups >= 0: # recursively remove spice.unload(kern) cls.removeKernel(kern)
def et2cal(time, format='UTC', support_ker=False, unload=False): """ Converts Ephemeris Time (ET) into UTC or Calendar TDB (CAL) time. Accepts a single time or a lists of times. This function assumes that the support kernels (meta-kernel or leapseconds kernel) has been loaded. :param time: Input ET time :type time: Union[float, list] :param format: Desired output format; 'UTC' or 'CAL' :type format: str :param unload: If True it will unload the input meta-kernel :type unload: bool :return: Output time in 'UTC', 'CAL' or 'TDB' :rtype: Union[str, list] """ timlen = 62 out_list = [] if support_ker: spiceypy.furnsh(support_ker) if isinstance(time, float) or isinstance(time, str): time = [time] for element in time: if format == 'UTC': out_elm = spiceypy.et2utc(element, 'ISOC', 3) elif format == 'CAL': out_elm = spiceypy.timout(element, "YYYY-MM-DDTHR:MN:SC.###::TDB", timlen) else: out_elm = element out_list.append(out_elm) if len(out_list) == 1: out_time = out_list[0] else: out_time = out_list if unload: spiceypy.unload(support_ker) return out_time
def computeTimeWidths(self, leapsecondspath=None): #note we need SPICE to compute time widths because we have to convert from string ET to seconds-past-J2000 if not leapsecondspath == None: try: import spiceypy except: print('spiceypy not available') return spiceypy.furnsh(leapsecondspath) self.__data__[-1]['timeWidth'] = 0.0 for recordIndex in range(0, self.getLength() - 1): self.__data__[recordIndex]['timeWidth'] = spiceypy.str2et( self.getValue(recordIndex + 1, 'epoch')) - spiceypy.str2et( self.getValue(recordIndex, 'epoch')) spiceypy.unload(leapsecondspath)
def CarringtonLongitude(Date, ut): #create output array n = np.size(ut) et = np.zeros((n, ), dtype='float64') lon = np.zeros(n, dtype='float64') if n == 1: ut = np.array([ut]) #load kernels sp.furnsh(lsk_path) sp.furnsh(spk_kernel) VEXspk = ListVenusSPK(Date) for vk in VEXspk: sp.furnsh(vk) sp.furnsh(pck_kernel) sp.furnsh(hci_kernel) #do each unique date to speed things up if np.size(Date) == 1: et[0] = utc2et(Date, 0.0) et = et[0] + ut * 3600.0 else: ud = np.unique(Date) for i in range(0, ud.size): use = np.where(Date == ud[i])[0] tmp = utc2et(ud[i], 0.0) et[use] = tmp + ut[use] * 3600.0 #get the longitudes pos, lt = sp.spkpos('VEX', et, 'IAU_SUN', 'NONE', 'SUN') pos = np.array(pos) lon = np.arctan2(pos.T[1], pos.T[0]) * 180 / np.pi #unload kernels sp.unload(lsk_path) sp.unload(spk_kernel) for vk in VEXspk: sp.unload(vk) sp.unload(pck_kernel) sp.unload(hci_kernel) return lon
def PosHAE(date, ut): ''' Messenger position in HAE coords (km) ''' n = np.size(ut) if np.size(ut) == 1: ut = np.array([ut]) et = np.zeros((n, ), dtype='float64') x = np.zeros(n, dtype='float64') y = np.zeros(n, dtype='float64') z = np.zeros(n, dtype='float64') sp.furnsh(lsk_path) sp.furnsh(spk_kernel) sp.furnsh(spk_kernel2) sp.furnsh(pck_kernel) sp.furnsh(hci_kernel) if np.size(Date) == 1: et[0] = utc2et(Date, 0.0) et = et[0] + ut * 3600.0 else: ud = np.unique(Date) for i in range(0, ud.size): use = np.where(Date == ud[i])[0] tmp = utc2et(ud[i], 0.0) et[use] = tmp + ut[use] * 3600.0 pos, lt = sp.spkpos('MESSENGER', et, 'ECLIPDATE', 'NONE', 'SUN') x = pos.T[0] y = pos.T[1] z = pos.T[2] sp.unload(lsk_path) sp.unload(spk_kernel) sp.unload(spk_kernel2) sp.unload(pck_kernel) sp.unload(hci_kernel) return (x, y, z)
def PosMSM(Date, ut): ''' Messenger position in MSM coords (km) ''' n = np.size(ut) et = np.zeros((n, ), dtype='float64') x = np.zeros(n, dtype='float64') y = np.zeros(n, dtype='float64') z = np.zeros(n, dtype='float64') sp.furnsh(lsk_path) sp.furnsh(spk_kernel) sp.furnsh(spk_kernel2) sp.furnsh(pck_kernel) sp.furnsh(mso_kernel) if np.size(Date) == 1: et[0] = utc2et(Date, 0.0) et = et[0] + ut * 3600.0 else: ud = np.unique(Date) for i in range(0, ud.size): use = np.where(Date == ud[i])[0] tmp = utc2et(ud[i], 0.0) et[use] = tmp + ut[use] * 3600.0 for i in range(0, n): pos, lt = sp.spkpos('MESSENGER', et[i], 'MERCURYMSO', 'NONE', 'MERCURY') x[i] = pos[0] y[i] = pos[1] z[i] = pos[2] - 478.0 sp.unload(lsk_path) sp.unload(spk_kernel) sp.unload(spk_kernel2) sp.unload(pck_kernel) sp.unload(mso_kernel) return (x, y, z)
def PosHCIDates(Date0, Date1): ''' Get the postion of Mercury in HCI coordinates between two dates Inputs ====== Date0 : int Start date in format yyyymmdd Date1 : int End date Returns ======= x : float64 x-position in HCI coords (km) y : float64 y-position in HCI coords (km) z : float64 z-position in HCI coords (km) ''' #list the dates dates = ListDates(Date0, Date1) n = dates.size #load kernels sp.furnsh(lsk_path) sp.furnsh(spk_kernel) sp.furnsh(pck_kernel) sp.furnsh(hci_kernel) #get the ephemeris times et = utc2et(dates, np.zeros(n, dtype='float32')) #positions x = np.zeros(n, dtype='float64') y = np.zeros(n, dtype='float64') z = np.zeros(n, dtype='float64') for i in range(0, n): pos, lt = sp.spkpos('MERCURY', et[i], 'HCI', 'NONE', 'SUN') x[i] = pos[0] y[i] = pos[1] z[i] = pos[2] #unload kernels sp.unload(lsk_path) sp.unload(spk_kernel) sp.unload(pck_kernel) sp.unload(hci_kernel) return (x, y, z)
def HAEtoHCI(Date, ut, xi, yi, zi): ''' Convert HAE to HCI coordinates ''' #create some arrays n = np.size(xi) if np.size(ut) == 1: ut = np.zeros(n, dtype='float32') + ut if np.size(xi) == 1: xi = np.array([xi]) yi = np.array([yi]) zi = np.array([zi]) et = np.zeros((n, ), dtype='float64') x = np.zeros(n, dtype='float64') y = np.zeros(n, dtype='float64') z = np.zeros(n, dtype='float64') #load kernels sp.furnsh(lsk_path) sp.furnsh(spk_kernel) sp.furnsh(pck_kernel) sp.furnsh(hci_kernel) #do each unique date to speed things up if np.size(Date) == 1: et[0] = utc2et(Date, 0.0) et = et[0] + ut * 3600.0 else: ud = np.unique(Date) for i in range(0, ud.size): use = np.where(Date == ud[i])[0] tmp = utc2et(ud[i], 0.0) et[use] = tmp + ut[use] * 3600.0 for i in range(0, n): rot = sp.pxform('ECLIPDATE', 'HCI', et[i]) x[i], y[i], z[i] = np.sum(rot * np.array([xi[i], yi[i], zi[i]]), axis=1) #free kernels sp.unload(lsk_path) sp.unload(spk_kernel) sp.unload(pck_kernel) sp.unload(hci_kernel) return (x, y, z)
def mjd20002et(mjd2000, support_ker=False, unload=False): """ Given a date in MJD2000 (Modified Julian Date 2000) returns the Ephemeris time (ET which in SPICE is equivalent to TDB). Accepts a single time entry or a list of times. :param mjd2000: Date in MJD200 :type mjd2000: Union[float, list] :param support_ker: Support kernels required to run the function. At least it should be a leapseconds kernel (LSK) and optionally a meta-kernel (MK) :type support_ker: Union[str, list] :param unload: If True it will unload the input support kernel :type unload: bool :return: Date in ET/TDB :rtype: Union[float, list] """ tdb = [] if support_ker: spiceypy.furnsh(support_ker) if not isinstance(mjd2000, list): mjd2000 = [mjd2000] for time in mjd2000: mjd2000 = float(time) mjd = mjd2000 + 51544 jd = mjd + 2400000.5 jd = str(jd) + ' JD' tdb.append(spiceypy.str2et(jd)) if unload: spiceypy.unload(support_ker) if len(tdb) == 1: return tdb[0] else: return tdb
def PosIAU_SUN(Date,ut): ''' Get Venus' position in IAU_SUN coordinates, wher Z is along the Sun's rotational axis, X and Y rotate with the Sun. ''' #get output arrays n = np.size(ut) if np.size(ut) == 1: ut = np.array([ut]) et = np.zeros((n,),dtype='float64') x = np.zeros(n,dtype='float64') y = np.zeros(n,dtype='float64') z = np.zeros(n,dtype='float64') #load kernels sp.furnsh(lsk_path) sp.furnsh(spk_kernel) sp.furnsh(pck_kernel) sp.furnsh(hci_kernel) #do each unique date to speed things up if np.size(Date) == 1 : et[0] = utc2et(Date,0.0) et = et[0] + ut*3600.0 else: ud = np.unique(Date) for i in range(0,ud.size): use = np.where(Date == ud[i])[0] tmp = utc2et(ud[i],0.0) et[use] = tmp + ut[use]*3600.0 #get the positions pos,lt = sp.spkpos('VENUS',et,'IAU_SUN','NONE','SUN') pos = np.array(pos) x = pos.T[0] y = pos.T[1] z = pos.T[2] #unload kernels sp.unload(lsk_path) sp.unload(spk_kernel) sp.unload(pck_kernel) sp.unload(hci_kernel) return (x,y,z)
def PosHCI(Date,ut): ''' HCI position at set times ''' #get output arrays n = np.size(ut) if np.size(ut) == 1: ut = np.array([ut]) et = np.zeros((n,),dtype='float64') x = np.zeros(n,dtype='float64') y = np.zeros(n,dtype='float64') z = np.zeros(n,dtype='float64') #load kernels sp.furnsh(lsk_path) sp.furnsh(spk_kernel) sp.furnsh(pck_kernel) sp.furnsh(hci_kernel) #do each unique date to speed things up if np.size(Date) == 1 : et[0] = utc2et(Date,0.0) et = et[0] + ut*3600.0 else: ud = np.unique(Date) for i in range(0,ud.size): use = np.where(Date == ud[i])[0] tmp = utc2et(ud[i],0.0) et[use] = tmp + ut[use]*3600.0 #get the positions pos,lt = sp.spkpos('VENUS',et,'HCI','NONE','SUN') pos = np.array(pos) x = pos.T[0] y = pos.T[1] z = pos.T[2] #unload kernels sp.unload(lsk_path) sp.unload(spk_kernel) sp.unload(pck_kernel) sp.unload(hci_kernel) return (x,y,z)
# ============================================================================= # End of function # ============================================================================= # ============================================================================= # Run the function if requested # When run, this program regenerates all of the backplanes # It uses multiprocessing to run these in parallel # ============================================================================= #%%% if (__name__ == '__main__'): file_tm = 'kernels_kem_prime.tm' sp.unload(file_tm) sp.furnsh(file_tm) # NB: This would be an ideal candidate for multi-processing # NB: Done! # Set paramters here do_tuna = False digit_filter = None # Run code here digit_filters = ['12', '34', '56', '78', '90'] frames = ['2014_MU69_SUNFLOWER_ROT'] #, '2014_MU69_TUNACAN_ROT']
def unload(file_name): mk_path = path(file_name) spiceypy.unload(mk_path)
def OrientationSUN(date, ut): n = np.size(ut) et = np.zeros((n, ), dtype='float64') x = np.zeros(n, dtype='float64') y = np.zeros(n, dtype='float64') z = np.zeros(n, dtype='float64') ck = 'msgr_{:04d}_v*.bc' yymm = (Date // 100) % 10000 uyymm = np.unique(yymm) nck = np.size(uyymm) ck_kernel = [] for i in range(0, nck): files = FileSearch(ck_path, ck.format(uyymm[i])) if files.size > 0: ck_kernel.append(ck_path + files[-1]) nck = np.size(ck_kernel) #load kernels sp.furnsh(lsk_path) sp.furnsh(spk_kernel) sp.furnsh(spk_kernel2) sp.furnsh(sclk_kernel) sp.furnsh(pck_kernel) sp.furnsh(mso_kernel) sp.furnsh(ik_kernel) sp.furnsh(fk_kernel) for i in range(0, nck): sp.furnsh(ck_kernel[i]) if np.size(Date) == 1: et[0] = utc2et(Date, 0.0) et = et[0] + ut * 3600.0 else: ud = np.unique(Date) for i in range(0, ud.size): use = np.where(Date == ud[i])[0] tmp = utc2et(ud[i], 0.0) et[use] = tmp + ut[use] * 3600.0 #m = sp.pxform('J2000','MSGR_SPACECRAFT',et[0]) for i in range(0, n): pos, lt = sp.spkpos('SUN', et[i], 'MSGR_SPACECRAFT', 'NONE', 'MESSENGER') x[i] = pos[0] y[i] = pos[1] z[i] = pos[2] sp.unload(lsk_path) sp.unload(spk_kernel) sp.unload(spk_kernel2) sp.unload(sclk_kernel) sp.unload(pck_kernel) sp.unload(mso_kernel) sp.unload(ik_kernel) sp.unload(fk_kernel) for i in range(0, nck): sp.unload(ck_kernel[i]) return (x, y, z)
def NSOrientationMSO(Date, ut): ''' Get the orientation of MESSENGER NS? ''' n = np.size(ut) et = np.zeros((n, ), dtype='float64') x = np.zeros(n, dtype='float64') y = np.zeros(n, dtype='float64') z = np.zeros(n, dtype='float64') ck = 'msgr_{:04d}_v*.bc' yymm = (Date // 100) % 10000 uyymm = np.unique(yymm) nck = np.size(uyymm) ck_kernel = [] for i in range(0, nck): files = FileSearch(ck_path, ck.format(uyymm[i])) if files.size > 0: ck_kernel.append(ck_path + files[-1]) nck = np.size(ck_kernel) #load kernels sp.furnsh(lsk_path) sp.furnsh(spk_kernel) sp.furnsh(spk_kernel2) sp.furnsh(sclk_kernel) sp.furnsh(pck_kernel) sp.furnsh(mso_kernel) sp.furnsh(ik_kernel) sp.furnsh(fk_kernel) for i in range(0, nck): sp.furnsh(ck_kernel[i]) if np.size(Date) == 1: et[0] = utc2et(Date, 0.0) et = et[0] + ut * 3600.0 else: ud = np.unique(Date) for i in range(0, ud.size): use = np.where(Date == ud[i])[0] tmp = utc2et(ud[i], 0.0) et[use] = tmp + ut[use] * 3600.0 #calculate positions m = sp.pxform('MERCURYMSO', 'MSGR_GRNS_NS', et) #unload kernels sp.unload(lsk_path) sp.unload(spk_kernel) sp.unload(spk_kernel2) sp.unload(sclk_kernel) sp.unload(pck_kernel) sp.unload(mso_kernel) sp.unload(ik_kernel) sp.unload(fk_kernel) for i in range(0, nck): sp.unload(ck_kernel[i]) return m
def unloadSpiceFiles(self): # unload spice files for spiceFile in self.loadedSpiceFiles: spice.unload(spiceFile) spice.kclear()
def MMOPosMSM(Date, ut): ''' Position of MMO in MSM coords ''' #create the output arrays if np.size(np.shape(ut)) == 0: ut = np.array([ut]) n = np.size(ut) et = np.zeros((n, ), dtype='float64') x = np.zeros(n, dtype='float64') y = np.zeros(n, dtype='float64') z = np.zeros(n, dtype='float64') #load kernels sp.furnsh(lsk_path) sp.furnsh(sclk_kernel) sp.furnsh(de430_kernel) sp.furnsh(mpo_kernel) sp.furnsh(pck_kernel) sp.furnsh(mso_kernel) #do each unique date to speed things up if np.size(Date) == 1: et[0] = utc2et(Date, 0.0) et = et[0] + ut * 3600.0 else: ud = np.unique(Date) for i in range(0, ud.size): use = np.where(Date == ud[i])[0] tmp = utc2et(ud[i], 0.0) et[use] = tmp + ut[use] * 3600.0 #get positions pos, lt = sp.spkpos('MMO', et, 'MERCURYMSO', 'NONE', 'MERCURY') x = pos.T[0] y = pos.T[1] z = pos.T[2] - 478.0 #unload kernels sp.unload(lsk_path) sp.unload(sclk_kernel) sp.unload(de430_kernel) sp.unload(mpo_kernel) sp.unload(pck_kernel) sp.unload(mso_kernel) return (x, y, z)
def OrientationMSO(Date, ut, Verbose=False): ''' This should return the direction in which MESSENGER is oriented no idea if this is right ''' n = np.size(ut) et = np.zeros((n, ), dtype='float64') x = np.zeros(n, dtype='float64') y = np.zeros(n, dtype='float64') z = np.zeros(n, dtype='float64') #find the ck kernels ck = 'msgr_{:04d}_v*.bc' yymm = (Date // 100) % 10000 uyymm = np.unique(yymm) nck = np.size(uyymm) ck_kernel = [] for i in range(0, nck): files = FileSearch(ck_path, ck.format(uyymm[i])) if files.size > 0: ck_kernel.append(ck_path + files[-1]) nck = np.size(ck_kernel) #load all the kernels sp.furnsh(lsk_path) sp.furnsh(spk_kernel) sp.furnsh(spk_kernel2) sp.furnsh(sclk_kernel) sp.furnsh(pck_kernel) sp.furnsh(mso_kernel) sp.furnsh(ik_kernel) sp.furnsh(fk_kernel) for i in range(0, nck): sp.furnsh(ck_kernel[i]) if np.size(Date) == 1: et[0] = utc2et(Date, 0.0) et = et[0] + ut * 3600.0 else: ud = np.unique(Date) for i in range(0, ud.size): use = np.where(Date == ud[i])[0] tmp = utc2et(ud[i], 0.0) et[use] = tmp + ut[use] * 3600.0 #get the positions if Verbose: for i in range(0, n): print('\rVector {0} of {1}'.format(i + 1, n), end='') pos, lt = sp.spkpos('MERCURY', et[i], 'MSGR_SPACECRAFT', 'NONE', 'MESSENGER') x[i] = pos[0] y[i] = pos[1] z[i] = pos[2] print() else: for i in range(0, n): pos, lt = sp.spkpos('MERCURY', et[i], 'MSGR_SPACECRAFT', 'NONE', 'MESSENGER') x[i] = pos[0] y[i] = pos[1] z[i] = pos[2] #unload kernels sp.unload(lsk_path) sp.unload(spk_kernel) sp.unload(spk_kernel2) sp.unload(sclk_kernel) sp.unload(pck_kernel) sp.unload(mso_kernel) sp.unload(ik_kernel) sp.unload(fk_kernel) for i in range(0, nck): sp.unload(ck_kernel[i]) return (x, y, z)
def nh_ort_track4_flyby(dir_in=None, dir_out=None, name_trajectory = 'prime'): #%%% # dir_in = '/Users/throop/ # dir_in = '/Users/throop/data/ORT4/throop/ort4_bc3_10cbr2_dph/' stretch_percent = 99 stretch = astropy.visualization.PercentileInterval(stretch_percent) # dir_data = os.path.expanduser('~/Data/') # dir_in git do_compress = False # Do we use .gzip compression on the Track-4 input grids? # If we used compression on the track4_calibrate routine, we must use it here too. # dir_track4 = os.path.join(dir_data, name_ort, 'throop', 'track4') if do_compress: files = glob.glob(os.path.join(dir_in, '*.grid4d.gz')) else: files = glob.glob(os.path.join(dir_in, '*.grid4d')) files = glob.glob(os.path.join(dir_in, '*.dust.pkl')) # Alphabetize file list files = sorted(files) plt.set_cmap('plasma') utc_ca = '2019 1 Jan 05:33:00' dt_before = 1*u.hour dt_after = 1*u.hour # area_sc = (1*u.m)**2 frame = '2014_MU69_SUNFLOWER_ROT' name_target = 'MU69' origin = 'lower' # Required plotting order for imshow name_observer = 'New Horizons' hbt.figsize((8,6)) hbt.set_fontsize(12) dt = 1*u.s # Sampling time through the flyby. Astropy units. # Create an output table, Astropy format t = Table(names = ['trajectory', 'speed', 'q_dust', 'albedo', 'rho', 'tau_max', 'tau_typical', 'iof_max', 'iof_typical'], dtype = ['U30', float, float, float, float, float, float, float, float] ) # Start up SPICE if needed. Unload old kernels just as a safety precaution. sp.unload('kernels_kem_prime.tm') sp.unload('kernels_kem_alternate.tm') sp.furnsh(f'kernels_kem_{name_trajectory}.tm') do_short = False if do_short: files = files[0:4] i=3 file = files[i] num_files = len(files) name_run = dir_out.split('/')[-2] #%%% for i,file in enumerate(files): #%%% print(f'Starting file {i}/{len(files)}') grid = nh_ort_track4_grid(file) # Load the grid from disk. Uses gzip, so it is quite slow (10 sec/file) print(f'Loading file {file}') # Load the trajectory parameters et_ca = int( sp.utc2et(utc_ca) ) # Force this to be an integer, just to make output cleaner. et_start = et_ca - dt_before.to('s').value et_end = et_ca + dt_after.to('s').value grid.frame = frame grid.name_target = name_target grid.name_trajectory = name_trajectory # And call the method to fly through it! # The returned density values etc are available within the instance variables, not returned explicitly. grid.fly_trajectory(name_observer, et_start, et_end, dt) # If the first time thru loop, make plot of our path through the system do_plots_geometry = True if (do_plots_geometry and (i==0)): grid.plot_trajectory_geometry() # Make slice plots thru the grid do_plot_slices_xyz = False if do_plot_slices_xyz: hbt.fontsize(8) hbt.figsize((20,5)) grid.plot(axis_sum=0) grid.plot(axis_sum=1) grid.plot(axis_sum=2) hbt.fontsize(10) # Make a plot of optical depth do_plot_tau = True if do_plot_tau: grid.plot_tau() # ============================================================================= # Make some plots of count rate vs. time! # ============================================================================= # Make a plot of the instantaneous count rate hbt.figsize((10,15)) # Make a plot of the actual density that we give to Doug Mehoke # Define a list of colors. This is so we can use colors= argument to set # a marker to show grain size, rather than let plot() auto-assign. # colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] # This is the default color iterator. colors = ['antiquewhite', 'tomato', 'blueviolet', 'skyblue', 'gold', 'darkcyan', 'thistle', 'olive', 'red', 'sienna', 'deepskyblue', 'lightsalmon', 'pink', ] # 'aqua'] # 'antiquewhite4', 'aqua', 'aquamarine4', 'black', 'blue', 'blueviolet', # 'brown1', 'chartreuse1', 'darkgreen', 'darkorange1', 'dodgerblue1', 'lightpink', 'magenta'] # Make a plot of dust number density. This is straight out of the grid, and for comparison with MRS. vals_fiducial = [1e-10, 1e-8, 1e-6,1e-4, 1e-2, 1e-0] plt.subplot(3,1,1) for j,s in enumerate(grid.s): plt.plot(grid.delta_et_t, grid.number_t[j], label = 's={:.2f} mm'.format(s), color=colors[j]) plt.legend() plt.title('Dust number density'.format(grid.area_sc)) plt.xlabel('ET from C/A') plt.yscale('log') plt.ylim((1e-10, 1e2)) # Match to MRS. plt.axvline(0, color='black', alpha=0.05) plt.ylabel(r'Dust,, # km$^{-3}$') for val in vals_fiducial: plt.axhline(val, color='black', alpha=0.05) # Make a plot of impact rate. This assumes a s/c area. plt.subplot(3,1,2) for j,s in enumerate(grid.s): # 's' is dust size plt.plot(grid.delta_et_t, grid.number_sc_t[j], label = f's={s:.2f} mm', color=colors[j]) plt.title('Impact rate, A={}'.format(grid.area_sc)) plt.yscale('log') plt.xlabel('ET from C/A') plt.legend() plt.ylabel(r'Dust, # Impacts sec$^{{-1}}$') # Make a plot of the cumulative count rate. Mark grain sizes here too. plt.subplot(3,1,3) for j,s in enumerate(grid.s): # Loop over size plt.plot(grid.delta_et_t, grid.number_sc_cum_t[j], # Main plot line label = 's={:.2f} mm'.format(s), color=colors[j]) plt.plot([grid.delta_et_t[-1]], [grid.number_sc_cum_t[j,-1].value], # Circle to indicate grain size markersize=(7-j)*2, marker = 'o', # Use same color as prev line! color=colors[j]) hbt.figsize(5,5) plt.legend() plt.title('Number of impacts (cumulative), A={}'.format(grid.area_sc)) plt.xlabel('ET from C/A') plt.yscale('log') plt.ylabel('# of Impacts') plt.axhline(y = 1, linestyle = '--', alpha = 0.1) plt.tight_layout() plt.show() # Make a plot of size distibution. # Make two curves: one for n(r) for the entire grid, and one for n(r) that hits s/c # Now add an entry to the table. This is a table that lists all of the results -- # e.g., max_tau, count rate etc # One line per grid. t.add_row(vals=[grid.name_trajectory, grid.speed, grid.q, grid.albedo, grid.rho, grid.tau_max, grid.tau_typ, grid.iof_max, grid.iof_typ]) # Get size dist along path number_path = grid.number_sc_cum_t[:,-1].value # Take the full particle grid, and sum along all spatial axes, leaving just the size axis left. number_grid = np.sum(np.sum(np.sum(grid.density, axis=1), axis=1), axis=1) # Normalize the size dists both number_grid = hbt.normalize(number_grid) number_path = hbt.normalize(number_path) plt.plot(grid.s, number_path, label = 'Along s/c path') plt.plot(grid.s, number_grid, label = 'In grid, total') plt.yscale('log') plt.xscale('log') plt.ylim( (hbt.minval(np.array([number_grid, number_path]))/2, 1) ) plt.xlabel('Radius [mm]') plt.ylabel('Particle number [arbitrary]') plt.legend(loc = 'lower right') plt.show() # Output the dust population for this run to a file. This is the file that Doug Mehoke will read. grid.output_trajectory(name_run=name_run, do_positions=False, dir_out=dir_out) print('---') #%%% # Print the table t.pprint(max_width=-1) # Save the table as output file_out = os.path.join(dir_out, f'nh_{name_trajectory}_track4_table.pkl') lun = open(file_out, 'wb') pickle.dump(t,lun) lun.close() print(f'Wrote: {file_out}') #%%% # Now that all files have been created, compress results into an archive (.tar.gz) for Doug Mehoke inits_track4 = 'hbt' # if 'hamilton' in files[0]: # inits_track3 = 'dph' # if 'kauf' in files[0]: # inits_track3 = 'dk' file_out = f'{name_trajectory}_{name_run}_{inits_track4}_n{num_files}.tgz' str = f'cd {dir_out}; tar -czf {file_out} *{name_trajectory}*.dust' _ = subprocess.Popen(str, shell=True) print(str) print(f'Wrote: {dir_out}/{file_out}')
def tearDown(self): spice.unload(self.updated_kernels) for kern in self.binary_kernels: os.remove(kern)
def nh_ort_find_ring_pole(): file_superstack = '/Users/throop/Data/ORT4/superstack_ORT4_z4_mean_wcs_sm_hbt.fits' file_tm = 'kernels_kem_prime.tm' sp.unload(file_tm) sp.furnsh(file_tm) f = fits.open(file_superstack) img = f[0].data # plt.imshow(stretch(img)) # plt.show() wcs = WCS(file) num_pts = 200 ra_pole = 275 * hbt.d2r # dec_pole = -56 * hbt.d2r dec_pole = 13 * hbt.d2r radius_ring = 9000 # Radius in km vec_pole_j2k = sp.radrec(1, ra_pole, dec_pole) et = float(f[0].header['SPCSCET']) utc = sp.et2utc(et, 'C', 0) # Get position from NH to UT (st, lt) = sp.spkezr('MU69', et, 'J2000', 'LT', 'New Horizons') vec_nh_ut = st[0:3] # Get position from Sun, to NH (st, lt) = sp.spkezr('New Horizons', et, 'J2000', 'LT', 'Sun') vec_sun_nh = st[0:3] vec_sun_ut = vec_sun_nh + vec_nh_ut # Define a 'ring plane', based on a pole vector, and a point # This ring plane should be in J2K space -- that is, centered on Sun. plane_ring = sp.nvp2pl(vec_pole_j2k, vec_sun_ut) # Pole position is variable. Point is UT in J2K. # Get the point and spanning vectors that define this plane # XXX for some reason, these values from pl2psv do not depend on value of vec_pol_j2k (pt_pl, vec1_pl, vec2_pl) = sp.pl2psv(plane_ring) # Now take a bunch of linear combinations of these spanning vectors # Plot UT's position on the plot (st, lt) = sp.spkezr('MU69', et, 'J2000', 'LT', 'New Horizons') (_, ra, dec) = sp.recrad(vec_nh_ut) (x, y) = wcs.wcs_world2pix(ra*hbt.r2d, dec*hbt.r2d, 0) # plt.plot(x, y, marker = 'o', ms = 10, alpha=0.3, color='purple') # Set an offset to the WCS values, in case UT is not in the right position (ie, not centered properly) dy = 0 # Large value moves up dx = 0 # Draw the ring image plt.imshow(stretch(img), origin='lower') # Calculate and draw all of the ring points for i in range(num_pts): angle_azimuth = 2*math.pi * (i / num_pts) # Put in range 0 .. 2 pi vec_i = vec1_pl * math.sin(angle_azimuth) + vec2_pl * math.cos(angle_azimuth) vec_i = vec_i * radius_ring # Now get the point in space, J2K pt_ring_i_j2k = vec_i + vec_sun_ut vec_sun_ring_i = pt_ring_i_j2k vec_nh_ring_i = vec_sun_ring_i- vec_sun_nh # (_, ra_i, dec_i) = sp.recrad(vec_nh_ring_i) (x, y) = wcs.wcs_world2pix(ra_i*hbt.r2d, dec_i*hbt.r2d, 0) plt.plot(x+dx, y+dy, marker = 'o', ms = 1, color='red', alpha = 0.15) # print(f'{i}, {ra_i*hbt.r2d}, {dec_i*hbt.r2d}, {x}, {y}') plt.title(f'ORT4 Superstack, Ring Pole = ({ra_pole*hbt.r2d},{dec_pole*hbt.r2d}) deg') plt.show() return