Example #1
0
    def __get_spice_range__(self, filename):
        """
        Function to calculate the range of coverage of a spice file given by filename
        :param filename: String, full path to spice file.
        :return dates_min: Astropy time object giving start of period spanned by spice file
        :return dates_max: Astropy time object giving end of period spanned by spice file
        :return craft_ids: List of craft ids represented by the spice file
        """

        # Ephemeris files:
        if filename.endswith('bsp'):
            # Get craft id's
            craft_ids = spice.spkobj(filename)
            times = []
            for s in craft_ids:
                cover = spice.utils.support_types.SPICEDOUBLE_CELL(2000)
                spice.spkcov(filename, s, cover)
                times.append([c for c in cover])
        # Pointing files
        elif filename.endswith('bc'):
            # Get craft id's
            craft_ids = spice.ckobj(filename)
            times = []
            for s in craft_ids:
                cover = spice.utils.support_types.SPICEDOUBLE_CELL(2000)
                try:
                    print('spice.ckcov: compute segment')
                    spice.ckcov(filename, s, False, 'segment', 0.0, 'TDB',
                                cover)
                except Exception:
                    print('spice.ckcov: compute interval')
                    spice.ckcov(filename, s, False, 'interval', 0.0, 'TDB',
                                cover)

                times.append([c for c in cover])
        else:
            print('Unrecognized file extension : ' + filename.split('.')[-1])
            dates_min = np.NaN
            dates_max = np.NaN
            craft_ids = np.NaN
            return dates_min, dates_max, craft_ids

        # Format the dates.
        min_time = min([min(t) for t in times])
        dates_min = Time(spice.et2utc(min_time, 'ISOC', 3))
        max_time = max([max(t) for t in times])
        dates_max = Time(spice.et2utc(max_time, 'ISOC', 3))
        return dates_min, dates_max, craft_ids
Example #2
0
def et2str(et):
    "Convert an ephemeris time (seconds after J2000) to a UTC string."
    # see https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/et2utc_c.html
    formatStr = "ISOC"
    prec = 0
    s = spice.et2utc(et, formatStr, prec, lenout=256)
    return s
Example #3
0
def et2datetime(et):
    """
    Convert an input time from ephemeris seconds past J2000 to a standard Python datetime.
    This is supposed to be included in the SpiceyPy package, but didn't show up in my installation,
    so I copied it here and modified it.

    Parameters
    ----------
    et : float
        Input epoch in ephemeris seconds past J2000.

    Returns
    -------
    Output datetime object in UTC.
    """

    # convert to UTC using ISO calendar format with 6 digits of fractional precision
    result = spice.et2utc(et, 'ISOC', 6)

    # define the ISO calendar format for datetime
    isoformat = '%Y-%m-%dT%H:%M:%S.%f'

    # return the datetime object version of the input ephemeris time
    return datetime.datetime.strptime(result,
                                      isoformat).replace(tzinfo=pytz.utc)
Example #4
0
 def obt2utc(self, obt_string):
     # Obt to Ephemeris time (seconds past J2000)
     ephemeris_time = spiceypy.scs2e(self.solar_orbiter_naif_id, obt_string)
     # Ephemeris time to Utc
     # Format of output epoch: ISOC (ISO Calendar format, UTC)
     # Digits of precision in fractional seconds: 3
     return spiceypy.et2utc(ephemeris_time, "ISOC", 3)
def find_eclipses( ets, a, method = 'either', v = False, vv = False ):
	diff          = np.diff( a )
	idxs          = ECLIPSE_MAP[ method ]
	ecl_entrances = np.where( np.isin( diff, idxs[ 0 ] ) )[ 0 ]
	ecl_exits     = np.where( np.isin( diff, idxs[ 1 ] ) )[ 0 ]

	if len( ecl_entrances ) == 0:
		return {}

	if ecl_entrances[ 0 ] > ecl_exits[ 0 ]:
		ecl_entrances = np.insert( ecl_entrances, 0, 0 )

	if len( ecl_entrances ) > len( ecl_exits ):
		ecl_exits = np.append( ecl_exits, len( ets ) - 1 )

	ecls                = {}
	ecls[ 'idxs' ]      = []
	ecls[ 'ets'  ]      = []
	ecls[ 'durations' ] = []
	for pair in zip( ecl_entrances, ecl_exits ):
		_ets = [ ets[ pair[ 0 ] ], ets[ pair[ 1 ] ] ]
		ecls[ 'idxs'      ].append( pair )
		ecls[ 'ets'       ].append( _ets )
		ecls[ 'durations' ].append( _ets[ 1 ] - _ets[ 0 ] )

	ecls[ 'total_time' ] = sum( ecls[ 'durations' ] )
	ecls[ 'max_time'   ] = max( ecls[ 'durations' ] )
	ecls[ 'ratio'      ] = ecls[ 'total_time' ] / ( ets[ -1 ] - ets[ 0 ] )

	if v or vv:
		print( '\n******** ECLIPSE SUMMARY START ********' )
		print( f'Number of eclipses: {len(ecls["idxs"])}' )
		print( 'Eclipse durations (seconds): ', end = '' )
		print( [ float(f'{a:.2f}') for a in ecls[ "durations" ] ] )
		print( f'Max eclipse duration: {ecls["max_time"]:.2f} seconds' )
		print( f'Eclipse time ratio: {ecls["ratio"]:.3f}' )
		if vv:
			print( 'Eclipse entrances and exits:' )
			for n in range( len( ecls[ 'ets' ] ) ):
				print(
					spice.et2utc( ecls[ 'ets' ][ n ][ 0 ], 'C', 1 ),
					'-->',
					spice.et2utc( ecls[ 'ets' ][ n ][ 1 ], 'C', 1 )
				)
		print( '******** ECLIPSE SUMMARY END ********\n' )

	return ecls
Example #6
0
def Calculate_SCoords(run):

    #setup
    import spiceypy as spice
    import numpy as np
    import math
    spice.furnsh("./MetDat/MoonMetdat.txt")

    #import ~_setup.txt and SPK (~.bsp) file
    slines = []
    with open(run + '_setup.txt') as f:
        slines = f.read().splitlines()
    spice.furnsh(run + '.bsp')

    #get TLE_SPK_OBJ_ID and et (time, seconds past J2000) from TLE File
    obj_id = slines[5].split('=')[1]
    et = float(slines[28].split('=')[1])
    print ''
    #read out date as yyyy mmm dd hr:min:sec.millisecond
    print '\n', spice.et2utc(et, 'C', 3)

    #Calculate sub-observer point and distance
    state = spice.spkezr(obj_id, et, "MOON_PA", "LT+S", "Moon")
    s_obs = spice.reclat(state[0][0:3])
    print '\nSub-Observer Point:'
    print ' Sat-Moon distance: ', s_obs[0], 'km'
    print ' Satellite sub-long: ', s_obs[1] * 180 / math.pi, 'deg'
    print ' Satellite sub-lat:  ', s_obs[2] * 180 / math.pi, 'deg'

    #Calculate sub-Earth point and distance
    state = spice.spkezr("Earth", et, "MOON_PA", "LT+S", "Moon")
    s_eat = spice.reclat(state[0][0:3])
    print '\nSub-Earth Point:'
    print ' Earth-Moon distance: ', s_eat[0], 'km'
    print ' Earth sub-long: ', s_eat[1] * 180 / math.pi, 'deg'
    print ' Earth sub-lat:  ', s_eat[2] * 180 / math.pi, 'deg'

    #Calculate sub-Sun point and distance
    state = spice.spkezr("Sun", et, "MOON_PA", "LT+S", "Moon")
    s_sun = spice.reclat(state[0][0:3])
    print '\nSub-Sun Point:'
    print ' Sun-Moon distance: ', s_sun[0], 'km'
    print ' Sun sub-long: ', 90 - s_sun[1] * 180 / math.pi, 'deg'
    print ' Sun sub-lat:  ', s_sun[2] * 180 / math.pi, 'deg\n'

    #Writes selenographic coordiantes to a file named 'run'+_spoints.txt
    with open(run + '_spoints.txt', 'w') as f:
        f.write('' + '\n#Sub-Observer Point:' + '\n\t Sat-Moon distance: ' +
                str(s_obs[0]) + '\n\t slong: ' +
                str(s_obs[1] * 180 / math.pi) + '\n\t slat: ' +
                str(s_obs[2] * 180 / math.pi) + '\n\n#Sub-Earth Point:' +
                '\n\t Earth-Moon distance: ' + str(s_eat[0]) + '\n\t slong: ' +
                str(s_eat[1] * 180 / math.pi) + '\n\t slat: ' +
                str(s_eat[2] * 180 / math.pi) + '\n\n#Sub-Sun Point:' +
                '\n\t Sun-Moon distance: ' + str(s_sun[0]) + '\n\t slong: ' +
                str(90 - s_sun[1] * 180 / math.pi) + '\n\t slat: ' +
                str(s_sun[2] * 180 / math.pi))

    return [slines[21].split('=')[1][1:], et, s_obs, s_eat, s_sun]
Example #7
0
def write_positions(
    utc_start,
    utc_end,
    steps,
):
    """
    Write positions of earth and moon in output file.
    Input:
        -utc_start              str (format YYYYmmdd)
        -utc_end                str (format YYYYmmdd)
        -steps                  int
    """

    # read kernel files paths and some constants from config
    dark_side_path = os.path.dirname(
        os.path.dirname(os.path.realpath(__file__)))
    config = configparser.ConfigParser()
    config.read(os.path.join(dark_side_path, "config", "config.ini"))
    spk_kernel = config["spice"]["spk_kernel"]
    lsk_kernel = config["spice"]["lsk_kernel"]
    reference_frame = config["spice"]["reference_frame"]
    aberration_correction = config["spice"]["aberration_correction"]

    # load kernels
    spice.furnsh(spk_kernel)
    spice.furnsh(lsk_kernel)

    # compute ET times
    et_start = spice.str2et(format_date_for_spice(utc_start))
    et_end = spice.str2et(format_date_for_spice(utc_end))
    times = [x * (et_end - et_start) / steps + et_start for x in range(steps)]

    # load positions
    earth_positions, _ = spice.spkpos("EARTH", times, reference_frame,
                                      aberration_correction, "SUN")
    moon_positions, _ = spice.spkpos("MOON", times, reference_frame,
                                     aberration_correction, "EARTH")

    # create output dir
    output_dir = os.path.join(
        dark_side_path,
        "data",
    )
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # write results in output files
    write_output_file(os.path.join(output_dir, "earth.txt"), [
        "{:15.8f}, {:15.8f}, {:15.8f}".format(pos[0], pos[1], pos[2])
        for pos in earth_positions
    ])
    write_output_file(os.path.join(output_dir, "moon.txt"), [
        "{:15.8f}, {:15.8f}, {:15.8f}".format(pos[0], pos[1], pos[2])
        for pos in moon_positions
    ])
    write_output_file(os.path.join(output_dir, "times.txt"), [
        "{}".format(format_date_from_spice(spice.et2utc(time, "C", 0)))
        for time in times
    ])
Example #8
0
def run(mk, time_start='', time_finish='', step=60, target='MERCURY',
        frame='', sensor='MPO_MERTIS_TIR_PLANET', pixel_line='',
        pixel_sample='', observer='MPO'):

    spiceypy.furnsh(mk)

    target = target.upper()
    if not time_start: time_start = datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S')
    if not frame: frame = f'IAU_{target}'
    if not time_finish: time_finish = time_start
    if not pixel_sample: pixel_sample = np.floor(ccd_center(sensor)[0])
    if not pixel_line: pixel_line = np.floor(ccd_center(sensor)[1])

    if pixel_sample == 'all':
        pixel_sample = np.arange(1, pixel_samples(sensor), 1)
    else:
        pixel_sample = [pixel_sample]
    if pixel_line == 'all':
        pixel_line = np.arange(1, pixel_lines(sensor), 1)
    else:
        pixel_line = [pixel_line]


    et_start = spiceypy.utc2et(time_start)
    et_finish = spiceypy.utc2et(time_finish)

    if et_start != et_finish:
        interval = np.arange(et_start, et_finish, step)
    else:
        interval = [et_start]

    # Time tag [UTC]
    # pixel id [(x,y)]
    # corner id [(x,y)]

    # Requested geometry

    # lat lon intersection (planetocentric)
    # lat lon subspacecraft
    # lat lon subsolar
    # target distance intersection
    # target angular diameter
    # local solar time intersection
    # phase angle intersection
    # emission angle intersection
    # incidence angle intersection

    with open('spice4mertis.csv', 'w') as o:
        o.write('utc,et,pixlin,pixsam,tarlon,tarlat,sublon,sublat,sunlon,sunlat,tardis,tarang,ltime,phase,emissn,incdnc\n')
        for et in interval:
            utc = spiceypy.et2utc(et, 'ISOC', 3)
            for line in pixel_line:
                for sample in pixel_sample:
                    pixelGeometry = pixel_geometry(et, sensor, line, sample, target, frame, observer=observer)
                    print(utc,line,sample,str(pixelGeometry)[1:-1].replace(',',' '))
                    o.write(f'{utc},{et},{line},{sample},{str(pixelGeometry)[1:-1].replace(" ","")}\n')
    return
Example #9
0
 def toUTC(self, sclk, fmt='ISOC', prec=5):
     '''Convert Spacecraft clock time to UTC'''
     if not self.loaded:
         self.load()
     if isinstance(sclk, (int, float)):
         sclk = '1/%f' % sclk
     elif '_' in sclk:
         sclk = '1/' + sclk.split('_')[0]
     return spice.et2utc(spice.scs2e(self.sc, sclk), fmt, prec)
Example #10
0
def et_to_utc(et):
    """Summary
    convert SPICE epoch in Ephemerides seconds (TDB) to a
    UTC time string.

    Args:
        et (float): SPICE epoch in Ephemerides seconds (TDB)

    Returns:
        string: UTC time
    """
    return spice.et2utc(et, 'ISOC', 3, 30)
Example #11
0
    def CA(self, utc_0, utc_1, step=120, abcorr='CN+S'):
        '''Search Close Approch time and distance'''
        et_0 = spice.str2et(utc_0)
        et_1 = spice.str2et(utc_1)
        times = [x * (et_1-et_0)/step + et_0 for x in range(step)]
        pos, _ = spice.spkpos(self.targ, times, self.ref, abcorr, self.obs)
        dists = np.sqrt(np.sum(np.power(pos, 2), 1))

        # Find the smallest distance
        ca_i = np.argmin(dists)
        ca_et = times[ca_i]
        ca_dist = dists[ca_i]
        return spice.et2utc(ca_et, 'ISOC', 5), ca_dist
Example #12
0
def position_spice(xfunc, tais, **kwargs):
    res = {}
    try:
        for tai in tais:
            et = sp.unitim(tai, 'tai', 'et')
            pos = sp.spkezp(kwargs['target'], et, kwargs['ref'], kwargs['abcorr'], kwargs['observer'])[0]
            utc = sp.et2utc(et, 'isoc', 3)

            res[utc] = xfunc(pos)
    except sp.stypes.SpiceyError as ex:
        return ex.value
    else:
        return res
Example #13
0
def spiceet_to_utcstr(time, fmt='ISOD', precision=5):
    """Time conversion function"""

    length = 22
    if fmt == 'C':
        length = 22
    if not np.isfinite(time):
        raise ValueError("Supplied ET is not finite")

    if fmt == 'CAL':
        raise RuntimeError("Don't use spiceypy.etcal - this is a bad idea.")
        # return spiceypy.etcal(time)
    return spiceypy.et2utc(time, fmt, precision, length + precision)
Example #14
0
def state_spice(xfunc, tais, **kwargs):
    res = {}
    try:
        for tai in tais:
            et = sp.unitim(tai, 'tai', 'et')
            sta = sp.spkez(kwargs['target'], et, kwargs['ref'], kwargs['abcorr'], kwargs['observer'])[0]
            utc = sp.et2utc(et, 'isoc', 3)

            res[utc] = [xfunc(sta[0:3]), sta[3:6]]
    except sp.stypes.SpiceyError as ex:
        return ex.value
    else:
        return res
Example #15
0
def xform_spice(xfunc, tais, **kwargs):
    res = {}
    try:
        for tai in tais:
            et = sp.unitim(tai, 'tai', 'et')
            mat = sp.pxform(kwargs['from_ref'], kwargs['to_ref'], et)
            utc = sp.et2utc(et, 'isoc', 3)

            res[utc] = xfunc(mat)
    except sp.stypes.SpiceyError as ex:
        return ex.value
    else:
        return res
Example #16
0
def utc2scs_spice(tais, sc):
    res = {}
    try:
        scid = sp.bods2c(sc)
        for tai in tais:
            et = sp.unitim(tai, 'tai', 'et')
            obt = sp.sce2s(scid, et)
            utc = sp.et2utc(et, 'isoc', 3)

            res[utc] = obt
    except sp.stypes.SpiceyError as ex:
        raise GeometrySpiceError(ex.value)
    else:
        return res
Example #17
0
def delambert(img_data, start_date, interframe_delay, verbose=False):
    height = img_data.shape[0]

    camera_red = junocam.Camera.get_camera(junocam.JUNO_JUNOCAM_RED)
    camera_green = junocam.Camera.get_camera(junocam.JUNO_JUNOCAM_GREEN)
    camera_blue = junocam.Camera.get_camera(junocam.JUNO_JUNOCAM_BLUE)
    et_start = spice.str2et(start_date)

    light_data = np.ones(img_data.shape)

    num_exposures = height / junocam.JUNOCAM_STRIP_HEIGHT / 3
    if verbose:
        print_r("Number of triplets (exposures):", num_exposures)

    start_time_bias = 0.06188
    interframe_delay_bias = 0.0010347187388880883

    for exposure in range(0, num_exposures):
        #for exposure in range(8, 12):
        #if True:
        #    exposure = 7
        frame_0 = exposure * 3
        frame_1 = exposure * 3 + 1
        frame_2 = exposure * 3 + 2

        et = et_start + start_time_bias + (exposure) * (interframe_delay +
                                                        interframe_delay_bias)

        if verbose:
            utcstr = spice.et2utc(et, "C", 3)
            print_r("Processing triplet", (exposure + 1), "of", num_exposures,
                    "on date/time:", utcstr)

        if verbose:
            print_r("   Exposure #", (exposure + 1), ", Blue...")
        calc_light_for_band(light_data, et, frame_0, camera_blue)

        if verbose:
            print_r("   Exposure #", (exposure + 1), ", Green...")
        calc_light_for_band(light_data, et, frame_1, camera_green)

        if verbose:
            print_r("   Exposure #", (exposure + 1), ", Red...")
        calc_light_for_band(light_data, et, frame_2, camera_red)

    #light_data = 1.0 - light_data
    img_data[:] = light_data
    #img_data *= light_data
    return img_data
Example #18
0
def et2jd(et):
    """Ephemeris time to Julian date, UTC.

    Parameters
    ----------
    et : float
      Ephemeris time.

    Returns
    -------
    jd : string
      The Julian date, UTC.

    """

    return spice.et2utc(et, "J", 14)[3:]
Example #19
0
def et2cal(time, format='UTC', support_ker=False, unload=False):
    """
    Converts Ephemeris Time (ET) into UTC or Calendar TDB (CAL) time. Accepts
    a single time or a lists of times. This function assumes that the support
    kernels (meta-kernel or leapseconds kernel) has been loaded.

    :param time: Input ET time
    :type time: Union[float, list]
    :param format: Desired output format; 'UTC' or 'CAL'
    :type format: str
    :param unload: If True it will unload the input meta-kernel
    :type unload: bool
    :return: Output time in 'UTC', 'CAL' or 'TDB'
    :rtype: Union[str, list]
    """
    timlen = 62
    out_list = []

    if support_ker:
        spiceypy.furnsh(support_ker)

    if isinstance(time, float) or isinstance(time, str):
        time = [time]

    for element in time:

        if format == 'UTC':
            out_elm = spiceypy.et2utc(element, 'ISOC', 3)

        elif format == 'CAL':
            out_elm = spiceypy.timout(element, "YYYY-MM-DDTHR:MN:SC.###::TDB",
                                      timlen)
        else:
            out_elm = element

        out_list.append(out_elm)

    if len(out_list) == 1:
        out_time = out_list[0]
    else:
        out_time = out_list

    if unload:
        spiceypy.unload(support_ker)

    return out_time
Example #20
0
    def __ClockDrift(self, enddate=False):

        if self.name != 'MPO':
            sclk_start = 0.0
            sclk_end = 500000000
        else:
            sclk_start = 3.9631239807361E+13/65536
            sclk_end = 700000000

        #sclk_end = spiceypy.gdpool('SCLK_PARTITION_END_{}'.format(str(-1*self.id)),0,1000)[0]/65536


        step = 10000.0

        if not enddate:
            et_end = self.time.getTime('finish','utc')
        else:
            et_end = spiceypy.utc2et(enddate)

        sclk = []
        ephtime = []
        utctime = []

        for i in np.arange(sclk_start, sclk_end, step):
            sclk.append(i)

            sclkdp = i*65536
            et = spiceypy.sct2e(self.id, sclkdp)
            ephtime.append(et)

            utcstr = spiceypy.et2utc(et, 'C', 3)
            utctime.append(utcstr)


        dates = []
        drift = []
        for j in range(0,len(ephtime),1):
            if ephtime[j] >= et_end:
                break
            drift.append((sclk[j]-sclk_start) - (ephtime[j] - ephtime[0]))
            dates.append(ephtime[j])

        self.clock_dates = dates
        self.clock_drift = drift

        return
Example #21
0
def scs2utc_spice(scs, sc, deltat):
    res = {}
    try:
        scid = sp.bods2c(sc)
        et = sp.scs2e(scid, scs)

        if deltat is not None:
            tai = sp.unitim(et, 'et', 'tai')
            tai += deltat
            et = sp.unitim(tai, 'tai', 'et')

        utc = sp.et2utc(et, 'isoc', 3)

        res[scs] = utc
    except sp.stypes.SpiceyError as ex:
        raise GeometrySpiceError(ex.value)
    else:
        return res
Example #22
0
def met2utc(met_in, name_observer = 'NEW HORIZONS'):

#     met_in = 299348928.9358144
#     name_observer = 'New Horizons'

  if (name_observer.upper().find('NEW HORIZONS') == -1):
    print('MET can be used only for New Horizons')
    return

# Convert input to an array, even if it is not

  if hbt.is_array(met_in):
    met = np.array(met_in)
  else:
    met = np.array([met_in])

# If there are any commas in the MET, then remove them

  if (type(met[0]) == str):
    met = np.zeros(np.shape(met_in))
    for i,met_i in enumerate(met_in):
      met[i] = float(met_in[i].replace(',', ''))

  sclk_ticks = np.array(met * 5e4)  # Have to include np.array() -- not sure why. I guess a 1x1 np-array is demoted??
  ntime = np.size(met_in)     # Number of elements
  et  = np.zeros(ntime) 
  utc = np.zeros(ntime, dtype = 'U30')

  for i in range(np.size(ntime)):  
     et[i] = sp.sct2e(-98, sclk_ticks[i])
     utc[i] = sp.et2utc(et[i], 'C', 3)
#        utc[i] = sp_et2utc, et_i, 'ISOD', 3, utc_i
  
  if (ntime == 1):
    utc = utc[0]

  return utc
Example #23
0
    def scet_to_utc(self, scet):
        """
        Convert SCET to UTC time string in ISO format.

        Parameters
        ----------
        scet : `str`, `int`, `float`
            SCET time as a number or spacecraft clock string e.g. `1.0` or `625237315:44104`

        Returns
        -------
        `str`
            UTC time string in ISO format
        """
        # Obt to Ephemeris time (seconds past J2000)
        ephemeris_time = None
        if isinstance(scet, (float, int)):
            ephemeris_time = spiceypy.sct2e(SOLAR_ORBITER_ID, scet)
        elif isinstance(scet, str):
            ephemeris_time = spiceypy.scs2e(SOLAR_ORBITER_ID, scet)
        # Ephemeris time to Utc
        # Format of output epoch: ISOC (ISO Calendar format, UTC)
        # Digits of precision in fractional seconds: 6
        return spiceypy.et2utc(ephemeris_time, "ISOC", 3)
Example #24
0
    def __init__(self, inst, et, abcorr, obsrvr, width, height, mag_limit):
        # input parameter
        self.inst = inst
        self.et = et
        self.abcorr = abcorr
        self.obsrvr = obsrvr
        self.width = width
        self.height = height

        # parameters equivalent to input parameter
        self.date = spice.et2utc(et, "ISOC", 3)
        self.inst_id = spice.bodn2c(inst)

        # Instrument FOV
        self.fov = Fov(self.inst_id)
        self.fov_in_degrees = self.fov.fovmax * 2.0 * spice.dpr()

        # geometry information
        self.pos, _ = spice.spkpos(obsrvr, et, self.fov.frame, abcorr, "SUN")
        self.obs2refmtx = spice.pxform(self.fov.frame, "J2000", et)
        self.ref2obsmtx = spice.pxform("J2000", self.fov.frame, et)

        # screen information
        pos_angle, angle_res, ra, dec = get_geometry_info(
            self.obs2refmtx, self.fov, width, height
        )

        self.center = self.fov.bounds_rect.center_vec
        self.pos_angle = pos_angle
        self.angle_res = angle_res
        self.ra = ra
        self.dec = dec

        # searched objects
        self.solar_objects = search_solar_objects(self)
        self.stars = search_stars(self, mag_limit)
      else:
          dt_s = float(file['ET']) - t0
      
      m, s = divmod(dt_s, 60)
      h, m = divmod(m, 60)
      dt_str = "{:3d}h {:2d}m {:2d}s".format(int(h), int(m), int(s))
      if (dt_s == 0): dt_str = '--'
      dt_str = dt_str.replace(' 0h', '').replace(' 0m', '')
      t0 = float(file['ET'])
      
      # Create a super-short version of the filename (cut out the ApID)
      
      file_trunc = file['Shortname'].replace('lor_', '').replace('_0x630_sci', '').\
        replace('_0x633_sci', '').replace('_opnav', '').replace('.fit', '')
      
      utc_trunc = sp.et2utc(sp.utc2et(file['UTC']),'C', 0)
      
      # Print a line of the table, to the screen and the file
      
      line =  "{:>3}/{:<3}: {:>3}, {:>3},  {},   {},   {},  {:6.3f},{:>12s},   {:.1f} deg, {:<9}".format(int(i), 
                                                     int(num_files), int(i_group), 
                                                     int(i_file), file_trunc, file['Format'], utc_trunc, 
                                                     file['Exptime'], (dt_str), file['Phase']*hbt.r2d, file['Target'])
      print(line)
      lines_out.append(line)
      
      arr = hbt.read_lorri(file['Filename'], bg_method = 'Polynomial', bg_argument = 4, frac_clip = 1)
      
      arr = hbt.remove_brightest(arr, 0.99)
      arr = -hbt.remove_brightest(-arr, 0.99)
Example #26
0
from mpl_toolkits.mplot3d import Axes3D

method = "Ellipsoid"
target = "MARS"
fixref = "IAU_MARS"
abcorr = "None"
obsrvr = "SUN"

spoint = sp.latrec(3390, -24.6 / sp.dpr(), 18.3 / sp.dpr())
et = sp.utc2et("2003 OCT 28, 00:00:00")

step = 60

for loop in range(100):
    time = et + step * loop
    time_string = sp.et2utc(time, "C", 0)
    sza = sp.ilumin(method, target, time, fixref, abcorr, obsrvr,
                    spoint)[3] * sp.dpr()


def writeLog(file_name, lines_to_write):
    """function to append log file"""
    #    global LOG_PATHS
    logFile = open(file_name + ".csv", 'w')
    for line_to_write in lines_to_write:
        logFile.write(line_to_write + '\n')
    logFile.close()


#    print(line_to_write)
Example #27
0
column_names = [
    'Date', 'a[au]', 'e[]', 'i[rad]', 'node[rad]', 'omega[rad]', 'mA[rad]'
]

utc = ['Jan 1, 2025', 'Dec 31,2025']
FirstDay = sp.str2et(utc[0])
LastDay = sp.str2et(utc[1])
with open('new_earth_impactors.csv', 'w', newline='') as file:
    writer = csv.writer(file)
    writer.writerow(column_names)

for i in range(100):

    time = getEarthTime(rad(gfd(i, 'trueAnomaly1[deg]', df)))
    T0 = time + FirstDay
    JD_Time_earth = sp.et2utc(time + FirstDay, 'J',
                              14)  #Julian Date at this particular true anmoly
    C_Time_earth = sp.et2utc(time + FirstDay, 'C', 14)
    #print(JD_Time) #Time where the earth is at the specifie
    pos_vel_earth = convertEarth(
        sp.spkezr('EARTH', time + FirstDay, 'J2000', 'LT+S', 'SUN')
    )  #will find the position and velocity of earth at specific ephermeris w/ respect to the sun
    pos_vel_km = sp.spkezr('EARTH', time + FirstDay, 'J2000', 'LT+S', 'SUN')

    et = 0
    a = gfd(i, ' a[au]', df) * 1.496e+8
    e = gfd(i, 'e[]', df)
    inc = rad(gfd(i, 'i[deg]', df))
    node = rad(gfd(i, 'node[deg]', df))
    omega = rad(gfd(i, 'w[deg]', df))
    M_A = toMean(rad(gfd(i, 'trueAnomaly2[deg]', df)), e)
    elts_ast = np.array([a, e, inc, node, omega, M_A, et, Gm])
def get_object_positions(ref_frame, targets, curVizJd, curVizJdDelta,
                         tailLenJd, validSeconds):
    """
    aether-rest-server.py -- get_object_positions
        This function serves position and time data to the frontend for visualization. It is called when the frontend
        makes a GET request to the above URL using specified params. This function can return data for either a single
        body/target or multiple. In the case of multiple targets, they should be passed as either names or NAIF IDs
        separated by a '+' (e.g. "sun+earth+499+jupiter+europa+pluto+904"). Case does not matter since each target is
        converted to lower case within this function.

        Main idea: The frontend needs position data both before and after the current simulation time. It needs data
            before the current sim time because trajectory tails must be drawn. It needs data after the current sim time
            so that the frontend has some data for the future and does not need to make API calls constantly. curVizJd
            represents the initial time, curVizJdDelta specifies the granularity in JD between each position coordinate,
            tailLenJd specifies the amount of data to gather before curVizJd (also in JD), validSeconds specifies the
            amount of data to gather past curVizJd. validSeconds assumes a framerate of 60 fps, and uses that, along
            with curVizJdDelta to determine how many positions past curVizJd to obtain.

    Params: ref_frame <str> -- the name or NAIF ID of the observing body for which target positions reference. This
                basically represents the origin of the coordinate system on the frontend (e.g. solar system barycenter,
                mars barycenter, 0, earth, etc.)
            targets <str> -- names or NAIF IDs of the desired targets for which position data will be obtained. This may
                be either a single target, or multiple separated by '+' signs. See above for an example.
            curVizJd <str> -- a time in Julian days (often a float) representing the an initial time. This is often the
                current time in the frontend visualization. The beginning and ending times are calculated based on this
                value as well as curVizJdDelta, tailLenJd and validSeconds.
            curVizJdDelta <str> -- a time delta in Julian days (often a float) representing the granularity between each
                position coordinate. Initially this was meant to be the rate of time on the frontend (the time between
                ticks, which usually occur 60 times per second, or whatever the fps of the viz is). However, to avoid
                making a new API call every time a user changes the rate of time, the frontend simply changes the rate
                at which the positions list is traversed.
            tailLenJd <str> -- the length of the trajectory tail (amount of JD prior to curVizJd). This determines the
                beginning positions/times in the returned data. See Main idea above for more info.
            validSeconds <int> -- the amount of position data to gather past curVizJd. Higher values provide more future
                data, so the frontend won't need to update as much, and vice versa. See Main idea above for more detail.

    Returns: Flask Response object with a dictionary containing position lists (x, y, z) w.r.t. the specified observer,
        times corresponding to each position, and the current index (index of lists for curVizJd) for each target.
    """

    # convert string of targets into a list -- ensure lower case for consistency
    targets_list = [target.lower() for target in targets.split('+')]

    # convert ref frame to lower case for consistency
    ref_frame = ref_frame.lower()

    # check to make sure the reference frame is valid
    if not aether_bodies.isValidRefFrame(
            ref_frame):  #ref_frame not in valid_targets:
        return returnResponse(
            {'error': '{} is not a valid reference frame.'.format(ref_frame)},
            400)

    # check to make sure all targets are valid
    for target in targets_list:
        if (not aether_bodies.isValidID(target)) and (
                not aether_bodies.isValidName(target)):
            return returnResponse(
                {'error': '{} is not a known target.'.format(target)}, 401)

    # convert all JD string arguments into floats... maybe they could be specified as floats instead...
    try:
        curVizJd = float(curVizJd)
        curVizJdDelta = float(curVizJdDelta)
        tailLenJd = float(tailLenJd)
    except ValueError:
        return returnResponse(
            {
                'error':
                'curVizJd, curVizJdDelta, tailLenJd must all be floats.'
            }, 402)

    # assume 60 fps as an upper bound -- this ensures that the data is valid for at least validSeconds
    # validSeconds specifies the amount of real time the returned data will be valid for in the frontend.
    # This has changed slightly, see the docstring for details...
    jd_end = curVizJd + (curVizJdDelta * 60 * validSeconds)

    # subtract tail len from cur JD to get the start time in JD
    jd_start = curVizJd - tailLenJd

    # ensure jd_end is a multiple of the current JD delta
    if (
        (tailLenJd / curVizJdDelta) % 1
    ) > 0.0001:  # value is small enough to account for round off error in most cases
        return returnResponse(
            {'error': 'tailLenJd must be evenly divisible by curVizJdDelta.'},
            403)

    # Convert back to string and add 'jd ' to the front for SPICE
    startDate = 'jd ' + str(jd_start)
    # endDate = 'jd ' + str(jd_end)

    # ----- REMEMBER: ET (ephemeris time) is simply seconds past J2000 epoch. J2000 epoch is JD 2451545.0 -----

    # calculate ET times from date/time strings
    try:
        etStart = spice.str2et(startDate)
        etDelta = curVizJdDelta * 86400  # Since JD is in days and ET is in seconds, simply multiply by seconds in a day
    except Exception as error:

        return returnResponse({'error': error}, 405)

    # calculate the number of necessary steps...
    total_steps = round((jd_end - jd_start) / curVizJdDelta)
    cur_idx = round((curVizJd - jd_start) / curVizJdDelta)
    times = [etStart + (etDelta * x) for x in range(total_steps + 1)]

    # DEBUGGING
    # if etStart not in times:
    #     print("Times list does not contain etStart")
    # if spice.str2et('jd ' + str(curVizJd)) not in times:
    #     print("Times list does not contain curVizJd")
    # if etEnd not in times:
    #     print("Times list does not contain etEnd")

    # empty dictionary to hold return data
    response_data = dict()

    # gather data for each target
    for i, target in enumerate(targets_list):

        # second variable returned is light times, which we may disregard for this purpose
        target_positions, _ = spice.spkpos(target, times, 'J2000', 'NONE',
                                           ref_frame)

        response_data[target] = {
            'info':
            'Positions (x,y,z) and times (JD) of {} w.r.t. {}'.format(
                target.capitalize(), ref_frame.capitalize()),
            'positions': [coord.tolist() for coord in target_positions
                          ],  # target_positions is a numpy.ndarray
            'times':
            [float(spice.et2utc(etTime, "J", 8)[3:]) for etTime in times],
            # convert times to JD, slice off "JD " and convert to float
            'cur_time_idx':
            cur_idx
        }

    # return the response to the frontend -- 200 code for success
    return returnResponse(response_data, 200)
def create_backplanes_fits(file_in, name_target, frame, name_observer, file_out,
                              do_clobber = False,
                              do_verbose = False,
                              do_plot    = False):    
    """
    Function to create backplanes and add them to a FITS file.
    
    Idea is that this should be runnable as a commandline script.
    
    SPICE must already be running.
    
    Parameters
    ----
    
    file_in:
        Input FITS file. Should be fully navigated and have good header info (correct WCS, etc)
    name_target:
        String. Name of central body of the backplanes.
    frame:
        The SPICE frame to be used for the backplane. Usually IAU_JUPITER, IAU_PLUTO, etc.
    name_observer:
        String. Name of observer. Usually 'New Horizons'
    file_out:
        Output FITS file to be written. If None, then no file is written.
    do_clobber:
        Boolean. Overwrite the output file?
    do_verbose:
        Boolean. 
    
    Output
    ----    
    
    All output is written to specified file. Nothing output to user.
    
    Return status
    ----
        0 or 1. 0 = no problems.
        
    """


# Initialize the output file, and check if it exists
    
    if not(file_out):
        file_out = file_in.replace('.fit', '_backplaned.fit')   # Works for both .fit and .fits
    
    # Check the mod times. If there's a new input file, then we want to regenerate the output
    
    if os.path.exists(file_out):
        time_file_out = os.path.getmtime(file_out)
    else:
        time_file_out = -1
    if os.path.exists(file_in):
        time_file_in  = os.path.getmtime(file_in)
    else:
        time_file_in = 0
    
    
    if os.path.exists(file_out) and not(do_clobber) and (time_file_out > time_file_in):
        raise(FileExistsError)
        
# Load the input image
        
    hdu    = fits.open(file_in) 
    header = hdu['PRIMARY'].header

# Grab info from header

    et      = header['SPCSCET']
    utc     = sp.et2utc(et, 'C', 0)
    w       = WCS(header)

# =============================================================================
# Call a routine to actually create the backplanes, which returns them as a tuple.
# =============================================================================

    (planes, descs) = compute_backplanes(file_in, name_target, frame, name_observer)
      
# =============================================================================
# Now write everything to a new FITS file. 
# =============================================================================
    
    # Open the existing file
    
    hdu = fits.open(file_in)
    
    if (do_verbose):
        print("Read: {}".format(file_in))
        
    # Go thru all of the new backplanes, and add them one by one. For each, create an ImageHDU, and then add it.
    
    for key in planes.keys():
        hdu_new = fits.ImageHDU(data=planes[key].astype(np.float32), name=key, header=None)
        hdu.append(hdu_new)
    
    # Add relevant header info
    
    keys = list(planes.keys())
    for i,desc in enumerate(descs):
        hdu[0].header['BKPLN_{}'.format(i)] = "{}: {}".format(keys[i], desc)
    
    hdu[0].header['BKPLNFRM'] = (frame,       'Name of SPICE frame used for backplanes')
    hdu[0].header['BKPLNTRG'] = (name_target, 'Name of SPICE target used for backplanes')

    # Add a comment / divider. Not working, not sure why.
    
#    hdu[0].header['COMMENT'] = '*********************************************************'
#    hdu[0].header['COMMENT'] = '*** BACKPLANE INFO                                    ***'
#    hdu[0].header['COMMENT'] = '*********************************************************'
#    
    # Create the new directory, if it doesn't exist
    
    dir_out = os.path.dirname(file_out)
    
    if not(os.path.isdir(dir_out)):
        os.makedirs(dir_out)
        print(f'Created directory: {dir_out}')
        
    # Write to a new file
    
    hdu.writeto(file_out, overwrite=True)

    if (do_verbose):
        print("Wrote: {}; {} planes; {:.1f} MB".format(file_out, 
                                                   len(hdu), 
                                                   os.path.getsize(file_out)/1e6))

    hdu.close()

# =============================================================================
# Make a plot if requested
# =============================================================================

    if do_plot:
        plot_backplanes(file_out, name_target, name_observer)
 
    return(0)
mode = '4X4'
#mode = '1X1'
pos = (None, None)
#pos = (300, 700)
pos = (100, 200)  # y, x in normal imshow() coordinates.
#dist_target = 0.01*u.au
dist_solar  = 43.2*u.au  # MU69 dist at encounter: 43.2 AU, from KEM Wiki page 
do_psf = True            # Flag: Do we convolve result with NH LORRI PSF?26.7 + 0.65

dt_obs = -22*u.day        # Time relative to MU69 C/A

utc_ca = '2019 1 Jan 05:33:00'
et_ca  = sp.utc2et(utc_ca)
et_obs = et_ca + dt_obs.to('s').value
 
utc_obs = sp.et2utc(et_obs, 'C', 0)
utc_obs_human = 'K{:+}d'.format(dt_obs.to('day').value)

vec,lt = sp.spkezr('2014 MU69', et_obs, 'J2000', 'LT', 'New Horizons')
vec_sc_targ = vec[0:3]
dist_target = np.sqrt(np.sum(vec_sc_targ**2))*u.km.to('AU')*u.au
            
arr = nh_make_simulated_image_lorri(do_ring=True, 
                                    dist_ring_smoothing = 1000*u.km, 
                                    iof_ring = iof_ring,
                                    a_ring = (5000*u.km, 10000*u.km), 
                                    exptime = exptime, 
                                    mode = mode, 
                                    pos = pos,
                                    dist_solar = dist_solar, 
                                    dist_target = dist_target,
def plot_backplanes_fits(file):

    """
    This file takes an image, and plots all of the backplanes for it.
    """

    # Start up SPICE
    
    file_kernel = '/Users/throop/git/NH_rings/kernels_kem.tm'
    sp.furnsh(file_kernel)

    hdulist = fits.open(file)

    # Loop over all of the planes, and plot each one
    
    i=1
    fig = plt.subplots()
    for hdu in hdulist:
        plt.subplot(3,4,i)
        plt.imshow(hdu.data)
        plt.title("{} / {}".format(i-1, hdu.name))
        i+=1

    plt.show()

    stretch_percent = 90    
    stretch = astropy.visualization.PercentileInterval(stretch_percent)
        
    # Look up position of MU69 in pixels.

    et = hdu[0].header['SPCSCET']
    utc = sp.et2utc(et, 'C', 0)
    abcorr = 'LT'
    frame = 'J2000'
    name_target = 'MU69'
    name_observer = 'New Horizons'
    w = WCS(file_new)
    
    (st,lt) = sp.spkezr(name_target, et, frame, abcorr, name_observer)
    vec_obs_mu69 = st[0:3]
    (_, ra, dec) = sp.recrad(vec_obs_mu69)
    (pos_pix_x, pos_pix_y) = w.wcs_world2pix(ra*hbt.r2d, dec*hbt.r2d, 0)
    
#    Plot the image itself
 
    hbt.figsize((10,10)) 
    plt.imshow(stretch(hdu[0].data))

    # Plot one of the planes

    plt.imshow(stretch(hdu['Longitude_eq'].data), alpha=0.5, cmap=plt.cm.Reds_r)

    # Plot the ring
 
    radius_ring = 100_000  # This needs to be adjusted for different distances.
    radius_arr = hdu['Radius_eq'].data
    radius_good = np.logical_and(radius_arr > radius_ring*0.95, radius_arr < radius_ring*1.05)
    plt.imshow(radius_good, alpha=0.3)
    
    # Plot MU69
    
    plt.plot(pos_pix_x, pos_pix_y, ms=10, marker = 'o', color='green')    
    plt.title("{}, {}".format(os.path.basename(file_new), utc))
    plt.show()  

    # Close the file
    
    hdu.close()
def nh_ort1_find_rings():
    
    plt.set_cmap('Greys_r')

    stretch_percent = 90    
    stretch = astropy.visualization.PercentileInterval(stretch_percent) # PI(90) scales to 5th..95th %ile.

    dir = '/Users/throop/Data/ORT1/throop/backplaned'
    files = glob.glob(os.path.join(dir, '*', '*fits'))
    
    hbt.figsize((15,8))
    
    # Set up output arrays
    
    ra_arr    = []
    dec_arr   = []
    reqid_arr = []
    exptime_arr= []
    et_arr    = []
    utc_arr   = [] 

    # Start up SPICE
    
    if (sp.ktotal('ALL') == 0):
        sp.furnsh('kernels_kem.tm')
    
    for ii,file in enumerate(files):
        
        hdu = fits.open(file)
        print('Reading {}/{}: {}'.format(ii, len(files), os.path.basename(file)))
        img = hdu[0].data
        
        header = hdu[0].header
        
        ra_arr.append(header['CRVAL1'])
        dec_arr.append(header['CRVAL2'])
        exptime_arr.append(header['EXPTIME'])
        reqid_arr.append(header['REQID'])
        et_arr.append(header['SPCSCET'])
        utc_arr.append(sp.et2utc(header['SPCSCET'], 'C', 0))
        
        radius_eq = hdu['RADIUS_EQ'].data
        
        dradius = 1000
        num_bins_radius = 100
        
        bins_radius = hbt.frange(0, np.amax(radius), num_bins_radius)
        dn_median_arr = np.zeros(num_bins_radius)
        dn_mean_arr   = np.zeros(num_bins_radius)
        
        for i in range(num_bins_radius-1):
            is_good = np.logical_and(radius_eq > bins_radius[i], radius_eq < bins_radius[i+1])
            dn_median_arr[i] = np.nanmedian(img[is_good])
            dn_mean_arr[i]   = np.nanmean(img[is_good])

        do_plot = False

        if do_plot:
            
            plt.subplot(1,2,1)
            plt.plot(bins_radius, dn_median_arr, label = 'median')
            plt.plot(bins_radius, dn_mean_arr,   label = 'mean')
            plt.legend(loc = 'upper right')
            plt.title("{}/{}  {}".format(ii,len(files), os.path.basename(file)))
           
            
            plt.subplot(1,2,2)
            plt.imshow(stretch(img))
            plt.show()
        
        hdu.close()
        
# =============================================================================
# Read the values into NumPy arrays
# =============================================================================

    ra   = np.array(ra_arr)
    dec  = np.array(dec_arr)
    reqid = np.array(reqid_arr)
    et = np.array(et_arr)
    exptime = np.array(exptime_arr)
    utc  = np.array(utc_arr)
    
    plt.plot(ra, dec, ls='none', marker = 'o', ms=2)
    
    # Put them all into a table
    
    t = Table(          [ra, dec, et, utc, exptime, reqid], 
              names = ('RA', 'Dec', 'ET', 'UTC', 'EXPTIME', 'ReqID'))
    
    t = Table([a, b, c], names=('a', 'b', 'c'), meta={'name': 'first table'})
    
    w_haz0 = (t['ReqID'] == 'K1LR_HAZ00')
    w_haz1 = (t['ReqID'] == 'K1LR_HAZ01')
    w_haz2 = (t['ReqID'] == 'K1LR_HAZ02')
    w_haz3 = (t['ReqID'] == 'K1LR_HAZ03')
    w_haz4 = (t['ReqID'] == 'K1LR_HAZ04')
    
    plt.plot(ra[w_haz0], dec[w_haz0], marker='o', ls='none')
    plt.plot(ra[w_haz1], dec[w_haz1], marker='o', ls='none')
    plt.plot(ra[w_haz2], dec[w_haz2], marker='o', ls='none')
    plt.plot(ra[w_haz3], dec[w_haz3], marker='o', ls='none')
    plt.plot(ra[w_haz4], dec[w_haz4], marker='o', ls='none')
    plt.show()
    
    plt.plot(et[w_haz0], marker='o', ls='none')
    plt.plot(et[w_haz1], marker='o', ls='none')
    plt.plot(et[w_haz2], marker='o', ls='none')
    plt.plot(et[w_haz3], marker='o', ls='none')
    plt.plot(et[w_haz4], marker='o', ls='none')
Example #33
0
 def round_up_day(self, t):
     # Take a timestamp and round it up to the nearest day and return
     # a new timestamp for the start of the next day.
     jd_str = spiceypy.et2utc(t, 'j', 0)[3:] + '5'
     return spiceypy.str2et('JD ' + jd_str) + 0.1
Example #34
0
 def getDate(et):
     return sp.et2utc(et, "C", 0)[0:11]
Example #35
0
    #        print("nadir_end %i" %nadir_end_indices[index]
    #        print("occ_pc_start %i" %occ_pc_start_indices[index]
    #        print("occ_pc_end %i" %occ_pc_end_indices[index]
    #        print("occ_start %i" %occ_start_indices[index+1]
    #        print("occ_end %i" %occ_end_indices[index+1]
        """may need to shift list elements by one if start time occurs within an occultation!"""
        print("Occultations:")
        for occ_start, occ_end, occ_start_index, occ_end_index in zip(
                occ_starts, occ_ends, occ_start_indices, occ_end_indices):
            occ_length = occ_end - occ_start
            occ_min_altitude = np.min(valid_altitudes[occ_start_index +
                                                      1:occ_end_index + 1])
            occ_max_altitude = np.max(valid_altitudes[occ_start_index +
                                                      1:occ_end_index + 1])
            print("%s - %s (%0.1fs duration, %0.1f-%0.1fkm altitude)" %
                  (sp.et2utc(occ_start, formatstr,
                             prec), sp.et2utc(occ_end, formatstr, prec),
                   occ_length, occ_min_altitude, occ_max_altitude))

        print("Day Nadirs:")
        for nadir_start, nadir_end, nadir_start_index, nadir_end_index in zip(
                daynadir_starts, daynadir_ends, daynadir_start_indices,
                daynadir_end_indices):
            nadir_length = nadir_end - nadir_start
            print("%s - %s (%0.1fs duration)" %
                  (sp.et2utc(nadir_start, formatstr, prec),
                   sp.et2utc(nadir_end, formatstr, prec), nadir_length))

        print("Night Nadirs:")
        for nadir_start, nadir_end, nadir_start_index, nadir_end_index in zip(
                nightnadir_starts, nightnadir_ends, nightnadir_start_indices,
                nightnadir_end_indices):
Example #36
0
phase_arr = []
utc_arr   = []

for et in et_arr:
    (st, lt) = sp.spkezr('MU69', et, 'J2000', 'LT+S', 'New Horizons')
    vec_sc_mu69 = st[0:3]

    (st, lt) = sp.spkezr('MU69', et, 'J2000', 'LT+S', 'Sun')
    vec_sun_mu69 = st[0:3]
    
    ang_phase = sp.vsep(-vec_sc_mu69, -vec_sun_mu69)
    
    phase_arr.append(ang_phase)
    
    utc_arr.append(sp.et2utc(et, 'C', 0))
    
    print(f'Phase angle = {ang_phase*hbt.r2d} deg at {utc_arr[-1]}')

phase_arr = np.array(phase_arr)
et_arr = np.array(et_arr)

d_et = f'Days past {utc_limits_arr[0]}'
d_et = (et_arr - np.amin(et_arr))/86400

hbt.figsize(8,8)
hbt.fontsize(12)
plt.plot(d_et, np.array(phase_arr)*hbt.r2d)
plt.xlabel(f'Days past {utc_limits_arr[0]}')
plt.ylabel('Phase [deg]')
plt.title('NH-Sun Phase Angle')
Example #37
0
    def __init__(self, dir, do_force=False, do_verbose=False, nmax=None, prefix='lor', do_save=False) :   

        """
        Init method: load the index and all files.
        
        This does not align, register, stack, or anything like that. It just loads the images.
        
        If a saved .pkl file is available, then it will be used instead of reading individual images.
        
        Parameters
        ----
        dir:
            The directory with all the files
            
        Optional keyword parameters
        ----
        do_force:
            Force loading stack from original files, rather than from a .pkl file.
        do_verbose:
            List each file explicitly when reading
        prefix:
            A text prefix which each filename must match (e.g., 'lor' to match only LORRI files).
        do_save:
            If set, save the results of this stacking as a .pkl file
            
        """
        
        # If we are passed a pickle file, then restore the file from pickle, rather than by reading in explicity.
        # I haven't tested this -- not sure if it works.
        
        if 'pkl' in dir:
            self.file_save = dir
            self.load()
            return
        
        name_target = 'MU69'
            
        do_lorri_destripe = True  # I didn't use this at first, but it is a clear improvement.
        
        do_lorri_dedark   = True  # Remove dark current from LORRI?
        
        files1 = glob.glob(os.path.join(dir,      prefix + '*.fit*'))     # Look in dir
        files2 = glob.glob(os.path.join(dir, '*', prefix + '*.fit*'))     # Look in subdirs
        
        files = files1 + files2
        
        # Truncate the list, if requested
        
        if (nmax):
            files = files[0:nmax]
            
        num_files = len(files)

        self.file_save      = os.path.join(dir, 'image_stack_n{}.pkl'.format(num_files))
        
        # Initialize the center of this image. The shfits of each image are taken to be relative to this.
        # It could be that we just keep this at zero. Time will tell.
        
        self.shift_x_pix_center = 0
        self.shift_y_pix_center = 0
        
        # Set the internal zoom level, which will be used when flattening
        
        self.zoom = 1
        
        # Set a flag to indicate if flattened or not
        
        self.flattened = False
        
        # If a save file exists, then load it, and immediately return
        
        if (os.path.isfile(self.file_save)) and not(do_force):
            self.load()
            return
        
        mode     = []
        exptime  = []
        filename_short = []
        exptime  = []
        visitnam = []
        sapname  = []
        sapdesc  = []
        reqid    = []
        et       = []
        utc      = []
        target   = []
                
        # Set up the table 't'. This is an astropy table within the stack, that has a list of all of the 
        # useful image parameters taken from the FITS header.
        
        # Fields in the table are:
        #   filename_short
        #   exptime
        #   visitname
        #   sapname
        #   sapdesc
        #   target
        #   reqid
        #   et
        #   utc
        #   shift_x_pix -- the shift of this image, relative to the zero point (tbd)
        #   shift_y_pix -- the shift of this image, relative to the zero point (tbd)
        #   ra    -- 
        #   dec
        #   angle
        #   dx_pix -- x dimension
        #   dy_pix -- y dimension
        
        
        self.t = Table(  [[],  [],            [],          [],         [],        [],       [],       [],      [],   [], 
                                                    [], [], [], [], [], [], [], [], [], [], [], [] ],
            names=('filename', 'filename_short', 'exptime', 'visitname', 'sapname', 'sapdesc', 'target', 'reqid', 'et',  'utc', 
                  'shift_x_pix', 'shift_y_pix', 'ra_center', 'dec_center', 'angle', 'dx_pix', 'dy_pix', 
                  'pixscale_x_km', 'pixscale_y_km', 'dist_target_km', 'wcs', 'data'),
            dtype = ('U150', 'U50',           'float64', 'U50',      'U50',     'U50',     'U50',    'U50',   'float64', 'U50', 
                    'float64', 'float64', 'float64', 'float64', 'float64', 'float64', 
                    'float64', 'float64', 'float64', 'float64', 'object', 'object'  ))
        
        # Read the LORRI dark frame. This is a one-off frame for MU69 approach, made by Marc Buie.
        # It is only valid for 4x4 LORRI.
        # Units are DN/sec of dark current.
        
        if do_lorri_dedark:
            file_dark = '/Users/throop/Data/MU69_Approach/2018dark_mwb_v2.fits'
            hdu = fits.open(file_dark)
            arr_lorri_dark = hdu['PRIMARY'].data
            hdu.close()
            
        if (len(files)):
            print("Reading {} files from {}".format(len(files), dir))
            
            for i,file in enumerate(files):
            
                # Read the data
            
                hdulist        = fits.open(file)
                arr            = hdulist[0].data
                err            = hdulist[1].data
                quality        = hdulist[2].data
                backplane_radius = hdulist['RADIUS_EQ'].data
                
                dx_pix         = hbt.sizex(arr)   # Usually 256 or 1024
                dy_pix         = hbt.sizey(arr)
                
                filename_short = os.path.basename(file).replace('.fits', '').replace('lor_', '')\
                         .replace('_0x633_pwcs','')\
                         .replace('_0x630_pwcs','')
                exptime = hdulist[0].header['EXPTIME']
                visitnam= hdulist[0].header['VISITNAM']
                sapname = hdulist[0].header['SAPNAME']
                sapdesc = hdulist[0].header['SAPDESC']
                target  = hdulist[0].header['TARGET']
                reqid   = hdulist[0].header['REQID']
                et      = hdulist[0].header['SPCSCET']
                angle   = hdulist[0].header['SPCEMEN']*hbt.d2r # Boresight roll angle
                utc     = sp.et2utc(et, 'C', 1)
            
                if do_verbose:
                    print("Read {}/{} {}".format(i, len(files), filename_short))
                else:
                    print(".", end="")
    
                hdulist.close()
            
                # Destripe if requested (aka remove jailbars)
                
                if do_lorri_destripe:
                    arr = hbt.lorri_destripe(arr)
                
                # Calibrate out dark current, if requested, and if a 4X4
                
                if do_lorri_dedark and (hbt.sizex(arr) == 256):
                    arr = arr - exptime * arr_lorri_dark
                
                # Read the WCS coords of this file.
                # XXX Suppress warnings about WCS SIP coords which Buie's files get.
                # However, looks like AstroPy doesn't use proper warning mechanism??
                
                with warnings.catch_warnings():
                    warnings.simplefilter("ignore")
                    w = WCS(file)
                
                # Get the RA/Dec location of the central pixel, in radians
                
                ra_center  = hdulist[0].header['CRVAL1']*hbt.d2r
                dec_center = hdulist[0].header['CRVAL2']*hbt.d2r
                
                pixscale_x = abs(hdulist[0].header['CD1_1']*hbt.d2r)  # radians per 4x4 pixel
                pixscale_y = abs(hdulist[0].header['CD2_2']*hbt.d2r)  # Sometimes this is negative?
                
                # Initialize the shift amount for this image, in pixels
                
                shift_x_pix = 0
                shift_y_pix = 0
                
                # Calc the distance to MU69, and the pixel scale (non-zoomed)
                
                (st, lt) = sp.spkezr(name_target, et, 'J2000', 'LT', 'New Horizons')
                dist_target_km = sp.vnorm(st[0:2])
                pixscale_x_km = dist_target_km * pixscale_x
                pixscale_y_km = dist_target_km * pixscale_y
                
                # Load the values for this image into a row of the astropy table
                # *** Need to add backplane data here, as planes[] or backplanes[] ***
            
                self.t.add_row(
                          [file, filename_short, exptime, visitnam, sapname, sapdesc, target, reqid, 
                           et, utc, 
                           shift_x_pix, shift_y_pix,
                           ra_center,   
                           dec_center,  
                           angle,
                           dx_pix, dy_pix, # X and Y dimensions
                           pixscale_x_km, pixscale_y_km,
                           dist_target_km,
                           w,              # WCS object 
                           arr])           # Actual imaging data 
            
            # End of loop over files
        else:
            print(f"No files found in {dir}!")
            return
        
#        if do_verbose:
        
        print("\n") # Print CR at end of "...."
            
        # Sort by ET.
            
        self.t.sort('et')
        
        # Save the pixel scale, from most recent image. We assume that the pixel scale of all frames is identical.
        
        self.pixscale_x = pixscale_x
        self.pixscale_y = pixscale_y
        
        self.pixscale_x_km = pixscale_x_km
        self.pixscale_y_km = pixscale_y_km
        
        self.dist_target_km   = dist_target_km
        
        self.et               = et 
        
        # Save the image size, from most recent image
        
        self.dx_pix     = dx_pix
        self.dy_pix     = dy_pix
        
        # Save the image stack size so it can be easily retrieved
        
        self.size       = (len(files), dx_pix, dy_pix) 
        
        # Finally, remove a few columns that we don't need, or that are wrong.
        
        self.t.remove_column('sapdesc')
        self.t.remove_column('sapname')
        self.t.remove_column('target')
        self.t.remove_column('visitname')

        # Initialize the 'indices' vector, which indicates which planes we use for flattening
    
        self.indices = np.ones(len(self.t), dtype=bool)
        
        # Initialize the num_planes vector to set the size
        
        self.num_planes = len(files)
    
        # If we generated the files manually (not by reloading), and flag is not set, then offer to save them
        
        # if not(do_save):
        #     # print(f'File to save: {self.file_save}')
        #     answer = input(f'Save to pickle file {self.file_save.split("/")[-1]}? ')
        #     if ('y' in answer):
        #         do_save = True
                
        if do_save:
            self.save()            
            
        # Return. Looks like an init method should not return anything.
    
        print()
Example #38
0
    spice.furnsh(sd.pck00010)

    sc = SC({
        'date0': '2021-03-03 22:10:35 TDB',
        'coes': [7480.0, 0.09, 5.5, 6.26, 5.95, 0.2],
        'tspan': '40',
    })

    st.write_bsp(sc.ets, sc.states[:, :6], {'bsp_fn': 'leo.bsp'})
    spice.furnsh('leo.bsp')

    et0 = spice.str2et('2021-03-03 22:10:40 TDB')
    etf = spice.str2et('2021-03-04 TDB')

    timecell = spice.utils.support_types.SPICEDOUBLE_CELL(2)
    spice.appndd(et0, timecell)
    spice.appndd(etf, timecell)

    cell = spice.gfoclt('ANY', '399', 'ELLIPSOID', 'IAU_EARTH', '10',
                        'ELLIPSOID', 'IAU_SUN', 'LT', '-999', 120.0, timecell)
    ets_SPICE = spice.wnfetd(cell, 0)
    cal0 = spice.et2utc(ets_SPICE[0], 'C', 1)
    cal1 = spice.et2utc(ets_SPICE[1], 'C', 1)
    print('\n*** SPICE RESULTS ***')
    print(f'{cal0} --> {cal1}')

    sc.plot_3d()
    sc.calc_eclipses(vv=True)

    sc.plot_eclipse_array({'time_unit': 'seconds', 'show': True})
def nh_ort_find_ring_pole():
    
    file_superstack = '/Users/throop/Data/ORT4/superstack_ORT4_z4_mean_wcs_sm_hbt.fits'
    
    file_tm = 'kernels_kem_prime.tm'
    sp.unload(file_tm)
    sp.furnsh(file_tm)
    
    f = fits.open(file_superstack)
    
    img = f[0].data
    
#    plt.imshow(stretch(img))
#    plt.show()
    
    wcs = WCS(file) 
    
    num_pts = 200
    
    ra_pole   = 275 * hbt.d2r
#    dec_pole  = -56 * hbt.d2r
    dec_pole  = 13 * hbt.d2r

    radius_ring = 9000  # Radius in km

    vec_pole_j2k = sp.radrec(1, ra_pole, dec_pole)
    
    et = float(f[0].header['SPCSCET'])
    utc = sp.et2utc(et, 'C', 0)
    
    # Get position from NH to UT
    
    (st, lt) = sp.spkezr('MU69', et, 'J2000', 'LT', 'New Horizons')

    vec_nh_ut = st[0:3]
 
     # Get position from Sun, to NH
    
    (st, lt) = sp.spkezr('New Horizons', et, 'J2000', 'LT', 'Sun')
    
    vec_sun_nh = st[0:3]

    vec_sun_ut = vec_sun_nh + vec_nh_ut
    
    # Define a 'ring plane', based on a pole vector, and a point
    # This ring plane should be in J2K space -- that is, centered on Sun.
    
    plane_ring = sp.nvp2pl(vec_pole_j2k, vec_sun_ut)  # Pole position is variable. Point is UT in J2K.
 
    # Get the point and spanning vectors that define this plane
    
    # XXX for some reason, these values from pl2psv do not depend on value of vec_pol_j2k
    
    (pt_pl, vec1_pl, vec2_pl) = sp.pl2psv(plane_ring)

    # Now take a bunch of linear combinations of these spanning vectors
    
    # Plot UT's position on the plot
    
    (st, lt) = sp.spkezr('MU69', et, 'J2000', 'LT', 'New Horizons')

    (_, ra, dec) = sp.recrad(vec_nh_ut)
        
    (x, y) = wcs.wcs_world2pix(ra*hbt.r2d, dec*hbt.r2d, 0)
       
#    plt.plot(x, y, marker = 'o', ms = 10, alpha=0.3, color='purple')
    
    # Set an offset to the WCS values, in case UT is not in the right position (ie, not centered properly)
    
    dy = 0 # Large value moves up
    dx = 0

    # Draw the ring image
    
    plt.imshow(stretch(img), origin='lower')
    
    # Calculate and draw all of the ring points
        
    for i in range(num_pts):
        
        angle_azimuth = 2*math.pi * (i / num_pts)   # Put in range 0 .. 2 pi
        vec_i = vec1_pl * math.sin(angle_azimuth) + vec2_pl * math.cos(angle_azimuth)
        vec_i = vec_i * radius_ring
        
        # Now get the point in space, J2K
        
        pt_ring_i_j2k = vec_i + vec_sun_ut
        
        vec_sun_ring_i = pt_ring_i_j2k
        
        vec_nh_ring_i = vec_sun_ring_i- vec_sun_nh
        
        # 
        (_, ra_i, dec_i) = sp.recrad(vec_nh_ring_i)
        
        (x, y) = wcs.wcs_world2pix(ra_i*hbt.r2d, dec_i*hbt.r2d, 0)
        
        plt.plot(x+dx, y+dy, marker = 'o', ms = 1, color='red', alpha = 0.15)
#        print(f'{i}, {ra_i*hbt.r2d}, {dec_i*hbt.r2d}, {x}, {y}')
    
    plt.title(f'ORT4 Superstack, Ring Pole = ({ra_pole*hbt.r2d},{dec_pole*hbt.r2d}) deg')    
    plt.show()
    
    return
Example #40
0
 def get_utc_from_et(self, et):
     return spice.et2utc(et, "ISOC", 14)
# =============================================================================

# Start up SPICE if needed

hbt.figsize((10,10))
if (sp.ktotal('ALL') == 0):
    sp.furnsh('kernels_kem_prime.tm')
        
stretch_percent = 90    
stretch = astropy.visualization.PercentileInterval(stretch_percent) # PI(90) scales to 5th..95th %ile.

dir_images = '/Users/throop/Data/ORT_Sep18/day2/lor/'

files = glob.glob(os.path.join(dir_images, '*'))

do_transpose = False

files = ['/Users/throop/Data/NH_Jring/data/jupiter/level2/lor/all/lor_0034715072_0x630_sci_1.fit']

for file in files:
    lun = fits.open(file)
    im = lun['PRIMARY'].data
    et = lun['PRIMARY'].header['SPCSCET']
    utc = sp.et2utc(et, 'C', 0)
    if do_transpose:
        im = np.transpose(im)
    plt.imshow(stretch(im), origin='lower')
    file_short = file.split('/')[-1]
    plt.title(f'{file_short} {utc}')
    plt.show()
    
Example #42
0
def et2utc(et):
    return sp.et2utc(et, formatstr, 0)
def get_fits_info_from_files_lorri(
        path,
        file_tm="/Users/throop/gv/dev/gv_kernels_new_horizons.txt",
        pattern=''):
    "Populate an astropy table with info from the headers of a list of LORRI files."
    import numpy as np
    import spiceypy as sp
    import glob
    import astropy
    from astropy.io import fits
    from astropy.table import Table
    import astropy.table
    import math
    import hbt

    # For testing:
    # file = '/Users/throop/Data/NH_Jring/data/jupiter/level2/lor/all/lor_0035020322_0x630_sci_1.fit' # 119 deg phase as per gv
    # file = '/Users/throop/Data/NH_Jring/data/jupiter/level2/lor/all/lor_0034599122_0x630_sci_1.fit' # 7 deg phase, inbound

    # t = hbt.get_fits_info_from_files_lorri(file)

    # Flags: Do we do all of the files? Or just a truncated subset of them, for testing purposes?

    DO_TRUNCATED = False
    NUM_TRUNC = 100

    # We should work to standardize this, perhaps allowing different versions of this function
    # for different instruments.

    d2r = np.pi / 180.
    r2d = 1. / d2r

    sp.furnsh(file_tm)

    # *** If path ends with .fit or .fits, then it is a file not a path. Don't expand it, but read it as a single file.

    if (('.fits' in path) or ('.fit' in path)):
        file_list = path
        files = [file_list]

    else:

        dir_data = path
        #dir_data = '/Users/throop/data/NH_Jring/data/jupiter/level2/lor/all'
        # Start up SPICE

        # Get the full list of files
        # List only the files that match an (optional) user-supplied pattern, such as '_opnav'

        file_list = glob.glob(dir_data + '/*' + pattern + '.fit')
        files = np.array(file_list)
        indices = np.argsort(file_list)
        files = files[indices]

# Read the JD from each file. Then sort the files based on JD.

    jd = []
    for file in files:
        hdulist = fits.open(file)
        jd.append(hdulist[0].header['MET'])
        hdulist.close()

    fits_met = []  # new list (same as array)
    fits_startmet = []
    fits_stopmet = []
    fits_exptime = []  # starting time of exposure
    fits_target = []
    fits_reqdesc = []
    fits_reqcomm = []  # New 9-Oct-2018
    fits_reqid = []  # New 9-Oct-2018
    fits_spcinst0 = []
    fits_spcutcjd = []
    fits_naxis1 = []
    fits_naxis2 = []
    fits_sformat = []  # Data format -- '1x1' or '4x4'
    fits_spctscx = []  # sc - target, dx
    fits_spctscy = []  # dy
    fits_spctscz = []  # dz
    fits_spctcb = []  # target name
    fits_spctnaz = [
    ]  # Pole angle between target and instrument (i.e., boresight rotation angle)
    fits_rsolar = [
    ]  # (DN/s)/(erg/cm^2/s/Ang/sr), Solar spectrum. Use for resolved sources.

    if (DO_TRUNCATED):
        files = files[0:NUM_TRUNC]

#files_short = np.array(files)
#for i in range(files.size):
#    files_short = files[i].split('/')[-1]  # Get just the filename itself

# Set up one iteration variable so we don't need to create it over and over
    num_obs = np.size(files)
    i_obs = np.arange(num_obs)

    print("Read " + repr(np.size(files)) + " files.")

    for file in files:
        print("Reading file " + file)

        hdulist = fits.open(file)
        header = hdulist[0].header

        keys = header.keys()

        fits_met.append(header['MET'])
        fits_exptime.append(header['EXPTIME'])
        fits_startmet.append(header['STARTMET'])
        fits_stopmet.append(header['STOPMET'])
        fits_target.append(header['TARGET'])
        fits_reqdesc.append(header['REQDESC'])
        fits_reqcomm.append(header['REQCOMM'])
        fits_reqid.append(header['REQID'])
        fits_spcinst0.append(header['SPCINST0'])
        fits_spcutcjd.append(
            (header['SPCUTCJD'])[3:])  # Remove the 'JD ' from before number
        fits_naxis1.append(header['NAXIS1'])
        fits_naxis2.append(header['NAXIS2'])
        fits_spctscx.append(header['SPCTSCX'])
        fits_spctscy.append(header['SPCTSCY'])
        fits_spctscz.append(header['SPCTSCZ'])
        fits_spctnaz.append(header['SPCTNAZ'])
        fits_sformat.append(header['SFORMAT'])
        fits_rsolar.append(
            header['RSOLAR']
        )  # NB: This will be in the level-2 FITS, but not level 1

        hdulist.close()  # Close the FITS file

#print object
#print "done"

# Calculate distance to Jupiter in each of these
# Calc phase angle (to Jupiter)
# Eventually build backplanes: phase, RA/Dec, etc.
# Eventually Superimpose a ring on top of these
#  ** Not too hard. I already have a routine to create RA/Dec of ring borders.
# Eventually overlay stars
#   Q: Will there be enough there?
# Eventually repoint based on stars
#  ** Before I allow repointing, I should search a star catalog and plot them.

# Convert some things to numpy arrays. Is there any disadvantage to this?

    met = np.array(fits_met)
    jd = np.array(fits_spcutcjd,
                  dtype='d')  # 'f' was rounding to one decimal place...
    naxis1 = np.array(fits_naxis1)
    naxis2 = np.array(fits_naxis2)
    target = np.array(
        fits_target
    )  # np.array can use string arrays as easily as float arrays
    instrument = np.array(fits_spcinst0)
    dx_targ = np.array(fits_spctscx)
    dy_targ = np.array(fits_spctscy)
    dz_targ = np.array(fits_spctscz)
    desc = np.array(fits_reqdesc)
    reqid = np.array(fits_reqid)
    reqcomm = np.array(fits_reqcomm)
    met0 = np.array(fits_startmet)
    met1 = np.array(fits_stopmet)
    exptime = np.array(fits_exptime)
    rotation = np.array(fits_spctnaz)
    sformat = np.array(fits_sformat)
    rotation = np.rint(rotation).astype(
        int
    )  # Turn rotation into integer. I only want this to be 0, 90, 180, 270...
    rsolar = np.array(fits_rsolar)

    files_short = np.zeros(num_obs, dtype='U60')

    # Now do some geometric calculations and create new values for a few fields

    dist_targ = np.sqrt(dx_targ**2 + dy_targ**2 + dz_targ**2)

    phase = np.zeros(num_obs)
    utc = np.zeros(num_obs, dtype='U30')
    et = np.zeros(num_obs)
    subsclat = np.zeros(num_obs)  # Sub-sc latitude
    subsclon = np.zeros(num_obs)  # Sub-sc longitude

    name_observer = 'New Horizons'
    frame = 'J2000'
    abcorr = 'LT+S'
    #         Note that using light time corrections alone ("LT") is
    #         generally not a good way to obtain an approximation to an
    #         apparent target vector:  since light time and stellar
    #         aberration corrections often partially cancel each other,
    #         it may be more accurate to use no correction at all than to
    #         use light time alone.

    # Fix the MET. The 'MET' field in fits header is actually not the midtime, but the time of the first packet.
    # I am going to replace it with the midtime.
    # *** No, don't do that. The actual MET field is used for timestamping -- keep it as integer.

    #    met = (met0 + met1) / 2.

    # Loop over all images

    for i in i_obs:

        # Get the ET and UTC, from the JD. These are all times *on s/c*, which is what we want

        et[i] = sp.utc2et('JD ' + repr(jd[i]))
        utc[i] = sp.et2utc(et[i], 'C', 2)

        # Calculate Sun-Jupiter-NH phase angle for each image

        (st_jup_sc, ltime) = sp.spkezr('Jupiter', et[i], frame, abcorr,
                                       'New Horizons')  #obs, targ
        (st_sun_jup, ltime) = sp.spkezr('Sun', et[i], frame, abcorr, 'Jupiter')
        ang_scat = sp.vsep(st_sun_jup[0:3], st_jup_sc[0:3])
        phase[i] = math.pi - ang_scat
        #      phase[i] = ang_scat
        files_short[i] = files[i].split('/')[-1]
        # Calc sub-sc lon/lat

        mx = sp.pxform(frame, 'IAU_JUPITER', et[i])
        st_jup_sc_iau_jup = sp.mxv(mx, st_jup_sc[0:3])

        (radius, subsclon[i],
         subsclat[i]) = sp.reclat(st_jup_sc[0:3])  # Radians
        (radius, subsclon[i],
         subsclat[i]) = sp.reclat(st_jup_sc_iau_jup)  # Radians


# Stuff all of these into a Table

    t = Table([
        i_obs, met, utc, et, jd, files, files_short, naxis1, naxis2, target,
        instrument, dx_targ, dy_targ, dz_targ, reqid, met0, met1, exptime,
        phase, subsclat, subsclon, naxis1, naxis2, rotation, sformat, rsolar,
        desc, reqcomm
    ],
              names=('#', 'MET', 'UTC', 'ET', 'JD', 'Filename', 'Shortname',
                     'N1', 'N2', 'Target', 'Inst', 'dx', 'dy', 'dz', 'ReqID',
                     'MET Start', 'MET End', 'Exptime', 'Phase', 'Sub-SC Lat',
                     'Sub-SC Lon', 'dx_pix', 'dy_pix', 'Rotation', 'Format',
                     'RSolar', 'Desc', 'Comment'))

    # Define units for a few of the columns

    t['Exptime'].unit = 's'
    t['Sub-SC Lat'].unit = 'degrees'

    # Create a dxyz_targ column, from dx dy dz. Easy!

    t['dxyz'] = np.sqrt(t['dx']**2 + t['dy']**2 +
                        t['dz']**2)  # Distance, in km

    return t
def get_fits_info_from_files_lorri(path,
                            file_tm = "/Users/throop/gv/dev/gv_kernels_new_horizons.txt", pattern=''):
    "Populate an astropy table with info from the headers of a list of LORRI files."
    import numpy as np
    import spiceypy as sp
    import glob
    import astropy
    from astropy.io import fits
    from astropy.table import Table
    import astropy.table
    import math
    import hbt
    

# For testing:
# file = '/Users/throop/Data/NH_Jring/data/jupiter/level2/lor/all/lor_0035020322_0x630_sci_1.fit' # 119 deg phase as per gv
# file = '/Users/throop/Data/NH_Jring/data/jupiter/level2/lor/all/lor_0034599122_0x630_sci_1.fit' # 7 deg phase, inbound

# t = hbt.get_fits_info_from_files_lorri(file)

# Flags: Do we do all of the files? Or just a truncated subset of them, for testing purposes?
    
    DO_TRUNCATED = False
    NUM_TRUNC = 100

# We should work to standardize this, perhaps allowing different versions of this function 
# for different instruments.

    d2r = np.pi /180.
    r2d = 1. / d2r

    sp.furnsh(file_tm)

# *** If path ends with .fit or .fits, then it is a file not a path. Don't expand it, but read it as a single file.

    if (('.fits' in path) or ('.fit' in path)):
        file_list = path
        files = [file_list]

    else:
        
        dir_data = path          
    #dir_data = '/Users/throop/data/NH_Jring/data/jupiter/level2/lor/all'
    # Start up SPICE
    
    
    # Get the full list of files
    # List only the files that match an (optional) user-supplied pattern, such as '_opnav'
    
        file_list = glob.glob(dir_data + '/*' + pattern + '.fit')
        files = np.array(file_list)
        indices = np.argsort(file_list)
        files = files[indices]

# Read the JD from each file. Then sort the files based on JD.

    jd = []
    for file in files:
        hdulist = fits.open(file)
        jd.append(hdulist[0].header['MET'])
        hdulist.close()
         
    fits_met     = [] # new list (same as array) 
    fits_startmet= [] 
    fits_stopmet = []
    fits_exptime = [] # starting time of exposure
    fits_target  = [] 
    fits_reqdesc = []     
    fits_reqcomm = [] # New 9-Oct-2018
    fits_reqid   = [] # New 9-Oct-2018
    fits_spcinst0= [] 
    fits_spcutcjd= []   
    fits_naxis1=   [] 
    fits_naxis2 =  []
    fits_sformat = [] # Data format -- '1x1' or '4x4'
    fits_spctscx = [] # sc - target, dx 
    fits_spctscy = [] # dy
    fits_spctscz = [] # dz
    fits_spctcb  = [] # target name
    fits_spctnaz = [] # Pole angle between target and instrument (i.e., boresight rotation angle)
    fits_rsolar  = [] # (DN/s)/(erg/cm^2/s/Ang/sr), Solar spectrum. Use for resolved sources.
    
    if (DO_TRUNCATED):
        files = files[0:NUM_TRUNC]
        
#files_short = np.array(files)
#for i in range(files.size):
#    files_short = files[i].split('/')[-1]  # Get just the filename itself

# Set up one iteration variable so we don't need to create it over and over
    num_obs = np.size(files)
    i_obs = np.arange(num_obs)
    
    print("Read " + repr(np.size(files)) + " files.")
    
    for file in files:
        print("Reading file " + file)
    
        hdulist = fits.open(file)
        header = hdulist[0].header
        
        keys = header.keys()
    
        fits_met.append(header['MET'])
        fits_exptime.append(header['EXPTIME'])
        fits_startmet.append(header['STARTMET'])
        fits_stopmet.append(header['STOPMET'])
        fits_target.append(header['TARGET'])
        fits_reqdesc.append(header['REQDESC'])
        fits_reqcomm.append(header['REQCOMM'])
        fits_reqid.append(header['REQID'])
        fits_spcinst0.append(header['SPCINST0'])
        fits_spcutcjd.append( (header['SPCUTCJD'])[3:]) # Remove the 'JD ' from before number
        fits_naxis1.append(header['NAXIS1'])
        fits_naxis2.append(header['NAXIS2'])
        fits_spctscx.append(header['SPCTSCX'])
        fits_spctscy.append(header['SPCTSCY'])
        fits_spctscz.append(header['SPCTSCZ'])    
        fits_spctnaz.append(header['SPCTNAZ'])    
        fits_sformat.append(header['SFORMAT'])
        fits_rsolar.append(header['RSOLAR'])   # NB: This will be in the level-2 FITS, but not level 1
                                             
        hdulist.close() # Close the FITS file

#print object
#print "done"

# Calculate distance to Jupiter in each of these
# Calc phase angle (to Jupiter)
# Eventually build backplanes: phase, RA/Dec, etc.
# Eventually Superimpose a ring on top of these
#  ** Not too hard. I already have a routine to create RA/Dec of ring borders.
# Eventually overlay stars 
#   Q: Will there be enough there?
# Eventually repoint based on stars
#  ** Before I allow repointing, I should search a star catalog and plot them.

# Convert some things to numpy arrays. Is there any disadvantage to this?

    met        = np.array(fits_met)
    jd         = np.array(fits_spcutcjd, dtype='d') # 'f' was rounding to one decimal place...
    naxis1     = np.array(fits_naxis1)
    naxis2     = np.array(fits_naxis2)
    target     = np.array(fits_target) # np.array can use string arrays as easily as float arrays
    instrument = np.array(fits_spcinst0)
    dx_targ    = np.array(fits_spctscx)
    dy_targ    = np.array(fits_spctscy)
    dz_targ    = np.array(fits_spctscz)
    desc       = np.array(fits_reqdesc)
    reqid      = np.array(fits_reqid)
    reqcomm    = np.array(fits_reqcomm)
    met0       = np.array(fits_startmet)
    met1       = np.array(fits_stopmet)
    exptime    = np.array(fits_exptime)
    rotation   = np.array(fits_spctnaz)
    sformat    = np.array(fits_sformat)
    rotation   = np.rint(rotation).astype(int)  # Turn rotation into integer. I only want this to be 0, 90, 180, 270... 
    rsolar     = np.array(fits_rsolar)
    
    files_short = np.zeros(num_obs, dtype = 'U60')

# Now do some geometric calculations and create new values for a few fields

    dist_targ = np.sqrt(dx_targ**2 + dy_targ**2 + dz_targ**2)

    phase = np.zeros(num_obs)
    utc = np.zeros(num_obs, dtype = 'U30')
    et = np.zeros(num_obs)
    subsclat = np.zeros(num_obs) # Sub-sc latitude
    subsclon = np.zeros(num_obs) # Sub-sc longitude
    
    name_observer = 'New Horizons'
    frame = 'J2000'
    abcorr = 'LT+S'
#         Note that using light time corrections alone ("LT") is 
#         generally not a good way to obtain an approximation to an 
#         apparent target vector:  since light time and stellar 
#         aberration corrections often partially cancel each other, 
#         it may be more accurate to use no correction at all than to 
#         use light time alone. 

# Fix the MET. The 'MET' field in fits header is actually not the midtime, but the time of the first packet.
# I am going to replace it with the midtime.
# *** No, don't do that. The actual MET field is used for timestamping -- keep it as integer.

#    met = (met0 + met1) / 2.

# Loop over all images

    for i in i_obs:
    
# Get the ET and UTC, from the JD. These are all times *on s/c*, which is what we want
    
      et[i] = sp.utc2et('JD ' + repr(jd[i]))
      utc[i] = sp.et2utc(et[i], 'C', 2)
    
# Calculate Sun-Jupiter-NH phase angle for each image 
    
      (st_jup_sc, ltime) = sp.spkezr('Jupiter', et[i], frame, abcorr, 'New Horizons') #obs, targ
      (st_sun_jup, ltime) = sp.spkezr('Sun', et[i], frame, abcorr, 'Jupiter')
      ang_scat = sp.vsep(st_sun_jup[0:3], st_jup_sc[0:3])
      phase[i] = math.pi - ang_scat
#      phase[i] = ang_scat
      files_short[i] = files[i].split('/')[-1]
# Calc sub-sc lon/lat
      
      mx = sp.pxform(frame,'IAU_JUPITER', et[i])
      st_jup_sc_iau_jup = sp.mxv(mx, st_jup_sc[0:3])
      
      (radius,subsclon[i],subsclat[i]) = sp.reclat(st_jup_sc[0:3])  # Radians
      (radius,subsclon[i],subsclat[i]) = sp.reclat(st_jup_sc_iau_jup)  # Radians

# Stuff all of these into a Table

    t = Table([i_obs, met, utc, et, jd, files, files_short, naxis1, naxis2, target, instrument, 
               dx_targ, dy_targ, dz_targ, reqid, 
               met0, met1, exptime, phase, subsclat, subsclon, naxis1, 
               naxis2, rotation, sformat, rsolar, desc, reqcomm], 
               
               names = ('#', 'MET', 'UTC', 'ET', 'JD', 'Filename', 'Shortname', 'N1', 'N2', 'Target', 'Inst', 
                        'dx', 'dy', 'dz', 'ReqID',
                        'MET Start', 'MET End', 'Exptime', 'Phase', 'Sub-SC Lat', 'Sub-SC Lon', 'dx_pix', 
                        'dy_pix', 'Rotation', 'Format', 'RSolar', 'Desc', 'Comment'))
    
# Define units for a few of the columns
                        
    t['Exptime'].unit = 's'
    t['Sub-SC Lat'].unit = 'degrees'

# Create a dxyz_targ column, from dx dy dz. Easy!

    t['dxyz'] = np.sqrt(t['dx']**2 + t['dy']**2 + t['dz']**2)  # Distance, in km

    return t
def nh_jring_create_objectlist(file_in, do_stars=True, bodies = [], num_stars_max = 100):
    
    ''' 
    Creates a text file which lists all the stars in a file, with lines like
    
        'star', <xpos>, <ypos>, mag [optional]

    <xpos> and <ypos> are the x and y coordinate centers, in pixels.
    The output is sorted by magnitude, if it is available. 
    
    <bodies> is a list, like ['Adrastea', 'Amalthea'], etc.
    
    It is OK to have xpos and ypos be outside the screen. We might want to know that sometimes, for satellites.
    '''

#    file_in    = 'lor_0034962025_0x630_sci_1_opnav.fit'
    dir_images = '/Users/throop/data/NH_Jring/data/jupiter/level2/lor/all/'
    dir_out    = '/Users/throop/data/NH_Jring/out/'
    
    file_in_base = file_in.split('/')[-1]   # Strip the pathname
    file_out_base = file_in_base.replace('.fit', '_objects.txt')
    file_out   = dir_out + file_out_base

# If we were passed a

    file = dir_images + file_in_base
   
    header = hbt.get_image_header(dir_images + file_in_base)

    dx_pix = header['NAXIS1']
    dy_pix = header['NAXIS2']
    
    radius_search = 0.2 * u.deg
    
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        w = WCS(file)
        
    name_cat = u'Guide Star Catalog v2 1'# Works on gobi only (no tomato laptop)
    name_cat = 'The HST Guide Star Catalog, Version GSC-ACT (Lasker+ 1996-99) 1'
    url_cat = 'http://gsss.stsci.edu/webservices/vo/ConeSearch.aspx?CAT=GSC23&' # Works always

    with data.conf.set_temp('remote_timeout', 30): # This is the very strange syntax to set a timeout delay.
                                                   # The default is 3 seconds, and that times out often.
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            pos = (w.wcs.crval[0], w.wcs.crval[1])  # Get errors about invalid coords. Convert from np arr → tuple, ugh.
            stars = conesearch.conesearch(pos, radius_search, cache=True, catalog_db = url_cat)

    # NB: the returned value is a Table, but I have to access via .array[] -- not sure why.
    
    ra_stars  = np.array(stars.array['ra'])*hbt.d2r # Convert to radians
    dec_stars = np.array(stars.array['dec'])*hbt.d2r # Convert to radians

    mag_stars = np.array(stars.array['Mag'])
    
    print("Stars downloaded: {}; mag = {} .. {}".format(np.size(mag_stars), np.nanmin(mag_stars), np.nanmax(mag_stars)))
    print("RA = {} .. {}".format(np.nanmin(ra_stars)*hbt.r2d, np.nanmax(ra_stars)*hbt.r2d))
    
    # Now sort by magnitude, and keep the 100 brightest

    order = np.argsort(mag_stars)
    order = np.array(order)[0:num_stars_max]

    ra_stars        = ra_stars[order]
    dec_stars       = dec_stars[order]
    mag_stars       = mag_stars[order]
    
    radec_stars        = np.transpose(np.array((ra_stars,dec_stars)))
    x_stars, y_stars   = w.wcs_world2pix(radec_stars[:,0]*hbt.r2d, radec_stars[:,1]*hbt.r2d, 0)
  
    is_good = np.logical_and( np.logical_and(x_stars >=0, x_stars <= dx_pix),
                              np.logical_and(y_stars >=0, y_stars <= dy_pix) )
    
# Now make a table
    
    t_stars          = Table()
    t_stars['name']  = np.zeros(np.shape(mag_stars[is_good]), dtype='U30')
    t_stars['name'][:] = u'star'    

    t_stars['x_pix'] = x_stars[is_good]
    t_stars['y_pix'] = y_stars[is_good]
    t_stars['mag']   = mag_stars[is_good]


#==============================================================================
# Now find the satellite locations
#==============================================================================

    if np.size(bodies) > 0:
        
# Look up satellite positions

        et = header['SPCSCET']
        utc = sp.et2utc(et, 'C', 0)
        x_bodies, y_bodies = hbt.get_pos_bodies(et, bodies, units='pixels', wcs=w)
        t_sats = Table()
        t_sats['x_pix'] = x_bodies
        t_sats['y_pix'] = y_bodies
        t_sats['name']  = np.array(bodies).astype('U30')

#==============================================================================
# Merge the stars and sats into one table
#==============================================================================
                 
        t_merged = vstack([t_stars, t_sats])

    else:
        t_merged = t_stars
              
#==============================================================================
# And write the table to disk
#==============================================================================

    t_merged.write(file_out, format = 'csv', overwrite = True)
    print("Wrote: {} ({} objects)".format(file_out, np.shape(t_merged)[0]))
#    
    return t_merged
Example #46
0
def et2utc(et):
    return sp.et2utc(et, "C", 0)