示例#1
0
def run(mk, time_start='', time_finish='', step=60, target='MERCURY',
        frame='', sensor='MPO_MERTIS_TIR_PLANET', pixel_line='',
        pixel_sample='', observer='MPO'):

    spiceypy.furnsh(mk)

    target = target.upper()
    if not time_start: time_start = datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S')
    if not frame: frame = f'IAU_{target}'
    if not time_finish: time_finish = time_start
    if not pixel_sample: pixel_sample = np.floor(ccd_center(sensor)[0])
    if not pixel_line: pixel_line = np.floor(ccd_center(sensor)[1])

    if pixel_sample == 'all':
        pixel_sample = np.arange(1, pixel_samples(sensor), 1)
    else:
        pixel_sample = [pixel_sample]
    if pixel_line == 'all':
        pixel_line = np.arange(1, pixel_lines(sensor), 1)
    else:
        pixel_line = [pixel_line]


    et_start = spiceypy.utc2et(time_start)
    et_finish = spiceypy.utc2et(time_finish)

    if et_start != et_finish:
        interval = np.arange(et_start, et_finish, step)
    else:
        interval = [et_start]

    # Time tag [UTC]
    # pixel id [(x,y)]
    # corner id [(x,y)]

    # Requested geometry

    # lat lon intersection (planetocentric)
    # lat lon subspacecraft
    # lat lon subsolar
    # target distance intersection
    # target angular diameter
    # local solar time intersection
    # phase angle intersection
    # emission angle intersection
    # incidence angle intersection

    with open('spice4mertis.csv', 'w') as o:
        o.write('utc,et,pixlin,pixsam,tarlon,tarlat,sublon,sublat,sunlon,sunlat,tardis,tarang,ltime,phase,emissn,incdnc\n')
        for et in interval:
            utc = spiceypy.et2utc(et, 'ISOC', 3)
            for line in pixel_line:
                for sample in pixel_sample:
                    pixelGeometry = pixel_geometry(et, sensor, line, sample, target, frame, observer=observer)
                    print(utc,line,sample,str(pixelGeometry)[1:-1].replace(',',' '))
                    o.write(f'{utc},{et},{line},{sample},{str(pixelGeometry)[1:-1].replace(" ","")}\n')
    return
示例#2
0
def analyseLineScan2(hdf5Files, hdf5Filenames):

    #    referenceFrame = "TGO_NOMAD_UVIS_OCC"
    referenceFrame = "TGO_NOMAD_SO"
    #    referenceFrame = "TGO_SPACECRAFT"
    fig, ax = plt.subplots(figsize=(FIG_X, FIG_Y))

    for fileIndex, hdf5File in enumerate(hdf5Files):
        print("Reading in file %i: %s" % (fileIndex + 1, obspaths[fileIndex]))

        detectorDataAll = hdf5File["Science/Y"][:, :]
        #        binsAll = get_dataset_contents(hdf5File, "Bins")[0]
        observationTimeStringsAll = hdf5File[
            "Geometry/ObservationDateTime"][:, :]

        centrePixelIndex = 200
        detectorDataBin1 = detectorDataAll[:, 1, centrePixelIndex].flatten()
        detectorDataBin2 = detectorDataAll[:, 2, centrePixelIndex].flatten()
        detectorDataCentrePixel = np.mean((detectorDataBin1, detectorDataBin2),
                                          axis=0)

        print(
            "max value = %0.0f, min value = %0.0f" %
            (np.max(detectorDataCentrePixel), np.min(detectorDataCentrePixel)))
        observationTimes = np.asfarray([
            np.mean([sp.utc2et(time[0]),
                     sp.utc2et(time[1])])
            for time in list(observationTimeStringsAll)
        ])

        unitVectors = np.asfarray([
            get_vector(datetime, referenceFrame)
            for datetime in observationTimes
        ])
        #        marker_colour = np.log(detectorDataCentrePixel+1000).flatten()
        marker_colour = np.log(detectorDataCentrePixel.flatten())
        ax.set_xlim([-0.004, 0.004])
        ax.set_ylim([-0.004, 0.004])
        ax.set_xlabel("%s FRAME X" % referenceFrame)
        ax.set_ylabel("%s FRAME Y" % referenceFrame)
        ax.scatter(unitVectors[:, 0],
                   unitVectors[:, 1],
                   c=marker_colour,
                   vmin=np.log(200000),
                   alpha=0.5,
                   cmap="jet",
                   linewidths=0)
        ax.set_aspect("equal")

        circle1 = plt.Circle((0, 0), 0.0016, color='yellow', alpha=0.1)
        ax.add_artist(circle1)
示例#3
0
def plotBinStrengths(hdf5Files, hdf5Filenames, obspaths):
    et_string_all = []
    max_counts_all = []
    relative_counts_all = []
    for fileIndex, (hdf5File,
                    hdf5Filename) in enumerate(zip(hdf5Files, hdf5Filenames)):
        print("%i/%i: Reading in file %s" %
              (fileIndex, len(hdf5Filenames), hdf5Filename))

        #    et, et_string, max_counts, relative_counts = getOccultationReferenceCounts(hdf5File, hdf5Filename, 17859.0)
        et_string, max_counts, relative_counts = getOccultationReferenceCounts(
            hdf5File)

        if len(relative_counts) == 4:  #just take nominal 6 order data
            et_string_all.append(et_string)
            max_counts_all.append(max_counts)
            relative_counts_all.append(relative_counts)

            if np.min(relative_counts) < 0.9:
                print(
                    "File %s has minimum relative counts of %0.2f (max counts = %0.0f)"
                    % (hdf5Filename, np.min(relative_counts), max_counts))

    relative_counts_array = np.asfarray(relative_counts_all)
    et_array = np.asfarray([sp.utc2et(string) for string in et_string_all])

    fig, ax = plt.subplots(figsize=(15, 5))
    plt.title("SO channel relative counts for each bin\nSearch string: %s" %
              obspaths[0])
    for bin_index in range(4):
        plt.scatter(et_array,
                    relative_counts_array[:, bin_index],
                    label="Bin %i" % bin_index,
                    marker=".")
    plt.xlabel("Ephemeris Time (s)")
    plt.ylabel("Relative counts for each bin")
    plt.legend()
    plt.grid(True)

    months = np.arange(4, 13, 1)
    monthStarts = [
        sp.utc2et(datetime(2018, month, 1).strftime("%Y-%m-%d"))
        for month in months
    ]
    monthNames = [datetime(2018, month, 1).strftime("%B") for month in months]
    for monthStart, monthName in zip(monthStarts, monthNames):
        plt.axvline(x=monthStart, color='k', linestyle='--')
        plt.text(monthStart + 100000, 0.7, monthName)
示例#4
0
 def ephemeris_start_time(self):
     inital_time = spice.utc2et(self.utc_start_time.isoformat())
     # To get shutter end (close) time, subtract 2 seconds from the start time
     updated_time = inital_time - 2
     # To get shutter start (open) time, take off the exposure duration from the end time.
     start_time = updated_time - self.exposure_duration
     return start_time
示例#5
0
def test_hga_angles():

    spiops.load('/Users/mcosta/SPICE/BEPICOLOMBO/kernels/mk/bc_ops_local.tm')
    (hga_az_el,
     hga_earth) = spiops.hga_angles('MPO',
                                    spiceypy.utc2et('2020-05-07T006:00:00'))
    print(hga_az_el)
示例#6
0
文件: core.py 项目: mkelley/mskpy
def jd2et(jd):
    """Convert Julian date to SPICE ephemeris time.

    Ephemeris time is seconds past 2000.0.

    UTC is assumed.

    Parameters
    ----------
    jd : string, float, or array
      The Julian date.  Strings will have higher precisions than
      floats.

    Returns
    -------
    et : float or array
      Ephemeris time.

    """

    global _spice_setup

    if not _spice_setup:
        _setup_spice()

    if isinstance(jd, (list, tuple, np.ndarray)):
        return [jd2et(x) for x in jd]

    if isinstance(jd, float):
        jd = "{:17.9f} JD".format(jd)

    return spice.utc2et(jd)
示例#7
0
文件: core.py 项目: mkelley/mskpy
def time2et(t):
    """Convert astropy `Time` to SPICE ephemeris time.

    Ephemeris time is seconds past 2000.0.

    Parameters
    ----------
    t : astropy Time
      The time.  Must be convertable to the UTC scale.

    Returns
    -------
    et : float or array
      Ephemeris time.

    """

    global _spice_setup
    from .. import util

    if not _spice_setup:
        _setup_spice()

    if util.date_len(t) > 0:
        return [time2et(x) for x in t]

    return spice.utc2et(t.utc.iso)
    def __init__(self, file):
        
            # Get the speed relative to MU69

        file_tm = 'kernels_kem_prime.tm'
        
        # Start up SPICE if needed
        
        if (sp.ktotal('ALL') == 0):
            sp.furnsh(file_tm)    
            
        utc_ca = '2019 1 Jan 05:33:00'
        et_ca  = sp.utc2et(utc_ca) 
        (st,lt) = sp.spkezr('New Horizons', et_ca, 'J2000', 'LT', 'MU69')
        
        velocity = sp.vnorm(st[3:6])*u.km/u.s
    
        # Save the velocity (relative to MU69)

        self.velocity = velocity

        # Save the name of file to read
        
        self.file = file

        # Save the area of the s/c
        
        self.area_sc = (1*u.m)**2
        
        return
def f_convert_utc_to_et(time_utc):

    #Convert datetime object to str
    time_utc_str = time_utc.strftime('%Y-%m-%dT%H:%M:%S')
    #Convert UTC to ET
    time_et = spiceypy.utc2et(time_utc_str)
    return (time_et, time_utc_str)
示例#10
0
def check(mk, time=False, report=False):

    frames_to_check = False
    if not time:
        today = datetime.datetime.now()
        time = today.strftime("%Y-%m-%dT%H:%M:%S")

    frame_dict = gen_frame_dict(mk, report=report)

    spiceypy.furnsh(mk)
    et = spiceypy.utc2et(time)

    for key in frame_dict.keys():
        if frame_dict[key]['class'] == 4:
            fname = frame_dict[key]['name']
            try:
                spiceypy.pxform(fname, 'J2000', et)
            except SpiceyError:
                frames_to_check = True
                print( f'Frame {fname} not properly defined at {time}\n' +
                       f'   NAME:     {frame_dict[key]["name"]}\n' +
                       f'   CLASS:    {frame_dict[key]["class"]}\n' +
                       f'   ID:       {frame_dict[key]["id"]}\n' +
                       f'   CENTER:   {frame_dict[key]["center"]}\n' +
                       f'   RELATIVE: {frame_dict[key]["relative"]}\n'
                     )
                pass

    spiceypy.kclear()

    if not frames_to_check:
        print(f'All {len(frame_dict)} frames are correct @ {time}')

    return
示例#11
0
文件: core.py 项目: mkelley/mskpy
def cal2et(date):
    """Convert calendar date to SPICE ephemeris time.

    Ephemeris time is seconds past 2000.0.

    UTC is assumed.

    Parameters
    ----------
    date : string or array
      The date.

    Returns
    -------
    et : float or array
      Ephemeris time.

    """

    from ..util import cal2iso
    global _spice_setup

    if not _spice_setup:
        _setup_spice()

    if isinstance(date, (list, tuple, np.ndarray)):
        return [cal2et(x) for x in t]

    return spice.utc2et(cal2iso(date))
示例#12
0
def dataToPickle():
    orbits_begin = {1:'2016-07-31T19:46:02',
                            2:'2016-09-23T03:44:48',
                            3:'2016-11-15T05:36:45',
                            4:'2017-01-07T03:11:30',
                            5:'2017-02-28T22:55:48',
                            6:'2017-04-22T19:14:57'}
    
    file_dict = {}
    metaKernel = 'juno_2019_v03.tm'
    spice.furnsh(metaKernel)

    start_time = datetime.datetime.strptime(orbits_begin[1],'%Y-%m-%dT%H:%M:%S')
    
    end_time = datetime.datetime.strptime(orbits_begin[2],'%Y-%m-%dT%H:%M:%S')
    
    data_folder = pathlib.Path(r'..\data\fgm')
    p = re.compile(r'\d{7}')
    
    for parent,child,files in os.walk(data_folder):
        for name in files:
            if name.endswith('.csv'):
                file_path = os.path.join(data_folder,name)
                
                search = p.search(name).group()
                date = datetime.datetime.strptime(search,'%Y%j')
                
                if date.date() >= start_time.date() and date.date() <= end_time.date():
                    iso_date = date.strftime('%Y-%m-%d')
                    if iso_date not in file_dict.keys():
                        file_dict[iso_date] = [file_path]
                    elif iso_date in file_dict.keys() and file_dict[iso_date] != file_path: 
                        file_dict[iso_date].append(file_path)
    
    for date in file_dict.keys():
        fgmdf = pd.DataFrame(data={'TIME':[],'BX':[],'BY':[],'BZ':[],'LAT':[]})
        save_date = datetime.datetime.strptime(date,'%Y-%m-%d')
        file_list = file_dict[date]
        for file in file_list:
            
            temp = pd.read_csv(file)
            datetime_list = temp['SAMPLE UTC']
            time_list = [datetime.datetime.fromisoformat(i).strftime('%H:%M:%S') for i in datetime_list]
            
            for index,time in enumerate(datetime_list):
                
                position, lighttime = spice.spkpos('JUNO',spice.utc2et(time),'IAU_JUPITER','NONE','JUPITER')
            
                vectorPos = spice.vpack(position[0],position[1],position[2])
                radii,longitude,latitude = spice.reclat(vectorPos)
                lat = latitude*spice.dpr()
                
                if lat >= -10 and lat <= 10:
                    fgmdf = fgmdf.append({'TIME':time,'BX':temp['BX PLANETOCENTRIC'][index],'BY':temp['BY PLANETOCENTRIC'][index],'BZ':temp['BZ PLANETOCENTRIC'][index],'LAT':lat},ignore_index=True)
        fgmdf = fgmdf.sort_values(by=['TIME'])
        save_name = f'{save_date.strftime("%Y%m%d")}'
        save_path = pathlib.Path(f'..\data\pickledfgm\jno_fgm_{save_name}.pkl')
        pickledf = fgmdf.to_pickle(save_path)
        print(f'Saved pickle {date}')                                     
def cspice_utc2et(kernelfile, ctime):
    spice_ver = spice.tkvrsn('TOOLKIT')
    spice.furnsh(kernelfile)
    kernels_loaded = spice.ktotal("ALL")
    #print(kernels_loaded)
    ntime = spice.utc2et(ctime.value)
    #print(spice_ver)
    return ntime
示例#14
0
def cspice_utc2et(kernelfile, stimes, ttimes):
    global ets, etx
    nt = len(stimes)
    ets = np.zeros(nt)
    etx = np.zeros(nt)
    spice_ver = spice.tkvrsn('TOOLKIT')
    spice.furnsh(kernelfile)
    kernels_loaded = spice.ktotal("ALL")
    #print(kernels_loaded)

    for i in range(0, nt):
        sstr = stimes[i][:23]
        tstr = ttimes[i][:23]
        et1 = spice.utc2et(sstr)
        et2 = spice.utc2et(tstr)
        ets[i] = 0.5 * (et1 + et2)

    for i in range(0, nt):
        etx[i] = ets[i] - ets[0]
示例#15
0
def cal2et(time, format='UTC', support_ker=False, unload=False):
    """
    Converts UTC or Calendar TDB (CAL) time to Ephemeris Time (ET). Accepts
    a single time or a lists of times. This function assumes that the support
    kernels (meta-kernel or leapseconds) kernel has been loaded.

    :param time: Input UTC or CAL time
    :type time: Union[float, list]
    :param format: Input format; 'UTC' or 'CAL'
    :type format: str
    :param unload: If True it will unload the input meta-kernel
    :type unload: bool
    :return: Output ET
    :rtype: Union[str, list]
    """
    out_list = []

    if isinstance(time, str):
        time = [time]

    #
    # We need to specify that is Calendar format in TDB. If it is UTC we need
    # to load the support kernels
    #
    if support_ker:
        spiceypy.furnsh(support_ker)

    if format == 'CAL':
        time[:] = [x.replace('T', ' ') for x in time]
        time[:] = [x + ' TDB' for x in time]

    for element in time:

        try:
            if format == 'UTC':
                out_elm = spiceypy.utc2et(element)

            elif format == 'CAL':
                out_elm = spiceypy.str2et(element)
            else:
                out_elm = element
        except:
            out_elm = spiceypy.str2et(element)

        out_list.append(out_elm)

    if len(out_list) == 1:
        out_time = out_list[0]
    else:
        out_time = out_list

    if unload:
        spiceypy.unload(support_ker)

    return out_time
示例#16
0
def test_Satellite_B():
    """Test that a Satellite-object correctly calculates internal epochs (consistent with sp.utc2et() )"""
    # Make a Satellite-object and get the epochs
    time = Time([2458337.8283571, 2458337.9], format='jd', scale='tdb')
    obscode = '-95'
    returnedEpochs = wis.Satellite(obscode, time).epochs

    # Check epoch calcs are correct
    for i, jd in enumerate(time.utc.jd):
        assert returnedEpochs[i] == sp.utc2et('JD'+str(jd)), \
            'Returned epoch [%r] does not match calculation from spiceypy [%r]' % (returnedEpochs[i] , jd)
def get_psp_positions(from_days, to_days, step=24):
    now = datetime.now()
    coords = list()
    for i in range(from_days * 24, to_days * 24, step):
        tspy = (now + timedelta(hours=i)).strftime('%Y-%m-%dT%H:%M:%S')
        ts = spice.utc2et(tspy)
        # -96 is the ID of PSP
        pos, lt = spice.spkezp(-96, ts, 'ECLIPJ2000', 'NONE', 0)
        x, y, z = pos
        coords.append([x, y, z])
    return f2i(coords)
示例#18
0
def generateEphemeris(datesFile, bsp, dexxx, leapSec, nameFile):
    #Load the asteroid and planetary ephemeris and the leap second (in order)
    spice.furnsh(dexxx)
    spice.furnsh(leapSec)
    spice.furnsh(bsp)


    #Values specific for extract all comments of header from bsp files (JPL, NIMA)
    source = {'NIMA':(45, 'ASTEROID_SPK_ID ='), 'JPL':(74, 'Target SPK ID   :')}
    n, key = source['NIMA']
    idspk = findIDSPK(n, key)
    if idspk == '':
        n, key = source['JPL']
        idspk = findIDSPK(n, key)

    #Read the file with dates
    with open(datesFile, 'r') as inFile:
        dates = inFile.read().splitlines()

    n = len(dates)

    #Convert dates from utc to et format
    datesET = [spice.utc2et(utc) for utc in dates]

    #Compute geocentric positions (x,y,z) for each date with light time correction
    rAst, ltAst = spice.spkpos(idspk, datesET, 'J2000', 'LT', 'EARTH')
    rSun, ltSun = spice.spkpos('SUN', datesET, 'J2000', 'NONE', 'EARTH')
    
    elongation = [angle(rAst[i], rSun[i]) for i in range(n)]

    data = [spice.recrad(xyz) for xyz in rAst]
    distance, rarad, decrad = zip(*data)

    #================= for graphics =================
    tempFile = open('radec.txt', 'w')
    for row in data:
        tempFile.write(str(row[1]) + '  ' + str(row[2]) + '\n')
    tempFile.close()
    #================================================

    ra = [ra2HMS(alpha) for alpha in rarad]
    dec = [dec2DMS(delta) for delta in decrad]

    #Convert cartesian to angular coordinates and save it in a ascii file
    outFile = open(nameFile,'w')
    outFile.write('\n\n     Data Cal. UTC' + ' '.ljust(51) + 'R.A.__(ICRF//J2000.0)__DEC')
    outFile.write(' '.ljust(43) + 'DIST (km)' + ' '.ljust(24) + 'S-O-A\n')
    for i in range(n):
        outFile.write(dates[i] + ' '.ljust(44) + ra[i] + '  ' + dec[i] + ' '.ljust(35)) 
        outFile.write('{:.16E}'.format(distance[i]) + ' '.ljust(17))
        outFile.write('{:.4f}'.format(elongation[i]) + '\n')
    outFile.close()
示例#19
0
 def utc2obt(self, utc_string):
     # Utc to Ephemeris time (seconds past J2000)
     ephemeris_time = spiceypy.utc2et(utc_string)
     # Ephemeris time to Obt
     #return ephemeris_time
     obt_string = spiceypy.sce2s(self.solar_orbiter_naif_id, ephemeris_time)
     time_fields = re.search('\/(.*?):(\d*)', obt_string)
     group = time_fields.groups()
     try:
         return int(group[0]) + int(group[1]) / 65536.
     except Exception as e:
         #logger.warning(str(e))
         return 0
示例#20
0
文件: DataGraph.py 项目: aaschok/Juno
def getPosLabels(dataDict, num):
    stamp = str(dataDict['DATETIME_ARRAY'][min(
        range(len(dataDict['TIME_ARRAY'])),
        key=lambda j: abs(dataDict['TIME_ARRAY'][j] - num))])

    position, lighttime = spice.spkpos('JUNO', spice.utc2et(stamp),
                                       'IAU_JUPITER', 'NONE', 'JUPITER')

    vectorPos = spice.vpack(position[0], position[1], position[2])
    radii, longitude, latitude = spice.reclat(vectorPos)
    lat = f'{round(latitude*spice.dpr(),2)}$^o$ Lat'
    dist = f'{round(radii/69911,3)} $R_j$'
    return lat, dist
示例#21
0
def create_targets_file(name, dates, bsp_object, bsp_planets, leap_Sec,
                        location):
    # Load the asteroid and planetary ephemeris and the leap second (in order)
    spice.furnsh(bsp_planets)
    spice.furnsh(leap_Sec)
    spice.furnsh(bsp_object)

    # Values specific for extract all comments of header from bsp files (JPL, NIMA)
    source = {
        'NIMA': (45, 'ASTEROID_SPK_ID ='),
        'JPL': (74, 'Target SPK ID   :')
    }
    n, key = source['NIMA']
    idspk = findIDSPK(n, key)
    if idspk == '':
        n, key = source['JPL']
        idspk = findIDSPK(n, key)

    # Convert dates from JD to et format. "JD" is added due to spice requirement
    datesET = [spice.utc2et(jd + " JD") for jd in dates]

    # Compute geocentric positions (x,y,z) for each date with light time correction
    rAst, ltAst = spice.spkpos(idspk, datesET, 'J2000', 'LT', 'EARTH')

    # Location
    lon, lat, ele = location[0], location[1], location[2]

    # Create targets file
    output = os.path.join(os.getenv("DATA_DIR"), targets_file)
    with open(output, 'w') as outFile:
        for i, r_geo in enumerate(rAst):
            # Convert from longitude, latitude, elevation to r(x,y,z)
            r = geoTopoVector(lon, lat, ele, float(dates[i]))

            #r_topo = r_geo - r
            r_topo = [r_geo[0] - r[0], r_geo[1] - r[1], r_geo[2] - r[2]]

            # Convert rectangular coordinates (x,y,z) to range, right ascension, and declination.
            d, rarad, decrad = spice.recrad(r_topo)

            # Transform RA and Decl. from radians to degrees and then to hexadecimal format.
            ra = ra2HMS(np.degrees(rarad) / 15.0, 6)
            dec = dec2DMS(np.degrees(decrad), 5)

            # Save parameters in specific format
            outFile.write(" " + ra + " " + dec + " " + dates[i] + " " + name +
                          "\n")

    outFile.close()

    return output
示例#22
0
def utc2tai(utc, utc_end, deltat):
    if utc_end is None:
        utc_end = utc

    if deltat is None:
        deltat = 86400

    if deltat <= 0:
        utc_end = utc
        deltat = 1

    try:
        tai_beg = sp.unitim(sp.utc2et(utc), 'et', 'tai')
        tai_end = sp.unitim(sp.utc2et(utc_end), 'et', 'tai')
    except sp.stypes.SpiceyError as ex:
        raise GeometrySpiceError(ex.value)

    tais = []
    while tai_beg <= tai_end and len(tais) < 1e6:
        tais.append(tai_beg)
        tai_beg += deltat

    return tais
示例#23
0
    def positionData(self):
        # spice.furnsh(self.meta) #loads the meta kernel that will load all kernels needed

        time = spice.utc2et(self.time)

        self.position, lighttime = spice.spkpos(
            'JUNO', time, 'IAU_JUPITER', 'NONE', 'JUPITER'
        )  #Finds the position in cartesian coords relative to jupiter

        pos = spice.vpack(self.position[0], self.position[1],
                          self.position[2])  #Packs the position into a vector
        self.distance, self.longitude, self.latitude = spice.reclat(
            pos)  #Finds radial dist, latitide and longitude
        self.latitude *= spice.dpr()
        self.distance /= 69911
示例#24
0
    def ephemeris_start_time(self):
        """
        Returns the ephemeris_start_time of the image.
        Expects spacecraft_clock_start_count to be defined. This should be a float
        containing the start clock count of the spacecraft.
        Expects spacecraft_id to be defined. This should be the integer Naif ID code
        for the spacecraft.

        Returns
        -------
        : float
          ephemeris start time of the image.
        """
        return spice.utc2et(
            self.utc_start_time.strftime("%Y-%m-%d %H:%M:%S.%f"))
示例#25
0
    def ephemeris_start_time(self):
        """
        Returns the ephemeris_start_time of the image.
        Expects spacecraft_clock_start_count to be defined. This should be a float
        containing the start clock count of the spacecraft.
        Expects spacecraft_id to be defined. This should be the integer Naif ID code
        for the spacecraft.

        Returns
        -------
        : float
          ephemeris start time of the image.
        """
        return spice.utc2et(
            str(self.label['IsisCube']['Instrument']['StartTime']))
示例#26
0
def printBoresights(angleSeparationA, angleSeparationB):
    """input manual rotation angles from SPICE kernels to calculate new and old boresight"""
    oldSoBoresight = [0.0, 0.0, 1.0]
    oldUVISBoresight = [0.0, 0.0, 1.0]
    rotationMatrixSoUVIS = sp.pxform("TGO_NOMAD_SO", "TGO_NOMAD_UVIS_OCC",
                                     sp.utc2et("2018 APR 01 00:00:00 UTC"))
    oldSoBoresightUVIS = np.dot(oldSoBoresight, rotationMatrixSoUVIS.T)
    oldBoresightSeparation = sp.vsep(oldUVISBoresight,
                                     oldSoBoresightUVIS) * sp.dpr() * 60.0
    print("oldBoresightSeparation")
    print(oldBoresightSeparation)

    print("angleSeparationB")
    print(angleSeparationB)
    #####SAVE THIS IT WORKS!!!######
    newSoBoresightTGO = np.asfarray([
            -1.0 * np.sin(angleSeparationB / sp.dpr()), \
            np.sin(angleSeparationA / sp.dpr()) * np.cos(angleSeparationB / sp.dpr()), \
            np.cos(angleSeparationA / sp.dpr()) * np.cos(angleSeparationB / sp.dpr())])

    print("newSoBoresightTGO, vnorm = %0.6f" % sp.vnorm(newSoBoresightTGO))
    print(newSoBoresightTGO)

    newUVISBoresightTGO = np.asfarray(
        [-0.922221097920913, -0.386613383297695, 0.006207330031467])
    oldSoBoresightTGO = np.asfarray([-0.92156, -0.38819, 0.00618])
    oldUVISBoresightTGO = np.asfarray(
        [-0.92207347097, -0.3869614566418, 0.0064300242046])

    oldNewSoBoresightSeparation = sp.vsep(newSoBoresightTGO,
                                          oldSoBoresightTGO) * sp.dpr() * 60.0
    print("oldNewSoBoresightSeparation")
    print(oldNewSoBoresightSeparation)

    oldNewUVISBoresightSeparation = sp.vsep(
        newUVISBoresightTGO, oldUVISBoresightTGO) * sp.dpr() * 60.0
    print("oldNewUVISBoresightSeparation")
    print(oldNewUVISBoresightSeparation)

    newSoUVISBoresightSeparation = sp.vsep(
        newSoBoresightTGO, newUVISBoresightTGO) * sp.dpr() * 60.0
    print("newSoUVISBoresightSeparation")
    print(newSoUVISBoresightSeparation)

    oldSoUVISBoresightSeparation = sp.vsep(
        oldSoBoresightTGO, oldUVISBoresightTGO) * sp.dpr() * 60.0
    print("oldSoUVISBoresightSeparation")
    print(oldSoUVISBoresightSeparation)
示例#27
0
def exact_leap_tests():
    ''' Tests time tick conversion for leap second values '''
    epoch_lst = ['Y1966', 'Y1970', 'Y2000', 'J2000']

    leap_dates = [datetime(1974, 1, 1), datetime(2006, 1, 1), datetime(2009, 1, 1)]

    # Y1966 should not treat leap seconds differently
    leap_tick = ff_time.date_to_tick(leap_dates[0], 'Y1966')
    dates, leaps = ff_time.ticks_to_dates([leap_tick], 'Y1966')
    assert(dates[0] == leap_dates[0])
    assert(len(leaps) == 0)

    # Test Y1970 epoch against exact leap seconds
    for leap_date in leap_dates:
        leap_tick = ff_time.date_to_tick(leap_date, 'Y1970')
        dates, leaps = ff_time.ticks_to_dates([leap_tick-1, leap_tick], 'Y1970')
        assert(len(leaps) == 1)
        assert(dates[1] == leap_date)

    # Test leap second values for 2000 epochs
    # for epoch in ['Y2000', 'J2000']:
    #     for leap_date in leap_dates:
    #         leap_tick = ff_time.date_to_tick(leap_date, epoch)
            # dates, leaps = ff_time.ticks_to_dates([leap_tick - 1, leap_tick], epoch)
            # print (dates)
            # # Only one leap second found, leap date = previous second, 
            # # and first date = last date (expected behavior)
            # assert(len(leaps) == 1)
            # assert(dates[0] == leap_date - timedelta(seconds=1))
            # assert(dates[1] == leap_date)

    # Test J2000 leap second dates against CSPICE epoch values
    epoch = 'J2000'
    for leap_date in leap_dates:
        leap_tick = ff_time.date_to_tick(leap_date, epoch)

        # Get fftime and CSPICE conversions
        ts = leap_date.strftime('%Y %j %b %d %H:%M:%S.%f')
        fftime_tick = FFTIME(ts, Epoch=epoch)._tick

        spice_tick = spice.utc2et(leap_date.isoformat())

        # Check that spice tick and fftime_tick are close
        # (subtracting ofst_delta beforehand since epochs
        #  are defined differently)
        comp = fftime_tick - ofst_delta
        assert(np.isclose(comp, spice_tick, 0.1))
示例#28
0
def getHdf5DetectorTemperature(hdf5File, obspath, channel):
    print("Reading in file %s" %(obspath))

    if channel == "so":
        temperature_field = "FPA1_FULL_SCALE_TEMP_SO"
        start_index = 1
        end_index = 590
    elif channel == "lno":
        temperature_field = "FPA1_FULL_SCALE_TEMP_LNO"
        start_index = 1
        end_index = 40

    hsk_time_strings = hdf5File["Housekeeping/DateTime"][start_index:end_index]
    variable = hdf5File["Housekeeping/%s" %temperature_field][start_index:end_index]
    hsk_time = [sp.utc2et(hsk_time_string) for hsk_time_string in hsk_time_strings]

    return hsk_time, variable 
示例#29
0
def print_state(epoch):
    et = sp.utc2et(epoch)
    print()
    print(epoch, et)
    sc_from_moon = sp.spkez(-10003001, et, 'j2000', 'none', 301)[0]
    print("Moon J2000", sc_from_moon)
    sc_from_earth = sp.spkez(-10003001, et, 'j2000', 'none', 399)[0]
    print("EME2000", sc_from_earth)
    moon_from_earth = sp.spkez(301, et, 'j2000', 'none', 399)[0]
    print("Moon from Earth:", moon_from_earth)
    moon_from_emb = sp.spkez(301, et, 'j2000', 'none', 3)[0]
    print("Moon from EMB:", moon_from_emb)
    earth_from_emb = sp.spkez(399, et, 'j2000', 'none', 3)[0]
    print("Earth from EMB:", earth_from_emb)
    sum = -earth_from_emb + moon_from_emb + sc_from_moon
    print("Sum: -earth_from_emb + moon_from_emb + sc_from_moon\n\t", sum)
    print("Delta:", sum - sc_from_earth)
示例#30
0
    def __ClockDrift(self, enddate=False):

        if self.name != 'MPO':
            sclk_start = 0.0
            sclk_end = 500000000
        else:
            sclk_start = 3.9631239807361E+13/65536
            sclk_end = 700000000

        #sclk_end = spiceypy.gdpool('SCLK_PARTITION_END_{}'.format(str(-1*self.id)),0,1000)[0]/65536


        step = 10000.0

        if not enddate:
            et_end = self.time.getTime('finish','utc')
        else:
            et_end = spiceypy.utc2et(enddate)

        sclk = []
        ephtime = []
        utctime = []

        for i in np.arange(sclk_start, sclk_end, step):
            sclk.append(i)

            sclkdp = i*65536
            et = spiceypy.sct2e(self.id, sclkdp)
            ephtime.append(et)

            utcstr = spiceypy.et2utc(et, 'C', 3)
            utctime.append(utcstr)


        dates = []
        drift = []
        for j in range(0,len(ephtime),1):
            if ephtime[j] >= et_end:
                break
            drift.append((sclk[j]-sclk_start) - (ephtime[j] - ephtime[0]))
            dates.append(ephtime[j])

        self.clock_dates = dates
        self.clock_drift = drift

        return
示例#31
0
文件: manager.py 项目: i4Ds/STIXCore
    def utc_to_scet(self, utc):
        """
        Convert UTC ISO format to SCET time strings.

        Parameters
        ----------
        utc : `str`
            UTC time string in ISO format e.g. '2019-10-24T13:06:46.682758'

        Returns
        -------
        `str`
            SCET time string
        """
        # Utc to Ephemeris time (seconds past J2000)
        ephemeris_time = spiceypy.utc2et(utc)
        # Ephemeris time to Obt
        return spiceypy.sce2s(SOLAR_ORBITER_ID, ephemeris_time)
示例#32
0
def writeTimeLsToFile():
    """make list of time vs ls"""
    SPICE_TARGET = "MARS"
    SPICE_ABERRATION_CORRECTION = "None"
    
    DATETIME_FORMAT = "%d/%m/%Y %H:%M"
    
    
    from datetime import datetime, timedelta
    
    linesToWrite = []
    datetimeStart = datetime(2018, 3, 1, 0, 0, 0, 0)
    for hoursToAdd in range(0, 24*31*12*3, 6): #3 years
        newDatetime = (datetimeStart + timedelta(hours=hoursToAdd)).strftime(DATETIME_FORMAT)
        ls = sp.lspcn(SPICE_TARGET, sp.utc2et(str(datetimeStart + timedelta(hours=hoursToAdd))), SPICE_ABERRATION_CORRECTION) * sp.dpr()
        linesToWrite.append("%s\t%0.1f" %(newDatetime, ls))
    
    writeOutput("Time_vs_Ls.txt", linesToWrite)
示例#33
0
    def get_posns(self,
                  obscode,
                  times,
                  center="Sun",
                  frame="J2000",
                  abcorr="NONE"):
        """ """

        # Convert the supplied time to the required format for spiceypy
        # -----------------------------------------------
        self.epochs = np.array(
            [sp.utc2et('JD' + str(jdutc)) for jdutc in self.time.utc.jd])

        # Evaluate the position of the satellite using the loaded kernels
        # -----------------------------------------------
        self.posns, self.ltts = sp.spkpos(obscode, self.epochs, frame, abcorr,
                                          center)  # [km, s]
        self.posns = self.convert(posns=self.posns)  # AU
        self.ltts = self.convert(ltts=self.ltts)  # Day
示例#34
0
    def get_posns(self,
                  obscode,
                  times,
                  center="Sun",
                  frame="J2000",
                  abcorr="NONE"):
        """
        """

        # Convert the supplied time to the required format for spiceypy
        # -----------------------------------------------
        self.epochs = np.array(
            [sp.utc2et('JD' + str(jdutc)) for jdutc in times.utc.jd])

        # Get observatory posn for specific obs-code supplied
        # NB: this is in fractions of an earth-radius
        # So will probably need multiplying by 6378.1363/149597870.700 to get to AU
        # -----------------------------------------------
        self.obs_vec = ground_obscode_dict[obscode] * Rearth_km

        # Use pxform to return the matrix that transforms position vectors from ITRF93 (not IAU_EARTH) frame to J2000 frame at specified epoch.
        # Rotate the observatory posn vec to the required frame ( the J2000 default means this would be EQUATORIAL)
        #https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/FORTRAN/spicelib/pxform.html
        #https://spiceypy.readthedocs.io/en/v2.3.1/documentation.html#spiceypy.spiceypy.pxform
        # -----------------------------------------------
        self.obs_vec_rot = np.array([
            np.dot(sp.pxform('ITRF93', frame, epoch), self.obs_vec)
            for epoch in self.epochs
        ])
        self.obs_vec_rot_AU = self.obs_vec_rot / au_km

        # Get the position of the geocenter
        # ( the default frame=J2000 & center=SUN means this would be HELIOCENTRIC EQUATORIAL)
        # -----------------------------------------------
        self.posns, self.ltts = sp.spkpos('399', self.epochs, frame, abcorr,
                                          center)
        self.posns = self.convert(posns=self.posns)  # AU
        self.ltts = self.convert(ltts=self.ltts)  # Day

        # Combine vectors to get the posn vec of the observatory
        # ( the default frame=J2000 & center=SUN means this would be HELIOCENTRIC EQUATORIAL)
        # -----------------------------------------------
        self.hXYZ = self.obs_vec_rot_AU + self.posns
示例#35
0
文件: timing.py 项目: apodemus/pySHOC
def get_Earth_Sun_coords(t, ABCORR):
    """
    Get Earth (Geocentric) and solar (Heliocentric) ephemeris (km) relative to solar
    system Barycenter at times t
    """
    FRAME = 'J2000'
    OBSERVER = '0'  # Solar System Barycenter (SSB)

    N = len(t)
    # TODO: vectorize??
    xyzEarth = np.empty((N, 3), np.float128)
    xyzSun = np.empty((N, 3), np.float128)
    ltEarth = np.empty(N)
    ltSun = np.empty(N)
    for i, t in enumerate(t):
        # Ephemeris time (seconds since J2000 TDB)
        et = spice.utc2et(str(t))
        # Earth geocenter wrt SSB in J2000 coordiantes
        xyzEarth[i], ltEarth[i] = spice.spkpos(OBSERVER, et, FRAME, ABCORR,
                                               'earth')
        # Sun heliocenter wrt SSB in J2000 coordiantes
        xyzSun[i], ltSun[i] = spice.spkpos(OBSERVER, et, FRAME, ABCORR, '10')

    return xyzSun, xyzEarth
示例#36
0
import matplotlib.pyplot as plt # pyplot
from   matplotlib.figure import Figure
import numpy as np

import spiceypy as sp

# HBT imports

import hbt

sp.furnsh('kernels_kem_ats.tm')

utc_start = "2019 1 Jan 06:40:00"
utc_end   = "2019 1 Jan 07:20:00"

et_start = sp.utc2et(utc_start)
et_end   = sp.utc2et(utc_end)

et_mid = (et_start + et_end)/2

num_dt = 500

et = hbt.frange(et_start, et_end, num_dt)

name_target = 'MU69'
name_observer = 'New Horizons'

dist   = np.zeros(num_dt)
phase  = np.zeros(num_dt)

for i,et_i in enumerate(et):
    file_tm = file_tm_nightside  # I have not really validated that nightside works. Results look dfft than expected.
    side_str = 'nightside'
    
sp.furnsh(file_tm) # Start up SPICE

hour      = 3600
day       = 24 * hour

fs        = 15   # General font size

if ('tcm22' in file_tm):
    utc_ca    = '2019 1 jan 07:00:00'
else:
    utc_ca    = '2019 1 jan 03:09:00' # MU69 C/A time. I got this from GV.
    
et_ca     = sp.utc2et(utc_ca)
dt_tof    = 180                   # Time-of-flight uncertainty (halfwidth, seconds)

# Define the dates of the OpNavs. I have no time for these, but it probably doens't matter.
# Taken from MOL = Master Obs List spreadsheet 27-Mar-2017

utc_opnav = np.array(
            [
#             '16 Aug 2018',

             '15 Sep 2018', '16 Sep 2018', '17 Sep 2018',
             '24 Sep 2018', '25 Sep 2018', '26 Sep 2018',
             '05 Oct 2018', '06 Oct 2018', '07 Oct 2018',             
             '15 Oct 2018', '16 Oct 2018', '17 Oct 2018',                
             '25 Oct 2018', '26 Oct 2018', '27 Oct 2018',                
             '04 Nov 2018', '05 Nov 2018', '06 Nov 2018',
示例#38
0
def utcstr_to_spiceet(time):
    """passthrough to spiceypy.utc2et"""
    if isinstance(time, bytes):
        return spiceypy.utc2et(time.decode('utf-8'))
    return spiceypy.utc2et(time)
示例#39
0
 def _get_et(self):
     return spice.utc2et(self.utc)
def test_frame_mu69_sunflower():
    
    """
    This is just a quick test routine to check that MU69 Sunflower frame is more-or-less working.
    
    This program prints the RA/Dec of MU69's Y and Z axes, under a variety of different conditions.
    
    No options.
    
    Things to verify here:
        - That the Z RA/Dec point toward the orbit pole RA/Dec in all cases. 
              Small deviations << 1 deg allowed
              
        - That the Y RA/Dec point toward the Sun always for _ROT Frames.
              (ie, Y RA/Dec changes with time.)
              
        - That the Y RA/Dec point toward the Sun for the _INERT frames on 1 Jan 2015,
              and slowly move after / before that, at roughly 1 deg/year.
              (ie, Y RA/Dec is pretty much fixed)
              
    16-Jan-2018. HBT verified that output values look pretty good.

    """
    
    tms = ['kernels_sunflower.tm']  # Define the metakernel, which in turn calls the sunflower .tf frame

    frames = ['2014_MU69_SUNFLOWER_ROT', '2014_MU69_SUNFLOWER_INERT']
        
    utcs = ['1 Jan 2005 00:00:00', '1 Jan 2015 00:00:00']
    
    frame_j2k = 'J2000'
    
    # Get values from Simon Porter. See email from MRS ~16-Jan-2018.
    # These RA/Dec values are also put into the .tf file which I have made.
    
    ra_mu69_sp = 272.426110231801*hbt.d2r
    dec_mu69_sp = 68.831520928192*hbt.d2r
    
    # Define the MU69 Z axis. We will rotate this, and it should point in specified direction.
    
    z_mu69 = [0, 0, 1] # MU69 +Z axis. It should point to the specified RA/Dec
    y_mu69 = [0, 1, 0] # MU69 +Y axis. It should point away from the Sun
    
    print("Simon Porter pole position:")
    print("RA = {}, Dec = {}".format(ra_mu69_sp*hbt.r2d, dec_mu69_sp*hbt.r2d))
    print("---")
    
    # Loop over input parameters. For each combination, do a calculation, and print the output.
    
    for tm in tms:    
        for frame in frames: 
            for utc in utcs:
                sp.furnsh(tm)
                et = sp.utc2et(utc)
                mx = sp.pxform(frame, frame_j2k, et)
            
                z_mu69_j2k = sp.mxv(mx, z_mu69)                
                (_, ra_z, dec_z) = sp.recrad(z_mu69_j2k)

                y_mu69_j2k = sp.mxv(mx, y_mu69)                
                (_, ra_y, dec_y) = sp.recrad(y_mu69_j2k)
                
                print("Metakernel: {}".format(tm))
                print("UTC:        {}".format(utc))
                print("Frame:      {}".format(frame))
                print("Matrix:     \n{}".format(mx))
                print("Y RA = {}, Dec = {}".format(ra_y*hbt.r2d, dec_y*hbt.r2d))
                print("Z RA = {}, Dec = {}".format(ra_z*hbt.r2d, dec_z*hbt.r2d))
                print("\n---\n")
iof_ring = 1e-7
exptime = 30
mode = '4X4'
#mode = '1X1'
pos = (None, None)
#pos = (300, 700)
pos = (100, 200)  # y, x in normal imshow() coordinates.
#dist_target = 0.01*u.au
dist_solar  = 43.2*u.au  # MU69 dist at encounter: 43.2 AU, from KEM Wiki page 
do_psf = True            # Flag: Do we convolve result with NH LORRI PSF?26.7 + 0.65

dt_obs = -22*u.day        # Time relative to MU69 C/A

utc_ca = '2019 1 Jan 05:33:00'
et_ca  = sp.utc2et(utc_ca)
et_obs = et_ca + dt_obs.to('s').value
 
utc_obs = sp.et2utc(et_obs, 'C', 0)
utc_obs_human = 'K{:+}d'.format(dt_obs.to('day').value)

vec,lt = sp.spkezr('2014 MU69', et_obs, 'J2000', 'LT', 'New Horizons')
vec_sc_targ = vec[0:3]
dist_target = np.sqrt(np.sum(vec_sc_targ**2))*u.km.to('AU')*u.au
            
arr = nh_make_simulated_image_lorri(do_ring=True, 
                                    dist_ring_smoothing = 1000*u.km, 
                                    iof_ring = iof_ring,
                                    a_ring = (5000*u.km, 10000*u.km), 
                                    exptime = exptime, 
                                    mode = mode, 
width_fov_rad = 5.7 * hbt.d2r
height_fov_rad = 0.15 * hbt.d2r

dt_slew   = 30 # Time between slews
exptime   = 10  # Exposure time, MVIC.
exp_per_footprint = 3   # Number of total exposures per pointing. ('3' for 2+1.)
 
frac_overlap = 0.92   # Fractional overlap between footprints

fs        = 15   # General font size

utc_ca    = '2019 1 Jan 07:00:00'

n_footprints = 27

et_ca     = sp.utc2et(utc_ca)

et_start  = et_ca    + 30*minute
et_end    = et_start + 30*minute

radius_image = 2500*u.km
radius_kbo   = 20*u.km 
radius_ring  = 1000*u.km

radius_ring_km = radius_ring.to('km').value
radius_ring_km = np.array([150, 500, 1250])

#==============================================================================
# Set up the times for each exposure
#==============================================================================
示例#43
0
# HBT imports

import hbt
file_tm = 'kernels_kem_prime.tm'

sp.furnsh(file_tm)


utc_limits_arr = ["2018 1 Dec 5:00", "2018 1 Dec 12:00"]
utc_limits_arr = ["2019 1 Jan 5:00", "2019 1 Jan 6:00"]

et_limits_arr = []

for utc in utc_limits_arr:
    et_limits_arr.append(sp.utc2et(utc))

num_et = 100


et_arr = hbt.frange(et_limits_arr[0], et_limits_arr[1], num_et)

phase_arr = []
utc_arr   = []

for et in et_arr:
    (st, lt) = sp.spkezr('MU69', et, 'J2000', 'LT+S', 'New Horizons')
    vec_sc_mu69 = st[0:3]

    (st, lt) = sp.spkezr('MU69', et, 'J2000', 'LT+S', 'Sun')
    vec_sun_mu69 = st[0:3]
示例#44
0
      else:
          dt_s = float(file['ET']) - t0
      
      m, s = divmod(dt_s, 60)
      h, m = divmod(m, 60)
      dt_str = "{:3d}h {:2d}m {:2d}s".format(int(h), int(m), int(s))
      if (dt_s == 0): dt_str = '--'
      dt_str = dt_str.replace(' 0h', '').replace(' 0m', '')
      t0 = float(file['ET'])
      
      # Create a super-short version of the filename (cut out the ApID)
      
      file_trunc = file['Shortname'].replace('lor_', '').replace('_0x630_sci', '').\
        replace('_0x633_sci', '').replace('_opnav', '').replace('.fit', '')
      
      utc_trunc = sp.et2utc(sp.utc2et(file['UTC']),'C', 0)
      
      # Print a line of the table, to the screen and the file
      
      line =  "{:>3}/{:<3}: {:>3}, {:>3},  {},   {},   {},  {:6.3f},{:>12s},   {:.1f} deg, {:<9}".format(int(i), 
                                                     int(num_files), int(i_group), 
                                                     int(i_file), file_trunc, file['Format'], utc_trunc, 
                                                     file['Exptime'], (dt_str), file['Phase']*hbt.r2d, file['Target'])
      print(line)
      lines_out.append(line)
      
      arr = hbt.read_lorri(file['Filename'], bg_method = 'Polynomial', bg_argument = 4, frac_clip = 1)
      
      arr = hbt.remove_brightest(arr, 0.99)
      arr = -hbt.remove_brightest(-arr, 0.99)
def get_fits_info_from_files_lorri(path,
                            file_tm = "/Users/throop/gv/dev/gv_kernels_new_horizons.txt", pattern=''):
    "Populate an astropy table with info from the headers of a list of LORRI files."
    import numpy as np
    import spiceypy as sp
    import glob
    import astropy
    from astropy.io import fits
    from astropy.table import Table
    import astropy.table
    import math
    import hbt
    

# For testing:
# file = '/Users/throop/Data/NH_Jring/data/jupiter/level2/lor/all/lor_0035020322_0x630_sci_1.fit' # 119 deg phase as per gv
# file = '/Users/throop/Data/NH_Jring/data/jupiter/level2/lor/all/lor_0034599122_0x630_sci_1.fit' # 7 deg phase, inbound

# t = hbt.get_fits_info_from_files_lorri(file)

# Flags: Do we do all of the files? Or just a truncated subset of them, for testing purposes?
    
    DO_TRUNCATED = False
    NUM_TRUNC = 100

# We should work to standardize this, perhaps allowing different versions of this function 
# for different instruments.

    d2r = np.pi /180.
    r2d = 1. / d2r

    sp.furnsh(file_tm)

# *** If path ends with .fit or .fits, then it is a file not a path. Don't expand it, but read it as a single file.

    if (('.fits' in path) or ('.fit' in path)):
        file_list = path
        files = [file_list]

    else:
        
        dir_data = path          
    #dir_data = '/Users/throop/data/NH_Jring/data/jupiter/level2/lor/all'
    # Start up SPICE
    
    
    # Get the full list of files
    # List only the files that match an (optional) user-supplied pattern, such as '_opnav'
    
        file_list = glob.glob(dir_data + '/*' + pattern + '.fit')
        files = np.array(file_list)
        indices = np.argsort(file_list)
        files = files[indices]

# Read the JD from each file. Then sort the files based on JD.

    jd = []
    for file in files:
        hdulist = fits.open(file)
        jd.append(hdulist[0].header['MET'])
        hdulist.close()
         
    fits_met     = [] # new list (same as array) 
    fits_startmet= [] 
    fits_stopmet = []
    fits_exptime = [] # starting time of exposure
    fits_target  = [] 
    fits_reqdesc = []     
    fits_reqcomm = [] # New 9-Oct-2018
    fits_reqid   = [] # New 9-Oct-2018
    fits_spcinst0= [] 
    fits_spcutcjd= []   
    fits_naxis1=   [] 
    fits_naxis2 =  []
    fits_sformat = [] # Data format -- '1x1' or '4x4'
    fits_spctscx = [] # sc - target, dx 
    fits_spctscy = [] # dy
    fits_spctscz = [] # dz
    fits_spctcb  = [] # target name
    fits_spctnaz = [] # Pole angle between target and instrument (i.e., boresight rotation angle)
    fits_rsolar  = [] # (DN/s)/(erg/cm^2/s/Ang/sr), Solar spectrum. Use for resolved sources.
    
    if (DO_TRUNCATED):
        files = files[0:NUM_TRUNC]
        
#files_short = np.array(files)
#for i in range(files.size):
#    files_short = files[i].split('/')[-1]  # Get just the filename itself

# Set up one iteration variable so we don't need to create it over and over
    num_obs = np.size(files)
    i_obs = np.arange(num_obs)
    
    print("Read " + repr(np.size(files)) + " files.")
    
    for file in files:
        print("Reading file " + file)
    
        hdulist = fits.open(file)
        header = hdulist[0].header
        
        keys = header.keys()
    
        fits_met.append(header['MET'])
        fits_exptime.append(header['EXPTIME'])
        fits_startmet.append(header['STARTMET'])
        fits_stopmet.append(header['STOPMET'])
        fits_target.append(header['TARGET'])
        fits_reqdesc.append(header['REQDESC'])
        fits_reqcomm.append(header['REQCOMM'])
        fits_reqid.append(header['REQID'])
        fits_spcinst0.append(header['SPCINST0'])
        fits_spcutcjd.append( (header['SPCUTCJD'])[3:]) # Remove the 'JD ' from before number
        fits_naxis1.append(header['NAXIS1'])
        fits_naxis2.append(header['NAXIS2'])
        fits_spctscx.append(header['SPCTSCX'])
        fits_spctscy.append(header['SPCTSCY'])
        fits_spctscz.append(header['SPCTSCZ'])    
        fits_spctnaz.append(header['SPCTNAZ'])    
        fits_sformat.append(header['SFORMAT'])
        fits_rsolar.append(header['RSOLAR'])   # NB: This will be in the level-2 FITS, but not level 1
                                             
        hdulist.close() # Close the FITS file

#print object
#print "done"

# Calculate distance to Jupiter in each of these
# Calc phase angle (to Jupiter)
# Eventually build backplanes: phase, RA/Dec, etc.
# Eventually Superimpose a ring on top of these
#  ** Not too hard. I already have a routine to create RA/Dec of ring borders.
# Eventually overlay stars 
#   Q: Will there be enough there?
# Eventually repoint based on stars
#  ** Before I allow repointing, I should search a star catalog and plot them.

# Convert some things to numpy arrays. Is there any disadvantage to this?

    met        = np.array(fits_met)
    jd         = np.array(fits_spcutcjd, dtype='d') # 'f' was rounding to one decimal place...
    naxis1     = np.array(fits_naxis1)
    naxis2     = np.array(fits_naxis2)
    target     = np.array(fits_target) # np.array can use string arrays as easily as float arrays
    instrument = np.array(fits_spcinst0)
    dx_targ    = np.array(fits_spctscx)
    dy_targ    = np.array(fits_spctscy)
    dz_targ    = np.array(fits_spctscz)
    desc       = np.array(fits_reqdesc)
    reqid      = np.array(fits_reqid)
    reqcomm    = np.array(fits_reqcomm)
    met0       = np.array(fits_startmet)
    met1       = np.array(fits_stopmet)
    exptime    = np.array(fits_exptime)
    rotation   = np.array(fits_spctnaz)
    sformat    = np.array(fits_sformat)
    rotation   = np.rint(rotation).astype(int)  # Turn rotation into integer. I only want this to be 0, 90, 180, 270... 
    rsolar     = np.array(fits_rsolar)
    
    files_short = np.zeros(num_obs, dtype = 'U60')

# Now do some geometric calculations and create new values for a few fields

    dist_targ = np.sqrt(dx_targ**2 + dy_targ**2 + dz_targ**2)

    phase = np.zeros(num_obs)
    utc = np.zeros(num_obs, dtype = 'U30')
    et = np.zeros(num_obs)
    subsclat = np.zeros(num_obs) # Sub-sc latitude
    subsclon = np.zeros(num_obs) # Sub-sc longitude
    
    name_observer = 'New Horizons'
    frame = 'J2000'
    abcorr = 'LT+S'
#         Note that using light time corrections alone ("LT") is 
#         generally not a good way to obtain an approximation to an 
#         apparent target vector:  since light time and stellar 
#         aberration corrections often partially cancel each other, 
#         it may be more accurate to use no correction at all than to 
#         use light time alone. 

# Fix the MET. The 'MET' field in fits header is actually not the midtime, but the time of the first packet.
# I am going to replace it with the midtime.
# *** No, don't do that. The actual MET field is used for timestamping -- keep it as integer.

#    met = (met0 + met1) / 2.

# Loop over all images

    for i in i_obs:
    
# Get the ET and UTC, from the JD. These are all times *on s/c*, which is what we want
    
      et[i] = sp.utc2et('JD ' + repr(jd[i]))
      utc[i] = sp.et2utc(et[i], 'C', 2)
    
# Calculate Sun-Jupiter-NH phase angle for each image 
    
      (st_jup_sc, ltime) = sp.spkezr('Jupiter', et[i], frame, abcorr, 'New Horizons') #obs, targ
      (st_sun_jup, ltime) = sp.spkezr('Sun', et[i], frame, abcorr, 'Jupiter')
      ang_scat = sp.vsep(st_sun_jup[0:3], st_jup_sc[0:3])
      phase[i] = math.pi - ang_scat
#      phase[i] = ang_scat
      files_short[i] = files[i].split('/')[-1]
# Calc sub-sc lon/lat
      
      mx = sp.pxform(frame,'IAU_JUPITER', et[i])
      st_jup_sc_iau_jup = sp.mxv(mx, st_jup_sc[0:3])
      
      (radius,subsclon[i],subsclat[i]) = sp.reclat(st_jup_sc[0:3])  # Radians
      (radius,subsclon[i],subsclat[i]) = sp.reclat(st_jup_sc_iau_jup)  # Radians

# Stuff all of these into a Table

    t = Table([i_obs, met, utc, et, jd, files, files_short, naxis1, naxis2, target, instrument, 
               dx_targ, dy_targ, dz_targ, reqid, 
               met0, met1, exptime, phase, subsclat, subsclon, naxis1, 
               naxis2, rotation, sformat, rsolar, desc, reqcomm], 
               
               names = ('#', 'MET', 'UTC', 'ET', 'JD', 'Filename', 'Shortname', 'N1', 'N2', 'Target', 'Inst', 
                        'dx', 'dy', 'dz', 'ReqID',
                        'MET Start', 'MET End', 'Exptime', 'Phase', 'Sub-SC Lat', 'Sub-SC Lon', 'dx_pix', 
                        'dy_pix', 'Rotation', 'Format', 'RSolar', 'Desc', 'Comment'))
    
# Define units for a few of the columns
                        
    t['Exptime'].unit = 's'
    t['Sub-SC Lat'].unit = 'degrees'

# Create a dxyz_targ column, from dx dy dz. Easy!

    t['dxyz'] = np.sqrt(t['dx']**2 + t['dy']**2 + t['dz']**2)  # Distance, in km

    return t
def nh_find_simulated_rings_lorri():

# =============================================================================
# Now go thru the synthetic ring images. 
#    Load and stack the synthetic implanted images.
#    Load and stack the original 'raw' frames
#    Difference them, and see if we can find a ring in there.
# =============================================================================

    dir_porter = '/Users/throop/Dropbox/Data/NH_KEM_Hazard/Porter_Sep17/'
    dir_synthetic = '/Users/throop/Dropbox/Data/NH_KEM_Hazard/synthetic/'
    
    do_subpixel = False  # Flag: Do we use sub-pixel shifting when doing the flattening? 
                         # It is slower and in theory better, but in reality makes a trivial difference.

    # Start up SPICE
    
    file_kernel = 'kernels_kem.tm'
    sp.furnsh(file_kernel)
    
    # Load the images into a table
    
    images_raw = image_stack(dir_porter)
    images_syn = image_stack(dir_synthetic, do_force=False)
    
    stretch = astropy.visualization.PercentileInterval(95)
    plt.set_cmap('Greys_r')

    # =============================================================================
    # If desired, do a one-time routine for the synthetic images:
    #  extract the I/F and ring size from the filenames, and append that to the table.
    # This routine should be run after creating new synthetic images (e.g., adding an I/F value) 
    # =============================================================================
    
    DO_APPEND = False
    if (DO_APPEND):

        t_syn = images_syn.t
        num_images_syn = (np.shape(t_syn))[0]

        iof_ring  = np.zeros(num_images_syn, dtype=float)
        size_ring = np.zeros(num_images_syn, dtype='U30')
    
        for i in range(num_images_syn):
            f = t_syn['filename_short'][i]
            m = re.search('ring_(.*)_iof(.*)_K', f)  # Call regexp to parse it.
            iof_ring[i] = eval(m.group(2))
            size_ring[i] = m.group(1)
            
        t_syn['size_ring'] = size_ring
        t_syn['iof_ring']  = iof_ring
        images_syn.t = t_syn
        images_syn.save()           # Save the whole pickle archive (including images and table) back to disk
    
    data_raw = images_raw.data
    data_syn = images_syn.data
    
    t_raw = images_raw.t
    t_syn = images_syn.t
    
    num_images_raw = (np.shape(t_raw))[0]
    num_images_syn = (np.shape(t_syn))[0]

    # Look up the time offset, from the image title. (Would be better to have it stored in table, but this will do.)

    match = re.search('_K(.*)d', t_syn['filename_short'][0])
    
    dt_ca = ((match.group(1)*u.day).to('s'))  # Weird: we don't need .value here. I can't explain it.

    utc_ca = '2019 1 Jan 05:33'
    et_ca  = sp.utc2et(utc_ca)
    et_obs = et_ca + dt_ca
    
    # Set the pixel scale
    
    vec,lt = sp.spkezr('2014 MU69', et_obs, 'J2000', 'LT', 'New Horizons')
    vec_sc_targ = vec[0:3]
    dist_target_km = (sp.vnorm(vec_sc_targ)*u.km).value    
    scale_pix_lorri_1x1_rad = 0.3*hbt.d2r / 1024
    scale_pix_lorri_4x4_rad = scale_pix_lorri_1x1_rad * 4
    scale_pix_km_dict = {'1X1' : scale_pix_lorri_1x1_rad * dist_target_km,
                         '4X4' : scale_pix_lorri_4x4_rad * dist_target_km}  # We are 
    
    # Create a bunch of possible image sets, based on various parameters
    
    # Indices for 'raw' images
    
    indices_sep17_raw = t_raw['et'] > sp.utc2et('15 sep 2017')  # The positon of MU69 has changed a few pixels.
                                                                # We can't blindly co-add between sep and pre-sep
    indices_jan17_raw = t_raw['et'] < sp.utc2et('1 sep 2017')
                                                        
    indices_rot0_raw  = t_raw['angle'] < 180   # One rotation angle
    indices_rot90_raw = t_raw['angle'] > 180   # The other rotation angle
    indices_10sec_raw = np.logical_and( t_raw['exptime'] < 10, t_raw['exptime'] > 5  )
    indices_20sec_raw = np.logical_and( t_raw['exptime'] < 20, t_raw['exptime'] > 10 )
    
    indices_30sec_raw = np.logical_and( t_raw['exptime'] < 30, t_raw['exptime'] > 20 )
    
    indices_1x1_raw = t_raw['naxis1'] == 1024
    indices_4x4_raw = t_raw['naxis1'] == 256
    
    indices_30sec_4x4_raw = np.logical_and(indices_4x4_raw, indices_30sec_raw) # 94
    
    # Indices for synthetic images
    
    indices_ring_small_syn = t_syn['size_ring'] == 'small'
    indices_ring_large_syn = t_syn['size_ring'] == 'large'
    
    indices_iof_1em7_syn = t_syn['iof_ring'] == 1e-7
    indices_iof_3em7_syn = t_syn['iof_ring'] == 3e-7
    indices_iof_1em6_syn = t_syn['iof_ring'] == 1e-6
    indices_iof_1em5_syn = t_syn['iof_ring'] == 1e-5
    indices_iof_1em4_syn = t_syn['iof_ring'] == 1e-4
    
    indices_small_1em7_syn = np.logical_and(indices_iof_1em7_syn, indices_ring_small_syn)
    indices_small_3em7_syn = np.logical_and(indices_iof_3em7_syn, indices_ring_small_syn)
    indices_small_1em6_syn = np.logical_and(indices_iof_1em6_syn, indices_ring_small_syn)
    indices_small_1em5_syn = np.logical_and(indices_iof_1em5_syn, indices_ring_small_syn)
    indices_small_1em4_syn = np.logical_and(indices_iof_1em4_syn, indices_ring_small_syn)
    indices_large_1em7_syn = np.logical_and(indices_iof_1em7_syn, indices_ring_large_syn)
    indices_large_3em7_syn = np.logical_and(indices_iof_3em7_syn, indices_ring_large_syn)
    indices_large_1em6_syn = np.logical_and(indices_iof_1em6_syn, indices_ring_large_syn)
    indices_large_1em5_syn = np.logical_and(indices_iof_1em5_syn, indices_ring_large_syn)
    indices_large_1em4_syn = np.logical_and(indices_iof_1em4_syn, indices_ring_large_syn)

    # Choose which indiex. ** THIS IS WHERE WE SET THE RING TO USE!!
    
    indices_raw = indices_30sec_4x4_raw.copy()   # 94 of 344
    indices_syn = indices_small_1em6_syn.copy()  # 94 of 752

    # Now take the first half of the synthetic indices, and the second half of the raw ones
    # This is to assure that we are using different images for the two stacks! Otherwise, the results are trivial.
    
    frames_max = int(np.sum(indices_raw) / 2)         # Total number of frames (94)
    
    w = np.where(indices_raw)[0]
    indices_raw[w[frames_max]:] = False          # De-activate all frames *below* frames_max
    
    w = np.where(indices_syn)[0]
    indices_syn[:w[frames_max]] = False          # De-activate all frames above frames_max
    
    # Set the indices
    
    images_raw.set_indices(indices_raw)
    images_syn.set_indices(indices_syn)
    
    # Do the flattening
        
    arr_raw = images_raw.flatten(do_subpixel=do_subpixel)
    arr_syn = images_syn.flatten(do_subpixel=do_subpixel)
    
#    arr_raw_sub = images_raw.flatten(do_subpixel=True)    
#    arr_syn_sub = images_syn.flatten(do_subpixel=True)
    
    # Extract various fields from the data table. We can look up from any of the images -- they should be all the same.
    
    t_syn       = images_syn.t  # Get the data table
    
    iof_ring    = t_syn[indices_syn]['iof_ring'][0]
    size_ring   = t_syn[indices_syn]['size_ring'][0]
    exptime     = t_syn[indices_syn]['exptime'][0]
    
    # The two flattened images need some offsetting. Do that.
    
    shift = ird.translation(arr_raw, arr_syn)['tvec']
#    shift = np.round(shift).astype('int')
    
#    arr_syn_shift = np.roll(np.roll(arr_syn, int(round(shift[0])), axis=0), int(round(shift[1])), axis=1)
    arr_syn_shift = scipy.ndimage.shift(arr_syn, shift, order=5)  # This allows sub-pixel shifts, apparently. *NO*!

#    a = arr_syn.copy()
#    a_05_05 = scipy.ndimage.shift(arr_syn, (0.5, 0.5), order=5)  # Ugh. 0.5, 0.5 and 1, 1 are *exactly* the same.
#    a_1_05 = scipy.ndimage.shift(arr_syn, (1, 0.5), order=5)
#    a_1_1 = scipy.ndimage.shift(arr_syn, (1, 1), order=5)
#    a_1_15 = scipy.ndimage.shift(arr_syn, (1, 1.5), order=5)
#    a_1_0 = scipy.ndimage.shift(arr_syn, (1, 0), order=5)
#    a_05_0 = scipy.ndimage.shift(arr_syn, (0.5, 0), order=5)
    
    arr_diff  = arr_syn_shift  - arr_raw
    
    pos = (images_raw.y_pix_mean*4, images_raw.x_pix_mean*4)
    
    # Set the binning width of the radial profiles

    binning_pix = 5
    
    # Extract the radial profiles
    
    (dist_pix_1d, profile_1d_median) = get_radial_profile_circular(arr_diff, pos, method='median', width=binning_pix)
    (dist_pix_1d, profile_1d_mean)   = get_radial_profile_circular(arr_diff, pos, method='mean', width=binning_pix)

    str_title = ('Synthetic ring - raw, I/F = {:.0e}, {}, {} x {:.1f}s'.format(
            iof_ring, size_ring, frames_max, exptime))
    
    plt.imshow(stretch(arr_diff))
    plt.title(str_title)
    plt.plot(pos[1], pos[0], marker='.', color='red')
    plt.show()
    
    # Set the scale for the effective mode of these observations. Many are taken as 4x4, but we've rebinned to 1x1
    
    if (np.shape(arr_raw)[0] == 1024):
        scale_mode = '1X1'
    else:
        scale_mode = '4X4'
    scale_pix_km = scale_pix_km_dict[scale_mode]
    
    # Make a plot of the radial profile. Don't plot the innermost bin. It is useless, since it has so few pixels in it.
    
    hbt.figsize((12,8))
    
    plt.plot(dist_pix_1d[1:] * scale_pix_km, profile_1d_median[1:], label = 'Annulus median', alpha = 0.7)
#    plt.plot(dist_pix_1d[1:] * scale_pix_km, profile_1d_mean[1:],   label = 'Mean',   alpha = 0.2)
    plt.xlabel('Distance [km]')
    plt.ylabel('DN per pixel')
    plt.title(str_title + ', binning = {}'.format(binning_pix))
    plt.xlim((0,30000))
    
    # Set the y axis range. This is really stupid. Can't matplotlib figure this out itself?
    
    ax = plt.gca()
    lims = ax.get_xlim()
    i = np.where( (dist_pix_1d * scale_pix_km > lims[0]) &  (dist_pix_1d*scale_pix_km < lims[1]) )[0]
    ax.set_ylim( profile_1d_median[i].min(), profile_1d_median[i].max() ) 
    
    plt.legend()
    plt.show()
    plt.savefig()
# These data files were obtained from
# http://naif.jpl.nasa.gov/pub/naif/generic_kernels
LEAP_SECOND_FILE = "data/naif0011.tls"
EPHEMERIDES_FILE = "data/de432s.bsp"


if __name__ == "__main__":
    # Script expects first argument to be a UTC timestamp
    if len(sys.argv) > 1:
        input_utc = sys.argv[1]
    else:
        input_utc = "2000-01-01T00:00:00"  # default for testing
    # Load the necessary data files
    sp.furnsh([LEAP_SECOND_FILE, EPHEMERIDES_FILE])
    # Convert UTC to ephemeris time
    epoch = sp.utc2et(input_utc)
    # State (position and velocity in cartesian coordinates)
    # of EARTH as seen from SUN in the ecliptic J2000 coordinate frame.
    state, lt, = sp.spkezr("EARTH", epoch, "ECLIPJ2000", "NONE", "SUN")
    # Show the output
    print("Input time = {}".format(input_utc))
    print("")
    print("# Position of the Earth in the heliocentric ecliptic (J2000) frame")
    print("X = {} km".format(state[0]))
    print("Y = {} km".format(state[1]))
    print("Z = {} km".format(state[2]))
    print("")
    print("# Velocity of the Earth in the same frame")
    print("dX = {} km/s".format(state[3]))
    print("dY = {} km/s".format(state[4]))
    print("dZ = {} km/s".format(state[5]))
def nh_ort_track4_flyby(dir_in=None, dir_out=None, name_trajectory = 'prime'):

    #%%%
    

#    dir_in = '/Users/throop/
#    dir_in = '/Users/throop/data/ORT4/throop/ort4_bc3_10cbr2_dph/'
    
    stretch_percent = 99
    stretch = astropy.visualization.PercentileInterval(stretch_percent)

#    dir_data = os.path.expanduser('~/Data/')
    
    # dir_in git
    do_compress = False   # Do we use .gzip compression on the Track-4 input grids?
                          # If we used compression on the track4_calibrate routine, we must use it here too.
    
#    dir_track4 = os.path.join(dir_data, name_ort, 'throop', 'track4')
    
    if do_compress:
        files = glob.glob(os.path.join(dir_in, '*.grid4d.gz'))
    
    else:
        files = glob.glob(os.path.join(dir_in, '*.grid4d'))
        files = glob.glob(os.path.join(dir_in, '*.dust.pkl'))

    # Alphabetize file list
    
    files = sorted(files)

    plt.set_cmap('plasma')

    utc_ca = '2019 1 Jan 05:33:00'
    dt_before = 1*u.hour
    dt_after  = 1*u.hour
    
#    area_sc = (1*u.m)**2

    frame = '2014_MU69_SUNFLOWER_ROT'
    
    name_target = 'MU69'
    origin = 'lower'   # Required plotting order for imshow
    
    name_observer = 'New Horizons'
    
    hbt.figsize((8,6))
    hbt.set_fontsize(12)

    dt = 1*u.s         # Sampling time through the flyby. Astropy units.

    # Create an output table, Astropy format
    
    t = Table(names = ['trajectory', 'speed', 'q_dust', 'albedo', 'rho',
                       'tau_max', 'tau_typical', 'iof_max', 'iof_typical'],
              dtype = ['U30', float, float, float, float, 
                       float, float, float, float]  )
    
    # Start up SPICE if needed. Unload old kernels just as a safety precaution.
    
    sp.unload('kernels_kem_prime.tm')
    sp.unload('kernels_kem_alternate.tm')
    
    sp.furnsh(f'kernels_kem_{name_trajectory}.tm')
    
    do_short = False
    
    if do_short:
        files = files[0:4]
             
        i=3
        file = files[i]

    num_files = len(files)
    name_run = dir_out.split('/')[-2]
        
#%%%    
    for i,file in enumerate(files):
        
#%%%       
        print(f'Starting file {i}/{len(files)}')
              
        grid = nh_ort_track4_grid(file)    # Load the grid from disk. Uses gzip, so it is quite slow (10 sec/file)
        print(f'Loading file {file}')
         
    # Load the trajectory parameters

        et_ca = int( sp.utc2et(utc_ca) )  # Force this to be an integer, just to make output cleaner.
        
        et_start = et_ca - dt_before.to('s').value
        et_end   = et_ca + dt_after.to('s').value
                        
        grid.frame       = frame
        grid.name_target = name_target
        grid.name_trajectory = name_trajectory
        
        # And call the method to fly through it!
        # The returned density values etc are available within the instance variables, not returned explicitly.

        grid.fly_trajectory(name_observer, et_start, et_end, dt)
 
        # If the first time thru loop, make plot of our path through the system
     
        do_plots_geometry = True
        
        if (do_plots_geometry and (i==0)):
            
            grid.plot_trajectory_geometry()

        # Make slice plots thru the grid
        
        do_plot_slices_xyz = False

        if do_plot_slices_xyz:
            hbt.fontsize(8)
            hbt.figsize((20,5))
            grid.plot(axis_sum=0)
            grid.plot(axis_sum=1)
            grid.plot(axis_sum=2)
            
            hbt.fontsize(10)
        
        # Make a plot of optical depth
        
        do_plot_tau = True
        if do_plot_tau:
            grid.plot_tau()

# =============================================================================
# Make some plots of count rate vs. time!
# =============================================================================
                    
        # Make a plot of the instantaneous count rate

        hbt.figsize((10,15))

        # Make a plot of the actual density that we give to Doug Mehoke
        
        # Define a list of colors. This is so we can use colors= argument to set
        # a marker to show grain size, rather than let plot() auto-assign.
        
#        colors = plt.rcParams['axes.prop_cycle'].by_key()['color']  # This is the default color iterator.
        colors = ['antiquewhite',
                  'tomato', 
                  'blueviolet',
                  'skyblue',
                  'gold',
                  'darkcyan',
                  'thistle',
                  'olive',
                  'red',
                  'sienna',
                  'deepskyblue',
                  'lightsalmon',
                  'pink',
                  ]
                  # 'aqua']
                  
#                  'antiquewhite4', 'aqua', 'aquamarine4', 'black', 'blue', 'blueviolet', 
#                  'brown1', 'chartreuse1', 'darkgreen', 'darkorange1', 'dodgerblue1', 'lightpink', 'magenta']
        
        # Make a plot of dust number density. This is straight out of the grid, and for comparison with MRS.
        
        vals_fiducial = [1e-10, 1e-8, 1e-6,1e-4, 1e-2, 1e-0]
        
        plt.subplot(3,1,1)
        for j,s in enumerate(grid.s):
            plt.plot(grid.delta_et_t, grid.number_t[j],
                     label = 's={:.2f} mm'.format(s),
                     color=colors[j])
        plt.legend()
        plt.title('Dust number density'.format(grid.area_sc))
        plt.xlabel('ET from C/A')
        plt.yscale('log')
        plt.ylim((1e-10, 1e2))  # Match to MRS.
        plt.axvline(0, color='black', alpha=0.05)
        plt.ylabel(r'Dust,, # km$^{-3}$')
                   
        for val in vals_fiducial:
            plt.axhline(val, color='black', alpha=0.05)

        # Make a plot of impact rate. This assumes a s/c area.
        
        plt.subplot(3,1,2)
        for j,s in enumerate(grid.s):   # 's' is dust size
            plt.plot(grid.delta_et_t, grid.number_sc_t[j],
                     label = f's={s:.2f} mm',
                     color=colors[j])
        plt.title('Impact rate, A={}'.format(grid.area_sc))
        plt.yscale('log')    
        plt.xlabel('ET from C/A')
        plt.legend()
        plt.ylabel(r'Dust, # Impacts sec$^{{-1}}$')

        # Make a plot of the cumulative count rate. Mark grain sizes here too.
        
        plt.subplot(3,1,3)
        for j,s in enumerate(grid.s):                                             # Loop over size
            plt.plot(grid.delta_et_t, grid.number_sc_cum_t[j],                    # Main plot line
                     label = 's={:.2f} mm'.format(s), color=colors[j])
            plt.plot([grid.delta_et_t[-1]], [grid.number_sc_cum_t[j,-1].value],   # Circle to indicate grain size
                     markersize=(7-j)*2, marker = 'o',                            # Use same color as prev line! 
                     color=colors[j])


        hbt.figsize(5,5)
        plt.legend()
        plt.title('Number of impacts (cumulative), A={}'.format(grid.area_sc))
        plt.xlabel('ET from C/A')
        plt.yscale('log')
        plt.ylabel('# of Impacts')
        plt.axhline(y = 1, linestyle = '--', alpha = 0.1)    

        plt.tight_layout()
        
        plt.show()
        
        # Make a plot of size distibution. 
        # Make two curves: one for n(r) for the entire grid, and one for n(r) that hits s/c
        
        # Now add an entry to the table. This is a table that lists all of the results --
        #     e.g., max_tau, count rate etc
        # One line per grid.

        t.add_row(vals=[grid.name_trajectory, grid.speed, grid.q, grid.albedo, grid.rho, 
                        grid.tau_max, grid.tau_typ,
                        grid.iof_max, grid.iof_typ])
                                        
        # Get size dist along path
         
        number_path = grid.number_sc_cum_t[:,-1].value
        
        # Take the full particle grid, and sum along all spatial axes, leaving just the size axis left.
        
        number_grid = np.sum(np.sum(np.sum(grid.density, axis=1), axis=1), axis=1)
        
        # Normalize the size dists both
        number_grid = hbt.normalize(number_grid)
        number_path = hbt.normalize(number_path)
        
        plt.plot(grid.s, number_path, label = 'Along s/c path')
        plt.plot(grid.s, number_grid, label = 'In grid, total')
        plt.yscale('log')
        plt.xscale('log')
        plt.ylim( (hbt.minval(np.array([number_grid, number_path]))/2, 1) )
        plt.xlabel('Radius [mm]')
        plt.ylabel('Particle number [arbitrary]')
        plt.legend(loc = 'lower right')
        plt.show()

        # Output the dust population for this run to a file. This is the file that Doug Mehoke will read.
        
        grid.output_trajectory(name_run=name_run, do_positions=False, dir_out=dir_out)
    
        print('---')
                        

#%%%        
        
    # Print the table
    
    t.pprint(max_width=-1)
    
    # Save the table as output
    
    file_out = os.path.join(dir_out, f'nh_{name_trajectory}_track4_table.pkl')
    
    lun = open(file_out, 'wb')
    pickle.dump(t,lun)
    lun.close()
    print(f'Wrote: {file_out}')
    
#%%%    

    # Now that all files have been created, compress results into an archive (.tar.gz) for Doug Mehoke
    
    inits_track4    = 'hbt'
    
#    if 'hamilton' in files[0]:
#        inits_track3 = 'dph'
#    if 'kauf' in files[0]:
#        inits_track3 = 'dk'
        
    file_out = f'{name_trajectory}_{name_run}_{inits_track4}_n{num_files}.tgz'
    
    str = f'cd {dir_out}; tar -czf {file_out} *{name_trajectory}*.dust'
    
    _ = subprocess.Popen(str, shell=True)
    
    print(str)
    print(f'Wrote: {dir_out}/{file_out}')
    def __init__(self, master, size_window):

        self.master = master   # This is the handle to the main Tk widget. I have to use it occasionally to 
                               # set up event handlers, so grab it and save it.

        self.size_window = size_window # Save the size of the whole Tk window, in pixels.
        
        # Open the image stack
        
#        self.stretch_percent = 90    
#        self.stretch = astropy.visualization.PercentileInterval(self.stretch_percent) # PI(90) scales to 5th..95th %ile.
#        
        name_ort = 'ORT4'
#        name_ort = 'ORT2_OPNAV'
        
        if (name_ort == 'ORT1'):
            self.reqids_haz  = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03', 'K1LR_HAZ04']
            #        self.reqids_haz  = ['K1LR_HAZ03', 'K1LR_HAZ01', 'K1LR_HAZ02']
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'        
            self.dir_data    = '/Users/throop/Data/ORT1/throop/backplaned/'

        if (name_ort == 'ORT2'):
            self.reqids_haz  = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03', 'K1LR_HAZ04']
#            self.reqids_haz  = ['K1LR_HAZ03', 'K1LR_HAZ01', 'K1LR_HAZ02']
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'        
            self.dir_data    = '/Users/throop/Data/ORT2/throop/backplaned/'

        if (name_ort == 'ORT3'):
            self.reqids_haz  = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03', 'K1LR_HAZ04']
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'        
            self.dir_data    = '/Users/throop/Data/ORT3/throop/backplaned/'

        if (name_ort == 'ORT4'):
            self.reqids_haz  = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03']
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'        
            self.dir_data    = '/Users/throop/Data/ORT4/throop/backplaned/'
            
        if (name_ort == 'ORT2_OPNAV'):
            self.dir_data    = '/Users/throop/Data/ORT2/throop/backplaned/'
            dirs = glob.glob(self.dir_data + '/*LR_OPNAV*')         # Manually construct a list of all the OPNAV dirs
            self.reqids_haz = []
            for dir_i in dirs:
                self.reqids_haz.append(os.path.basename(dir_i))
            self.reqids_haz = sorted(self.reqids_haz)    
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'    
            
        # Set the edge padding large enough s.t. all output stacks will be the same size.
        # This value is easy to compute: loop over all stacks, and take max of stack.calc_padding()[0]
        
        self.padding     = 61 # Amount to pad the images by. This is the same as the max drift btwn all images in stacks
        self.zoom        = 4  # Sub-pixel zoom to apply when shifting images. 1 for testing; 4 for production.
        self.num_image   = 0  # Which stack number to start on.
        self.zoom_screen = 1  # 'Screen zoom' amount to apply. This can be changed interactively.
        
        self.is_blink    = False  # Blinking mode is turned off by default
        self.dt_blink    = 300    # Blink time in ms
        
        # Start up SPICE if needed
        
        if (sp.ktotal('ALL') == 0):
            sp.furnsh('kernels_kem_prime.tm')
            
        # Set the RA/Dec of MU69. We could look this up from SPICE but it changes slowly, so just keep it fixed for now.
        
        self.radec_mu69 = (4.794979838984583, -0.3641418801015417)
        
        # Set the CA time. Roughly doing this is fine.
        
        self.et_ca = sp.utc2et('2019 1 Jan 05:33:00')
        
        # Boolean. For the current image, do we subtract the field frame, or not?
        
        self.do_subtract = True

        hbt.set_fontsize(20)

        # Set the stretch range, for imshow. These values are mapped to black and white, respectively.
        
        self.vmin_diff = -1   # Range for subtracted images
        self.vmax_diff =  2
        
        self.vmin_raw = -1    # Range for raw images (non-subtracted)
        self.vmax_raw = 1000
        
# Restore the stacks directly from archived pickle file, if it exists
        
        self.file_save = os.path.join(self.dir_data, 
                                      f'stacks_blink_{name_ort}_n{len(self.reqids_haz)}_z{self.zoom}.pkl')
        
        if os.path.isfile(self.file_save):
            self.restore()
        else:

# If no pickle file, load the stacks from raw images and re-align them
            
            # Load and stack the field images
    
            print("Stacking field images")        
            self.stack_field = image_stack(os.path.join(self.dir_data, self.reqid_field))    # The individual stack
            self.stack_field.align(method = 'wcs', center = self.radec_mu69)
            (self.img_field, self.wcs_field)  =\
                self.stack_field.flatten(zoom=self.zoom, padding=self.padding) # Save the stacked image and WCS
        
            # Load and stack the Hazard images
            
            self.img_haz   = {} # Output dictionary for the stacked images
            self.stack_haz = {} # Output dictionary for the stacks themselves
            self.wcs_haz   = {} # Output dictionary for WCS for the stacks
            
            for reqid in self.reqids_haz:
                self.stack_haz[reqid] = image_stack(os.path.join(self.dir_data, reqid))    # The individual stack
                self.stack_haz[reqid].align(method = 'wcs', center = self.radec_mu69)
                (self.img_haz[reqid], self.wcs_haz[reqid])  =\
                    self.stack_haz[reqid].flatten(zoom=self.zoom, padding=self.padding) 
                # Put them in a dictionary

            # Save the stacks to a pickle file, if requested
            
            yn = input("Save stacks to a pickle file? ")
            if ('y' in yn):
                self.save()
                
# Set the sizes of the plots -- e.g., (15,15) = large square
        
        figsize_image = (15,15)
        
        self.fig1 = Figure(figsize = figsize_image)    # <- this is in dx, dy... which is opposite from array order!

        self.ax1 = self.fig1.add_subplot(1,1,1, 
                                    label = 'Image') # Return the axes
        plt.set_cmap('Greys_r')
        
        self.canvas1 = FigureCanvasTkAgg(self.fig1,master=master)
        self.canvas1.show()
        
# Put objects into appropriate grid positions

        self.canvas1.get_tk_widget().grid(row=1, column=1, rowspan = 1)
        
# Define some keyboard shortcuts for the GUI
# These functions must be defined as event handlers, meaning they take two arguments (self and event), not just one.

        master.bind('q',       self.quit_e)
        master.bind('<space>', self.toggle_subtract_e)
        master.bind('=',       self.prev_e)
        master.bind('-',       self.next_e)
        master.bind('h',       self.help_e)
        master.bind('?',       self.help_e)
        master.bind('<Left>',  self.prev_e)
        master.bind('<Right>', self.next_e)
        master.bind('s',       self.stretch_e)
        master.bind('b',       self.blink_e)
        master.bind('t',       self.blink_set_time_e)
        master.bind('#',       self.blink_set_sequence_e)
        master.bind('z',       self.zoom_screen_up_e)
        master.bind('Z',       self.zoom_screen_down_e)
        master.bind('x',       self.clear_current_objects_e)
        master.bind('X',       self.clear_all_objects_e)
        
        master.bind('=',       self.scale_max_up_e)
        master.bind('+',       self.scale_max_down_e)
        master.bind('-',       self.scale_min_up_e)
        master.bind('_',       self.scale_min_down_e)
        
        master.bind('S',       self.save_output_e)
        
        self.canvas1.get_tk_widget().bind("<Button 1>", self.click_e)        
        
# Set the initial image index
        
        self.reqid_haz = self.reqids_haz[self.num_image]  # Set it to 'K1LR_HAZ00', for instance.

# Initialize the list of found objects for each stack
# There is a list of objects for each individual stack (ie, for each frame in the blink)

        self.list_objects = {}
        
        for reqid_i in self.reqids_haz:
            self.list_objects[reqid_i] = []  # Each entry here will be something like [(x, y, dn), (x, y, dn)] 

# Initialize a set of matplotlib 'line' objects for the image.
# These correspond to the 'objects' above, which are really just points            
            
        self.list_lines = {}

        for reqid_i in self.reqids_haz:
            self.list_lines[reqid_i] = []  # Each entry here will be a list of plot objects, of type 'line' 
                    
# Set a list of frame numbers to animate. For default, do them all.

        self.list_index_blink = hbt.frange(0, len(self.reqids_haz)-1) # List of indices ( [1, 2, 3] )
        self.list_index_blink_str = ' '.join(np.array(self.list_index_blink).astype(str)) # Make into string ('1 2 3')
        self.index_blink = 0      # where in the list of indices do we start? Current index.     
        
# Plot the image
        
        self.plot()
def make_table_grid_positions():
    
    """
    This is a one-off utility function for MRS. 
    In it, I just do a flyby of MU69, and output the X Y Z grid indices (as well as positions and timestamps).
    I don't output density at all -- just the s/c positions.
    
    I do this for both prime and alternate trajectories.
    
    This is just because he hasn't integrated SPICE into his grid code.
    
    This function is stand-alone. It doesn't rely on the grid class.
    It is included in this file because it directly relates to the grids.
    
    """

#%%%
     
    name_trajectory = 'alternate'  # ← Set this to 'prime' or 'alternate'
    # name_trajectory = 'prime'  # ← Set this to 'prime' or 'alternate'
   
    hbt.unload_kernels_all() 
   
    frame = '2014_MU69_SUNFLOWER_ROT'
    
    name_observer = 'New Horizons'

    name_target = 'MU69'
 
    sp.furnsh(f'kernels_kem_{name_trajectory}.tm')
    
    # Get the OD version. This might not work.
    
    # files = hbt.list_kernels_loaded()
    # for file in files:
        # if 
    
#    file_in = '/Users/throop/Data/ORT2/throop/track4/ort2-ring_v2.2_q2.0_pv0.10_rho0.22.grid4d.gz'
#    file_in = '/Users/throop/Data/ORT5/throop/deliveries/tuna9k/ort5_None_y3.0_q3.5_pv0.70_rho1.00.dust.txt'
    file_in = '/Users/throop/Data/ORT5/kaufmann/deliveries/chr3-tunacan10k/chr3-0003' + \
              '/y3.0/beta1.0e+00/subset00/model.array2'
    
    # file_in = '/Users/throop/Data/ORT5/throop/deliveries/tuna9k/ort5_None_y3.0_q3.5_pv0.70_rho1.00.dust.pkl' #250km
    # file_in = '/Users/throop/Data/ORT5/throop/deliveries/dph-sunflower10k/ort5_None_y2.2_q2.5_pv0.05_rho0.46.dust.pkl' #500km
    file_in = '/Users/throop/Data/ORT5/throop/deliveries/dph-tunacan3.5kinc55/ort5_None_y2.2_q2.5_pv0.05_rho0.46.dust.pkl' #500km
    
    grid = nh_ort_track4_grid(file_in)    # Load the grid from disk. Uses gzip, so it is quite slow (10 sec/file)

    resolution_km = int(grid.resolution_km[0])
    
    utc_ca = '2019 1 Jan 05:33:00'
    dt_before = 1*u.hour
    dt_after  = 1*u.hour
    dt = 1*u.s         # Sampling time through the flyby. Astropy units.
    
    et_ca = int( sp.utc2et(utc_ca) )  # Force this to be an integer, just to make output cleaner.
        
    et_start = et_ca - dt_before.to('s').value
    et_end   = et_ca + dt_after.to('s').value
                    
    grid.frame       = frame
    grid.name_target = name_target
    grid.name_trajectory = name_trajectory
    
    # And call the method to fly through it!
    # The returned density values etc are available within the instance variables, not returned explicitly.

    grid.fly_trajectory(name_observer, et_start, et_end, dt)

    # Make plots
    
    hbt.figsize((9,9))
    hbt.fontsize(12)

    plt.subplot(3,2,1)
    plt.plot(grid.bin_x_t)
    plt.ylabel('X Bin #')
    plt.title(f'MU69, Trajectory = {name_trajectory}, frame = {frame}')
    
    plt.subplot(3,2,3)
    plt.plot(grid.bin_y_t)
    plt.ylabel('Y Bin #')

    plt.subplot(3,2,5)
    plt.plot(grid.bin_z_t)
    plt.ylabel('Z Bin #')
    plt.xlabel('Timestep #')

    t_t = grid.et_t - np.mean(grid.et_t)
    bin_t = range(len(t_t))
    plt.subplot(3,2,2)
    plt.axhline(0, color='pink')
    plt.axvline(0, color='pink')
    plt.plot(t_t, grid.x_t)

    plt.ylabel('X [km]')

    plt.xlabel('t [sec]')
    
    plt.subplot(3,2,4)
    plt.axhline(0, color='pink')
    plt.axvline(0, color='pink')
    plt.plot(t_t, grid.y_t)
    plt.ylabel('Y [km]')

    plt.subplot(3,2,6)
    plt.axhline(0, color='pink')
    plt.axvline(0, color='pink')
    plt.plot(t_t, grid.z_t)
    plt.ylabel('Z [km]')
    plt.xlabel('Time from C/A [sec]')
    plt.tight_layout()

    # Save the plot to a file
    
    file_out = f'positions_trajectory_{name_trajectory}.png'
    path_out = os.path.join(dir_out, file_out)
     
    plt.savefig(path_out)
    print(f'Wrote: {path_out}')
    plt.show()
    
    # Make a table
    
    arr = {'bin' : bin_t, 
           'delta_et' : t_t,
           'X_km' : grid.x_t,
           'Y_km' : grid.y_t,
           'Z_km' : grid.z_t,
           'Bin_X' : grid.bin_x_t,
           'Bin_Y' : grid.bin_y_t,
           'Bin_Z' : grid.bin_z_t}
   
    t = Table(arr, names=['bin', 'delta_et', 'X_km', 'Y_km', 'Z_km', 'Bin_X', 'Bin_Y', 'Bin_Z'],
              dtype=['int', 'int', 'float', 'float', 'float', 'int', 'int', 'int'])
  
    # Save the table to a file
    
    file_out = f'positions_trajectory_{name_trajectory}_res{resolution_km}.txt'
    path_out = os.path.join('/Users/throop/Data/ORT5', file_out)
    
    t.write(path_out, format = 'ascii.csv', overwrite=True)
    print(f'Wrote: {path_out}')
示例#51
0
vmax = 100 # For vertical scaling
dpi  = 50  # Size of the output picture. 200 for cathy. 50 for web. 100 for high-res web.

file_out_base = os.path.join(dir_frames, f'movie_w{width}_d{dpi}_{cmap}')

file_out_gif = f'{file_out_base}_n{len(reqids)}_f{fps}.gif'

for i,reqid_i in enumerate(reqids):

    # Convert from DOY to calendar day

    file_out_frame = f'{file_out_base}_{i:03}.png'
    
    doy = reqid_i.split('_')[-1][-3:]
    
    et = sp.utc2et(f'2018::{doy} 12:00:00')
    utc = sp.timout(et, "Month DD, YYYY", 20)
    utc = utc.replace(' 0', ' ')
    
    # Draw the frame

    f = plt.figure(frameon=False, figsize=(10, 5), dpi=dpi)  # Change dpi to change output size
    # f.patch.set_facecolor('pink')
    canvas_width, canvas_height = f.canvas.get_width_height()
    ax = f.add_axes([0, 0, 1, 1])
    ax.axis('off')
    # ax.set_facecolor('pink')

    plt.subplot(1,2,1)
    ax = plt.gca()
    # ax.set_facecolor('pink')