def __init__(self, file):
        
            # Get the speed relative to MU69

        file_tm = 'kernels_kem_prime.tm'
        
        # Start up SPICE if needed
        
        if (sp.ktotal('ALL') == 0):
            sp.furnsh(file_tm)    
            
        utc_ca = '2019 1 Jan 05:33:00'
        et_ca  = sp.utc2et(utc_ca) 
        (st,lt) = sp.spkezr('New Horizons', et_ca, 'J2000', 'LT', 'MU69')
        
        velocity = sp.vnorm(st[3:6])*u.km/u.s
    
        # Save the velocity (relative to MU69)

        self.velocity = velocity

        # Save the name of file to read
        
        self.file = file

        # Save the area of the s/c
        
        self.area_sc = (1*u.m)**2
        
        return
Пример #2
0
 def logging_kernel_names(self):
     n_kernels = spice.ktotal('ALL')
     msg = 'After loading/unloading kernels in {:s}, {:d} kernel files are loaded.'.format(
         self.meta_kernel_path, n_kernels)
     self.logger.info(msg)
     for i in range(n_kernels):
         self.logger.debug(spice.kdata(i, 'ALL'))
Пример #3
0
 def is_kernel_already_loaded(self):
     result = False
     n_kernels = spice.ktotal('ALL')
     for i in range(n_kernels):
         file, _, source, _ = spice.kdata(i, 'ALL')
         if file == self.meta_kernel_path:
             msg = 'Already loaded. file: {:s}, source: {:s}.'.format(
                 file, source)
             result = True
             break
     return result
Пример #4
0
def describe_loaded_kernels(kind='all'):
    """Print a list of loaded spice kernels of :kind:"""

    all_kinds = ('spk', 'pck', 'ck', 'ek', 'text', 'meta')
    if kind == 'all':
        for k in all_kinds:
            describe_loaded_kernels(k)
        return

    n = spiceypy.ktotal(kind)
    if n == 0:
        print('No loaded %s kernels' % kind)
        return

    print("Loaded %s kernels:" % kind)
    for i in range(n):
        data = spiceypy.kdata(i, kind, 100, 10, 100)
        print("\t%d: %s" % (i, data[0]))
Пример #5
0
def search_solar_objects(obsinfo):
    solar_objects = []
    count = spice.ktotal("spk")
    for which in range(count):
        filename, _filetype, _source, _handle = spice.kdata(which, "spk")
        ids = spice.spkobj(filename)
        for i in range(spice.card(ids)):
            obj = ids[i]
            target = spice.bodc2n(obj)
            if is_target_in_fov(
                    obsinfo.inst,
                    target,
                    obsinfo.et,
                    obsinfo.abcorr,
                    obsinfo.obsrvr,
            ):
                solar_objects.append(get_solar_object(obsinfo, obj, target))
    return solar_objects
from   astropy.stats import sigma_clip

# HBT imports

import hbt

# =============================================================================
# This is just a Q&D short routine to read in a directory of images, and plot them.
# I wrote this for the MU69 NYT ORT, but it coudl be used for anything.
# =============================================================================

# Start up SPICE if needed

hbt.figsize((10,10))
if (sp.ktotal('ALL') == 0):
    sp.furnsh('kernels_kem_prime.tm')
        
stretch_percent = 90    
stretch = astropy.visualization.PercentileInterval(stretch_percent) # PI(90) scales to 5th..95th %ile.

dir_images = '/Users/throop/Data/ORT_Sep18/day2/lor/'

files = glob.glob(os.path.join(dir_images, '*'))

do_transpose = False

files = ['/Users/throop/Data/NH_Jring/data/jupiter/level2/lor/all/lor_0034715072_0x630_sci_1.fit']

for file in files:
    lun = fits.open(file)
Пример #7
0
def nh_ort1_make_stacks():
    
    """
    This program takes a directory full of individual NH KEM Hazard frames, stacks them, and subtracts
    a stack of background field. This reveals rings, etc. in the area.
    
    Written for NH MU69 ORT1, Jan-2018.  
    """
    
    do_force = False  # Boolean: Do we force reloading of all of the images from FITS, or just restore from pkl?
                      # Pkl (aka False) is faster. But if we have made changes to the core algorithms, must
                      # reload from disk (aka True).
    
    stretch_percent = 90    
    stretch = astropy.visualization.PercentileInterval(stretch_percent) # PI(90) scales to 5th..95th %ile.
    
    reqids_haz  = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03', 'K1LR_HAZ04']
    reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'
    
    dir_data    = '/Users/throop/Data/ORT1/throop/backplaned/'

    zoom = 4
    
    # Set the edge padding large enough s.t. all output stacks will be the same size.
    # This value is easy to compute: loop over all stacks, and take max of stack.calc_padding()[0]
    
    padding = 61   
    # Start up SPICE if needed
    
    if (sp.ktotal('ALL') == 0):
        sp.furnsh('kernels_kem_prime.tm')
        
    # Set the RA/Dec of MU69. We could look this up from SPICE but it changes slowly, so just keep it fixed for now.
    
    radec_mu69 = (4.794979838984583, -0.3641418801015417)
    
    # Load and stack the field images
    
    stack_field = image_stack(os.path.join(dir_data, reqid_field), do_force=do_force)
    stack_field.align(method = 'wcs', center = radec_mu69)
    img_field  = stack_field.flatten(zoom=zoom, padding=padding)
    
    if do_force:
        stack_field.save()

    hbt.figsize((12,12))
    hbt.set_fontsize(15)
    
    for reqid in reqids_haz:
        stack_haz = image_stack(os.path.join(dir_data, reqid), do_force=do_force)
        stack_haz.align(method = 'wcs', center = radec_mu69)
        img_haz  = stack_haz.flatten(zoom=zoom, padding=padding)

        if do_force:
            stack_haz.save()
            
        # Make the plot
        
        diff = img_haz - img_field
        diff_trim = hbt.trim_image(diff)
        plt.imshow(stretch(diff_trim))
        plt.title(f"{reqid} - field, zoom = {zoom}")

        # Save the stacked image as a FITS file
        
        file_out = os.path.join(dir_data, reqid, "stack_n{}_z{}.fits".format(stack_haz.size[0], zoom))
        hdu = fits.PrimaryHDU(stretch(diff_trim))
        hdu.writeto(file_out, overwrite=True)
        print(f'Wrote: {file_out}')    
        
        # Save the stack as a PNG
        
        file_out_plot_stack = file_out.replace('.fits', '.png')
        plt.savefig(file_out_plot_stack, bbox_inches='tight')
        print("Wrote: {}".format(file_out_plot_stack))

        # Display it 
        # This must be done *after* the plt.savefig()
        
        plt.show()
        
        # Make a radial profile
        
        pos =  np.array(np.shape(diff))/2
        (radius, profile) = get_radial_profile_circular(diff, pos=pos, width=1)
    
        hbt.figsize((10,8))
        hbt.set_fontsize(15)
        plt.plot(radius, profile)
        plt.xlim((0, 50*zoom))
        plt.ylim((-1,np.amax(profile)))
        plt.xlabel('Radius [pixels]')
        plt.title(f'Ring Radial Profile, {reqid}, zoom={zoom}')
        plt.ylabel('Median DN')
        plt.show()

# =============================================================================
# Calculate how many DN MU69 should be at encounter (K-20d, etc.)
# Or alternatively, convert all of my DN values, to I/F values
# =============================================================================

        # Convert DN values in array, to I/F values
            
        RSOLAR_LORRI_1X1 = 221999.98  # Diffuse sensitivity, LORRI 1X1. Units are (DN/s/pixel)/(erg/cm^2/s/A/sr)
        RSOLAR_LORRI_4X4 = 3800640.0  # Diffuse sensitivity, LORRI 1X1. Units are (DN/s/pixel)/(erg/cm^2/s/A/sr)
        
        C = profile # Get the DN values of the ring. Typical value is 1 DN.
        
        # Define the solar flux, from Hal's paper.
        
        FSOLAR_LORRI  = 176.	     	    # We want to be sure to use LORRI value, not MVIC value!
        F_solar = FSOLAR_LORRI # Flux from Hal's paper
        
        RSOLAR = RSOLAR_LORRI_4X4
        
        # Calculate the MU69-Sun distance, in AU (or look it up). 
        
        km2au = 1 / (u.au/u.km).to('1')
        
        et = stack_haz.t['et'][0]
        (st,lt) = sp.spkezr('MU69', et, 'J2000', 'LT', 'New Horizons')
        r_nh_mu69 = sp.vnorm(st[0:3]) * km2au # NH distance, in AU
        
        (st,lt) = sp.spkezr('MU69', et, 'J2000', 'LT', 'Sun')
        r_sun_mu69 = sp.vnorm(st[0:3]) * km2au # NH distance, in AU
        
        pixscale_km =  (r_nh_mu69/km2au * (0.3*hbt.d2r / 256)) / zoom # km per pix (assuming 4x4)
        
        TEXP = stack_haz.t['exptime'][0]
        
        I = C / TEXP / RSOLAR   # Could use RSOLAR, RJUPITER, or RPLUTO. All v similar, except for spectrum assumed.
        
        # Apply Hal's conversion formula from p. 7, to compute I/F and print it.
        
        IoF = math.pi * I * r_sun_mu69**2 / F_solar # Equation from Hal's paper
        
        plt.plot(radius * pixscale_km, IoF)
        plt.xlim((0, 50000))
        plt.ylim((-1e-7, 4e-7))
#        plt.ylim((0,np.amax(IoF)))
#        plt.yscale('log')
        plt.xlabel('Radius [km]')
        plt.title(f'Ring Radial Profile, {reqid}, zoom={zoom}')
        plt.ylabel('Median I/F')
        file_out_plot_profile = file_out.replace('.fits', '_profile.png')
        plt.savefig(file_out_plot_profile, bbox_inches='tight')
        plt.show()
        print(f'Wrote: {file_out_plot_profile}')
        
        # Write it to a table
        t = Table([radius, radius * pixscale_km, profile, IoF], names = ['RadiusPixels', 'RadiusKM', 'DN/pix', 'I/F'])
        file_out_table = file_out.replace('.fits', '_profile.txt')
        t.write(file_out_table, format='ascii', overwrite=True)
        print("Wrote: {}".format(file_out_table))
Пример #8
0
    def __init__(self, master, size_window):

        self.master = master   # This is the handle to the main Tk widget. I have to use it occasionally to 
                               # set up event handlers, so grab it and save it.

        self.size_window = size_window # Save the size of the whole Tk window, in pixels.
        
        # Open the image stack
        
#        self.stretch_percent = 90    
#        self.stretch = astropy.visualization.PercentileInterval(self.stretch_percent) # PI(90) scales to 5th..95th %ile.
#        
        name_ort = 'ORT4'
#        name_ort = 'ORT2_OPNAV'
        
        if (name_ort == 'ORT1'):
            self.reqids_haz  = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03', 'K1LR_HAZ04']
            #        self.reqids_haz  = ['K1LR_HAZ03', 'K1LR_HAZ01', 'K1LR_HAZ02']
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'        
            self.dir_data    = '/Users/throop/Data/ORT1/throop/backplaned/'

        if (name_ort == 'ORT2'):
            self.reqids_haz  = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03', 'K1LR_HAZ04']
#            self.reqids_haz  = ['K1LR_HAZ03', 'K1LR_HAZ01', 'K1LR_HAZ02']
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'        
            self.dir_data    = '/Users/throop/Data/ORT2/throop/backplaned/'

        if (name_ort == 'ORT3'):
            self.reqids_haz  = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03', 'K1LR_HAZ04']
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'        
            self.dir_data    = '/Users/throop/Data/ORT3/throop/backplaned/'

        if (name_ort == 'ORT4'):
            self.reqids_haz  = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03']
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'        
            self.dir_data    = '/Users/throop/Data/ORT4/throop/backplaned/'
            
        if (name_ort == 'ORT2_OPNAV'):
            self.dir_data    = '/Users/throop/Data/ORT2/throop/backplaned/'
            dirs = glob.glob(self.dir_data + '/*LR_OPNAV*')         # Manually construct a list of all the OPNAV dirs
            self.reqids_haz = []
            for dir_i in dirs:
                self.reqids_haz.append(os.path.basename(dir_i))
            self.reqids_haz = sorted(self.reqids_haz)    
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'    
            
        # Set the edge padding large enough s.t. all output stacks will be the same size.
        # This value is easy to compute: loop over all stacks, and take max of stack.calc_padding()[0]
        
        self.padding     = 61 # Amount to pad the images by. This is the same as the max drift btwn all images in stacks
        self.zoom        = 4  # Sub-pixel zoom to apply when shifting images. 1 for testing; 4 for production.
        self.num_image   = 0  # Which stack number to start on.
        self.zoom_screen = 1  # 'Screen zoom' amount to apply. This can be changed interactively.
        
        self.is_blink    = False  # Blinking mode is turned off by default
        self.dt_blink    = 300    # Blink time in ms
        
        # Start up SPICE if needed
        
        if (sp.ktotal('ALL') == 0):
            sp.furnsh('kernels_kem_prime.tm')
            
        # Set the RA/Dec of MU69. We could look this up from SPICE but it changes slowly, so just keep it fixed for now.
        
        self.radec_mu69 = (4.794979838984583, -0.3641418801015417)
        
        # Set the CA time. Roughly doing this is fine.
        
        self.et_ca = sp.utc2et('2019 1 Jan 05:33:00')
        
        # Boolean. For the current image, do we subtract the field frame, or not?
        
        self.do_subtract = True

        hbt.set_fontsize(20)

        # Set the stretch range, for imshow. These values are mapped to black and white, respectively.
        
        self.vmin_diff = -1   # Range for subtracted images
        self.vmax_diff =  2
        
        self.vmin_raw = -1    # Range for raw images (non-subtracted)
        self.vmax_raw = 1000
        
# Restore the stacks directly from archived pickle file, if it exists
        
        self.file_save = os.path.join(self.dir_data, 
                                      f'stacks_blink_{name_ort}_n{len(self.reqids_haz)}_z{self.zoom}.pkl')
        
        if os.path.isfile(self.file_save):
            self.restore()
        else:

# If no pickle file, load the stacks from raw images and re-align them
            
            # Load and stack the field images
    
            print("Stacking field images")        
            self.stack_field = image_stack(os.path.join(self.dir_data, self.reqid_field))    # The individual stack
            self.stack_field.align(method = 'wcs', center = self.radec_mu69)
            (self.img_field, self.wcs_field)  =\
                self.stack_field.flatten(zoom=self.zoom, padding=self.padding) # Save the stacked image and WCS
        
            # Load and stack the Hazard images
            
            self.img_haz   = {} # Output dictionary for the stacked images
            self.stack_haz = {} # Output dictionary for the stacks themselves
            self.wcs_haz   = {} # Output dictionary for WCS for the stacks
            
            for reqid in self.reqids_haz:
                self.stack_haz[reqid] = image_stack(os.path.join(self.dir_data, reqid))    # The individual stack
                self.stack_haz[reqid].align(method = 'wcs', center = self.radec_mu69)
                (self.img_haz[reqid], self.wcs_haz[reqid])  =\
                    self.stack_haz[reqid].flatten(zoom=self.zoom, padding=self.padding) 
                # Put them in a dictionary

            # Save the stacks to a pickle file, if requested
            
            yn = input("Save stacks to a pickle file? ")
            if ('y' in yn):
                self.save()
                
# Set the sizes of the plots -- e.g., (15,15) = large square
        
        figsize_image = (15,15)
        
        self.fig1 = Figure(figsize = figsize_image)    # <- this is in dx, dy... which is opposite from array order!

        self.ax1 = self.fig1.add_subplot(1,1,1, 
                                    label = 'Image') # Return the axes
        plt.set_cmap('Greys_r')
        
        self.canvas1 = FigureCanvasTkAgg(self.fig1,master=master)
        self.canvas1.show()
        
# Put objects into appropriate grid positions

        self.canvas1.get_tk_widget().grid(row=1, column=1, rowspan = 1)
        
# Define some keyboard shortcuts for the GUI
# These functions must be defined as event handlers, meaning they take two arguments (self and event), not just one.

        master.bind('q',       self.quit_e)
        master.bind('<space>', self.toggle_subtract_e)
        master.bind('=',       self.prev_e)
        master.bind('-',       self.next_e)
        master.bind('h',       self.help_e)
        master.bind('?',       self.help_e)
        master.bind('<Left>',  self.prev_e)
        master.bind('<Right>', self.next_e)
        master.bind('s',       self.stretch_e)
        master.bind('b',       self.blink_e)
        master.bind('t',       self.blink_set_time_e)
        master.bind('#',       self.blink_set_sequence_e)
        master.bind('z',       self.zoom_screen_up_e)
        master.bind('Z',       self.zoom_screen_down_e)
        master.bind('x',       self.clear_current_objects_e)
        master.bind('X',       self.clear_all_objects_e)
        
        master.bind('=',       self.scale_max_up_e)
        master.bind('+',       self.scale_max_down_e)
        master.bind('-',       self.scale_min_up_e)
        master.bind('_',       self.scale_min_down_e)
        
        master.bind('S',       self.save_output_e)
        
        self.canvas1.get_tk_widget().bind("<Button 1>", self.click_e)        
        
# Set the initial image index
        
        self.reqid_haz = self.reqids_haz[self.num_image]  # Set it to 'K1LR_HAZ00', for instance.

# Initialize the list of found objects for each stack
# There is a list of objects for each individual stack (ie, for each frame in the blink)

        self.list_objects = {}
        
        for reqid_i in self.reqids_haz:
            self.list_objects[reqid_i] = []  # Each entry here will be something like [(x, y, dn), (x, y, dn)] 

# Initialize a set of matplotlib 'line' objects for the image.
# These correspond to the 'objects' above, which are really just points            
            
        self.list_lines = {}

        for reqid_i in self.reqids_haz:
            self.list_lines[reqid_i] = []  # Each entry here will be a list of plot objects, of type 'line' 
                    
# Set a list of frame numbers to animate. For default, do them all.

        self.list_index_blink = hbt.frange(0, len(self.reqids_haz)-1) # List of indices ( [1, 2, 3] )
        self.list_index_blink_str = ' '.join(np.array(self.list_index_blink).astype(str)) # Make into string ('1 2 3')
        self.index_blink = 0      # where in the list of indices do we start? Current index.     
        
# Plot the image
        
        self.plot()
Пример #9
0
def nh_ort1_find_rings():
    
    plt.set_cmap('Greys_r')

    stretch_percent = 90    
    stretch = astropy.visualization.PercentileInterval(stretch_percent) # PI(90) scales to 5th..95th %ile.

    dir = '/Users/throop/Data/ORT1/throop/backplaned'
    files = glob.glob(os.path.join(dir, '*', '*fits'))
    
    hbt.figsize((15,8))
    
    # Set up output arrays
    
    ra_arr    = []
    dec_arr   = []
    reqid_arr = []
    exptime_arr= []
    et_arr    = []
    utc_arr   = [] 

    # Start up SPICE
    
    if (sp.ktotal('ALL') == 0):
        sp.furnsh('kernels_kem.tm')
    
    for ii,file in enumerate(files):
        
        hdu = fits.open(file)
        print('Reading {}/{}: {}'.format(ii, len(files), os.path.basename(file)))
        img = hdu[0].data
        
        header = hdu[0].header
        
        ra_arr.append(header['CRVAL1'])
        dec_arr.append(header['CRVAL2'])
        exptime_arr.append(header['EXPTIME'])
        reqid_arr.append(header['REQID'])
        et_arr.append(header['SPCSCET'])
        utc_arr.append(sp.et2utc(header['SPCSCET'], 'C', 0))
        
        radius_eq = hdu['RADIUS_EQ'].data
        
        dradius = 1000
        num_bins_radius = 100
        
        bins_radius = hbt.frange(0, np.amax(radius), num_bins_radius)
        dn_median_arr = np.zeros(num_bins_radius)
        dn_mean_arr   = np.zeros(num_bins_radius)
        
        for i in range(num_bins_radius-1):
            is_good = np.logical_and(radius_eq > bins_radius[i], radius_eq < bins_radius[i+1])
            dn_median_arr[i] = np.nanmedian(img[is_good])
            dn_mean_arr[i]   = np.nanmean(img[is_good])

        do_plot = False

        if do_plot:
            
            plt.subplot(1,2,1)
            plt.plot(bins_radius, dn_median_arr, label = 'median')
            plt.plot(bins_radius, dn_mean_arr,   label = 'mean')
            plt.legend(loc = 'upper right')
            plt.title("{}/{}  {}".format(ii,len(files), os.path.basename(file)))
           
            
            plt.subplot(1,2,2)
            plt.imshow(stretch(img))
            plt.show()
        
        hdu.close()
        
# =============================================================================
# Read the values into NumPy arrays
# =============================================================================

    ra   = np.array(ra_arr)
    dec  = np.array(dec_arr)
    reqid = np.array(reqid_arr)
    et = np.array(et_arr)
    exptime = np.array(exptime_arr)
    utc  = np.array(utc_arr)
    
    plt.plot(ra, dec, ls='none', marker = 'o', ms=2)
    
    # Put them all into a table
    
    t = Table(          [ra, dec, et, utc, exptime, reqid], 
              names = ('RA', 'Dec', 'ET', 'UTC', 'EXPTIME', 'ReqID'))
    
    t = Table([a, b, c], names=('a', 'b', 'c'), meta={'name': 'first table'})
    
    w_haz0 = (t['ReqID'] == 'K1LR_HAZ00')
    w_haz1 = (t['ReqID'] == 'K1LR_HAZ01')
    w_haz2 = (t['ReqID'] == 'K1LR_HAZ02')
    w_haz3 = (t['ReqID'] == 'K1LR_HAZ03')
    w_haz4 = (t['ReqID'] == 'K1LR_HAZ04')
    
    plt.plot(ra[w_haz0], dec[w_haz0], marker='o', ls='none')
    plt.plot(ra[w_haz1], dec[w_haz1], marker='o', ls='none')
    plt.plot(ra[w_haz2], dec[w_haz2], marker='o', ls='none')
    plt.plot(ra[w_haz3], dec[w_haz3], marker='o', ls='none')
    plt.plot(ra[w_haz4], dec[w_haz4], marker='o', ls='none')
    plt.show()
    
    plt.plot(et[w_haz0], marker='o', ls='none')
    plt.plot(et[w_haz1], marker='o', ls='none')
    plt.plot(et[w_haz2], marker='o', ls='none')
    plt.plot(et[w_haz3], marker='o', ls='none')
    plt.plot(et[w_haz4], marker='o', ls='none')
def nh_ort_make_backplanes(digit_filter, frame, q):
    
    """
    Process all of the MU69 ORT files. 
    
    Takes Simon's WCS'd FITS files as inputs, and creates backplaned FITS as output.
    
    Call this function in order to generate the backplanes from Simon's WCS files.

    Thus function is part of HBT's pipeline, and not a general-purpose routine.
    
    Arguments
    -----

    frame:
        The SPICE frame to use
    
    digit_filter:
        '12', '34', etc -- something to filter the FITS files by.
    q:
        The multiprocessor queue object.
    
    NB: I could not figure out how to pass keyword arguments  to the Process() function.
    So, that's why this function uses positional arguments only.           
    """

# =============================================================================
# Initialize
# =============================================================================

    do_print_diag = False

    do_plot    = True
    do_clobber = True
    
    name_target   = 'MU69'
    name_observer = 'New Horizons'
    # frame         = '2014_MU69_SUNFLOWER_ROT'  # Change this to tuna can if needed, I think??
    # frame         = '2014_MU69_TUNACAN_ROT'
    # frame         = '2014_MU69_ORT4_1'  # Change this to tuna can if needed, I think??
    
# =============================================================================
#     Get a proper list of all the input files
# =============================================================================
    
    do_ORT1 = False
    do_ORT3 = False
    do_ORT2 = False
    do_ORT4 = False
    do_ACTUAL = True  # Run this on actual OpNav data!
    
    do_force = True
    
#    dir_data_ort = '/Users/throop/Data/ORT1'
#    dir_in  = os.path.join(dir_data_ort, 'porter', 'pwcs_ort1')
#    dir_out = os.path.join(dir_data_ort, 'throop', 'backplaned')

    if do_ORT2:
        dir_data_ort = '/Users/throop/Data/ORT2'
        dir_in  = os.path.join(dir_data_ort, 'porter', 'pwcs_ort2')
        dir_out = os.path.join(dir_data_ort, 'throop', 'backplaned')
        files = glob.glob(os.path.join(dir_in, '*','*_ort2.fit'))

    if do_ORT3:    
        dir_data_ort = '/Users/throop/Data/ORT3'
        dir_in  = os.path.join(dir_data_ort, 'buie') # Using Buie backplanes, not Simon's.
        dir_out = os.path.join(dir_data_ort, 'throop', 'backplaned')
        files = glob.glob(os.path.join(dir_in, '*','*_ort3.fit'))
        frame         = '2014_MU69_TUNACAN_ROT'
        
    if do_ORT4:
        dir_data_ort = '/Users/throop/Data/ORT4'
        dir_in  = os.path.join(dir_data_ort, 'porter', 'pwcs_ort4')
        dir_out = os.path.join(dir_data_ort, 'throop', 'backplaned')
        files = glob.glob(os.path.join(dir_in, '*','*_pwcs.fits'))

    if do_ACTUAL:
        dir_data_ort = '/Users/throop/Data/MU69_Approach'
        dir_in  = os.path.join(dir_data_ort, 'porter')
        dir_out = os.path.join(dir_data_ort, 'throop', 'backplaned')

        # files = glob.glob(os.path.join(dir_in,'*', '*_pwcs.fits'))  # OpNav field data (older)        
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018267', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018284', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018284', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018284', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018284', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018287', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018298', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018301', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018304', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018311', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018314', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018315', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018316', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018317', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018325', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018326', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018327', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_OpNav_L4_2018328', '*_pwcs2.fits')) # OpNav data
        # files = glob.glob(os.path.join(dir_in, 'KALR_MU69_Hazard_L4_2018325', '*_pwcs2.fits')) # OpNav data

        files = glob.glob(os.path.join(dir_in, '*', '*_pwcs2.fits')) # OpNav data
        
        do_force = False
        do_clobber = False

# =============================================================================
# Check what FRAME we are using, and change output directory appropriately
# =============================================================================

    if 'TUNACAN' in frame.upper():
        dir_out = dir_out.replace('backplaned', 'backplaned_tunacan')
        
# =============================================================================
#     Filter files if needed
# =============================================================================
    
    # If desired, do a 'digit filter.' This filters the files down into a smaller number.
    # This is useful to do processing in parallel. Python global interpreter lock means
    # that only one CPU at a time can be used. To get around this, filter the files down,
    # and put each filter in its own Spyder tab.
    
    if digit_filter:
        
    # do_digit_filter = False

    # digit_filter = '12'
    # digit_filter = '34'
    # digit_filter = '56'
    # digit_filter = '78'
    # digit_filter = '90'
    
    # if (do_digit_filter):
        files_filtered = []
        for file in files:
            base = os.path.basename(file)
            digit = base[12]  # Match the penultimate digit in LORRI filename (e.g., lor_0405348852_pwcs ← matches '5')
                              # This is the digit that changes the most in the LORRI files, so it's a good choice.
            if (digit in digit_filter):
                files_filtered.append(file)
        if do_print_diag:
            print("Filtered on '{}': {} files → {}".format(digit_filter, len(files), len(files_filtered)))
        
        files = files_filtered            

# =============================================================================
# Start SPICE, if necessary
# =============================================================================
    
    if (sp.ktotal('ALL') == 0):
        sp.furnsh('kernels_kem_prime.tm')
        
# =============================================================================
# Loop and create each backplane
# =============================================================================
        
    count_run = 0
    count_skipped = 0
    
    
    for i,file_in in enumerate(files):
        if do_print_diag:
            print("{}/{}".format(i,len(files))) 
        file_out = file_in.replace(dir_in, dir_out)
        file_out = file_out.replace('_pwcs.fit', '_pwcs_backplaned.fit') # Works for both .fit and .fits
        file_out = file_out.replace('_pwcs2.fit', '_pwcs2_backplaned.fit') # Works for both .fit and .fits
    
        # Call the backplane function. Depending on settings, this will automatically run if a newer input file is 
        # received, and thus we need to regenerate the output backplane.
        
        try:
            create_backplanes_fits(file_in, 
                                      name_target,
                                      frame,
                                      name_observer,
                                      file_out,
                                      do_plot=False, 
                                      do_clobber=do_clobber,
                                      do_verbose=True)
            count_run += 1
            if (do_plot):
                plot_backplanes(file_out, name_observer = name_observer, name_target = name_target)
     
        except FileExistsError:
            if do_print_diag:
                print('File exists -- skipping. {}'.format(os.path.basename(file_out)))
            count_skipped +=1
            
    q.put(f'Digit filter {digit_filter}, {frame:25} {len(files)} files examined; ' +
          f'{count_run} run; {count_skipped} skipped')