コード例 #1
0
def test_p11():
    '''
    Test to plot a phase function, and make sure it is normalized properly
    '''
    
    alam = 500*u.nm
    r = 10*u.micron
    x = (2*math.pi * r / alam).to('1').value
    

    num_theta = 1000
    theta = hbt.frange(0,math.pi, num_theta)
    p11 = np.zeros(num_theta)
    
    phase = math.pi - theta      # Theta is scattering angle
    
    nm_refract = complex(1.5, 0.1)
    mie = Mie(x=x, m=nm_refract)  # This is only for one x value, not an ensemble

    qext = mie.qext()
    qsca = mie.qsca()
    qbak = mie.qb()

    for i,theta_i in enumerate(theta):
        (S1, S2)  = mie.S12(np.cos(theta_i)) # Looking at code, S12 returns tuple (S1, S2). S3, S4 are zero for sphere.
        k = 2*pi / alam
        sigma = pi * r**2 * qsca             # For (S1,S2) -> P11: p. 2 of http://nit.colorado.edu/atoc5560/week8.pdf
                                             # qsca = scattering efficiency. sigma = scattering cross-section 
        p11[i]  = 4 * pi / (k**2 * sigma) * ( (np.abs(S1))**2 + (np.abs(S2))**2) / 2
                   
    # Check the normalization of the resulting phase function.

    dtheta = theta[1] - theta[0]
    
    norm = np.sum(p11 * np.sin(theta) * dtheta)  # This should be 2, as per TPW04 eq. 4
    
    print('Normalized integral = {:.2f}'.format(norm))
    
    plt.plot(phase*hbt.r2d, p11)
    plt.yscale('log')
    plt.title('X = {:.1f}'.format(x))
    plt.xlabel('Phase angle')
    plt.ylabel('$P_{11}$')
    plt.show()
    
    p = plt.plot([0,10], [0,10])
    currentAxis = plt.gca()
    currentAxis.add_patch(Rectangle((0.5, 0.5), 0.7, 0.7,alpha=0.1, color='red'))
    plt.text(1, 1, 'Danger')
    plt.show()
    
コード例 #2
0
def other():

    theta_0 = 0
    theta_1 = pi
    num_theta = 100
    
    theta = hbt.frange(theta_0, theta_1, num_theta)
    dtheta = theta[1] - theta[0]
    
    p11 = np.cos(theta)  		# lambertian phase function = cos(theta). This is the *surface* phase function --
      				# not the phase function for the entire sphere, which needs to be convolved with the
    				# crescent shape!
    
    p11 = 1			# Integral = 2 for this value (isotropic scattering)
    p11 = 8/(3 * pi) * (np.sin(theta) + (pi - theta) * np.cos(theta))			
      				# Integral = 2 for this expression (disk-integrated scattering from lambert sphere)
      				# Madhusudhan & Burrows, 2012, http://arxiv.org/pdf/1112.4476v1.pdf, eq33
    
    int = np.sum(p11 * np.sin(theta) * dtheta)		# This integral should be 2, as per TE98 eq 16.
 
    print("Total = {}".format(int))
コード例 #3
0
t = pickle.load(lun)
lun.close()
	
groupmask = (t['Desc'] == 'Jupiter ring - search for embedded moons')
t_group = t[groupmask]	

segment = 1  # 1, 2, or 3

fs = 15 # Set the font size

if (segment == 1): # 50 frames, upright
    frames_med = np.array([1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19])
#    frames_med = np.array([1,2,3,4,5])
    num_frames_med = np.size(frames_med)     

    frames_data = hbt.frange(0,48,49) # 0 .. 49

if (segment == 2): # 5 frames, tilted sideways
    frames_med = np.array([49,51,52,53,53])
    num_frames_med = np.size(frames_med) 
    frames_data = frames_med

if (segment == 3): # 5 frames, tilted sideways
    frames_med = hbt.frange(54,111,58)
    num_frames_med = np.size(frames_med) 
    frames_data = frames_med

frame_arr      = np.zeros((num_frames_med, 1024, 1024))
frame_sfit_arr = np.zeros((num_frames_med, 1024, 1024))
frame_ffit_arr = np.zeros((num_frames_med, 1024, 1024))
コード例 #4
0
    t['groupnum'] = np.zeros(len(t)).astype(int)
    t['imagenum'] = np.zeros(len(t)).astype(int)

    # Standardize the reqid field, so it is sortable

    t['ReqID_fixed'] = t['ReqID']

    for i in range(len(t)):
        t['ReqID_fixed'][i] = fix_reqid(t['ReqID'][i])

    # Loop over all the groups, and assign a group and image number to each file

    for groupnum, group in enumerate(groups_all):
        is_match = t['Desc'] == group
        t['groupnum'][is_match] = groupnum
        t['imagenum'][is_match] = hbt.frange(0,
                                             np.sum(is_match) - 1).astype(int)

    # Now get a list of all reqid's

    reqids_all = astropy.table.unique(t, keys=(['ReqID_fixed']))['ReqID']

    # Now get a list of reqid's, for rings only!

    groups_rings = groups_all[5:]
    groupmask = np.logical_or((t['Desc'] == groups_rings[0]),
                              (t['Desc'] == groups_rings[1]))
    groupmask = np.logical_or(groupmask, t['Desc'] == groups_rings[2])
    groupmask = np.logical_or(groupmask, t['Desc'] == groups_rings[3])
    groupmask_rings = groupmask

    t_rings = t[groupmask_rings]
コード例 #5
0
index_group = 7  # Raw image, group
method = 'String'   # Next, Previous, String, etc.
argument = '8-15'  # 33, or 8/23, or 10-20, etc.
    
groupmask = (t['Desc'] == groups[index_group])
t_group = t[groupmask]	

file = t_group[index_image]['Filename'] 

image = hbt.read_lorri(file)

image_proc = hbt.nh_jring_process_image(image, method, argument, index_group, index_image)

#stop

image_stray = hbt.nh_get_straylight_median(index_group, hbt.frange(8,15))

nx = np.shape(image)[0]
ny = np.shape(image)[1]

#image_stray = '/Users/throop/data/NH_Jring/out/straylight_median_g7_n8..15_sfit5,5.pkl'

xvals = range(nx)
yvals = range(ny)
(x,y) = np.meshgrid(xvals,yvals)

stretch = astropy.visualization.PercentileInterval(90)  # PI(90) scales array to 5th .. 95th %ile. 

image_s5 = hbt.remove_sfit(image,5)

plt.rcParams['figure.figsize'] = 5,5
コード例 #6
0
    plt.title('frame_ffit_med, n=' + repr(num_frames_med))
    plt.show()    
    
#stop




#####

#def f():
#    nh_get_straylight_median(8,[50,51,52,52], do_sfit=True, power1=5, power2=5) # long movie, tilted portion
#    nh_get_straylight_median(8,hbt.frange(0,49,50), do_sfit=True, power1=5, power2=5) # long movie, first 50 frames
#nh_get_straylight_median(5,hbt.frange(1,6,6), do_sfit=True, power1=5, power2=5) # best portrait, ansa

s = nh_get_straylight_median(6,hbt.frange(229,232), do_sfit=True, power1=5, power2=5) # 4x4 gossamer 
a = hbt.read_lorri(35117774)
a = hbt.read_lorri(35120054)
a = a - hbt.sfit(a,5)
a = hbt.remove_brightest(a,0.99,symmetric=True)  # Careful here -- don't crop the ring!
(s_norm,coeffs) = hbt.normalize_images(s,a)
plt.imshow(a - s_norm)

#plt.subplot(3,2,1) # Reversed! num cols, num rows ** Good reduction. Largest az etent.
#im = hbt.remove_brightest(image - frame_sfit_med, 0.97, symmetric=True)
#plt.imshow(hbt.remove_brightest(im - hbt.sfit(im,8), 0.97, symmetric=True), vmin=-20, vmax=20)
#plt.title('image - multi_median_sfit, sfit', fontsize=fs)
##plt.show()
#
#plt.subplot(3,2,2) # OK reduction. Bit lumpy though.
#im = hbt.remove_brightest(image - frame_ffit_med, 0.97, symmetric=True)
コード例 #7
0
# self.is_flattened = False

lun = open(dir_out + file_pickle, 'rb')
t = pickle.load(lun)                        # Self.t is the *entire* table for all J-ring obs, not subset.
lun.close()

# Process the group names. Some of this is duplicated logic -- depends on how we want to use it.

groups = astropy.table.unique(t, keys=(['Desc']))['Desc']

stretch_percent = 90    
stretch = astropy.visualization.PercentileInterval(stretch_percent) # PI(90) scales to 5th..95th %ile.

index_group = 5
index_images = hbt.frange(1,6)

# Extract the one group we are looking at 
# NB: This creates an entirely new table -- *not* a view into the original table. 
# If we modify values here, we need to explicitly write the changes back to the original.

groupmask = t['Desc'] == groups[index_group]
t_group = t[groupmask]  # 
    
num_images_group = np.size(t_group)

num_bins_radius  = 300
num_bins_azimuth = 300

bins_radius = hbt.frange(126_000, 131_000, num_bins_radius)
bins_azimuth = hbt.frange(0, 2*math.pi, num_bins_azimuth)
コード例 #8
0
ファイル: gossamer.py プロジェクト: henrythroop/NH_Jring2
t = pickle.load(lun)
lun.close()

groups = astropy.table.unique(t, keys=(['Desc']))['Desc']

groupmask = (t['Desc'] == groups[index_group])
t_group = t[groupmask]

####

# Read and plot the LHS image of a 1x3 mosaic of Gossamer ring images.
# This is a lot of lines of code, but most of it is just stacking images and scaling, which should be rewritten 
# into a function.

index_group = 6
index_image = hbt.frange(181,184).astype(int) # Which frame from the group do we extract?

image_stray = hbt.nh_get_straylight_median(6, hbt.frange(185,188)) # 

image_arr = np.zeros((1024, 1024, 4))
for i in range(4):
    image = hbt.read_lorri(t_group[index_image[i]]['Filename'], autozoom=True)
    image_arr[:,:,i] = image

image_sum = np.sum(image_arr,axis=2)
image_sum_sfit = hbt.remove_sfit(image_sum, 5)

(image_stray_scale,junk) = hbt.normalize_images(image_stray, image_sum_sfit)

final = image_sum_sfit - image_stray_scale
final = hbt.remove_brightest(final, 0.95, symmetric=True)
コード例 #9
0
    
    file_short = files[i].split('/')[-1]
    
    if DO_INTERACTIVE:
        k = input("File {}-{} ({} = {}): ".format(0, np.size(files)-1, i, file_short))
    else:
        k = repr(list_batch[ii])         # Load the next element, as a string
        if (ii == np.size(list_batch)-1):  # If we've hit the last element
            DO_INTERACTIVE = True    # And go back to interactive mode

    if (k in ['x', 'q']):            # QUIT
        sys.exit(0)

    if ('-' in k):
       (i1, i2) = np.array(k.split('-')).astype('int')
       list_batch = hbt.frange(int(i1), int(i2)).astype('int')
       ii = 0                        # Current element number, within our list
       k = repr(list_batch[ii])      # Extract one element from it
       print ("Running from {} to {}...".format(i1, i2))
       DO_INTERACTIVE = False

    if ('*' in k):                   # Wildcard search
        searchstr = k.replace('*', '')
        for ii,file in enumerate(files):
            if (searchstr in file):
                print("{}. {}".format(ii, file.split('/')[-1]))
                
    if (k == 'l'):                  # List all files
        for ii,file in enumerate(files):
            print("{}. {}".format(ii, file.split('/')[-1]))
    
コード例 #10
0
def nh_jring_process_image(image_raw,
                           method,
                           vars,
                           index_group=-1,
                           index_image=-1,
                           mask_sfit=None):
    """
    Return image with stray light removed. Flux is preserved and no clipping is done.

    Parameters
    -----
    
    image_raw: 
        NumPy array with the data image.

    method:    
        Method of background subtraction, which can be 'Next', 'Prev', 'Polynomial', 'String', 'None', 
        'Grp Num Frac Pow', etc.
        
        In general 'String' is the most flexible, and recommended.
        It can be things like "5/0-10 r3 *2 mask_7_10':
            
        - Make a median of Group 5, Images 0-10
        - Rotate them all by 270 degrees
        - Scale it to the data image
        - Multiply background image by 2
        - Subtract data - background
        - Remove a 5th degree polynomial from the result [always done, regardless]
        - Load the Photoshop-created mask file "mask_7_10" and incorporate via a tuple
        - Return final result

    vars:
        The argument to the 'method'. Can be an exponent, a file number, a string, etc -- arbitrary, as needed. 
             
    index_group:
        Index of current image. Not used except for Next/Prev.

    index_image:
        Index of current group. Not used except for Next/Prev.
        
    mask_sfit:
        An optional mask to be applied when doing the sfit. Ony pixels with True will be used.
        This is to mask out satellites, stars, CRs, etc. so they don't affect the sfit().
             
    """

    stretch_percent = 90
    stretch = astropy.visualization.PercentileInterval(
        stretch_percent)  # PI(90) scales to 5th..95th %ile.

    # Load the arrays with all of the filenames

    dir_out = '/Users/throop/Data/NH_Jring/out/'

    file_pickle = dir_out + 'nh_jring_read_params_571.pkl'  # Filename to get filenames, etc.

    lun = open(file_pickle, 'rb')
    t = pickle.load(lun)
    lun.close()

    # Initialize variables

    DO_MASK = False  # We set this based on whether a mask is passed in or not

    dir_mask_stray = dir_out.replace(
        'out', 'masks')  # Directory where the mask files are

    # Process the group names. Some of this is duplicated logic -- depends on how we want to use it.

    groups = astropy.table.unique(t, keys=(['Desc']))['Desc']

    if (index_group != -1):  # Only do this if we actually passed a group in
        groupmask = (t['Desc'] == groups[index_group])
        t_group = t[groupmask]

        # Look up the filename, in case we need it.

        file_image = t_group['Filename'][index_image]

    if (method == 'Previous'):
        file_prev = t_group['Filename'][index_image - 1]
        #            print "file =      " + filename
        print("file_prev = " + file_prev)
        image_bg = hbt.read_lorri(file_prev, frac_clip=1.0, bg_method='None')
        image_fg = image_raw
        image = image_fg - image_bg
        image_processed = image

    if (method == 'Next'):
        file_next = t_group['Filename'][index_image + 1]
        image_bg = hbt.read_lorri(file_next,
                                  frac_clip=1.0,
                                  bg_method='None',
                                  autozoom=True)
        image_fg = image_raw
        image = image_fg - image_bg
        image_processed = image

    if (method == 'Median'):  # XXX not working yet
        file_prev = t_group['Filename'][index_image - 1]
        image_bg = hbt.read_lorri(file_prev, frac_clip=1.0, bg_method='None')
        image_fg = image_raw
        image = image_fg - image_bg
        image_processed = image

    if (method == 'Polynomial'):

        power = vars
        image = image_raw - hbt.sfit(
            image_raw, power)  # Look up the exponenent and apply it
        image_processed = image

    if (
            method == 'Grp Num Frac Pow'
    ):  # Specify to subtract a specified group#/image#, mult factor, and sfit power.
        # I thought this would be useful, but it turns out we usually need to subtract
        # a median of multiple images -- not just one -- so this is not very useful.
        # Plus, the best power is usually 5, and the best frac can be calc'd
        # with a linfit.

        if (np.size(vars) == 0):  # If no args passed, just plot the image
            power = 0
            frac = 0
            image = image_raw

        if (np.size(vars) == 1):  # One variable: interpret as exponent
            power = float(vars[0])
            frac = 0
            image = image_raw
            image_bg = hbt.sfit(image, power, mask=mask_sfit)
            image = image - image_bg

        if (np.size(vars) == 2
            ):  # Two variables: interpret as group num and file num
            (grp, num) = vars
            frac = 1
            power = 0

        if (
                np.size(vars)
        ) == 3:  # Three variables: interpret as group num, file num, fraction
            (grp, num, frac) = vars
            power = 0

        if (np.size(vars) == 4
            ):  # Four variables: Group num, File num, Fraction, Exponent
            (grp, num, frac, power) = vars

        if int(np.size(vars)) in [2, 3, 4]:

            grp = int(grp)
            num = int(num)
            frac = float(frac)
            power = int(power)

            print("group={}, num={}, frac={}".format(grp, num, frac))
            #            print "Group = {}, num{}, Name = {}".format(name_group, num, name)

            name_group = groups[grp]
            groupmask = t['Desc'] == name_group
            group_tmp = t[groupmask]
            filename_bg = group_tmp['Filename'][num]

            image_fg = image_raw
            image_bg = hbt.read_lorri(filename_bg,
                                      frac_clip=1,
                                      bg_method='None')

            image = image_fg - float(frac) * image_bg
            image = image - hbt.sfit(image, power, mask=mask_sfit)

        image_processed = image

# =============================================================================
# Do method 'None' (trivial)
# =============================================================================

    if (method == 'None'):

        image_processed = image = image_raw

#==============================================================================
# Do method 'String'. Complicated, but most useful.
#
# Parse a string like "6/112-6/129", or "129", or "6/114", or "124-129"
#        or "6/123 - 129" or "6/123-129 r1 *0.5 p4 mask_7_12"
#                  or "".
#
# Except for the group and image number, the order of thse does not matter.
#==============================================================================
####
# As of 8-July-2017, this is the one I will generally use for most purposes.
#
# 'String' does this:
#   o Subtract the bg image made by combining the named frames, and rotating and scaling as requested (optional)
#   o Apply a mask file (optional)
#   o Subtract a polynomial (optional). ** As of 17-Nov-2017, sfit is applied only to masked pixels, not full image.
#
####

    if (method == 'String'):

        str = vars

        # =============================================================================
        #          Parse any rotation angle -- written as "r90" -- and remove from the string
        # =============================================================================

        angle_rotate_deg = 0

        match = re.search('(r[0-9]+)', str)
        if match:
            angle_rotate_deg = int(match.group(0).replace(
                'r', ''))  # Extract the rotation angle
            str = str.replace(match.group(0),
                              '')  # Remove the whole phrase from string

            if (
                    np.abs(angle_rotate_deg)
            ) <= 10:  # Allow value to be passed as (1,2,3) or (90, 180, 270)
                angle_rotate_deg *= 90

        # Determine how much the bg frame should be scaled, to match the data frame. This is just a multiplicative
        # factor that very crudely accomodates for differences in phase angle, exptime, etc.

# =============================================================================
#          Parse any stray multiplication factor -- written as "*3" -- and remove from the string
#          Multiplicative factor is used to scale the stray light image to be removed, up and down (e.g., up by 3x)
# =============================================================================

        factor_stray_default = 1  # Define the default multiplicative factor

        factor_stray = factor_stray_default

        match = re.search(
            '(\*[0-9.]+)', str
        )  # Match   *3   *0.4   etc  [where '*' is literal, not wildcard]
        if match:
            factor_stray = float(match.group(0).replace(
                '*', ''))  # Extract the multiplicative factor
            str = str.replace(match.group(0),
                              '').strip()  # Remove phrase from the string

# =============================================================================
#          Parse any mask file -- written as "mask_7_0" -- and remove from the string
# =============================================================================
#
# This mask is a fixed pattern, read from a file, for stray light etc.
# It is *not* for stars or satellites, which are calculated separately.
#
# To make these mask files, the steps are...

# Maskfile is using same name structure as for the summed bg straylight images -- e.g., 8/0-48.
#
# True = good pixel. False = bad.

        file_mask_stray = None

        match = re.search('(mask[0-9a-z._\-]+)', str)

        print("Str = {}, dir_mask_stray = {}".format(str, dir_mask_stray))

        if match:
            file_mask_stray = dir_mask_stray + match.group(
                0) + '.png'  # Create the filename
            DO_MASK = True

            str = str.replace(match.group(0),
                              '').strip()  # Remove the phrase from the string

# =============================================================================
#          Parse any polynomial exponent -- written as 'p5'
#          This is the polynomial removed *after* subtracting Image - Stray
# =============================================================================

        poly_after_default = 0  # Define the default polynomial to subtract.
        # I could do 5, or I could do 0.

        poly_after = poly_after_default

        match = re.search('(p[0-9]+)', str)
        if match:
            poly_after = int(match.group(0).replace(
                'p', ''))  # Extract the polynomal exponent
            str = str.replace(match.group(0),
                              '').strip()  # Remove the phrase from the string

# =============================================================================
#          Now parse the rest of the string
# =============================================================================

# The only part that is left is 0, 1, or 2 integers, which specify the stray light file to extract
# They must be in the form "7/12-15", or "7/12" or "12"

        str2 = str.replace('-', ' ').replace('/', ' ').replace(
            'None', '')  # Get rid of any punctuation

        vars = np.array(
            str2.split(), dtype=int
        )  # With no arguments, split() breaks at any set of >0 whitespace chars.

        # =============================================================================
        # Now load the appropriate stray light image, based on the number of arguments passed
        # =============================================================================

        do_sfit_stray = False  # Flag: When constructing the straylight median file, do we subtract polynomial, or not?
        #
        # Usually we want False. ie, want to do:
        #  out = remove_sfit(raw - stray)
        #      not
        #  out = remove_sfit(raw) - remove_sfit(stray)

        if (np.size(vars) == 0):  #  "<no arguments>"
            image = image_raw
            image_processed = image
            image_stray = 0 * image

        if (np.size(vars) == 1):  #  "12" -- image number
            image_stray = hbt.nh_get_straylight_median(
                index_group, [int(vars[0])],
                do_sfit=do_sfit_stray)  # "122" -- assume current group

        if (np.size(vars) == 2):  #  "7-12" -- image range
            image_stray = hbt.nh_get_straylight_median(
                index_group,
                hbt.frange(int(vars[0]), int(vars[1])).astype('int'),
                do_sfit=do_sfit_stray)  # "122-129"
            # -- assume current group

        if (np.size(vars) == 3):  #  "7/12-20" -- group, plus image range
            image_stray = hbt.nh_get_straylight_median(
                int(vars[0]),
                hbt.frange(vars[1], vars[2]).astype('int'),
                do_sfit=do_sfit_stray)  # "5/122 - 129"

        if (np.size(vars) == 4
            ):  #  "7/12 - 7/20"  (very wordy -- don't use this)
            image_stray = hbt.nh_get_straylight_median(
                int(vars[0]),
                hbt.frange(vars[1], vars[3]).astype('int'),
                do_sfit=do_sfit_stray)  # "6/122 - 6/129"

# Adjust the stray image to be same size as original.
# Sometimes we'll have a 4x4 we want to use as stray model for 1x1 image -- this allows that.
# When we resize it, also adjust the flux (e.g., by factor of 16).

        dx_stray = hbt.sizex(image_stray)
        dx_im = hbt.sizex(image_raw)
        ratio = dx_im / dx_stray

        if (dx_stray < dx_im):
            image_stray = scipy.ndimage.zoom(image_stray, ratio) / (
                ratio**2)  # Enlarge the stray image

        if (dx_stray > dx_im):
            image_stray = scipy.ndimage.zoom(image_stray, ratio) / (
                ratio**2)  # Shrink the stray image

#=============================================================================
# Now that we have parsed the string, do the image processing
#=============================================================================

# Load the Photoshop stray mask file, if it exists. Otherwise, make a blank mask of True.

        if file_mask_stray:

            try:
                mask_stray = imread(
                    file_mask_stray
                ) > 128  # Read file. Mask PNG file is 0-255. Convert to boolean.

                print("Reading mask file {}".format(file_mask_stray))

                if (
                        len(np.shape(mask_stray)) > 2
                ):  # If Photoshop saved multiple planes, then just take first
                    mask_stray = mask_stray[:, :, 0]

            except IOError:  # If mask file is missing
                print("Stray light mask file {} not found".format(
                    file_mask_stray))

        else:
            mask_stray = np.ones(np.shape(image_raw), dtype=bool)

# Load the object mask. This masks out stars and satellites, which should not have sfit applied to them.

        file_objects = os.path.basename(file_image).replace(
            '.fit', '_objects.txt')
        mask_objects = nh_jring_mask_from_objectlist(file_objects)
        mask_objects = np.logical_not(
            mask_objects)  # Make so True = good pixel

        # Merge the two masks together

        mask = np.logical_and(
            mask_objects, mask_stray)  # Output good if inputs are both good

        # Rotate the stray light image, if that has been requested
        # [this probably doesn't work, but that's fine -- I didn't end up using this.]

        image_stray = np.rot90(
            image_stray, angle_rotate_deg /
            90)  # np.rot90() takes 1, 2, 3, 4 = 90, 180, 270, 360.

        # Subract the final background image from the data image

        image_processed = image_raw - factor_stray * image_stray

        #        print("Removing bg. factor = {}, angle = {}".format(factor_stray, angle_rotate_deg))

        # Apply the mask: convert any False pixels to NaN in prep for the sfit

        image_masked = image_processed.copy()
        image_masked[mask == False] = math.nan

        frac_good = np.sum(mask) / (np.prod(np.shape(mask)))

        print("Applying mask, fraction good = {}".format(frac_good))

        # Remove a polynomial from the result. This is where the mask comes into play.
        # XXX NB: I think the logic here could be cleaned up. sfit() now allows a mask= argument ,
        #         but it must not have when I wrote this code.

        sfit_masked = hbt.sfit(image_masked, poly_after)

        image_processed = image_processed - sfit_masked

        print("Removing sfit {}".format(poly_after))

        # Plot the masks and sfits, for diagnostics

        do_plot_masks = False
        if do_plot_masks:

            plt.subplot(1, 3, 1)
            plt.imshow(stretch(mask))
            plt.title('mask')
            plt.subplot(1, 3, 2)
            plt.imshow(stretch(image_masked))
            plt.title('image_masked')
            plt.subplot(1, 3, 3)
            plt.imshow(sfit_masked)
            plt.title('sfit_masked')
            plt.show()

# =============================================================================
# END OF CASE STATEMENT FOR METHODS
# =============================================================================

# Remove a small bias offset between odd and even rows ('jailbars')
# This might be better done before the sfit(), but in reality probably doesn't make a difference.

    image_processed = hbt.lorri_destripe(image_processed)

    # If requested: plot the image, and the background that I remove.
    # Plot to Python console, not the GUI.

    # Test stretching here
    # We use astropy's stretching here, rather than matplotlib's norm= keyword. The basic idea of both of these
    # is the same, but I know that astropy has a percentile stretch available.

    DO_DIAGNOSTIC = False

    if (DO_DIAGNOSTIC):

        stretch = astropy.visualization.PercentileInterval(
            90)  # PI(90) scales array to 5th .. 95th %ile

        plt.rcParams['figure.figsize'] = 16, 6

        # Column 1: raw image

        im = image_raw

        im = hbt.remove_sfit(im, degree=5)
        plt.subplot(1, 3, 1)  # vertical, horizontal, index
        plt.imshow(stretch(hbt.remove_sfit(im, degree=5)))
        plt.title('remove_sfit(image_raw, degree=5), mean=' +
                  hbt.trunc(np.mean(im), 3))
        plt.colorbar()

        #       Column 2: Stray only. This will throw an error if we haven't read in a stray light file -- just ignore it.

        plt.subplot(1, 3, 2)
        try:
            plt.imshow(stretch(hbt.remove_sfit(
                image_stray,
                degree=5)))  # This won't do much since it is already applied
        except UnboundLocalError:
            print("No stray light to subtract")

        plt.title('remove_sfit(stray_norm, degree=5), mean=' +
                  hbt.trunc(np.mean(im), 3))

        # Column 3: raw - stray

        plt.subplot(1, 3, 3)

        try:
            im = hbt.remove_sfit(image_raw - image_stray, degree=5)
            plt.imshow(stretch(im))
        except UnboundLocalError:
            print("No stray light to subtract")

        plt.title('remove_sfit(image_raw - image_stray, degree=5), med ' +
                  hbt.trunc(np.median(im), 3))

        plt.show()

# Now return the array. If we have a mask, then we return it too, as a tuple

    if (DO_MASK):  # If we loaded a mask
        return (image_processed, mask)
    else:
        return image_processed
コード例 #11
0
dir_out     = '/Users/throop/data/NH_Jring/out/' # Directory for saving of parameters, backplanes, etc.

dir_masks = dir_out.replace('out', 'masks')

lun = open(dir_out + file_pickle, 'rb')
t = pickle.load(lun)
lun.close()

# Process the group names. Some of this is duplicated logic -- depends on how we want to use it.

groups = astropy.table.unique(t, keys=(['Desc']))['Desc']

stretch_percent = 90    
stretch = astropy.visualization.PercentileInterval(stretch_percent) # PI(90) scales to 5th..95th %ile.

index_imagesets = [hbt.frange(0,7), # First set: take all images 7/0 .. 7/7. Sum them.
                   hbt.frange(8,15),
                   hbt.frange(16,23),
                   hbt.frange(24,31),
                   hbt.frange(32,35),
                   hbt.frange(36,39),
                   hbt.frange(40,42),
                   hbt.frange(52,57),
                   hbt.frange(61,63),
                   hbt.frange(91,93),
                   hbt.frange(94,96)
                   ]

index_imagesets = [hbt.frange(58,60)]
index_group = 7
コード例 #12
0
    def __init__(self, master, size_window):

        self.master = master   # This is the handle to the main Tk widget. I have to use it occasionally to 
                               # set up event handlers, so grab it and save it.

        self.size_window = size_window # Save the size of the whole Tk window, in pixels.
        
        # Open the image stack
        
#        self.stretch_percent = 90    
#        self.stretch = astropy.visualization.PercentileInterval(self.stretch_percent) # PI(90) scales to 5th..95th %ile.
#        
        name_ort = 'ORT4'
#        name_ort = 'ORT2_OPNAV'
        
        if (name_ort == 'ORT1'):
            self.reqids_haz  = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03', 'K1LR_HAZ04']
            #        self.reqids_haz  = ['K1LR_HAZ03', 'K1LR_HAZ01', 'K1LR_HAZ02']
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'        
            self.dir_data    = '/Users/throop/Data/ORT1/throop/backplaned/'

        if (name_ort == 'ORT2'):
            self.reqids_haz  = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03', 'K1LR_HAZ04']
#            self.reqids_haz  = ['K1LR_HAZ03', 'K1LR_HAZ01', 'K1LR_HAZ02']
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'        
            self.dir_data    = '/Users/throop/Data/ORT2/throop/backplaned/'

        if (name_ort == 'ORT3'):
            self.reqids_haz  = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03', 'K1LR_HAZ04']
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'        
            self.dir_data    = '/Users/throop/Data/ORT3/throop/backplaned/'

        if (name_ort == 'ORT4'):
            self.reqids_haz  = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03']
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'        
            self.dir_data    = '/Users/throop/Data/ORT4/throop/backplaned/'
            
        if (name_ort == 'ORT2_OPNAV'):
            self.dir_data    = '/Users/throop/Data/ORT2/throop/backplaned/'
            dirs = glob.glob(self.dir_data + '/*LR_OPNAV*')         # Manually construct a list of all the OPNAV dirs
            self.reqids_haz = []
            for dir_i in dirs:
                self.reqids_haz.append(os.path.basename(dir_i))
            self.reqids_haz = sorted(self.reqids_haz)    
            self.reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'    
            
        # Set the edge padding large enough s.t. all output stacks will be the same size.
        # This value is easy to compute: loop over all stacks, and take max of stack.calc_padding()[0]
        
        self.padding     = 61 # Amount to pad the images by. This is the same as the max drift btwn all images in stacks
        self.zoom        = 4  # Sub-pixel zoom to apply when shifting images. 1 for testing; 4 for production.
        self.num_image   = 0  # Which stack number to start on.
        self.zoom_screen = 1  # 'Screen zoom' amount to apply. This can be changed interactively.
        
        self.is_blink    = False  # Blinking mode is turned off by default
        self.dt_blink    = 300    # Blink time in ms
        
        # Start up SPICE if needed
        
        if (sp.ktotal('ALL') == 0):
            sp.furnsh('kernels_kem_prime.tm')
            
        # Set the RA/Dec of MU69. We could look this up from SPICE but it changes slowly, so just keep it fixed for now.
        
        self.radec_mu69 = (4.794979838984583, -0.3641418801015417)
        
        # Set the CA time. Roughly doing this is fine.
        
        self.et_ca = sp.utc2et('2019 1 Jan 05:33:00')
        
        # Boolean. For the current image, do we subtract the field frame, or not?
        
        self.do_subtract = True

        hbt.set_fontsize(20)

        # Set the stretch range, for imshow. These values are mapped to black and white, respectively.
        
        self.vmin_diff = -1   # Range for subtracted images
        self.vmax_diff =  2
        
        self.vmin_raw = -1    # Range for raw images (non-subtracted)
        self.vmax_raw = 1000
        
# Restore the stacks directly from archived pickle file, if it exists
        
        self.file_save = os.path.join(self.dir_data, 
                                      f'stacks_blink_{name_ort}_n{len(self.reqids_haz)}_z{self.zoom}.pkl')
        
        if os.path.isfile(self.file_save):
            self.restore()
        else:

# If no pickle file, load the stacks from raw images and re-align them
            
            # Load and stack the field images
    
            print("Stacking field images")        
            self.stack_field = image_stack(os.path.join(self.dir_data, self.reqid_field))    # The individual stack
            self.stack_field.align(method = 'wcs', center = self.radec_mu69)
            (self.img_field, self.wcs_field)  =\
                self.stack_field.flatten(zoom=self.zoom, padding=self.padding) # Save the stacked image and WCS
        
            # Load and stack the Hazard images
            
            self.img_haz   = {} # Output dictionary for the stacked images
            self.stack_haz = {} # Output dictionary for the stacks themselves
            self.wcs_haz   = {} # Output dictionary for WCS for the stacks
            
            for reqid in self.reqids_haz:
                self.stack_haz[reqid] = image_stack(os.path.join(self.dir_data, reqid))    # The individual stack
                self.stack_haz[reqid].align(method = 'wcs', center = self.radec_mu69)
                (self.img_haz[reqid], self.wcs_haz[reqid])  =\
                    self.stack_haz[reqid].flatten(zoom=self.zoom, padding=self.padding) 
                # Put them in a dictionary

            # Save the stacks to a pickle file, if requested
            
            yn = input("Save stacks to a pickle file? ")
            if ('y' in yn):
                self.save()
                
# Set the sizes of the plots -- e.g., (15,15) = large square
        
        figsize_image = (15,15)
        
        self.fig1 = Figure(figsize = figsize_image)    # <- this is in dx, dy... which is opposite from array order!

        self.ax1 = self.fig1.add_subplot(1,1,1, 
                                    label = 'Image') # Return the axes
        plt.set_cmap('Greys_r')
        
        self.canvas1 = FigureCanvasTkAgg(self.fig1,master=master)
        self.canvas1.show()
        
# Put objects into appropriate grid positions

        self.canvas1.get_tk_widget().grid(row=1, column=1, rowspan = 1)
        
# Define some keyboard shortcuts for the GUI
# These functions must be defined as event handlers, meaning they take two arguments (self and event), not just one.

        master.bind('q',       self.quit_e)
        master.bind('<space>', self.toggle_subtract_e)
        master.bind('=',       self.prev_e)
        master.bind('-',       self.next_e)
        master.bind('h',       self.help_e)
        master.bind('?',       self.help_e)
        master.bind('<Left>',  self.prev_e)
        master.bind('<Right>', self.next_e)
        master.bind('s',       self.stretch_e)
        master.bind('b',       self.blink_e)
        master.bind('t',       self.blink_set_time_e)
        master.bind('#',       self.blink_set_sequence_e)
        master.bind('z',       self.zoom_screen_up_e)
        master.bind('Z',       self.zoom_screen_down_e)
        master.bind('x',       self.clear_current_objects_e)
        master.bind('X',       self.clear_all_objects_e)
        
        master.bind('=',       self.scale_max_up_e)
        master.bind('+',       self.scale_max_down_e)
        master.bind('-',       self.scale_min_up_e)
        master.bind('_',       self.scale_min_down_e)
        
        master.bind('S',       self.save_output_e)
        
        self.canvas1.get_tk_widget().bind("<Button 1>", self.click_e)        
        
# Set the initial image index
        
        self.reqid_haz = self.reqids_haz[self.num_image]  # Set it to 'K1LR_HAZ00', for instance.

# Initialize the list of found objects for each stack
# There is a list of objects for each individual stack (ie, for each frame in the blink)

        self.list_objects = {}
        
        for reqid_i in self.reqids_haz:
            self.list_objects[reqid_i] = []  # Each entry here will be something like [(x, y, dn), (x, y, dn)] 

# Initialize a set of matplotlib 'line' objects for the image.
# These correspond to the 'objects' above, which are really just points            
            
        self.list_lines = {}

        for reqid_i in self.reqids_haz:
            self.list_lines[reqid_i] = []  # Each entry here will be a list of plot objects, of type 'line' 
                    
# Set a list of frame numbers to animate. For default, do them all.

        self.list_index_blink = hbt.frange(0, len(self.reqids_haz)-1) # List of indices ( [1, 2, 3] )
        self.list_index_blink_str = ' '.join(np.array(self.list_index_blink).astype(str)) # Make into string ('1 2 3')
        self.index_blink = 0      # where in the list of indices do we start? Current index.     
        
# Plot the image
        
        self.plot()
コード例 #13
0
import hbt

sp.furnsh('kernels_kem_ats.tm')

utc_start = "2019 1 Jan 06:40:00"
utc_end   = "2019 1 Jan 07:20:00"

et_start = sp.utc2et(utc_start)
et_end   = sp.utc2et(utc_end)

et_mid = (et_start + et_end)/2

num_dt = 500

et = hbt.frange(et_start, et_end, num_dt)

name_target = 'MU69'
name_observer = 'New Horizons'

dist   = np.zeros(num_dt)
phase  = np.zeros(num_dt)

for i,et_i in enumerate(et):
    (state,_) = sp.spkezr(name_target, et_i, 'J2000', 'LT+S', name_observer)
    dist[i] = np.sqrt(np.sum(state[0:3]**2))
    
plt.plot(et - et_mid, dist)
plt.xlabel('Dist [km]')
plt.ylabel('Seconds from c/a')
plt.show()    
コード例 #14
0
def nh_ort1_find_rings():
    
    plt.set_cmap('Greys_r')

    stretch_percent = 90    
    stretch = astropy.visualization.PercentileInterval(stretch_percent) # PI(90) scales to 5th..95th %ile.

    dir = '/Users/throop/Data/ORT1/throop/backplaned'
    files = glob.glob(os.path.join(dir, '*', '*fits'))
    
    hbt.figsize((15,8))
    
    # Set up output arrays
    
    ra_arr    = []
    dec_arr   = []
    reqid_arr = []
    exptime_arr= []
    et_arr    = []
    utc_arr   = [] 

    # Start up SPICE
    
    if (sp.ktotal('ALL') == 0):
        sp.furnsh('kernels_kem.tm')
    
    for ii,file in enumerate(files):
        
        hdu = fits.open(file)
        print('Reading {}/{}: {}'.format(ii, len(files), os.path.basename(file)))
        img = hdu[0].data
        
        header = hdu[0].header
        
        ra_arr.append(header['CRVAL1'])
        dec_arr.append(header['CRVAL2'])
        exptime_arr.append(header['EXPTIME'])
        reqid_arr.append(header['REQID'])
        et_arr.append(header['SPCSCET'])
        utc_arr.append(sp.et2utc(header['SPCSCET'], 'C', 0))
        
        radius_eq = hdu['RADIUS_EQ'].data
        
        dradius = 1000
        num_bins_radius = 100
        
        bins_radius = hbt.frange(0, np.amax(radius), num_bins_radius)
        dn_median_arr = np.zeros(num_bins_radius)
        dn_mean_arr   = np.zeros(num_bins_radius)
        
        for i in range(num_bins_radius-1):
            is_good = np.logical_and(radius_eq > bins_radius[i], radius_eq < bins_radius[i+1])
            dn_median_arr[i] = np.nanmedian(img[is_good])
            dn_mean_arr[i]   = np.nanmean(img[is_good])

        do_plot = False

        if do_plot:
            
            plt.subplot(1,2,1)
            plt.plot(bins_radius, dn_median_arr, label = 'median')
            plt.plot(bins_radius, dn_mean_arr,   label = 'mean')
            plt.legend(loc = 'upper right')
            plt.title("{}/{}  {}".format(ii,len(files), os.path.basename(file)))
           
            
            plt.subplot(1,2,2)
            plt.imshow(stretch(img))
            plt.show()
        
        hdu.close()
        
# =============================================================================
# Read the values into NumPy arrays
# =============================================================================

    ra   = np.array(ra_arr)
    dec  = np.array(dec_arr)
    reqid = np.array(reqid_arr)
    et = np.array(et_arr)
    exptime = np.array(exptime_arr)
    utc  = np.array(utc_arr)
    
    plt.plot(ra, dec, ls='none', marker = 'o', ms=2)
    
    # Put them all into a table
    
    t = Table(          [ra, dec, et, utc, exptime, reqid], 
              names = ('RA', 'Dec', 'ET', 'UTC', 'EXPTIME', 'ReqID'))
    
    t = Table([a, b, c], names=('a', 'b', 'c'), meta={'name': 'first table'})
    
    w_haz0 = (t['ReqID'] == 'K1LR_HAZ00')
    w_haz1 = (t['ReqID'] == 'K1LR_HAZ01')
    w_haz2 = (t['ReqID'] == 'K1LR_HAZ02')
    w_haz3 = (t['ReqID'] == 'K1LR_HAZ03')
    w_haz4 = (t['ReqID'] == 'K1LR_HAZ04')
    
    plt.plot(ra[w_haz0], dec[w_haz0], marker='o', ls='none')
    plt.plot(ra[w_haz1], dec[w_haz1], marker='o', ls='none')
    plt.plot(ra[w_haz2], dec[w_haz2], marker='o', ls='none')
    plt.plot(ra[w_haz3], dec[w_haz3], marker='o', ls='none')
    plt.plot(ra[w_haz4], dec[w_haz4], marker='o', ls='none')
    plt.show()
    
    plt.plot(et[w_haz0], marker='o', ls='none')
    plt.plot(et[w_haz1], marker='o', ls='none')
    plt.plot(et[w_haz2], marker='o', ls='none')
    plt.plot(et[w_haz3], marker='o', ls='none')
    plt.plot(et[w_haz4], marker='o', ls='none')
コード例 #15
0
    if (do_fft): 
        raise ValueError('Sorry, do_ffit not implemented')
        return -1
    
    if (do_sfit):
        return (frame_med_sfit, file_base)
    
    else:
        return (frame_med, file_base)   

# =============================================================================
# End of Function
# =============================================================================
    
if (__name__ == '__main__'):
    
    stretch = astropy.visualization.PercentileInterval(90)  # PI(90) scales array to 5th .. 95th %ile

    print("Testing...")
    
    index_group = 8
    index_files = hbt.frange(0,47)
    do_fft      = False
    power       = 5
    do_sfit     = False
    
    (arr,mask) = nh_create_straylight_median(index_group, index_files, do_fft=do_fft, do_sfit=do_sfit, power=power)
 
    plt.set_cmap('plasma')
    plt.imshow(stretch(hbt.remove_sfit(arr, degree=5)))
コード例 #16
0
    def plot(self, scale='log', percent = 98):
        """
        Plot the grid. Do it in a good way, with labels, and with proper orientation.
        
        Optional parameters
        -----
        
        scale:
            The scaling method to use. Can be `log` or `linear`.
            
        """
                
        origin = 'lower'                               # Set imshow() to start w/ (0,0) at lower-left
    
        do_stretch_log = ('log' in scale)
        
        # Define the axes. DK says that the array is delivered in order [x, y, z], which are same as Mark's coord frame.
        # That means that if I sum in the '0' direction, I will have a plot in Y and Z.
        
        axes           = ['X',     'Y',     'Z']    
        num_axis       = {'X' : 0, 'Y' : 1, 'Z' : 2}
        
        # Now, make a dictionary to show us what the axes will be of the output image after doing np.sum().
        # The dictionary here means: 
        #     If we sum along the X axis (0), and we plot the result, what will be on vert axis of the imshow() plot.
        #     In this case, first remaining axis (Y) is on vertical, and second (Z) is on horizontal.
        #     That is how imshow() works.
        
        axes_vertical  = {'X':'Y', 'Y':'X', 'Z':'X'}   
        axes_horizontal= {'X':'Z', 'Y':'Z', 'Z':'Y'}
        axes_transpose = {'X':True,'Y':True,'Z':True}
        view           = {'X':'Side', 'Y':'Front', 'Z':'Top'}  # "If summed along Z axis, this is a Top view", etc.
        
        # Set the font size
        
        hbt.fontsize(10)
        fontsize_axes = 15
        width_colorbar = 10
                
        halfwidth_km = self.km_per_cell_x * hbt.sizex(self.density) / 2
        extent = [-halfwidth_km, halfwidth_km, -halfwidth_km, halfwidth_km]  # Make calibrated labels for X and Y axes
    
        i = 1  # Index of which plot we are on
        
        for axis in axes:

            plt.subplot(1,3,i)

            # Plot the individual image. Start it with origin at lower-left corner.

            img = np.sum(self.density, axis=num_axis[axis])  # Make the flattened image

            if do_stretch_log:
                img_stretch = stretch_hbt(img)
            else:
                img_stretch = astropy.visualization.PercentileInterval(percent)(img)
            
            # Create the colorbar, and superimpose it on the image
            
            colorbar = hbt.frange(np.amin(img_stretch), np.amax(img_stretch), hbt.sizey(img_stretch))
            
            # Manually draw the colorbar onto the plot
            
            if axes_transpose[axis]:
                for j in range(width_colorbar):
                    img_stretch[-j,:] = colorbar  # There is probably a better way to do this?
        
            else:
                for j in range(width_colorbar):
                    img_stretch[:,-j] = colorbar  # There is probably a better way to do this?
                
            # Display the image.
            # The image is displayed in exactly the same orientation as if I print it, with the exception
            # that the origin={lower | upper} keyword can flip it vertically.
            # When accessing the array elements, they are in order img[y, x] -- which is opposite IDL.
            
            if axes_transpose[axis]:
                plt.imshow(np.transpose(img_stretch), extent=extent, origin=origin)
            else:
                plt.imshow(             img_stretch,  extent=extent, origin=origin)
            
            # Create the labels for the colorbar, and individually place them
            
            num_ticks_colorbar = 5 # Number of vertical value to put on our colorbar
            
            for j in range(num_ticks_colorbar):
                val = stretch_hbt_invert(hbt.frange(np.amin(img_stretch), np.amax(img_stretch), num_ticks_colorbar)[j])
                val = round(val)  # Convert to zero if it is very close
                xval = 0.65*np.max(extent)
                yrange = np.max(extent)-np.min(extent)
                yval = np.min(extent) + 0.02*yrange + (j/(num_ticks_colorbar-1) * 0.92*yrange)
                if do_stretch_log:
                    plt.text(xval, yval, f'{val:.1e}', color = 'white') # Label the colorbar, if log stretch only.
            
            # Label the axes and the plot
            
            if axes_transpose[axis]:
                plt.title(f'Summed along {axis} ({view[axis]})', fontsize=fontsize_axes)
                plt.xlabel(axes_vertical[axis] + ' [km]', fontsize=fontsize_axes)
                plt.ylabel(axes_horizontal[axis] + ' [km]', fontsize=fontsize_axes)
                plt.tight_layout()

            else:
                plt.title(f'Summed along {axis}', fontsize=fontsize_axes)
                plt.ylabel(axes_vertical[axis] + ' [km]', fontsize=fontsize_axes)
                plt.xlabel(axes_horizontal[axis] + ' [km]', fontsize=fontsize_axes)
                plt.tight_layout()
            
            i+=1
            
        plt.show()
コード例 #17
0
def test_scatter_mie_ensemble():
    
    import hbt
    import numpy as np
    
    from scatter_mie_ensemble import scatter_mie_ensemble
    
    pi = math.pi
    
    # Set up the scattering properties
    
    n_refract = 1.33
    m_refract = -0.001
    nm_refract = complex(n_refract,m_refract)
    
    # Define the wavelength 
    
    alam = 500*u.nm
    
    # Set up the size distribution
    alam   = 500  * u.nm
    rmin   = 0.01 * u.micron
    rmax   = 50   * u.micron     
    num_r  = 50
    
    pi     = math.pi
    
    # Define the exponent of the size distribution
    
    q_1    = 5
    q_2    = 5
    r_break=0.5*u.micron  
    
    r      = hbt.frange(rmin, rmax, num_r, log=True)*u.micron  # Astropy bug? When I run frange(), it drops the units.
    n      = hbt.powerdist_broken(r, r_break, q_1, q_2)        # Power law  
    n      = r**(-5)
    
#    r_min = 0.1
#    r_max = 20
#    num_r = 30
    
#    q    = 3.5
#    r    = hbt.frange(r_min, r_max, num_r, log=True)*u.micron
#    n    = r.value**-q
#    
    # Set up the angular distribution
    
    num_ang = 91
    
    ang_phase = hbt.frange(0, pi, num_ang)*u.rad # Phase angle.
    
# =============================================================================
#   Call the function
# =============================================================================
    
    (phase, p11_out, qsca) = scatter_mie_ensemble(nm_refract, n, r, ang_phase, alam, do_plot=True)
    
    # Now make a plot pair. One plot with a phase curve, and one with an n(r) distribution.
    
    plt.subplot(1,2,1)
    plt.plot(ang_phase.to('deg'), phase)
    plt.xlabel('Phase Angle [deg]')
    plt.title('q = {}'.format((q_1, q_2)))
    plt.yscale('log')
    
    plt.subplot(1,2,2)
    plt.plot(r.to('micron'), n * r**2)
    plt.xlabel('Radius [micron]')
    plt.ylabel('Number')
    plt.yscale('log')
    plt.title('n(r) * r^2, q={}'.format((q_1, q_2)))
    plt.xscale('log')
    plt.show()
コード例 #18
0
    lun.close()

    # Process the group names. Some of this is duplicated logic -- depends on how we want to use it.

    groups = astropy.table.unique(t, keys=(['Desc']))['Desc']

    index_group = 7
    index_images = [15, 16]

    method = 'String'
    vars = 'mask_7_8-15 p5'
    #    vars = '64-66 p10 *1.5 mask_7_61-63'

    groupmask = (t['Desc'] == groups[index_group])
    t_group = t[groupmask]
    index_images_stray = hbt.frange(8, 15)

    # Set up the stray light mask file (created w Photoshop)

    file_mask_stray = '/Users/throop/Data/NH_Jring/masks/mask_{}_{}-{}.png'.format(
        index_group, np.min(index_images_stray), np.max(index_images_stray))

    dir_backplanes = '/Users/throop/data/NH_Jring/out/'

    # Now loop over the images

    for index_image in index_images:
        file_image = t_group[index_image]['Filename']

        # Load the object mask
コード例 #19
0
data_df99 = ascii.read(file_df99)
data_hbt = ascii.read(file_hbt)

# Clip the data so that when we have N=1, the errorbars don't go to infinity

data_hbt['Error'] = np.clip(np.sqrt(data_hbt['Number']), 0.5, 1000)
data_hbt['Error'][0] = 0.7
data_hbt['Error'][1] = 0.7

# Make a distribution n(r)

q = np.array([2, 3, 4, 5, 5, 6])

numbins = 20
r = hbt.frange(0.001, 1000, numbins, log=True)  # Go from small to big

#==============================================================================
# Make a plot showing my n(r) vs DF99
#==============================================================================

#n = r**(-q)  # This is n(r) in the bin
#ngtr = n.copy()
#for i in range(numbins):
#    ngtr[i] = np.sum(n[i:-1])       # Num > r: sum from here to big

DO_PLOT_ALL_Q = False

if (DO_PLOT_ALL_Q):
    for q_i in q:
        n = r**(-q_i)
コード例 #20
0
    t['groupnum'] = np.zeros(len(t)).astype(int)
    t['imagenum'] = np.zeros(len(t)).astype(int)

    # Standardize the reqid field, so it is sortable
    
    t['ReqID_fixed'] = t['ReqID']
    
    for i in range(len(t)):
        t['ReqID_fixed'][i] = fix_reqid(t['ReqID'][i])
        
    # Loop over all the groups, and assign a group and image number to each file
    
    for groupnum,group in enumerate(groups_all):
        is_match = t['Desc'] == group
        t['groupnum'][is_match] = groupnum
        t['imagenum'][is_match] = hbt.frange(0,np.sum(is_match)-1).astype(int)

    # Now get a list of all reqid's
    
    reqids_all = astropy.table.unique(t, keys=(['ReqID_fixed']))['ReqID']
    
    # Now get a list of reqid's, for rings only!
    
    groups_rings = groups_all[5:]
    groupmask = np.logical_or( (t['Desc'] == groups_rings[0]),
                               (t['Desc'] == groups_rings[1]))
    groupmask = np.logical_or(groupmask, t['Desc'] == groups_rings[2])
    groupmask = np.logical_or(groupmask, t['Desc'] == groups_rings[3])
    groupmask_rings = groupmask
    
    t_rings = t[groupmask_rings]
コード例 #21
0
im_clean = im.copy()
is_outlier = im > (np.median(im) + 3*std)
im_clean[is_outlier] = np.median(im)

# Do it again

std = np.std(im_clean)
im_clean2 = im_clean.copy()
is_outlier = im_clean > (np.median(im_clean) + 3*std)
im_clean2[is_outlier] = np.median(im_clean)

#==============================================================================
# Measure the radial profile, from Pluto
#==============================================================================

bins_radius = hbt.frange(np.amin(radius), np.amax(radius), nbins_radius) # Set up radial bins, in km

dradius = bins_radius[1] - bins_radius[0]

bin_number_2d = np.copy(im)*0

dn_mean_arr   = np.zeros(nbins_radius)
dn_median_arr = np.zeros(nbins_radius)
dn_std_arr    = np.zeros(nbins_radius)
npix_arr        = np.zeros(nbins_radius)

dn_mean_clean_arr   = np.zeros(nbins_radius)
dn_median_clean_arr = np.zeros(nbins_radius)

for i in range(nbins_radius-1):
    is_good = np.array(radius > bins_radius[i]) \
コード例 #22
0
def nh_jring_unwrap_ring_image(im, 
                               num_bins_radius, limits_radius, 
                               binsize_azimuth, 
                               planes, dx=0, dy=0, mask_stray=None, mask_objects=None):
        
    """
    Unwrap a 2D ring image from (RA, Dec) into (radius, azimuth).
    
    Parameters:
    -----    
    
    All of these parameters are mandatory.
    
    im:      
        Image array
        
    num_bins_radius:
        Number of radial bins. **If this is too large, then we get a 'stretching' artifact**.
        
    limits_radius:
        Extent of the inner and outer radius, in km.
        
    binsize_azimuth:
        size of azimuthal bins, in radians.
        
    planes:  
        The table of backplanes
        
    mask_stray:    
        2D array of pixel flags. True = good. 
        
    mask_objects:
        2D array of pixel flags. True = good. 
  
    dx, dy: 
        Pixel values to roll the image by -- that is, an additional offset to be added to nav info in WCS header. 
          Both the image and the mask are rolled by this amount. Integer.
  
    Output
    ------
    (im_unwrapped, bins_radius, bins_azimuth)
  
    """  
        
    import hbt
    import numpy as np
    from   scipy.interpolate import griddata
    import math
    import matplotlib.pyplot as plt

# Process input

    if (mask_objects is None):    # NB: We need to use 'is' here. '==' will do an element-by-element comparison.
        DO_MASK_OBJECTS = False   # Alternatively, could also do 'if type(mask) is np.ndarray'
  
    else:
        DO_MASK_OBJECTS = True

    if (mask_stray is None):    # NB: We need to use 'is' here. '==' will do an element-by-element comparison.
        DO_MASK_STRAY = False   # Alternatively, could also do 'if type(mask) is np.ndarray'
  
    else:
        DO_MASK_STRAY = True
    
        
    # https://stackoverflow.com/questions/36783921/check-if-variable-is-none-or-numpy-array-in-python
    # https://stackoverflow.com/questions/15008380/double-equals-vs-is-in-python
    
#    DO_MASK = (mask != None)  # If we are passed the 'mask' value, then 
        
# Extract fields from the backplane array
    
    radius  = planes['Radius_eq']           # These are properly 4x4 or 1x1, as desired.
    azimuth = planes['Longitude_eq']
    phase   = planes['Phase']
    
#==============================================================================
# Examine backplane to figure out azimuthal limits of the ring image
#==============================================================================

    bins_radius = hbt.frange(limits_radius[0], limits_radius[1], num_bins_radius)
        
    # Select the ring points -- that is, everything inside the mask
    
    is_ring_all = ( np.array(radius > limits_radius[0]) & np.array(radius < limits_radius[1]))
    
# Make sure there are some valid data points. If not, return an error
    
    if (np.sum(is_ring_all) == 0):
        print("Error: No valid ring points between radius {:,.0f} .. {:,.0f} km".format(limits_radius[0], 
                                                                                        limits_radius[1]))
        print("Image radius limits = {:,.0f} .. {:,.0f} km".format(np.amin(radius), np.amax(radius)))
        raise ValueError('NoValidRingPoints')
        
    radius_all  = radius[is_ring_all]     # Make a list of all of the radius values
    azimuth_all = azimuth[is_ring_all]  # Make a list of all of the azimuth points for all pixels
    phase_all   = phase[is_ring_all]

    dn_all     = np.roll(np.roll(im, int(round(dy)), 0), int(round(dx)), 1)[is_ring_all]   
                                                                # Axis 0 is y. 1 is x.
                                                                # If a float is passed, round to the closest int
        
#    phase_mean  = np.mean(phase_all)     # Get the mean phase angle across the ring.
#    
    # Now take these raw data, and rearrange them so that we can take the longest continuous segment
    # We do this by appending the timeseries to itself, looking for the largest gap (of no az data), 
    # and then the data will start immediately after that.
    
    # _2 indicates a double-length array (ie, with [azimuth, azimuth + 2pi])
    # _s indicates sorted
    # _d indicates delta
    
    azimuth_all_3 = np.concatenate((azimuth_all, azimuth_all + 2*math.pi, azimuth_all + 4*math.pi))
    dn_all_3      = np.concatenate((dn_all,      dn_all,                  dn_all))
    radius_all_3  = np.concatenate((radius_all, radius_all,               radius_all))
    
    azimuth_all_3_s = np.sort(azimuth_all_3, kind = 'heapsort')
    azimuth_all_3_s_d = azimuth_all_3_s - np.roll(azimuth_all_3_s, 1)
    
    # Look for the indices where the largest gaps (in azimuth) start
    
    # XXX we get an error here on images if there are no valid ring points.
    
    index_seg_start_3_s = (np.where(azimuth_all_3_s_d > 0.999* np.max(azimuth_all_3_s_d)))[0][0]
    index_seg_end_3_s   = (np.where(azimuth_all_3_s_d > 0.999* np.max(azimuth_all_3_s_d)))[0][1]-1
    
    # Get proper azimithal limits. We want them to be a single clump of monotonic points.
    # Initial point is in [0, 2pi) and values increase from there.
                                                       
    azimuth_seg_start = azimuth_all_3_s[index_seg_start_3_s] # Azimuth value at the segment start
    azimuth_seg_end   = azimuth_all_3_s[index_seg_end_3_s]   # Azimuth value at the segment end
    
    # Quantize these values to the next-lower and next-upper bins (e.g., 0.001543 → 0.001), to use a common base.
    
    azimuth_seg_start = azimuth_seg_start - np.mod(azimuth_seg_start, binsize_azimuth) - binsize_azimuth
    azimuth_seg_end   = azimuth_seg_end   - np.mod(azimuth_seg_end,   binsize_azimuth) + binsize_azimuth

    # Calculate number of bins
    
    # NB: Must use round() not int() here to avoid 15.999999 → 15. And then int for benefit of numpy.
    
    num_bins_azimuth = int( round( (azimuth_seg_end - azimuth_seg_start) / binsize_azimuth) + 1 )
        
    indices_3_good = (azimuth_all_3 >= azimuth_seg_start) & (azimuth_all_3 < azimuth_seg_end)
    
    azimuth_all_good = azimuth_all_3[indices_3_good]
    radius_all_good  = radius_all_3[indices_3_good]
    dn_all_good      = dn_all_3[indices_3_good]
    
    # Extract arrays with the proper pixel values, and proper azimuthal values
    
    azimuth_all = azimuth_all_good
    radius_all  = radius_all_good
    dn_all      = dn_all_good
    
    if (DO_MASK_OBJECTS):
        mask_objects_roll = np.roll(np.roll(mask_objects, int(round(dy)), 0), int(round(dx)), 1)
        mask_objects_all = mask_objects_roll[is_ring_all]
        mask_objects_all_3 = np.concatenate((mask_objects_all, mask_objects_all, mask_objects_all))
        mask_objects_all_good    = mask_objects_all_3[indices_3_good]
        mask_objects_all    = mask_objects_all_good
        
    if (DO_MASK_STRAY):

        mask_stray_roll = np.roll(np.roll(mask_stray, int(round(dy)), 0), int(round(dx)), 1)     
        mask_stray_all = mask_stray_roll[is_ring_all]
        mask_stray_all_3 = np.concatenate((mask_stray_all, mask_stray_all, mask_stray_all))
        mask_stray_all_good    = mask_stray_all_3[indices_3_good]
        mask_stray_all    = mask_stray_all_good
    
#==============================================================================
#  Now regrid the data from xy position, to an unrolled map in (azimuth, radius)
#==============================================================================

# Construct the gridded image line-by-line

    dn_grid         = np.zeros((num_bins_radius, num_bins_azimuth))  # Row, column
    
    bins_azimuth    = hbt.frange(azimuth_seg_start, azimuth_seg_end, num_bins_azimuth)
    bins_radius     = hbt.frange(limits_radius[0], limits_radius[1], num_bins_radius)        

    if (DO_MASK_OBJECTS):
        mask_objects_grid     = dn_grid.copy() 
    
    if (DO_MASK_STRAY):
        mask_stray_grid       = dn_grid.copy() 
    
    for i in range(num_bins_radius-1):  # Loop over radius -- inner to outer. Do one radial output bin at a time.
        
        # Select only bins with right radius and azimuth
        
        is_ring_i = np.array(radius_all > bins_radius[i]) & np.array(radius_all < bins_radius[i+1]) & \
                    np.array(azimuth_all > azimuth_seg_start) & np.array(azimuth_all < azimuth_seg_end) 
        
        if np.sum(is_ring_i) > 0:
            dn_i         = dn_all[is_ring_i]  # Get the DN values from the image (adjusted by nav pos error)
            radius_i     = radius_all[is_ring_i]
            azimuth_i    = azimuth_all[is_ring_i]
            
            
            # Now make sure there is not a large gap in azimuth in the data. If there is, then make it out w/ NaN's.
            # This happens when the ring ansae gets near the image edge. griddata() will blindly interpolate,
            # but we want to fill gap w/ NaN so that it will not do so.

            do_plug_gaps = True
            
            if do_plug_gaps:
                
                # Put the azimuth points in monotonic order. They are not otherwise (and griddata does not require).
                # But, to look for gaps, we need to sort
               
                d_azimuth_gap_max = 0.05     # Max width of a gap, in radians. Larger than this will be NaN-plugged.
                
                indices   = np.argsort(azimuth_i)
                azimuth_i_sort = azimuth_i[indices]
                dn_i_sort      = dn_i[indices]
                
                # Check the point-by-point increase in azimuth
                
                d_azimuth_i_sort = azimuth_i_sort - np.roll(azimuth_i_sort,1)   
                
                # Flag it if it exceeds some pre-set size
                
                is_gap = np.abs(d_azimuth_i_sort) > d_azimuth_gap_max
                
                # And then put NaNs at the start and end of the gap
                
                is_gap[0] = False
                if np.sum(np.where(is_gap)):
                    w = np.where(is_gap)[0][0]
                    dn_i_sort[w-1:w+2] = np.nan

                grid_lin_i   = griddata(azimuth_i_sort, dn_i_sort, bins_azimuth, method='linear')
                dn_grid[i,:] = grid_lin_i         # Write a row of the output array   
            
            # Now do the interpolation. If there are NaNs it will skip that region.
            else:
                grid_lin_i   = griddata(azimuth_i, dn_i, bins_azimuth, method='linear')
                dn_grid[i,:] = grid_lin_i         # Write a row of the output array   

            if DO_MASK_STRAY:
                mask_stray_i         = mask_stray_all[is_ring_i]  # Get the DN values from the image 
                                                                  # (adj by nav pos error)
                grid_lin_i           = griddata(azimuth_i, mask_stray_i, bins_azimuth, method='linear')
                mask_stray_grid[i,:] = grid_lin_i

            if DO_MASK_OBJECTS:
                mask_objects_i         = mask_objects_all[is_ring_i]
                grid_lin_i             = griddata(azimuth_i, mask_objects_i, bins_azimuth, method='linear')
                mask_objects_grid[i,:] = grid_lin_i

# The way I've done the regridding, each output bin goes from n:n+1. So, the final bin is empty. Just duplicate it.
# There is probably a better way to do this.
# This kind of means that my output array is shifted 1/2-bin from my input array. Ugh.
                
# Also, this routine is quite slow, and I really think it introduces some artifacts. If I was a CS major, 
# I could write a much better version of this algorithm.               
#
# Algorithm uses griddata(), which is just interpolation. So, while it does not preserve 'area*flux', it does preserve
# the actual DN values ('DN per pixel') -- and it creates new pixels, with the same DN-per-pixel values.
# So, a radial profile, or azimuthal profile, by taking the mean along one axis, should work as intended, and give
# a value in DN-per-pixel.
                
    n                        = num_bins_radius
    dn_grid[n-1,:]           = dn_grid[n-2,:]
    mask_stray_grid[n-1,:]   = mask_stray_grid[n-2,:]
    mask_objects_grid[n-1,:] = mask_objects_grid[n-2,:]                    
                
# Save the variables, and return

    image_unwrapped    = dn_grid     # NB: This has a lot of NaNs in it.
    
    if (DO_MASK_OBJECTS and DO_MASK_STRAY):
        mask_stray_unwrapped   = mask_stray_grid
        mask_objects_unwrapped = mask_objects_grid
        
        # Convert these to booleans, and set any NaN to be False (since they are probably off-edge)
        
        
        return (image_unwrapped, mask_stray_unwrapped, mask_objects_unwrapped, bins_radius, bins_azimuth)
    
    else:
        return (image_unwrapped, bins_radius, bins_azimuth)
コード例 #23
0
if IS_4X4:
    ang_pix = ang_fov / 256     # Radians per pixe, 4X4
else:
    ang_pix = ang_fov / 1024    # Radians per pixel, 1X1    

ang_deadband = ang_deadband_deg * hbt.d2r  # Deadband size


drift_per_sec_pix = rate_drift / ang_pix

num_sum = num_steps   # corect?? XXX I had to add this line; not sure if it is right or not.

dt = length_sec / num_sum  # Timestep

t = hbt.frange(0, length_sec, num_sum)

# Need to make a function which 

ang_x = np.zeros(num_steps)     # Angle, in radians, at each timestep
ang_y = np.zeros(num_steps)
ang_rate_x = np.zeros(num_steps) # Drift rate, in radians/second, at each timestep
ang_rate_y = np.zeros(num_steps)

# Set the position and drift rate at initial timestep

ang_x[0] = 0
ang_y[0] = 0
ang_rate_x[:] = ang_rate_nominal
ang_rate_y[:] = ang_rate_nominal/2
コード例 #24
0
et_end    = et_start + 30*minute

radius_image = 2500*u.km
radius_kbo   = 20*u.km 
radius_ring  = 1000*u.km

radius_ring_km = radius_ring.to('km').value
radius_ring_km = np.array([150, 500, 1250])

#==============================================================================
# Set up the times for each exposure
#==============================================================================

n_exp = n_footprints * exp_per_footprint # Total number of exposures

index_exp = hbt.frange(0,n_exp-1)        
index_footprint = np.trunc(index_exp/3).astype('int')

# Define 'et' as an array, one per exposure, of the time that exposure is taken

et = et_start + (index_exp * exptime) + (index_footprint * dt_slew)

dist_kbo = np.zeros(n_exp)

for i,et_i in enumerate(et):
    (st, junk) = sp.spkezr(name_target, et_i, 'J2000', 'LT+S', name_observer)
    dist_kbo[i] = sp.vnorm(st[0:2])

dist_kbo = dist_kbo * u.km

width_fov_km = (dist_kbo * width_fov_rad).to('km').value    # Width, in km
コード例 #25
0
    
plt.set_cmap('plasma')

rj_km = 71492

# List all of the Gossamer observations. I have determined these groupings manually, based on timings.
# In general it looks like usually these are stacks of four frames at each pointing ('footprint')

# Lauer's footprints are just numbered sequentially (f01, f02, etc). So, we need to keep these files here in 
# same order, so as to correlate exactly w/ Lauer's.

index_group = 6

index_image_list = [
                    np.array([54]),    # 1X1. Main ring. 1.6 .. 1.9 RJ. Right ansa. One image, edge-on.
                    hbt.frange(59,62),  # Right ansa. -2.7 RJ. Gossamer maybe there but hidden -- need to optimize.
                    hbt.frange(63,66),  # Left ansa. Furthest out, no ring. Use for subtraction.
                    hbt.frange(67,70),  # Left ansa. Far out, no ring. Closer than above.
                    hbt.frange(71,74),  # Left ansa. No ring. Closer than above.
                    hbt.frange(75,78),  # Left ansa. Gossamer. Closer than above.
                    # np.array([81]),  # Left ansa. Main ring. Prob gossamer too but not interested. 1X1. Svrl ptgs.
                    hbt.frange(95,98),  # Right ansa. Gossamer limb.
                    hbt.frange(99,102), # Left ansa. Similar geometry as 75-78.
                    hbt.frange(112,115),# Right ansa. Gossamer limb. Similar geometry as 95-98 (offset pointing)
                    hbt.frange(116,119),# Left ansa. Similar geometry as 75-78. Sat visible??
                    hbt.frange(120,123),  # Left ansa. Closer than above.
                    hbt.frange(124,127),  # Left ansa. Closer than above.
                    hbt.frange(128,131),  # Left ansa + main ring ansa. Closer than above.
                    # hbt.frange(132,135),  # Main ring. Left side, well inside of gossamer.
                    
                    hbt.frange(157,160),
コード例 #26
0
ファイル: test_pdf.py プロジェクト: henrythroop/NH_Jring2
import numpy as np
import hbt

# Make an array of images on a PDF file.
# Based on http://stackoverflow.com/questions/2252726/how-to-create-pdf-files-in-python , near the end.

pp = PdfPages("out.pdf")

fs = 5
i = 1 # Current image #
num_rows = 2
num_cols = 2

im = hbt.dist_center(1001)

for row in hbt.frange(1,num_rows, num_rows):
    for col in hbt.frange(1, num_cols, num_cols):
        
        p = plt.subplot(num_rows, num_cols,i) # Image 1
       
        plt.imshow(im)
        a = plt.gca()
        a.get_xaxis().set_visible(False) # We don't need axis ticks
        a.get_yaxis().set_visible(False)
        
#        plotImage(files[1]) 
        str = '{:d}, X={:d}, Y={:d}'.format(int(i), int(col), int(row))
        plt.text(0, -20, str, fontsize = fs)
        plt.text(0, -60, str, fontsize = fs)

        if (i == 1):
コード例 #27
0
# Create the plot

q         = np.array([1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 7])			# 9 elements long

q         = np.array([2, 3.5, 5])			# 4 elements long
  
#p = plot([3,4], xtitle = 'Radius [$\mu$m]' ,$
#      ytitle = 'Total number N > r impacting NH during encounter',xrange=[0.1,1000d],yrange=yrange, 
# /XLOG, /YLOG, /NODATA, $
#      title = title)
   
#p_arr = replicate(p, sizex(q)) 

# Draw a dotted line at the two danger sizes that SAS specified at 3-Nov-2011 workshop

r    =  hbt.frange(rmin, rmax, num_r, log=True)*u.micron  # Astropy bug? When I run frange(), it drops the units.
qmie = np.zeros(num_r)
qsca = np.zeros(num_r)
qext = np.zeros(num_r)
qbak = np.zeros(num_r)
qabs = np.zeros(num_r)
p11_mie  = np.zeros(num_r)

bin_r_danger_1 = np.where(r > r_danger_1)[0][0]

# Calculate the transition regime

binstart_trans	= hbt.wheremin(np.abs(r - r_trans/halfwidth_trans))
  
binend_trans	    = hbt.wheremin(np.abs(r - r_trans*halfwidth_trans))
コード例 #28
0
def nh_create_straylight_medians():

    """
    This is a wrapper routine which calls the main function. It is just for testing. 
    """
    
    plt.rc('image', cmap='Greys_r')
     
    segment = 4  # 1, 2, or 3
    
    dir_out = '/Users/throop/data/NH_Jring/out/' 
    
    fs = 15 # Set the font size
    
    if (segment == 1): # 50 frames, upright
        frames_med = np.array([1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19])
        frames_med = np.array([1,2,3,4,5])
        index_files = frames_med
        index_group = 8
        
    #    num_frames_med = np.size(frames_med)     
    
        frames_data = hbt.frange(0,48,49) # 0 .. 49
    
    if (segment == 2): # 5 frames, tilted sideways
        frames_med = np.array([49,51,52,53,53])
        num_frames_med = np.size(frames_med) 
        frames_data = frames_med
    
    if (segment == 3): # 5 frames, tilted sideways
        frames_med = hbt.frange(54,111,58)
        num_frames_med = np.size(frames_med) 
        frames_data = frames_med

    # Get the straylight frame

    im = nh_create_straylight_median(index_group, index_files, do_fft=False, do_sfit=True, power1=5, power2=5)

    # Save the straylight frame itself
    
    outfile = dir_out + 'embedded_seg' + repr(int(segment)) + '_med' + repr(int(num_frames_med)) + '_sfit.png'
    matplotlib.image.imsave(outfile, frame_sfit_med)
    print 'Wrote: ' + outfile
    
    outfile = dir_out + 'embedded_seg' + repr(int(segment)) + '_med' + repr(int(num_frames_med)) + '_ffit.png'
    matplotlib.image.imsave(outfile, frame_ffit_med)
    print 'Wrote: ' + outfile
    
    file_out = 'med_g9_n0-49_sfit8_571_ps.pkl'
    
#for i,n in enumerate(frames_data):
#for i in range(5):

    outfile = dir_out + 'embedded_seg' + repr(int(segment)) + '_med' + repr(int(num_frames_med)) + \
        '_frame' + repr(int(i)) + '.png'    
    
    file = t_group['Filename'][n] # Look up filename
    print file
    print "Writing: " + outfile
    frame = hbt.read_lorri(file,frac_clip = 1)
    im  = hbt.remove_brightest(frame - frame_sfit_med, 0.97, symmetric=True)
    im2 = hbt.remove_brightest(im    - hbt.sfit(im,power2), 0.97, symmetric=True)
    
    arr_x = eval('np.' + t_group['x_pos_ring2'][n]) # Ring2 = outer ring
    arr_y = eval('np.' + t_group['y_pos_ring2'][n])
    ymean = np.mean(arr_y)
    xmean = np.mean(arr_x)
    
    ymin  = np.min(arr_y) # top of screen -- for upward pointing ansae
    ymax  = np.max(arr_y) # Bottom of screen -- for downward pointing ansae
    xmin  = np.min(arr_x) # Left edge
    xmax  = np.max(arr_x) # Right edge
    
    if (segment == 1): #     
        roll_x = -int(xmin - 100)    # Roll to __ from left edge
        roll_y = -int(ymin - 200)    # Roll to __ from top  edge
    
    if (segment == 2):
        roll_x = -int(xmean - 400)+500
        roll_y = -int(ymean - 3900)-1050

    if (segment == 3):     
        roll_x = -int(xmean - 400)+100
        roll_y = -int(ymean - 2600)    # -3200 is toward top. -3400 is toward bottom 


    # Now read all the frames, and process them one by one.
    # Output as images which can be animated.
    
    ## *** Need to roll the image properly as per navigation!

         
    pad_dx = 400 # Amount to add in x dir, total
    pad_dy = 400
    
    im_pad = np.zeros((1024 + pad_dx, 1024 + pad_dy))-20
    im_pad[pad_dx/2:-pad_dx/2, pad_dy/2:-pad_dy/2] = im2
    
    im_roll = np.roll(np.roll(im_pad, roll_x, axis=1), roll_y, axis=0)

    ax = plt.imshow(im_roll, vmin=-20, vmax=20)
    plt.title(repr(i) + ', ' + t_group['Shortname'][n])
    plt.axis('off')
    
#+ ', xmean=' + repr(int(xmean)) + 
#               ', ymean=' + repr(int(ymean)), fontsize=fs)    
    
    # Save the image
    
    matplotlib.image.imsave(outfile, im_roll)
    plt.show()

    plt.subplot(1,2,1) # Reversed! num cols, num rows ** Good reduction. Largest az etent.
    plt.imshow(frame_sfit_med)
    plt.title('frame_sfit_med, n=' + repr(num_frames_med))
    #plt.show()
    
    plt.subplot(1,2,2) # Reversed! num cols, num rows ** Good reduction. Largest az etent.
    plt.imshow(frame_ffit_med)
    plt.title('frame_ffit_med, n=' + repr(num_frames_med))
    plt.show()    
コード例 #29
0
def get_radial_profile_backplane(im, radius_plane, method='median', num_pts = 100, do_std=False):

    """
    Extract a radial profile from an image. 
    
    Uses a backplane passed in.
    
    Parameters
    -----
    
    im:
        Array of data values (e.g., the image).
        
    radius_plane:
        2D array, which is the backplane. Typically this is planes['Radius_eq'].
    
    num_pts:
        Scalar. Number of points to use in the output array.  Output radius is evenly spaced
        from 0 .. max(radius_plane).
        
    Optional parameters
    -----    
    
    method: 
        String. 'mean' or 'median'.
        
    do_stdev: 
        Boolean. If set, compute the standard deviation, and return in the tuple
        
    """
    radius_1d = hbt.frange(0, int(np.amax(radius_plane)), num_pts)
    
    profile_1d    = 0. * radius_1d.copy()
    
    std_1d        = profile_1d.copy()
        
    for i in range(len(profile_1d)-2):

    # Identify the pixels which are at the right distance
    
        is_good = np.logical_and(radius_plane >= radius_1d[i],
                                 radius_plane <= radius_1d[i+1]) 
    
        
        with warnings.catch_warnings():
            warnings.simplefilter("ignore", category=RuntimeWarning)    

            if (method == 'mean'):
                profile_1d[i]   = hbt.nanmean(im[is_good])
        
            if (method == 'median'):
                profile_1d[i] = hbt.nanmedian(im[is_good])
    
            # clipped = astropy.stats.sigma_clip(im[is_good], sigma=1.5)
            
            # std_1d[i] = np.nanstd(clipped)
            
    if do_std: 
        return (radius_1d, profile_1d, std_1d)
    
    return (radius_1d, profile_1d)
コード例 #30
0
if __name__ == '__main__':

    import astropy.visualization
    
    stretch_percent = 90    
    stretch = astropy.visualization.PercentileInterval(stretch_percent) # PI(90) scales to 5th..95th %ile.
    
    arr = hbt.nh_get_straylight_median(6,[120,121,122,123,124],do_sfit=True,  power=5)
    plt.imshow(stretch(arr))
    
    arr = hbt.nh_get_straylight_median(6,[120,121,122,123,124],do_sfit=True, power=10)
    plt.imshow(stretch(arr))

    arr = hbt.nh_get_straylight_median(6,[120,121,122,123,124],do_sfit=False, power=2)
    plt.imshow(arr)    
    
    arr = hbt.nh_get_straylight_median(6,[120,121,122,123,124],do_sfit=True, power=5)
    plt.imshow(arr)
    
    arr_bg = hbt.nh_get_straylight_median(7,[49,50,51],do_sfit=False)
    arr_image = hbt.nh_get_straylight_median(7,[52], do_sfit=False)
    
    plt.imshow(stretch(hbt.remove_sfit(arr_image - arr_bg,degree=1)))
    
    # Now try one for real, from 8/0-48 sequence.
    # There are a lot of ring artifacts left we wnt to get rid of.
    
    arr = hbt.nh_get_straylight_median(8,hbt.frange(0,48), do_sfit=True, power=5)
    plt.imshow(stretch(hbt.remove_sfit(arr,degree=1)))
    
コード例 #31
0
#==============================================================================
# Cross-correlate these signals and shift them radially in order to align them using radial profiles
#==============================================================================

limit_shift = np.array([127000,130000])
limit_shift_bin = hbt.x2bin(limit_shift, radius[0,:])

shift = np.zeros((numfiles)).astype(int)
profile_radius_dn_roll = profile_radius_dn.copy()

dx_max = 20  # look at all the posible shifts, from -dx_max to +dx_max, which is 2*dx_max+1 different integers

correl = np.zeros((numfiles, dx_max*2 + 1)) 

for i in range(numfiles):
    for j,dx in enumerate(hbt.frange(-dx_max, dx_max).astype(int)):
        correl[i,j] = np.correlate(profile_radius_dn[0,limit_shift_bin[0]:limit_shift_bin[1]], \
                           np.roll(profile_radius_dn[i,limit_shift_bin[0]:limit_shift_bin[1]],dx))
        if np.isnan(correl[i,j]): # Some of the radial profiles have nans in them. For these, just skip the correlation 
            correl[i,j] = 1

    shift[i] = (hbt.wheremax(correl[i,:])) - dx_max
    profile_radius_dn_roll[i,:] = np.roll(profile_radius_dn[i,:],shift[i])

# Shift each image vertically

for i in range(numfiles):
    bin_inner_vnorm = hbt.x2bin(131000, radius[i,:])
    bin_outer_vnorm = hbt.x2bin(133000, radius[i,:])
    profile_radius_dn_roll[i,:] -= np.mean(profile_radius_dn_roll[i,bin_inner_vnorm:bin_outer_vnorm])
コード例 #32
0
d_target_summed = np.zeros((5,541))
d_summed        = np.zeros((32,1024))

for i,file in enumerate(file_list):
    
    hdulist = fits.open(file)
    d = hdulist['PRIMARY'].data # Units of this are float, but I'm not sure what they are. I would prefer raw counts.
    d_target = d[13:18, 370:911]  # Keep in mind that 13:18 really means 13:17...
    d_target_summed += d_target
    d_summed += d
    p = hdulist['PIXEL_LIST_TABLE'].data
    count_rate_fits_i = hdulist['COUNT_RATE'].data
    num_samples = hdulist['COUNT_RATE'].header['NAXIS1'] # Number of samples in this file
    dt          = hdulist['COUNT_RATE'].header['SAMPLINT']  # Count rate sampling interval [sec]
    
    bins = hbt.frange(0, num_samples) # Get a list of all of the timestep bins, inclusive, for this file.
                                        # Use '+1' so we create the histogram upper size bin.
    
    # Now downselect the pixel list for just the photons in the proper X and Y position on the detector
    
    is_good = (p['Y_INDEX'] < 18) & (p['Y_INDEX'] >= 15) & (p['X_INDEX'] >= 370) & (p['X_INDEX'] < 910)

    # Now we have a list of all of the good pixels. For each of these, now we want to grab its timestep.

    timesteps_good = p['TIMESTEP'][is_good]
    timesteps_all  = p['TIMESTEP']

# Now count how many photons are in each timestep bin. I have defined those timestep bins up above.

    (count_rate_target_i, junk) = np.histogram(timesteps_good, bins)
    (count_rate_i, junk)        = np.histogram(timesteps_all,  bins) 
コード例 #33
0
ファイル: ccldas_df99.py プロジェクト: henrythroop/python_hbt
data_df99 = ascii.read(file_df99)
data_hbt  = ascii.read(file_hbt)

# Clip the data so that when we have N=1, the errorbars don't go to infinity

data_hbt['Error'] = np.clip(np.sqrt(data_hbt['Number']), 0.5, 1000)
data_hbt['Error'][0] = 0.7
data_hbt['Error'][1] = 0.7        

# Make a distribution n(r)

q = np.array([2, 3, 4, 5, 5, 6])

numbins = 20
r = hbt.frange(0.001, 1000, numbins, log=True)  # Go from small to big

#==============================================================================
# Make a plot showing my n(r) vs DF99
#==============================================================================

#n = r**(-q)  # This is n(r) in the bin
#ngtr = n.copy()
#for i in range(numbins):
#    ngtr[i] = np.sum(n[i:-1])       # Num > r: sum from here to big

DO_PLOT_ALL_Q = False

if (DO_PLOT_ALL_Q):
    for q_i in q:
        n = r**(-q_i)
コード例 #34
0
def get_radial_profile_backplane_quadrant(im, radius_plane, longitude_plane, method='median', num_pts = 100, do_std=False):

    """
    Extract a radial profile from an image. 
    
    Uses a backplane passed in.
    
    Returns four values, corresponding to the four quadrants.
    
    Parameters
    -----
    
    im:
        Array of data values (e.g., the image).
        
    radius_plane:
        2D array, which is the backplane. Typically this is planes['Radius_eq'].
    
    num_pts:
        Scalar. Number of points to use in the output array.  Output radius is evenly spaced
        from 0 .. max(radius_plane).
        
    Optional parameters
    -----    
    
    method: 
        String. 'mean' or 'median'.
        
    do_stdev: 
        Boolean. If set, compute the standard deviation, and return in the tuple
        
    """
    
    radius_1d = hbt.frange(0, int(np.amax(radius_plane)), num_pts)
    
    profile_1d    = np.zeros((4, len(radius_1d)))
    
    std_1d        = profile_1d.copy()
    
    quadrant_plane = np.ceil((np.pi + longitude_plane) * 4 / (np.pi * 2)).astype(int) - 1   # Range = 0 .. 3
    
    for i in range(len(radius_1d)-2):
        
        for j in range(4):    

        # Identify the pixels which are at the right distance
        
            is_good = np.logical_and(np.logical_and(radius_plane >= radius_1d[i],
                                     radius_plane <= radius_1d[i+1],),
                                     quadrant_plane == j)
        
            # print(f'Summing {np.sum(is_good)} cells')
            
            if (method == 'mean'):
                profile_1d[j,i]   = hbt.nanmean(im[is_good])
        
            if (method == 'median'):
                profile_1d[j,i] = hbt.nanmedian(im[is_good])
    
            # clipped = astropy.stats.sigma_clip(im[is_good], sigma=1.5)
            
            # std_1d[j,i] = np.nanstd(clipped)
        
    if do_std: 
        return (radius_1d, profile_1d, std_1d)
    
    return (radius_1d, profile_1d)
コード例 #35
0
Created on Wed Mar 22 13:22:54 2017

@author: throop
"""


import numpy as np
import matplotlib.pyplot as plt

import hbt
import astropy.units as u

#%%

hbt.figsize((4,5))
x = hbt.frange(0, 10,100)
y = np.sin(x)

fig, ax = plt.subplots()

xval = 50000*u.km

# Bug: clipping is not applied properly here!
# The xval is converted to a .value when plotted, as expcted.
# But if it has a unit with it, then the clip_on keyword is ignored.
# This causes the plot to be huge, and (I guess) never appears on the screen.

###
#set_clip_on(b)
#Set whether artist uses clipping.
#