Ejemplo n.º 1
0
def loco(D, algorithm='muddy', detail=12, cutoff=0.04):
    """
    Estimate local contrast for the given bitmap. Zero indicates a total
    absence of local intensity variation, and one indicates ultimate contrast.
    With other words, this sounds like a job for a traditional edge detector.

    Parameters
    ----------
    D : greyscale image array
        Desaturated difference with the background.
    algorithm : str, optional
        The method to use. Choose between muddy (home grown, see notes below)
        or one of Scharr / Sobel / Prewitt / Roberts (classics from scikits).
    cutoff : float, optional
        Eualization clipping limit, somewhere in the range 0 - 1.
        Note that a value of 0.1 is already quite agressive.
    detail : int or float, optional
        A measure for the evaluation radius, in pixels.
        Set this to the typical line width or edge gradient width.
        Only relevant for the muddy algorithm.

    Notes
    -----
    This is supposed to yield a map that approximates the intensity difference
    between nearby light and dark zones for all points. Points with high local
    contrast are eligible for serving as morph nodes. Our way of working here:

        1. Normalize contrast through adaptive histogram equalization
        2. Apply the selected algorithm. In case of muddy:
            a) Subtract a Gaussian blur
            b) Take the absolute value of this difference.
    """

    G = desaturate(D, cutoff=cutoff)

    algorithm = algorithm.lower().strip()

    if algorithm == 'muddy':
        F = gaussian_filter(G, sigma=detail)
        C = abs(G - F)

    elif algorithm == 'scharr':
        C = scharr(G)
    elif algorithm == 'sobel':
        C = sobel(G)
    elif algorithm == 'prewitt':
        C = prewitt(G)
    elif algorithm == 'roberts':
        C = roberts(G)

    return C
Ejemplo n.º 2
0
        free[scrap] = False

nono = noodle[keep, :]
stopwatch += time()
print("{0} nodes remain after {1:.3f}s".format(len(nono), stopwatch))

##########
# Review #
##########

#%% Trajectory chart
plt.close('all')
fig = plt.figure(figsize=(12, 8))

# Greyscale backdrop
Ga = algo.desaturate(Ka, blur=blur, cutoff=cutoff)
Gb = algo.desaturate(Kb, blur=blur, cutoff=cutoff)
G = 0.25 * (1 - Ga) + 0.25 * (1 - Gb) + 0.5
plt.imshow(G, cmap=plt.cm.gray, vmin=0, vmax=1)

# Plot paths as straight lines
for i in range(len(nono)):
    plt.plot(nono[i, [0, 2]], nono[i, [1, 3]], 'k-')

# Show start points as red dots, and stop points as blue dots
plt.plot(nono[:, 0], nono[:, 1], 'r.')
plt.plot(nono[:, 2], nono[:, 3], 'b.')

# Bring it on
plt.axis('image')
plt.title('Node paths - From red to blue', fontweight='bold')
Ejemplo n.º 3
0
# Keypoint extraction
# Tweak these parameters for a few test cases,
# and decide which ones should become knobs in the user interface.
print("Keypoint extraction ... ", end="")
stopwatch = -time()
kp, descriptors = algo.cornercatch(K,
                                   target=target,
                                   algorithm=algorithm,
                                   channel=channel,
                                   blur=blur,
                                   cutoff=cutoff,
                                   orb_threshold=orb_threshold,
                                   censure_mode=censure_mode)
stopwatch += time()
print("{0} points found in {1:.2f}s".format(len(kp), stopwatch))

# Review the result
plt.close('all')
G = algo.desaturate(K, channel)
h, w = G.shape
fig = algo.big_figure('MuddyMorph - CornerCatch Proto', w * 1.5, h)
plt.subplot(1, 2, 1)
plt.imshow(K)
plt.title('Image', fontweight='bold')
plt.subplot(1, 2, 2)
plt.imshow(G, cmap=plt.cm.gray)
plt.plot(kp[:, 0], kp[:, 1], '.', color='orange')
plt.axis('image')
plt.title('Keypoints', fontweight='bold')
fig.tight_layout()
Ejemplo n.º 4
0
def trajectory(settings, m, recycle=False, thread=None, X=None, Y=None,
               Ka=None, Kb=None, Ea=None, Eb=None, com_a=None, com_b=None,
               kp_a=None, kp_b=None, dc_a=None, dc_b=None):
    """
    Figure out node trajectories for the given morph sequence *m*,
    based on silhouette map and corner descriptors.
    
    Two temporary analysis files are generated:
         - nodes.csv node trajectory coordinates.
         - move.png  node trajectory chart.
    
    Usage
    -----
    >>> nodes, Ka, Kb, com_a, com_b = trajectory(settings, m, recycle, thread)
    
    Parameters
    ----------
    recycle : bool, optional
        If set to True and if output exists from a previous run,
        then that will be recycled.
    thread : object, optional
        Send status reports back through this channel,
        presumably a PyQt Qthread activated by the grapical user interface.
        This can be any object though, as long as it contains:
            - *abort*. A boolean status flag (True/False) that signals whether
              the user has had enough, and pressed a cancel button or such.
            - *report*. A progress report signal.
              Must have a method *emit* that accepts strings
              (which will either an image file name or one-line status report).
    
    Returns
    -------
    None in case of user abort,
    node trajectory coordinate array otherwise.
    
    Notes
    -----
    If silhouette map and corner keypoint coordinates are not available,
    then *silhouette* and *cornercatch* will be called to create these.
    """
    
    # Tell everyone about the fantastic voyage we are about to embark upon
    if count_morphs(settings) > 1:
        label = ' for morph {}'.format(m + 1)
    else:
        label = ''
    shoutout(msg='Detecting trajectories' + label, thread=thread)
    
    # Start the timer
    stopwatch = -time()

    # Key frame indices and output files
    a, b     = morph_key_indices(settings, m)
    folder_m = path.join(settings['temppath'], 'm{0:03d}'.format(m + 1))
    folder_a = path.join(settings['temppath'], 'k{0:03d}'.format(a + 1))
    folder_b = path.join(settings['temppath'], 'k{0:03d}'.format(b + 1))
    f1       = path.join(folder_m, 'move.png')
    f2       = path.join(folder_m, 'nodes.csv')
    
    # Assemble the settings
    s = settings['traject']
    docorners    = s['corners'   ][m]
    dosilhouette = s['silhouette'][m]
    arc          = s['arc'       ][m]
    spin         = s['spin'      ][m] and arc
    similim      = s['similim'   ][m] * 1e-2
    maxmove      = s['maxmove'   ][m] * 1e-2
    maxpoints    = s['maxpoints' ][m]
    neighbours   = s['neighbours'][m]
    
    # Detect silhouette of key frame A
    if Ka is None or Ea is None or com_a is None:
        msg = 'Extracting silhouette for key {}'.format(a + 1)
        shoutout(msg=msg, thread=thread)
        result = silhouette(settings, a, K=Ka, X=X, Y=Y, recycle=True)
        fsa, Ka, Ea, com_a = result
        shoutout(img=fsa, thread=thread)
        if thread and thread.abort: return
    
    # Detect silhouette of key frame B
    if Kb is None or Eb is None or com_b is None:
        msg = 'Extracting silhouette for key {}'.format(b + 1)
        shoutout(msg, thread=thread)
        result = silhouette(settings, b, K=Kb, X=X, Y=Y, recycle=True)
        fsb, Kb, Eb, com_b = result
        shoutout(img=fsb, thread=thread)
        if thread and thread.abort: return
    
    # Catch corners
    if docorners:
        shoutout('Catching corners for key {}'.format(a + 1), thread=thread)
        fca, kp_a, dc_a = cornercatch(settings, a, K=Ka, recycle=True)
        shoutout(img=fca, thread=thread)
        if thread and thread.abort: return

        shoutout('Catching corners for key {}'.format(b + 1), thread=thread)
        fcb, kp_b, dc_b = cornercatch(settings, b, K=Kb, recycle=True)
        shoutout(img=fcb, thread=thread)
        if thread and thread.abort: return
    
    # Nothing can beat the need for shear speed
    if recycle and path.isfile(f1) \
               and path.isfile(f2):
        shoutout(img=f1, thread=thread)
        nodes = loadit(f2)
        return nodes, Ka, Kb, com_a, com_b
    
    # Convert detail zone units from promille to pixels
    # FIXME: Remove this after testing new traject detail setting
    #se       = settings['edge'  ]
    #detail   = 0.5 * se['detail'][a] * 1e-3 + \
    #           0.5 * se['detail'][b] * 1e-3
    detail = settings['traject']['detail'][m] * 1e-3
    simisize = max(int(np.ceil(max(Ka.shape[:2]) * detail)) + 1, 4)
    
    # Show the nitty gritty details
    print(timestamp() + 'Similim  = {} %'   .format(s['similim'][m]))
    print(timestamp() + 'Detail   = {0:.3f}'.format(detail))
    print(timestamp() + 'Simisize = {} px'  .format(simisize))

    # Start with the foundation;
    # The four screen corners and center of mass
    if dosilhouette:
        if Ea is None: Ea = loadit(path.join(folder_a, 'edgy.png'))
        if Eb is None: Eb = loadit(path.join(folder_b, 'edgy.png'))
        
        if com_a is None: com_a = loadit(path.join(folder_a, 'com.json'))
        if com_b is None: com_b = loadit(path.join(folder_b, 'com.json'))
        
        nodes0 = algo.seed(*Ka.shape[:2], com_a, com_b)
        if not spin: com_a['a'], com_b['a'] = 0, 0
        
    else:
        nodes0 = algo.seed(*Ka.shape[:2])
        com_a  = dict(x=0, y=0, r=0, a=0.0)
        com_b  = dict(x=0, y=0, r=0, a=0.0)

    # Use CoM as repellant for edge nodes
    base = nodes0[4:]
    if thread and thread.abort: return
    
    # Match corners
    if docorners:
        shoutout('Matching corners' + label, thread=thread)
        if Ka is None: Ka = algo.load_rgba(settings['keyframes'][a])
        if Kb is None: Kb = algo.load_rgba(settings['keyframes'][b])
        
        catcher = settings['edge']['cornercatcher']
        catch_a = path.join(folder_a, catcher[a].lower())
        catch_b = path.join(folder_b, catcher[b].lower())
        
        if kp_a is None: kp_a = loadit(catch_a + '.csv')
        if kp_b is None: kp_b = loadit(catch_b + '.csv')
        if dc_a is None: dc_a = loadit(catch_a + '.png')
        if dc_b is None: dc_b = loadit(catch_b + '.png')
        
        nodes1, simi1 = algo.matchpoint(Ka, Kb, kp_a, kp_b, dc_a, dc_b,
                                        simisize=simisize, similim=similim)
        
        base = np.row_stack((base, nodes1))
        if thread and thread.abort: return
    
    # Extract and match silhouette key points
    if dosilhouette:
        shoutout('Matching silhouettes' + label, thread=thread)
        spawnpoints = min(1000, *settings['traject']['maxpoints'])
        
        sp_a = algo.spawn(Ea, base[:, [0, 1]], spawnpoints, r_min=simisize)
        sp_b = algo.spawn(Eb, base[:, [2, 3]], spawnpoints, r_min=simisize)
        n_half = int(spawnpoints / 2)
        
        nodes2, simi2 = algo.proximatch(Ka, Kb, Ea, sp_a, sp_b, com_a, com_b,
                                        neighbours=neighbours, n=n_half,
                                        simisize=simisize, similim=similim)
        
        nodes3, simi3 = algo.proximatch(Kb, Ka, Eb, sp_b, sp_a, com_b, com_a,
                                        neighbours=neighbours, n=n_half,
                                        simisize=simisize, similim=similim)
        
        try:
            nodes4 = np.row_stack((nodes2, nodes3[:, [2, 3, 0, 1]]))
            simi4 = np.append(simi2, simi3)
        except IndexError:
            nodes4, simi4 = nodes2, simi2
        if thread and thread.abort: return
    
    # Combine the results. One big happy family!
    if dosilhouette and docorners:
        nodez = np.row_stack((nodes1, nodes4))
        simiz = np.append(simi1, simi4)
    elif dosilhouette:
        nodez, simiz = nodes4, simi4
    elif docorners:
        nodez, simiz = nodes1, simi1
    else:
        nodez = []
    
    # Combine duplicates
    if len(nodez):
        shoutout('Combining duplicate trajectories' + label, thread=thread)
        nodez, simiz = algo.gettogether(nodez, simiz, simisize)
    
    # Discard excessive moves
    if len(nodez):
        shoutout('Discarding excessive moves' + label, thread=thread)
        diago = np.ceil(np.sqrt(Ka.shape[0] ** 2 + \
                                Ka.shape[1] ** 2))
        
        lim   = int(maxmove * diago)        
        keep  = algo.notsofast(nodez, lim, com_a, com_b)
        nodez = nodez[keep]
        simiz = simiz[keep]
        
        # Are we doing sensible things in this joint?
        print(timestamp() + 'Max move = {} px'.format(lim))
        if thread and thread.abort: return
    
    # In case of crossing paths discard the longest trajectory
    if len(nodez):
        shoutout('Discarding crossing paths' + label, thread=thread)
        keep = np.zeros_like(nodez, dtype=bool)
        repeat = 1
        while np.any(~keep) and repeat <= 10:
            if thread and thread.abort: return
            keep    = algo.straightenup(nodez)
            nodez   = nodez[keep]
            simiz   = simiz[keep]
            repeat += 1
    
    # Cherry pick nodes with the highest similarity score
    if len(nodez) > maxpoints:
        shoutout('Cherry picking' + label, thread=thread)
        seq   = np.argsort(simiz)[::-1]
        nodez = nodez[seq][:maxpoints]
        simiz = simiz[seq][:maxpoints]
    
    # Pack it all together into one cozy bundle
    if len(nodes0) and len(nodez):
        nodes = np.row_stack((nodes0, nodez))
    elif len(nodes0):
        nodes = nodes0
    else:
        nodes = nodez
    
    # Save the harvest
    saveit(nodes, f2)
    if thread and thread.abort: return
    
    # Fade to gray baby
    shoutout('Making trajectory chart' + label, thread=thread)
    channel_a = settings['edge']['channel'][a]
    channel_b = settings['edge']['channel'][b]
    if channel_a.lower().startswith('a'): channel_a = 'lightness'
    if channel_b.lower().startswith('a'): channel_b = 'lightness'
    Ga = algo.desaturate(Ka, channel_a)
    Gb = algo.desaturate(Kb, channel_b)
    
    # Produce a tingly trajectory chart       
    fig = algo.big_figure('MuddyMorph - Trajectories', *Ga.shape)
    if arc:
        comp_a, comp_b = com_a, com_b
    else:
        comp_a, comp_b = None, None
    try:
        tweens = settings['motion']['inbetweens'][m]
    except IndexError:
        tweens = algo.most_frequent_value(settings['motion']['inbetweens'])
    algo.movemap(Ga, Gb, nodes, comp_a, comp_b, tweens=tweens)
    plt.axis('off')
    plt.savefig(f1, **chartopts)
    plt.close(fig)
    
    # Our work here is done
    stopwatch += time()
    msg = 'Trajectory extraction took ' + duration(stopwatch)
    shoutout(msg, f1, thread)
    return nodes, Ka, Kb, com_a, com_b
Ejemplo n.º 5
0
def cornercatch(settings, k, recycle=True, K=None):
    """
    Detect corner key points for key frame *k*.
    These analysis files are generated:
         - In case of ORB; orb.csv, orb.png, orb_p.png.
         - In case of CENSURE; censure.csv, censure.png, censure_p.png.
    
    The full file name of the diagnostics diagram is returned for previewing.
    
    Usage
    -----
    >>> f, kp, dc = cornercatch(settings, k)
    
    Returns
    -------
    1. Filename of diagnostics chart
    2. Key point coordinates (None in case of file recycle).
    3. Key point binary descriptors (None in case of file recycle).
    """
    print(timestamp() + 'Collecting corners for key {}'.format(k))
    
    # Algorithm flavour and save file base
    catcher = settings['edge']['cornercatcher'][k]
    folder  = path.join(settings['temppath'], 'k{0:03d}'.format(k + 1))
    base    = path.join(folder, catcher.lower())
    f1      = base + '_p.png'
    f2      = base + '.csv'
    f3      = base + '.png'
    
    # Do we need to do anything at all?
    if recycle and path.isfile(f1) \
               and path.isfile(f2) \
               and path.isfile(f3): return f1, None, None
    
    # Collect the other parameters
    blur        = settings['edge']['blur'][k]
    spawnpoints = min(1000, *settings['traject']['maxpoints'])
    channel     = settings['edge']['channel'][k]
    if channel.lower().startswith('a'): channel = 'lightness'
    
    # Say it like it is
    msg = '{} corner extraction for key {}'
    print(timestamp() + msg.format(catcher, k + 1))

    # Load bitmap
    if K is None: K = algo.load_rgba(settings['keyframes'][k])
    
    # Do dat ting
    kp, dc = algo.cornercatch(K, channel=channel, algorithm=catcher,
                              target=spawnpoints, blur=blur)
    
    # Save the harvest
    saveit(kp, base + '.csv')
    saveit(dc, base + '.png')
    
    # Produce a simple diagnostics chart (just a bunch of orange dots)
    G   = algo.desaturate(K, channel, blur=blur)
    fig = algo.big_figure('MuddyMorph - Corner key points', *G.shape)
    
    plt.imshow(G, cmap=plt.cm.gray, vmin=0, vmax=1)
    plt.plot(kp[:, 0], kp[:, 1], '.', markersize=7, color=(1., .5, 0.))
    plt.axis('image')
    plt.axis('off')
    plt.savefig(f1, **chartopts)
    plt.close(fig)
    
    return f1, kp, dc
Ejemplo n.º 6
0
    W[: , -1] = 1
    W[0 , : ] = 1
    W[-1, : ] = 1
    
    backcolor = []
    for c in range(3):
        Bsub = np.squeeze(B[:, :, c])[W]
        backcolor.append(int(255 * np.median(Bsub)))
    backcolor = tuple(backcolor)

# Difference with the background
D = B.copy()
if channel != 'alpha':
    for c in range(3):
        D[:, :, c] = abs(D[:, :, c] - backcolor[c] / 255.)
D = algo.desaturate(D, channel)

# Remove lines from silhouette
if dolines:
    channel2 = channel if channel != 'alpha' else 'rms'
    D2 = B.copy()
    for c in range(3):
        D2[:, :, c] = abs(D2[:, :, c] - linecolor[c] / 255.)
    D2     = algo.desaturate(D2, channel2)
    low    = D2 < D
    D[low] = D2[low]

if invert: D = 1 - D

if blur > 0: D = ndimage.gaussian_filter(D, sigma=blur)