def inbetween(settings, m, savefile, d=0.5, f=0.5, Ka=None, Kb=None, nodes=None, radii=None, com_a=None, com_b=None, V=None, G=None, backfade=False): """ Produce an inbetween frame, either by warping or blobbing. Usage ----- >>> inbetween(settings, m, savefile, d, f) Returns ------- File name of midpoint warp image in temporary folder. Notes ----- Node trajectories and blob sizes (when not warping) must be precomputed. See *trajectory*. """ # Key frame indices and output files a, b = morph_key_indices(settings, m) folder_m = path.join(settings['temppath'], 'm{0:03d}'.format(m + 1)) folder_a = path.join(settings['temppath'], 'k{0:03d}'.format(a + 1)) folder_b = path.join(settings['temppath'], 'k{0:03d}'.format(b + 1)) # Load images and trajectories if Ka is None: Ka = algo.load_rgba(settings['keyframes'][a]) if Kb is None: Kb = algo.load_rgba(settings['keyframes'][b]) if nodes is None: nodes = loadit(path.join(folder_m, 'nodes.csv')) # Center of mass is needed when we want arcs if settings['traject']['arc'][m]: if com_a is None: com_a = loadit(path.join(folder_a, 'com.json')) if com_b is None: com_b = loadit(path.join(folder_b, 'com.json')) if not settings['traject']['spin'][m]: com_a['a'], com_b['a'] = 0, 0 else: com_a, com_b = None, None # To blob or not to blob? hardness = settings['render']['blobhardness'] if settings['motion']['blob'][m] and radii is None: radii = loadit(path.join(folder_m, 'radii.csv')) # Time to do the deed T = algo.tween(Ka, Kb, nodes, d=d, f=f, V=V, G=G, com_a=com_a, com_b=com_b, radii=radii, hardness=hardness, backfade=backfade) # Take the money and run fullfile = path.join(folder_m, savefile) qual = settings['render']['quality'] algo.save_rgba(T, fullfile, qual)
############## # Just Do It # ############## # Make an announcement print("") print("MuddyMorph ProxiMatch Test") print("==========================") print("") print("Image A\t{}".format(path.basename(key_a))) print("Image B\t{}".format(path.basename(key_b))) print("") #%% Load images print("Loading images ... ", end="") Ka = algo.load_rgba(key_a) Kb = algo.load_rgba(key_b) print("done") #%% Key point extraction print("Extracting key points ... ", end="") stopwatch = -time() kp_a, dc_a = algo.cornercatch(Ka, algorithm=algorithm, target=target, channel=channel, blur=blur, cutoff=cutoff, orb_threshold=orb_threshold) kp_b, dc_b = algo.cornercatch(Kb, algorithm=algorithm,
import matplotlib.pyplot as plt # Home grown import muddymorph_algo as algo ########### # Try-out # ########### # Make an announcement print("") print("MuddyMorph Local Contrast Proto") print("===============================") print("") B = algo.load_rgba(image) D, _, _ = algo.edgy(B, channel=channel) stopwatch = -time() C = algo.loco(D, algorithm=algorithm, cutoff=cutoff, detail=detail) stopwatch += time() ########## # Review # ########## print("Image \t{}".format(path.basename(image))) print("Channel\t{}".format(channel)) print("Cutoff \t{}".format(cutoff)) print("Detail \t{}".format(detail)) print("Min \t{0:.3f}".format(C.min()))
se = settings['edge'] ################## # Test test test # ################## # Make an announcement print("") print("MuddyMorph Warp Proto 2") print("=======================") print("") # Load data print("Loading images ... ", end="") Ka = algo.load_rgba(settings['keyframes'][0]) Kb = algo.load_rgba(settings['keyframes'][1]) h, w = Ka.shape[:2] print("done") # Edge detection print("Edge detection ... ", end="") Da, Sa, Ea = algo.edgy(Ka, channel=se['channel'], threshold=se['threshold'], blur=se['blur'], dolines=se['dolines']) Db, Sb, Eb = algo.edgy(Kb, channel=se['channel'], threshold=se['threshold'], blur=se['blur'],
# Keypoint detection target = 2000 cutoff = 0.01 algorithm = 'orb' # ORB / CENSURE orb_threshold = 0.08 censure_mode = 'STAR' # DoB / Octagon / STAR # Dependencies from time import time import matplotlib.pyplot as plt import muddymorph_algo as algo # Load images print("Loading images ... ", end="") K = algo.load_rgba(imagefile) print("done") # Keypoint extraction # Tweak these parameters for a few test cases, # and decide which ones should become knobs in the user interface. print("Keypoint extraction ... ", end="") stopwatch = -time() kp, descriptors = algo.cornercatch(K, target=target, algorithm=algorithm, channel=channel, blur=blur, cutoff=cutoff, orb_threshold=orb_threshold, censure_mode=censure_mode)
print('Looking for images ...', end='') files_in = glob(path.join(folder_in, filepattern)) print('found {}\n'.format(len(files_in))) # Process them pretty pictures, yee-haw! progress = 1 stopwatch = -time() for file_in in files_in: # Spread the word name = path.basename(file_in) msg = 'Processing [{progress}/{n}] {name} ... ' print(msg.format(progress=progress, n=len(files_in), name=name), end='') # Load that image Mi = algo.load_rgba(file_in) # Crop away stripy artefacts if sum(sidecrop): Mi = Mi[:, +sidecrop[0]:-sidecrop[1]] # Border color boco = algo.bordercolor(Mi) boco = np.append(boco, 255) faco = boco if fadecolor is None else fadecolor faco = [f / 255. for f in faco] # Rescale to the specified height w, h = size_out if Mi.shape[0] == h: Mm = Mi
def trajectory(settings, m, recycle=False, thread=None, X=None, Y=None, Ka=None, Kb=None, Ea=None, Eb=None, com_a=None, com_b=None, kp_a=None, kp_b=None, dc_a=None, dc_b=None): """ Figure out node trajectories for the given morph sequence *m*, based on silhouette map and corner descriptors. Two temporary analysis files are generated: - nodes.csv node trajectory coordinates. - move.png node trajectory chart. Usage ----- >>> nodes, Ka, Kb, com_a, com_b = trajectory(settings, m, recycle, thread) Parameters ---------- recycle : bool, optional If set to True and if output exists from a previous run, then that will be recycled. thread : object, optional Send status reports back through this channel, presumably a PyQt Qthread activated by the grapical user interface. This can be any object though, as long as it contains: - *abort*. A boolean status flag (True/False) that signals whether the user has had enough, and pressed a cancel button or such. - *report*. A progress report signal. Must have a method *emit* that accepts strings (which will either an image file name or one-line status report). Returns ------- None in case of user abort, node trajectory coordinate array otherwise. Notes ----- If silhouette map and corner keypoint coordinates are not available, then *silhouette* and *cornercatch* will be called to create these. """ # Tell everyone about the fantastic voyage we are about to embark upon if count_morphs(settings) > 1: label = ' for morph {}'.format(m + 1) else: label = '' shoutout(msg='Detecting trajectories' + label, thread=thread) # Start the timer stopwatch = -time() # Key frame indices and output files a, b = morph_key_indices(settings, m) folder_m = path.join(settings['temppath'], 'm{0:03d}'.format(m + 1)) folder_a = path.join(settings['temppath'], 'k{0:03d}'.format(a + 1)) folder_b = path.join(settings['temppath'], 'k{0:03d}'.format(b + 1)) f1 = path.join(folder_m, 'move.png') f2 = path.join(folder_m, 'nodes.csv') # Assemble the settings s = settings['traject'] docorners = s['corners' ][m] dosilhouette = s['silhouette'][m] arc = s['arc' ][m] spin = s['spin' ][m] and arc similim = s['similim' ][m] * 1e-2 maxmove = s['maxmove' ][m] * 1e-2 maxpoints = s['maxpoints' ][m] neighbours = s['neighbours'][m] # Detect silhouette of key frame A if Ka is None or Ea is None or com_a is None: msg = 'Extracting silhouette for key {}'.format(a + 1) shoutout(msg=msg, thread=thread) result = silhouette(settings, a, K=Ka, X=X, Y=Y, recycle=True) fsa, Ka, Ea, com_a = result shoutout(img=fsa, thread=thread) if thread and thread.abort: return # Detect silhouette of key frame B if Kb is None or Eb is None or com_b is None: msg = 'Extracting silhouette for key {}'.format(b + 1) shoutout(msg, thread=thread) result = silhouette(settings, b, K=Kb, X=X, Y=Y, recycle=True) fsb, Kb, Eb, com_b = result shoutout(img=fsb, thread=thread) if thread and thread.abort: return # Catch corners if docorners: shoutout('Catching corners for key {}'.format(a + 1), thread=thread) fca, kp_a, dc_a = cornercatch(settings, a, K=Ka, recycle=True) shoutout(img=fca, thread=thread) if thread and thread.abort: return shoutout('Catching corners for key {}'.format(b + 1), thread=thread) fcb, kp_b, dc_b = cornercatch(settings, b, K=Kb, recycle=True) shoutout(img=fcb, thread=thread) if thread and thread.abort: return # Nothing can beat the need for shear speed if recycle and path.isfile(f1) \ and path.isfile(f2): shoutout(img=f1, thread=thread) nodes = loadit(f2) return nodes, Ka, Kb, com_a, com_b # Convert detail zone units from promille to pixels # FIXME: Remove this after testing new traject detail setting #se = settings['edge' ] #detail = 0.5 * se['detail'][a] * 1e-3 + \ # 0.5 * se['detail'][b] * 1e-3 detail = settings['traject']['detail'][m] * 1e-3 simisize = max(int(np.ceil(max(Ka.shape[:2]) * detail)) + 1, 4) # Show the nitty gritty details print(timestamp() + 'Similim = {} %' .format(s['similim'][m])) print(timestamp() + 'Detail = {0:.3f}'.format(detail)) print(timestamp() + 'Simisize = {} px' .format(simisize)) # Start with the foundation; # The four screen corners and center of mass if dosilhouette: if Ea is None: Ea = loadit(path.join(folder_a, 'edgy.png')) if Eb is None: Eb = loadit(path.join(folder_b, 'edgy.png')) if com_a is None: com_a = loadit(path.join(folder_a, 'com.json')) if com_b is None: com_b = loadit(path.join(folder_b, 'com.json')) nodes0 = algo.seed(*Ka.shape[:2], com_a, com_b) if not spin: com_a['a'], com_b['a'] = 0, 0 else: nodes0 = algo.seed(*Ka.shape[:2]) com_a = dict(x=0, y=0, r=0, a=0.0) com_b = dict(x=0, y=0, r=0, a=0.0) # Use CoM as repellant for edge nodes base = nodes0[4:] if thread and thread.abort: return # Match corners if docorners: shoutout('Matching corners' + label, thread=thread) if Ka is None: Ka = algo.load_rgba(settings['keyframes'][a]) if Kb is None: Kb = algo.load_rgba(settings['keyframes'][b]) catcher = settings['edge']['cornercatcher'] catch_a = path.join(folder_a, catcher[a].lower()) catch_b = path.join(folder_b, catcher[b].lower()) if kp_a is None: kp_a = loadit(catch_a + '.csv') if kp_b is None: kp_b = loadit(catch_b + '.csv') if dc_a is None: dc_a = loadit(catch_a + '.png') if dc_b is None: dc_b = loadit(catch_b + '.png') nodes1, simi1 = algo.matchpoint(Ka, Kb, kp_a, kp_b, dc_a, dc_b, simisize=simisize, similim=similim) base = np.row_stack((base, nodes1)) if thread and thread.abort: return # Extract and match silhouette key points if dosilhouette: shoutout('Matching silhouettes' + label, thread=thread) spawnpoints = min(1000, *settings['traject']['maxpoints']) sp_a = algo.spawn(Ea, base[:, [0, 1]], spawnpoints, r_min=simisize) sp_b = algo.spawn(Eb, base[:, [2, 3]], spawnpoints, r_min=simisize) n_half = int(spawnpoints / 2) nodes2, simi2 = algo.proximatch(Ka, Kb, Ea, sp_a, sp_b, com_a, com_b, neighbours=neighbours, n=n_half, simisize=simisize, similim=similim) nodes3, simi3 = algo.proximatch(Kb, Ka, Eb, sp_b, sp_a, com_b, com_a, neighbours=neighbours, n=n_half, simisize=simisize, similim=similim) try: nodes4 = np.row_stack((nodes2, nodes3[:, [2, 3, 0, 1]])) simi4 = np.append(simi2, simi3) except IndexError: nodes4, simi4 = nodes2, simi2 if thread and thread.abort: return # Combine the results. One big happy family! if dosilhouette and docorners: nodez = np.row_stack((nodes1, nodes4)) simiz = np.append(simi1, simi4) elif dosilhouette: nodez, simiz = nodes4, simi4 elif docorners: nodez, simiz = nodes1, simi1 else: nodez = [] # Combine duplicates if len(nodez): shoutout('Combining duplicate trajectories' + label, thread=thread) nodez, simiz = algo.gettogether(nodez, simiz, simisize) # Discard excessive moves if len(nodez): shoutout('Discarding excessive moves' + label, thread=thread) diago = np.ceil(np.sqrt(Ka.shape[0] ** 2 + \ Ka.shape[1] ** 2)) lim = int(maxmove * diago) keep = algo.notsofast(nodez, lim, com_a, com_b) nodez = nodez[keep] simiz = simiz[keep] # Are we doing sensible things in this joint? print(timestamp() + 'Max move = {} px'.format(lim)) if thread and thread.abort: return # In case of crossing paths discard the longest trajectory if len(nodez): shoutout('Discarding crossing paths' + label, thread=thread) keep = np.zeros_like(nodez, dtype=bool) repeat = 1 while np.any(~keep) and repeat <= 10: if thread and thread.abort: return keep = algo.straightenup(nodez) nodez = nodez[keep] simiz = simiz[keep] repeat += 1 # Cherry pick nodes with the highest similarity score if len(nodez) > maxpoints: shoutout('Cherry picking' + label, thread=thread) seq = np.argsort(simiz)[::-1] nodez = nodez[seq][:maxpoints] simiz = simiz[seq][:maxpoints] # Pack it all together into one cozy bundle if len(nodes0) and len(nodez): nodes = np.row_stack((nodes0, nodez)) elif len(nodes0): nodes = nodes0 else: nodes = nodez # Save the harvest saveit(nodes, f2) if thread and thread.abort: return # Fade to gray baby shoutout('Making trajectory chart' + label, thread=thread) channel_a = settings['edge']['channel'][a] channel_b = settings['edge']['channel'][b] if channel_a.lower().startswith('a'): channel_a = 'lightness' if channel_b.lower().startswith('a'): channel_b = 'lightness' Ga = algo.desaturate(Ka, channel_a) Gb = algo.desaturate(Kb, channel_b) # Produce a tingly trajectory chart fig = algo.big_figure('MuddyMorph - Trajectories', *Ga.shape) if arc: comp_a, comp_b = com_a, com_b else: comp_a, comp_b = None, None try: tweens = settings['motion']['inbetweens'][m] except IndexError: tweens = algo.most_frequent_value(settings['motion']['inbetweens']) algo.movemap(Ga, Gb, nodes, comp_a, comp_b, tweens=tweens) plt.axis('off') plt.savefig(f1, **chartopts) plt.close(fig) # Our work here is done stopwatch += time() msg = 'Trajectory extraction took ' + duration(stopwatch) shoutout(msg, f1, thread) return nodes, Ka, Kb, com_a, com_b
def cornercatch(settings, k, recycle=True, K=None): """ Detect corner key points for key frame *k*. These analysis files are generated: - In case of ORB; orb.csv, orb.png, orb_p.png. - In case of CENSURE; censure.csv, censure.png, censure_p.png. The full file name of the diagnostics diagram is returned for previewing. Usage ----- >>> f, kp, dc = cornercatch(settings, k) Returns ------- 1. Filename of diagnostics chart 2. Key point coordinates (None in case of file recycle). 3. Key point binary descriptors (None in case of file recycle). """ print(timestamp() + 'Collecting corners for key {}'.format(k)) # Algorithm flavour and save file base catcher = settings['edge']['cornercatcher'][k] folder = path.join(settings['temppath'], 'k{0:03d}'.format(k + 1)) base = path.join(folder, catcher.lower()) f1 = base + '_p.png' f2 = base + '.csv' f3 = base + '.png' # Do we need to do anything at all? if recycle and path.isfile(f1) \ and path.isfile(f2) \ and path.isfile(f3): return f1, None, None # Collect the other parameters blur = settings['edge']['blur'][k] spawnpoints = min(1000, *settings['traject']['maxpoints']) channel = settings['edge']['channel'][k] if channel.lower().startswith('a'): channel = 'lightness' # Say it like it is msg = '{} corner extraction for key {}' print(timestamp() + msg.format(catcher, k + 1)) # Load bitmap if K is None: K = algo.load_rgba(settings['keyframes'][k]) # Do dat ting kp, dc = algo.cornercatch(K, channel=channel, algorithm=catcher, target=spawnpoints, blur=blur) # Save the harvest saveit(kp, base + '.csv') saveit(dc, base + '.png') # Produce a simple diagnostics chart (just a bunch of orange dots) G = algo.desaturate(K, channel, blur=blur) fig = algo.big_figure('MuddyMorph - Corner key points', *G.shape) plt.imshow(G, cmap=plt.cm.gray, vmin=0, vmax=1) plt.plot(kp[:, 0], kp[:, 1], '.', markersize=7, color=(1., .5, 0.)) plt.axis('image') plt.axis('off') plt.savefig(f1, **chartopts) plt.close(fig) return f1, kp, dc
def silhouette(settings, k, recycle=False, K=None, X=None, Y=None, showsil=True, showedge=True, showcom=True): """ Perform silhouette extraction and contour detection for frame *k*. These analysis files are generated: - silly.png silhouette shape binary map. - edgy.png silhouette edges binary map. - com.json silhouette center of mass properties. - shape.png silhouette detection diagram (returned for preview). Usage ----- >>> f, K, E, com = silhouette(settings, k) Returns ------- 1. Filename of diagnostics chart 2. Key frame bitmap 3. Edge map 4. Center of mass properties """ # This is where the action is folder = path.join(settings['temppath'], 'k{0:03d}'.format(k + 1)) f = path.join(folder, 'shape.png') # Fetch ingredients se = settings['edge'] sr = settings['render'] bc = None if sr['autoback'] else sr['backcolor'] if K is None: K = algo.load_rgba(settings['keyframes'][k]) # Do we need to do anything at all? if recycle and path.isfile(f) and \ path.isfile(path.join(folder, 'com.json' )) and \ path.isfile(path.join(folder, 'silly.png')) and \ path.isfile(path.join(folder, 'edgy.png' )): return f, K, None, None # Make mesh grid if X is None or Y is None: X, Y = algo.grid(K) # Extract silhouette D, S, E = algo.edgy(K, backcolor = bc, linecolor = sr['linecolor'], dolines = sr['lineart' ], threshold = se['threshold'][k] * 0.01, channel = se['channel' ][k], doscharr = se['scharr' ][k], blur = se['blur' ][k], invert = se['invert' ][k]) # Center of mass measurement com = algo.commie(S, X=X, Y=Y, verbose=False) # Save the harvest saveit(com, path.join(folder, 'com.json' )) saveit(S , path.join(folder, 'silly.png')) saveit(E , path.join(folder, 'edgy.png' )) # Combine all results into one classy chart Sp = S if showsil else None Ep = E if showedge else None comp = com if showcom else None fig = algo.big_figure('MuddyMorph - Silhouette Chart', *E.shape) algo.edgeplot(D, Sp, Ep, comp, X=X, Y=Y) plt.axis('off') plt.savefig(f, **chartopts) plt.close(fig) return f, K, E, com
def motion(settings, m, recycle_nodes=True, recycle_frames=True, thread=None, Ka=None, Kb=None, nodes=None, radii=None, com_a=None, com_b=None, X=None, Y=None, V=None, G=None): """ Generate a series of bitmaps that together form a morph sequence. Usage ----- >>> movie = motion(settings, m) Parameters ---------- See *default_settings* for info on settings, and *trajectory* for details regarding thread. Notes ----- - Start and stop key frames are included. This way it is easy to generate a preview per sequence. - Missing analysis data will be generated by invoking *trajectory*. - For the generation of single inbetween frames see *inbetween*. Returns ------- A list of saved bitmaps. """ # And so it begins movie = [] n_m = count_morphs(settings) msg = 'Making motion for morph {}'.format(m + 1) shoutout(msg=msg, thread=thread) stopwatch = -time() # Prepare for battle frames = np.arange(settings['motion']['inbetweens'][m] + 2) t = 1. * frames / max(frames) d = algo.motion_profile(t , settings['motion']['profile'][m]) f = algo.fade_profile(t, d, settings['motion']['fade'][m] * 0.01) a, b = morph_key_indices(settings, m) comfiles = path.join(settings['temppath'], 'k{0:03d}', 'com.json') folder = path.join(settings['temppath'], 'm{0:03d}'.format(m + 1)) # Fetch the basic ingredients for inbetweening if Ka is None: Ka = algo.load_rgba(settings['keyframes'][a]) if Kb is None: Kb = algo.load_rgba(settings['keyframes'][b]) if X is None: X, Y = algo.grid(Ka) if G is None: G = background(settings, Ka) if V is None: V = vinny(settings, Ka) if thread and thread.abort: return # Load precomputed goodies if we are allowed to if recycle_nodes: file_nodes = path.join(folder, 'nodes.csv') file_com_a = comfiles.format(a + 1) file_com_b = comfiles.format(b + 1) if nodes is None and path.isfile(file_nodes): nodes = loadit(file_nodes) if com_a is None and path.isfile(file_com_a): com_a = loadit(file_com_a) if com_b is None and path.isfile(file_com_b): com_b = loadit(file_com_b) if thread and thread.abort: return # Is stuff still missing? Then go and compute if nodes is None or com_a is None or com_b is None: result = trajectory(settings, m, recycle_nodes, thread, X=X, Y=Y, Ka=Ka, Kb=Kb, com_a=com_a, com_b=com_b) if result is None or (thread and thread.abort): return nodes, _, _, com_a, com_b = result # Blob sizes if radii is None: radii = blobbify(settings, nodes, com_a, com_b, m, *Ka.shape[:2]) # If both key frames are opaque, then so should be all inbetweens backfade = algo.is_opaque(Ka) and algo.is_opaque(Kb) # Hop through the frames for i in frames: if thread and thread.abort: return basename = 'f{0:03d}.{1}'.format(i + 1, settings['render']['ext']) savefile = path.join(folder, basename) # Remember frame for playback or export movie.append(savefile) # Can we recycle existing material? if recycle_frames and path.isfile(savefile): continue # Time for a short newsflash if n_m > 1: msg = 'Generating morph {} frame {}'.format(m + 1, i + 1) else: msg = 'Generating frame {}'.format(i + 1) shoutout(msg, thread=thread) # Copy or generate the file we need if t[i] == 0 and can_copy_key(settings, a): msg = 'Copying ' + semi_short_file_name(settings['keyframes'][a]) msg += ' to ' + semi_short_file_name(savefile) print(timestamp() + msg) copyfile(settings['keyframes'][a], savefile) elif t[i] == 1 and can_copy_key(settings, b): msg = 'Copying ' + semi_short_file_name(settings['keyframes'][b]) msg += ' to ' + semi_short_file_name(savefile) print(timestamp() + msg) copyfile(settings['keyframes'][b], savefile) else: msg = 'Generating {0} with t={1:.0f}%, d={2:.0f}%, f={3:.0f}%' msg = msg.format(semi_short_file_name(savefile), t[i]*100, d[i]*100, f[i]*100) print(timestamp() + msg) inbetween(settings, m, savefile, d=d[i], f=f[i], Ka=Ka, Kb=Kb, nodes=nodes, radii=radii, com_a=com_a, com_b=com_b, V=V, G=G, backfade=backfade) # Show the frame shoutout(img=savefile, thread=thread) # Peace out stopwatch += time() msg = 'Inbetweening took ' + duration(stopwatch) shoutout(msg, thread=thread) return movie
################## # Test test test # ################## # Make an announcement print("") print("MuddyMorph Spawn Test") print("=====================") print("") # Load data print("Loading image ... ", end="") K = algo.load_rgba(settings['keyframes'][0]) h, w = K.shape[:2] print("done") # Edge detection print("Edge detection ... ", end="") D, S, E = algo.edgy(K, channel = se['channel' ], threshold = se['threshold'], blur = se['blur' ], dolines = se['dolines' ]) print("done") # CoM detection com = algo.commie(S) base = algo.seed(com, com, *S.shape)[:, :2]