def vs_display_flow(r_flow, c_flow): if r_flow.size(0) != c_flow.size(0): # sanity check raise ValueError("Unequal flow lengths!") if not isinstance(r_flow, np.ndarray): r_flow = r_flow.numpy() if not isinstance(c_flow, np.ndarray): c_flow = c_flow.numpy() r_flow = [torch.from_numpy(flow_to_image(flow)) for flow in r_flow] c_flow = [torch.from_numpy(flow_to_image(flow)) for flow in c_flow] r_flow = torch.stack(r_flow, dim=0) c_flow = torch.stack(c_flow, dim=0) # display frames flow = torch.cat([r_flow, c_flow], dim=0) flow = make_grid(flow, nrow=r_flow.size(0), padding=1) img_t.imshow(flow) return
def display_frames(frames): if isinstance(frames, np.ndarray): frames = torch.from_numpy(frames).permute(0, 3, 1, 2) # plot frames consecutively frames = make_grid(frames, nrow=frames.size(0), padding=2, pad_value=255) img_t.imshow(frames) return
def vs_display_frames(r_frames, c_frames): if r_frames.size(0) != c_frames.size(0): # sanity check raise ValueError("Unequal clip lengths!") # display frames frames = torch.cat([r_frames, c_frames], dim=0) img_t.imshow(frames) return
def display_flow(flows): if not isinstance(flows, np.ndarray): flows = flows.numpy() flows = [torch.from_numpy(flow_to_image(flow)) for flow in flows] flows = torch.stack(flows, dim=0) # plot frames consecutively flows = make_grid(flows, nrow=flows.size(0), padding=1) img_t.imshow(flows) return
def imshow_callback(ref, comp, i): # display images alongside one another img = make_grid([ref[i], comp[i]], nrow=2, padding=1) img_t.imshow(img) return
def imshow_callback(f, i): # display image img_t.imsave(f[i], "i_" + str(i) + ".png") img_t.imshow(f[i]) return