def vis_example2d( example, num_frames: int = 20, fps: int = 4, reverse_xy: bool = False, flip_up_down: bool = False, class_names: Optional[Sequence[str]] = None, ): """ Visualize 2d example as an animated sequence of frames. Args: example: element from unbatched image stream, tuple of (features, label), where features is a dict with "coords", "time" and "polarity" keys. num_frames: total number of frames in resulting animation. fps: frame-rate reverse_xy: if True, transposes coordinates. flip_up_down: if true, flips up/down (after possible transposition from `reverse_xy`). class_names: if provided, prints `class_names[label]` rather than `label`. """ features, label = example coords = features["coords"] time = features["time"] time = (time - tf.reduce_min(time)).numpy() polarity = features["polarity"].numpy() if class_names is not None: print(class_names[label.numpy()]) else: print(label.numpy()) coords = (coords - tf.reduce_min(coords, axis=0)).numpy() print(f"{time.shape[0]} events over {time[-1] - time[0]} dt") if reverse_xy: coords = coords[:, -1::-1] if num_frames == 1: frame = as_frame(coords, polarity) plt.imshow(frame) plt.show() else: frames = as_frames( coords, time, polarity, num_frames=num_frames, flip_up_down=flip_up_down ) anim.animate_frames(frames, fps=fps, tight_layout=True)
import os from events_tfds.utils import make_monotonic from events_tfds.vis import image from events_tfds.vis import anim folder = "/home/rslsync/Resilio Sync/DHP19/DVS_movies/S1/session1" filename = "mov8.aedat" path = os.path.join(folder, filename) with tf.io.gfile.GFile(path, "rb") as fp: timestamp, x, y, pol, camera = load_dhp19_events(fp) timestamp = make_monotonic(timestamp, dtype=np.uint64) assert np.all(timestamp[1:] >= timestamp[:-1]) print("camera", np.min(camera), np.max(camera)) print("polarity", np.min(pol), np.max(pol)) print("x", np.min(x), np.max(x)) print("y", np.min(y), np.max(y)) cam_mask = camera == 1 pol = pol[cam_mask] x = x[cam_mask] y = y[cam_mask] timestamp = timestamp[cam_mask] print("num_events: {}".format(timestamp.size)) coords = np.stack((x, np.max(y) - y), axis=-1) print("Creating animation...") frames = image.as_frames(coords, timestamp, pol, num_frames=200) anim.animate_frames(frames, fps=20)
if __name__ == "__main__": save_path = "/tmp/asl-dvs.gif" # from scipy.io import loadmat # path = '/home/jackd/Downloads/y_4200.mat' # loadmat(path) download_config = None # download_config = tfds.core.download.DownloadConfig(register_checksums=True) builder = AslDvs() builder.download_and_prepare(download_config=download_config) from events_tfds.vis.image import as_frames from events_tfds.vis.anim import animate_frames for events, labels in builder.as_dataset(split="train", as_supervised=True): frames = as_frames( **{k: v.numpy() for k, v in events.items()}, num_frames=24, flip_up_down=True, ) print(tf.reduce_max(events["coords"], axis=0).numpy() + 1) t = events["time"].numpy() print(f"{t.size} events over dt = {t[-1] - t[0]}") print(f"class = {CLASSES[labels.numpy()]}") animate_frames(frames, fps=8, save_path=save_path) if save_path is not None: print(f"Animation saved to {save_path}") break
continue _, label, filename = path.split("/") example_id = int(filename[:-4]) time, coords, polarity = load_neuro_events(fobj) features = dict( events=dict(time=time, coords=coords, polarity=polarity), label=int(label), example_id=example_id, ) yield example_id, features if __name__ == "__main__": download_config = None # download_config = tfds.core.download.DownloadConfig( # register_checksums=True) builder = NMNIST() builder.download_and_prepare(download_config=download_config) from events_tfds.vis.image import as_frames from events_tfds.vis.anim import animate_frames for events, labels in builder.as_dataset(split="train", as_supervised=True): frames = as_frames(**{k: v.numpy() for k, v in events.items()}, num_frames=20) print(labels.numpy()) t = events["time"].numpy() print(t[-1] - t[0], len(t)) animate_frames(frames, fps=4)
events=dict( time=time.astype(np.int64), coords=coords.astype(np.int64), polarity=polarity, ), label=label, example_id=example_id, ) yield (label, example_id), features if __name__ == "__main__": from events_tfds.vis.image import as_frames from events_tfds.vis.anim import animate_frames download_config = None # download_config = tfds.core.download.DownloadConfig(register_checksums=True) builder = MnistDVS(config=SCALE16) builder.download_and_prepare(download_config=download_config) for events, labels in builder.as_dataset(split="train", as_supervised=True): frames = as_frames(**{k: v.numpy() for k, v in events.items()}, num_frames=40) print(labels.numpy()) print(tf.reduce_max(events["coords"], axis=0).numpy() + 1) time = events["time"].numpy() print(f"{time.shape[0]} events over {time[-1] - time[0]} dt") animate_frames(frames, fps=8)
timestamp, addr = load_raw_events( fp, filter_dvs=filter_dvs, # bytes_skip=bytes_skip, # bytes_trim=bytes_trim, # times_first=times_first ) x, y, polarity = parse_raw_address(addr, **kwargs) return timestamp, x, y, polarity if __name__ == "__main__": from events_tfds.utils import make_monotonic folder = "/home/rslsync/Resilio Sync/RoShamBoNPP/recordings/aedat/" # filename = 'background_10.aedat' filename = "paper_tobi_front.aedat" path = os.path.join(folder, filename) with open(path, "rb") as fp: time, x, y, pol = load_events(fp) time = make_monotonic(time) time -= np.min(time) x -= np.min(x) y -= np.min(y) from events_tfds.vis import image from events_tfds.vis import anim coords = np.stack((x, y), axis=-1) frames = image.as_frames(coords, time, pol, num_frames=100) anim.animate_frames(frames, 20)