def BindsNET_cpu(n_neurons, time): t0 = t() torch.set_default_tensor_type("torch.FloatTensor") t1 = t() network = Network() network.add_layer(Input(n=n_neurons), name="X") network.add_layer(LIFNodes(n=n_neurons), name="Y") network.add_connection( Connection(source=network.layers["X"], target=network.layers["Y"]), source="X", target="Y", ) data = {"X": poisson(datum=torch.rand(n_neurons), time=time)} network.run(inputs=data, time=time) return t() - t0, t() - t1
network.add_layer(input_layer, name="X") network.add_layer(conv_layer, name="Y") network.add_connection(conv_conn, source="X", target="Y") # Train the network. print("Begin training.\n") if args.tensorboard: analyzer = TensorboardAnalyzer("logs/conv") else: analyzer = MatplotlibAnalyzer() for step, batch in enumerate(tqdm(train_dataloader)): # batch contains image, label, encoded_image since an image_encoder # was provided # batch["encoded_image"] is in BxTxCxHxW format inputs = {"X": batch["encoded_image"]} # Run the network on the input. # Specify the location of the time dimension network.run(inputs=inputs, time=time, input_time_dim=1) network.reset_state_variables() # Reset state variables. analyzer.plot_conv2d_weights(conv_conn.w, step=step) analyzer.finalize_step()
shuffle=True, num_workers=0, pin_memory=gpu) # Run training data on reservoir computer and store (spikes per neuron, label) per example. n_iters = examples training_pairs = [] pbar = tqdm(enumerate(dataloader)) for (i, dataPoint) in pbar: if i > n_iters: break datum = dataPoint["encoded_image"].view(time, 1, 1, 28, 28).to(device_id) label = dataPoint["label"] pbar.set_description_str("Train progress: (%d / %d)" % (i, n_iters)) network.run(inputs={"I": datum}, time=time, input_time_dim=1) training_pairs.append([spikes["O"].get("s").sum(0), label]) if plot: inpt_axes, inpt_ims = plot_input( dataPoint["image"].view(28, 28), datum.view(time, 784).sum(0).view(28, 28), label=label, axes=inpt_axes, ims=inpt_ims, ) spike_ims, spike_axes = plot_spikes( {layer: spikes[layer].get("s").view(-1, time) for layer in spikes}, axes=spike_axes,