class Ensemble(NengoObject): """A group of neurons that collectively represent a vector. Parameters ---------- n_neurons : int The number of neurons. dimensions : int The number of representational dimensions. radius : int, optional The representational radius of the ensemble. encoders : Distribution or (n_neurons, dimensions) array_like, optional The encoders used to transform from representational space to neuron space. Each row is a neuron's encoder; each column is a representational dimension. intercepts : Distribution or (n_neurons,) array_like, optional The point along each neuron's encoder where its activity is zero. If ``e`` is the neuron's encoder, then the activity will be zero when ``dot(x, e) <= c``, where ``c`` is the given intercept. max_rates : Distribution or (n_neurons,) array_like, optional The activity of each neuron when the input signal ``x`` is magnitude 1 and aligned with that neuron's encoder ``e``; i.e., when ``dot(x, e) = 1``. eval_points : Distribution or (n_eval_points, dims) array_like, optional The evaluation points used for decoder solving, spanning the interval (-radius, radius) in each dimension, or a distribution from which to choose evaluation points. n_eval_points : int, optional The number of evaluation points to be drawn from the ``eval_points`` distribution. If None, then a heuristic is used to determine the number of evaluation points. neuron_type : `~nengo.neurons.NeuronType`, optional The model that simulates all neurons in the ensemble (see `~nengo.neurons.NeuronType`). gain : Distribution or (n_neurons,) array_like The gains associated with each neuron in the ensemble. If None, then the gain will be solved for using ``max_rates`` and ``intercepts``. bias : Distribution or (n_neurons,) array_like The biases associated with each neuron in the ensemble. If None, then the gain will be solved for using ``max_rates`` and ``intercepts``. noise : Process, optional Random noise injected directly into each neuron in the ensemble as current. A sample is drawn for each individual neuron on every simulation step. normalize_encoders : bool, optional Indicates whether the encoders should be normalized. label : str, optional A name for the ensemble. Used for debugging and visualization. seed : int, optional The seed used for random number generation. Attributes ---------- bias : Distribution or (n_neurons,) array_like or None The biases associated with each neuron in the ensemble. dimensions : int The number of representational dimensions. encoders : Distribution or (n_neurons, dimensions) array_like The encoders, used to transform from representational space to neuron space. Each row is a neuron's encoder, each column is a representational dimension. eval_points : Distribution or (n_eval_points, dims) array_like The evaluation points used for decoder solving, spanning the interval (-radius, radius) in each dimension, or a distribution from which to choose evaluation points. gain : Distribution or (n_neurons,) array_like or None The gains associated with each neuron in the ensemble. intercepts : Distribution or (n_neurons) array_like or None The point along each neuron's encoder where its activity is zero. If ``e`` is the neuron's encoder, then the activity will be zero when ``dot(x, e) <= c``, where ``c`` is the given intercept. label : str or None A name for the ensemble. Used for debugging and visualization. max_rates : Distribution or (n_neurons,) array_like or None The activity of each neuron when ``dot(x, e) = 1``, where ``e`` is the neuron's encoder. n_eval_points : int or None The number of evaluation points to be drawn from the ``eval_points`` distribution. If None, then a heuristic is used to determine the number of evaluation points. n_neurons : int or None The number of neurons. neuron_type : NeuronType The model that simulates all neurons in the ensemble (see ``nengo.neurons``). noise : Process or None Random noise injected directly into each neuron in the ensemble as current. A sample is drawn for each individual neuron on every simulation step. radius : int The representational radius of the ensemble. seed : int or None The seed used for random number generation. """ probeable = ("decoded_output", "input", "scaled_encoders") n_neurons = IntParam("n_neurons", low=1) dimensions = IntParam("dimensions", low=1) radius = NumberParam("radius", default=1.0, low=1e-10) encoders = DistOrArrayParam( "encoders", default=ScatteredHypersphere(surface=True), sample_shape=("n_neurons", "dimensions"), ) intercepts = DistOrArrayParam( "intercepts", default=Uniform(-1.0, 0.9), optional=True, sample_shape=("n_neurons",), ) max_rates = DistOrArrayParam( "max_rates", default=Uniform(200, 400), optional=True, sample_shape=("n_neurons",), ) eval_points = DistOrArrayParam( "eval_points", default=ScatteredHypersphere(), sample_shape=("*", "dimensions") ) n_eval_points = IntParam("n_eval_points", default=None, optional=True) neuron_type = NeuronTypeParam("neuron_type", default=LIF()) gain = DistOrArrayParam( "gain", default=None, optional=True, sample_shape=("n_neurons",) ) bias = DistOrArrayParam( "bias", default=None, optional=True, sample_shape=("n_neurons",) ) noise = ProcessParam("noise", default=None, optional=True) normalize_encoders = BoolParam("normalize_encoders", default=True, optional=True) _param_init_order = ["n_neurons", "dimensions"] def __init__( self, n_neurons, dimensions, radius=Default, encoders=Default, intercepts=Default, max_rates=Default, eval_points=Default, n_eval_points=Default, neuron_type=Default, gain=Default, bias=Default, noise=Default, normalize_encoders=Default, label=Default, seed=Default, ): super().__init__(label=label, seed=seed) self.n_neurons = n_neurons self.dimensions = dimensions self.radius = radius self.encoders = encoders self.intercepts = intercepts self.max_rates = max_rates self.n_eval_points = n_eval_points self.eval_points = eval_points self.bias = bias self.gain = gain self.neuron_type = neuron_type self.noise = noise self.normalize_encoders = normalize_encoders def __getitem__(self, key): return ObjView(self, key) def __len__(self): return self.dimensions @property def neurons(self): """A direct interface to the neurons in the ensemble.""" return Neurons(self) @neurons.setter def neurons(self, dummy): raise ReadonlyError(attr="neurons", obj=self) @property def size_in(self): """The dimensionality of the ensemble.""" return self.dimensions @property def size_out(self): """The dimensionality of the ensemble.""" return self.dimensions
def test_scattered_hypersphere(dims, surface, seed, plt): scipy_special = pytest.importorskip("scipy.special") n = 3000 dists = [ UniformHypersphere(surface=surface), ScatteredHypersphere(surface=surface, method="sct"), ScatteredHypersphere(surface=surface, method="sct-approx"), ScatteredHypersphere(surface=surface, method="tfww"), ] assert isinstance(dists[0], UniformHypersphere) xx = [] # generated points, for each dist times = [] # time taken to generate the points, for each dist for dist in dists: rng = np.random.RandomState(seed) timer = time.time() x = dist.sample(n, d=dims, rng=rng) timer = time.time() - timer rng.shuffle(x) # shuffle so we can compute distances in blocks without bias xx.append(x) times.append(timer) dd = [] # distance to the nearest point for each point, for each dist rr = [] # radii (norms) of all the generated points, for each dist for x in xx: # compute distances in blocks for efficiency (this means we're not actually # getting the minimum distance, just a proxy) n_split = 1000 d_min = [] for i in range(0, n, n_split): xi = x[i : i + n_split] d2 = ((xi[:, :, None] - xi.T[None, :, :]) ** 2).sum(axis=1) np.fill_diagonal(d2, np.inf) d_min.append(np.sqrt(d2.min(axis=1))) d_min = np.concatenate(d_min) dd.append(d_min) rr.append(np.sqrt((x ** 2).sum(axis=1))) # compute the approximate distance between points if they were evenly spread volume = np.pi ** (0.5 * dims) / scipy_special.gamma(0.5 * dims + 1) if surface: volume *= dims even_distance = (volume / n) ** (1 / (dims - 1 if surface else dims)) # --- plots colors = ["b", "g", "r", "m", "c"] plt.subplot(211) bins = np.linspace(np.min(dd), np.max(dd), 31) for i, d in enumerate(dd): histogram, _ = np.histogram(d, bins=bins) plt.plot( 0.5 * (bins[:-1] + bins[1:]), histogram, colors[i], ) plt.plot([d.min()], [0], colors[i] + "x") plt.plot([even_distance], [0], "kx") plt.title("surface=%s, dims=%d, n=%d" % (surface, dims, n)) plt.subplot(212) bins = np.linspace(0, 1.1, 31) for i, r in enumerate(rr): histogram, _ = np.histogram(r, bins=bins) plt.plot( 0.5 * (bins[:-1] + bins[1:]), histogram, colors[i], label=f"{dists[i]}: t={times[i]:0.2e}", ) plt.legend() # --- checks uniform_min = dd[0].min() for i, dist in enumerate(dists): if i == 0: continue # check that we're significantly better than UniformHypersphere d_min = dd[i].min() assert d_min > 1.2 * uniform_min, str(dist) # check that all surface points are on the surface if surface: assert np.allclose(rr[i], 1.0, atol=1e-5), str(dist)
def test_distributions(): check_init_args(PDF, ["x", "p"]) check_repr(PDF([1, 2, 3], [0.1, 0.8, 0.1])) assert (repr(PDF( [1, 2], [0.4, 0.6])) == "PDF(x=array([1., 2.]), p=array([0.4, 0.6]))") check_init_args(Uniform, ["low", "high", "integer"]) check_repr(Uniform(1, 3)) check_repr(Uniform(1, 4, integer=True)) assert repr(Uniform(0, 1)) == "Uniform(low=0, high=1)" assert repr(Uniform( 0, 5, integer=True)) == "Uniform(low=0, high=5, integer=True)" check_init_args(Gaussian, ["mean", "std"]) check_repr(Gaussian(0, 2)) assert repr(Gaussian(1, 0.1)) == "Gaussian(mean=1, std=0.1)" check_init_args(Exponential, ["scale", "shift", "high"]) check_repr(Exponential(2.0)) check_repr(Exponential(2.0, shift=0.1)) check_repr(Exponential(2.0, shift=0.1, high=10.0)) assert repr(Exponential(2.0)) == "Exponential(scale=2.0)" check_init_args(UniformHypersphere, ["surface", "min_magnitude"]) check_repr(UniformHypersphere()) check_repr(UniformHypersphere(surface=True)) check_repr(UniformHypersphere(min_magnitude=0.3)) assert repr(UniformHypersphere()) == "UniformHypersphere()" assert repr( UniformHypersphere(surface=True)) == "UniformHypersphere(surface=True)" check_init_args(ScatteredHypersphere, ["surface", "min_magnitude", "base", "method"]) check_repr(ScatteredHypersphere()) check_repr(ScatteredHypersphere(surface=True)) check_repr(ScatteredHypersphere(min_magnitude=0.3)) check_repr(ScatteredHypersphere(base=Uniform(0, 1))) check_repr(ScatteredHypersphere(method="tfww")) assert repr(ScatteredHypersphere()) == "ScatteredHypersphere()" assert (repr(ScatteredHypersphere( surface=True)) == "ScatteredHypersphere(surface=True)") assert (repr(ScatteredHypersphere(base=Uniform(0, 1), method="tfww")) == "ScatteredHypersphere(base=Uniform(low=0, high=1), method='tfww')") check_init_args(Choice, ["options", "weights"]) check_repr(Choice([3, 2, 1])) check_repr(Choice([3, 2, 1], weights=[0.1, 0.2, 0.7])) assert repr(Choice([1, 2, 3])) == "Choice(options=array([1., 2., 3.]))" assert (repr( Choice([1, 2, 3], weights=[0.1, 0.5, 0.4]) ) == "Choice(options=array([1., 2., 3.]), weights=array([0.1, 0.5, 0.4]))") check_init_args(Samples, ["samples"]) check_repr(Samples([3, 2, 1])) assert repr(Samples([3, 2, 1])) == "Samples(samples=array([3., 2., 1.]))" check_init_args(SqrtBeta, ["n", "m"]) check_repr(SqrtBeta(3)) check_repr(SqrtBeta(3, m=2)) assert repr(SqrtBeta(3)) == "SqrtBeta(n=3)" assert repr(SqrtBeta(3, 2)) == "SqrtBeta(n=3, m=2)" check_init_args(SubvectorLength, ["dimensions", "subdimensions"]) check_repr(SubvectorLength(6)) check_repr(SubvectorLength(6, 2)) assert repr(SubvectorLength(3)) == "SubvectorLength(dimensions=3)" check_init_args(CosineSimilarity, ["dimensions"]) check_repr(CosineSimilarity(6)) assert repr(CosineSimilarity(6)) == "CosineSimilarity(dimensions=6)"