예제 #1
0
 def add(obj):
     """Add the passed object to ``Network.context``."""
     if len(Network.context) == 0:
         raise NetworkContextError(
             "'%s' must either be created inside a ``with network:`` "
             "block, or set add_to_container=False in the object's "
             "constructor." % obj)
     network = Network.context[-1]
     if not isinstance(network, Network):
         raise NetworkContextError("Current context (%s) is not a network" %
                                   network)
     for cls in type(obj).__mro__:
         if cls in network.objects:
             network.objects[cls].append(obj)
             break
     else:
         raise NetworkContextError("Objects of type %r cannot be added to "
                                   "networks." % type(obj).__name__)
예제 #2
0
    def __exit__(self, dummy_exc_type, dummy_exc_value, dummy_tb):
        if len(Network.context) == 0:
            raise NetworkContextError(
                "Network.context in bad state; was empty when "
                "exiting from a 'with' block.")

        config = Config.context[-1]
        if config is not self._config:
            raise ConfigError("Config.context in bad state; was expecting "
                              "current context to be '%s' but instead got "
                              "'%s'." % (self._config, config))

        network = Network.context.pop()
        if network is not self:
            raise NetworkContextError(
                "Network.context in bad state; was expecting current context "
                "to be '%s' but instead got '%s'." % (self, network))

        self._config.__exit__(dummy_exc_type, dummy_exc_value, dummy_tb)
예제 #3
0
    def __init__(self, inputs, outputs, readout_synapse=None, network=None):
        """Builds a reservoir containing inputs and outputs.

        Parameters
        ----------
        inputs : nengo.NengoObject, nengo.ObjView, nengo.Neurons, or iterable
            Input (or inputs) within the network, to receive the input signal.
        outputs : nengo.NengoObject, nengo.ObjView, nengo.Neurons, or iterable
            Output (or outputs) within the network, for the linear readout.
        readout_synapse : nengo.synapses.Synapse (Default: ``None``)
            Optional synapse to filter all of the outputs before solving
            for the linear readout. This is included in the connection to the
            ``output`` Node created within the network.
        network : nengo.Network, optional (Default: ``None``)
            The Nengo network that contains all of the inputs and outputs,
            that can be simulated on its own. If ``None`` is supplied, then
            this will automatically use the current network context.
        """

        self.inputs = _to_list(inputs)
        self.outputs = _to_list(outputs)
        self.readout_synapse = readout_synapse

        # Determine dimensionality of reservoir
        self.size_in = 0
        for obj in self.inputs:
            if not isinstance(obj, self._connectable):
                raise TypeError(
                    "inputs (%s) must be connectable Nengo object" %
                    (inputs, ))

            # Increment input size of reservoir
            self.size_in += obj.size_in

        if self.size_in == 0:
            raise ValueError(
                "inputs (%s) must contain at least one input dimension" %
                (inputs, ))

        self.size_mid = 0
        for obj in self.outputs:
            if not isinstance(obj, self._connectable):
                raise TypeError(
                    "outputs (%s) must be connectable Nengo object" %
                    (outputs, ))

            # Increment output size of reservoir
            self.size_mid += obj.size_out

        if self.size_mid == 0:
            raise ValueError(
                "outputs (%s) must contain at least one output dimension" %
                (outputs, ))

        # Determine simulation context
        if network is None:
            if not len(nengo.Network.context):
                raise NetworkContextError(
                    "reservoir must be created within a network block if the "
                    "given network parameter is None")
            self.network = nengo.Network.context[-1]
        else:
            self.network = network

        with self.network:
            # Create a node whichs disperses all of the inputs
            self._proxy_in = nengo.Node(size_in=self.size_in)
            in_used = 0
            for obj in self.inputs:
                nengo.Connection(self._proxy_in[in_used:in_used + obj.size_in],
                                 obj,
                                 synapse=None)
                in_used += obj.size_in
            assert in_used == self.size_in

            # Create a node which collects all of the reservoir outputs
            self._proxy_mid = nengo.Node(size_in=self.size_mid)
            mid_used = 0
            for obj in self.outputs:
                nengo.Connection(obj,
                                 self._proxy_mid[mid_used:mid_used +
                                                 obj.size_out],
                                 synapse=None)
                mid_used += obj.size_out
            assert mid_used == self.size_mid

            # Create a dummy node to hold the eventually learned output
            # It will be the 0 scalar until the train method is called
            self.output = nengo.Node(size_in=1)
            self._readout = nengo.Connection(self._proxy_mid,
                                             self.output,
                                             synapse=self.readout_synapse,
                                             transform=np.zeros(
                                                 (1, self.size_mid)))
            self.size_out = None
예제 #4
0
def configure_settings(**kwargs):
    """
    Pass settings to ``nengo_dl`` by setting them as parameters on the
    top-level Network config.

    The settings are passed as keyword arguments to ``configure_settings``;
    e.g., to set ``trainable`` use ``configure_settings(trainable=True)``.

    Parameters
    ----------
    trainable : bool or None
        Adds a parameter to Nengo Ensembles/Connections that controls
        whether or not they will be optimized by `.Simulator.fit`.
        Passing ``None`` will use the default ``nengo_dl`` trainable settings,
        or True/False will override the default for all objects.  In either
        case trainability can be further configured on a per-object basis (e.g.
        ``net.config[my_ensemble].trainable = True``.  See `the documentation
        <https://www.nengo.ai/nengo-dl/simulator.html#choosing-which-elements-to-optimize>`__
        for more details.
    planner : graph planning algorithm
        Pass one of the `graph planners
        <https://www.nengo.ai/nengo-dl/reference.html#graph-optimization>`_ to change
        the default planner.
    sorter : signal sorting algorithm
        Pass one of the `sort algorithms
        <https://www.nengo.ai/nengo-dl/reference.html#graph-optimization>`_ to change
        the default sorter.
    simplifications: list of graph simplification functions
        Pass a list of `graph simplification functions
        <https://www.nengo.ai/nengo-dl/reference.html#graph-optimization>`_ to change
        the default simplifications applied.  The default list of simplifications
        can be found in ``nengo_dl.graph_optimizer.default_simplifications``.
    inference_only : bool
        Set to True if the network will only be run in inference mode (i.e.,
        no calls to `.Simulator.fit`).  This may result in a small
        increase in the inference speed.
    lif_smoothing : float
        If specified, use the smoothed `~.neurons.SoftLIFRate` neuron
        model, with the given smoothing parameter (``sigma``),
        to compute the gradient for `~nengo.LIF` neurons (as
        opposed to using `~nengo.LIFRate`).
    dtype : ``tf.DType``
        Set the floating point precision for simulation values.
    keep_history : bool
        Adds a parameter to Nengo Probes that controls whether or not they
        will keep the history from all simulation timesteps or only the last
        simulation step.  This can be further configured on a per-probe basis
        (e.g., ``net.config[my_probe].keep_history = False``).
    stateful : bool
        If True (default), the Simulator will be built to support stateful execution
        (where internal simulation state is preserved between simulator functions such
        as `.Simulator.predict`).  Otherwise all operations will be stateless. Note that
        this can also be configured individually through the ``stateful`` parameter on
        individual functions.
    use_loop : bool
        If True (default), use a symbolic while loop to run the simulation. Otherwise,
        simulation iterations are explicitly built into the model, avoiding the
        while loop. This can improve performance, but the simulation can only run
        for exactly ``Simulator.unroll_simulation`` iterations.
    """

    # get the toplevel network
    if len(Network.context) > 0:
        config = Network.context[0].config
    else:
        raise NetworkContextError(
            "`configure_settings` must be called within a Network context "
            "(`with nengo.Network(): ...`)")

    try:
        params = config[Network]
    except ConfigError:
        config.configures(Network)
        params = config[Network]

    for attr, val in kwargs.items():
        if attr == "trainable":
            # for trainable, we set it on the nearest containing network (rather than
            # the top-level)
            sub_config = Network.context[-1].config
            for obj in (Ensemble, Connection, ensemble.Neurons):
                try:
                    obj_params = sub_config[obj]
                except ConfigError:
                    sub_config.configures(obj)
                    obj_params = sub_config[obj]

                obj_params.set_param(
                    "trainable", BoolParam("trainable", val, optional=True))
        elif attr == "keep_history":
            config[Probe].set_param("keep_history",
                                    BoolParam("keep_history", val))
        elif attr in (
                "planner",
                "sorter",
                "simplifications",
                "inference_only",
                "lif_smoothing",
                "dtype",
                "stateful",
                "use_loop",
        ):
            params.set_param(attr, Parameter(attr, val))
        else:
            raise ConfigError("%s is not a valid config parameter" % attr)
예제 #5
0
파일: utils.py 프로젝트: hunse/nengo-dl
def configure_settings(**kwargs):
    """
    Pass settings to ``nengo_dl`` by setting them as parameters on the
    top-level Network config.

    The settings are passed as keyword arguments to ``configure_settings``;
    e.g., to set ``trainable`` use ``configure_settings(trainable=True)``.

    Parameters
    ----------
    trainable : bool or None
        Adds a parameter to Nengo Ensembles/Connections/Networks that controls
        whether or not they will be optimized by :meth:`.Simulator.train`.
        Passing ``None`` will use the default ``nengo_dl`` trainable settings,
        or True/False will override the default for all objects.  In either
        case trainability can be further configured on a per-object basis (e.g.
        ``net.config[my_ensemble].trainable = True``.  See `the documentation
        <https://www.nengo.ai/nengo-dl/training.html#choosing-which-elements-to-optimize>`_
        for more details.
    planner : graph planning algorithm
        Pass one of the `graph planners
        <https://www.nengo.ai/nengo-dl/graph_optimizer.html>`_ to change the
        default planner.
    sorter : signal sorting algorithm
        Pass one of the `sort algorithms
        <https://www.nengo.ai/nengo-dl/graph_optimizer.html>`_ to change the
        default sorter.
    simplifications: list of graph simplification functions
        Pass a list of functions that transform the list of operators in the
        model (see https://www.nengo.ai/nengo-dl/graph_optimizer.html).
    session_config: dict
        Config options passed to ``tf.Session`` initialization (e.g., to change
        the `GPU memory allocation method
        <https://www.tensorflow.org/programmers_guide/using_gpu#allowing_gpu_memory_growth>`_
        pass ``{"gpu_options.allow_growth": True}``).
    """

    # get the toplevel network
    if len(Network.context) > 0:
        config = Network.context[0].config
    else:
        raise NetworkContextError(
            "`configure_settings` must be called within a Network context "
            "(`with nengo.Network(): ...`)")

    try:
        params = config[Network]
    except ConfigError:
        config.configures(Network)
        params = config[Network]

    for attr, val in kwargs.items():
        if attr == "trainable":
            for obj in (Ensemble, Connection, ensemble.Neurons, Network):
                try:
                    obj_params = config[obj]
                except ConfigError:
                    config.configures(obj)
                    obj_params = config[obj]

                obj_params.set_param("trainable", BoolParam("trainable", val,
                                                            optional=True))
        elif attr in ("planner", "sorter", "simplifications",
                      "session_config"):
            params.set_param(attr, Parameter(attr, val))
        else:
            raise ConfigError("%s is not a valid config parameter" % attr)