Ejemplo n.º 1
0
    def reset(self, attributes=-1, synapses=False):
        """
        Resets all parameters and variables of the projection to the value they had before the call to compile.

        *Parameters:*

        * **attributes**: list of attributes (parameter or variable) which should be reinitialized. Default: all attributes.

        .. note::

            Only parameters and variables are reinitialized, not the connectivity structure (including the weights and delays).

            The parameter ``synapses`` will be used in a future release to also reinitialize the connectivity structure.

        """
        if attributes == -1:
            attributes = self.attributes

        for var in attributes:
            # Skip w
            if var=='w':
                continue
            # check it exists
            if not var in self.attributes:
                Global._warning("Projection.reset():", var, "is not an attribute of the population, won't reset.")
                continue
            # Set the value
            try:
                self.__setattr__(var, self.init[var])
            except Exception as e:
                Global._print(e)
                Global._warning("Projection.reset(): something went wrong while resetting", var)
Ejemplo n.º 2
0
def histogram(data, binsize=Global.config['dt']):
    """
    **Deprecated!!**

    Returns for each recorded simulation step the number of spikes occuring in the population.

    *Parameters*:

    * **data**: the dictionary returned by the get_record() method for the population. 
    * **binsize**: the duration in milliseconds where spikes are averaged (default: dt). 
    """
    Global._warning("histogram() is deprecated, use a Monitor instead.")
    if isinstance(data['start'], int):  # only one recording
        duration = data['stop'] - data['start']
    else:
        duration = 0
        for t in range(len(data['start'])):
            duration += data['stop'][t] - data['start'][t]

    nb_neurons = len(data['data'])
    nb_bins = int(duration * Global.config['dt'] / binsize)
    spikes = [0 for t in xrange(nb_bins)]
    for neuron in range(nb_neurons):
        for t in data['data'][neuron]:
            spikes[int(t / float(binsize / Global.config['dt']))] += 1
    return np.array(spikes)
Ejemplo n.º 3
0
    def sum(self, target):
        """
        Returns the array of weighted sums corresponding to the target:

        ```python
        excitatory = pop.sum('exc')
        ```

        For spiking networks, this is equivalent to accessing the conductances directly:

        ```python
        excitatory = pop.g_exc
        ```

        If no incoming projection has the given target, the method returns zeros.

        **Note:** it is not possible to distinguish the original population when the same target is used.

        :param target: the desired projection target.
        """
        # Check if the network is initialized
        if not self.initialized:
            Global._warning('sum(): the population', self.name,
                            'is not initialized yet.')
            return np.zeros(self.geometry)
        # Check if a projection has this type
        if not target in self.targets:
            Global._warning('sum(): the population', self.name,
                            'receives no projection with the target', target)
            return np.zeros(self.geometry)
        # Spiking neurons already have conductances available
        if self.neuron_type.type == 'spike':
            return getattr(self, 'g_' + target)
        # Otherwise, call the Cython method
        return getattr(self.cyInstance, 'get_sum_' + target)()
Ejemplo n.º 4
0
    def sum(self, target):
        """
        Returns the array of weighted sums corresponding to the target::

            excitatory = pop.sum('exc')

        For spiking networks, this is equivalent to accessing the conductances directly::

            excitatory = pop.g_exc

        If no incoming projection has the given target, the method returns zeros.

        *Parameter:*

        * **target**: the desired projection target.

        **Note:** it is not possible to distinguish the original population when the same target is used.
        """
        # Check if the network is initialized
        if not self.initialized:
            Global._warning('sum(): the population', self.name, 'is not initialized yet.')
            return np.zeros(self.geometry)
        # Check if a projection has this type
        if not target in self.targets:
            Global._warning('sum(): the population', self.name, 'receives no projection with the target', target)
            return np.zeros(self.geometry)
        # Spiking neurons already have conductances available
        if self.neuron_type.type == 'spike':
            return getattr(self, 'g_'+target)
        # Otherwise, call the Cython method
        return getattr(self.cyInstance, 'get_sum_'+target)()
Ejemplo n.º 5
0
    def _data(self):
        "Returns a dictionary containing all information about the population. Used for saving."
        desc = {}
        desc['name'] = self.name
        desc['geometry'] = self.geometry
        desc['size'] = self.size
        # Attributes
        desc['attributes'] = self.attributes
        desc['parameters'] = self.parameters
        desc['variables'] = self.variables
        # Save all attributes
        for var in self.attributes:
            try:
                ctype = self._get_attribute_cpp_type(var)
                if var in self.neuron_type.description['local']:
                    data = self.cyInstance.get_local_attribute_all(var, ctype)
                    desc[var] = data.reshape(self.geometry)
                else:
                    desc[var] = self.cyInstance.get_global_attribute(
                        var, ctype)

            except:
                Global._warning('Can not save the attribute ' + var +
                                ' in the population ' + self.name + '.')

        return desc
Ejemplo n.º 6
0
    def __init__(self, rates, schedule=0., period= -1., name=None, copied=False):
        neuron = Neuron(
            parameters="",
            equations=" r = 0.0",
            name="Timed Array",
            description="Timed array source."
        )
        # Geometry of the population
        geometry = rates.shape[1:]

        # Check the schedule
        if isinstance(schedule, (int, float)):
            if float(schedule) <= 0.0:
                schedule = Global.config['dt']
            schedule = [ float(schedule*i) for i in range(rates.shape[0])]

        if len(schedule) > rates.shape[0]:
            Global._error('TimedArray: the length of the schedule parameter cannot exceed the first dimension of the rates parameter.')

        if len(schedule) < rates.shape[0]:
            Global._warning('TimedArray: the length of the schedule parameter is smaller than the first dimension of the rates parameter (more data than time points). Make sure it is what you expect.')

        SpecificPopulation.__init__(self, geometry=geometry, neuron=neuron, name=name, copied=copied)

        self.init['schedule'] = schedule
        self.init['rates'] = rates
        self.init['period'] = period
Ejemplo n.º 7
0
 def set_image(self, image_name):
     """ 
     Sets an image (.png, .jpg or whatever is supported by PIL) into the firing rate of the population.
     
     If the image has a different size from the population, it will be resized.
     
     """
     try:
         im = Image.open(image_name)
     except : # image does not exist
         Global._error('The image ' + image_name + ' does not exist.')
         exit(0)
     # Resize the image if needed
     (width, height) = (self.geometry[1], self.geometry[0])
     if im.size != (width, height):
         Global._warning('The image ' + image_name + ' does not have the same size '+str(im.size)+' as the population ' + str((width, height)) + '. It will be resized.')
         im = im.resize((width, height))
     # Check if only the luminance should be extracted
     if self.dimension == 2 or self.geometry[2] == 1:
         im=im.convert("L")
     # Set the rate of the population
     if not Global._network[0]['compiled']:
         self.r = (np.array(im))/255.
     else:
         self.cyInstance.set_r(np.array(im).reshape(self.size)/255.)
Ejemplo n.º 8
0
def histogram(data, binsize=Global.config['dt']):
    """
    **Deprecated!!**

    Returns for each recorded simulation step the number of spikes occuring in the population.

    *Parameters*:

    * **data**: the dictionary returned by the get_record() method for the population. 
    * **binsize**: the duration in milliseconds where spikes are averaged (default: dt). 
    """
    Global._warning("histogram() is deprecated, use a Monitor instead.")
    if isinstance(data['start'], int): # only one recording
        duration = data['stop'] - data['start']
    else:
        duration = 0
        for t in range(len(data['start'])):
            duration += data['stop'][t] - data['start'][t]
            
    nb_neurons = len(data['data'])
    nb_bins = int(duration*Global.config['dt']/binsize)
    spikes = [0 for t in xrange(nb_bins)]
    for neuron in range(nb_neurons):
        for t in data['data'][neuron]:
            spikes[int(t/float(binsize/Global.config['dt']))] += 1
    return np.array(spikes)
Ejemplo n.º 9
0
 def set_image(self, image_name):
     """ 
     Sets an image (.png, .jpg or whatever is supported by PIL) into the firing rate of the population.
     
     If the image has a different size from the population, it will be resized.
     
     """
     try:
         im = Image.open(image_name)
     except:  # image does not exist
         Global._error('The image ' + image_name + ' does not exist.')
         exit(0)
     # Resize the image if needed
     (width, height) = (self.geometry[1], self.geometry[0])
     if im.size != (width, height):
         Global._warning('The image ' + image_name +
                         ' does not have the same size ' + str(im.size) +
                         ' as the population ' + str((width, height)) +
                         '. It will be resized.')
         im = im.resize((width, height))
     # Check if only the luminance should be extracted
     if self.dimension == 2 or self.geometry[2] == 1:
         im = im.convert("L")
     # Set the rate of the population
     if not Global._network[0]['compiled']:
         self.r = (np.array(im)) / 255.
     else:
         self.cyInstance.set_r(np.array(im).reshape(self.size) / 255.)
Ejemplo n.º 10
0
    def _data(self):
        "Method gathering all info about the projection when calling save()"

        if not self.initialized:
            Global._error('save_connectivity(): the network has not been compiled yet.')

        desc = {}
        desc['name'] = self.name
        desc['pre'] = self.pre.name
        desc['post'] = self.post.name
        desc['target'] = self.target
        desc['post_ranks'] = self.post_ranks
        desc['attributes'] = self.attributes
        desc['parameters'] = self.parameters
        desc['variables'] = self.variables
        desc['pre_ranks'] = self.cyInstance.pre_rank_all()
        desc['delays'] = self._get_delay()


        # Attributes to save
        attributes = self.attributes
        if not 'w' in self.attributes:
            attributes.append('w')

        # Save all attributes
        for var in attributes:
            try:
                desc[var] = getattr(self.cyInstance, 'get_'+var)()
            except:
                Global._warning('Can not save the attribute ' + var + ' in the projection.')

        return desc
Ejemplo n.º 11
0
    def reset(self, attributes=-1):
        """
        Resets all parameters and variables of the population to the value they had before the call to compile().

        *Parameters:*

        * **attributes**: list of attributes (parameter or variable) which should be reinitialized. Default: all attributes.
        """
        if attributes == -1:
            try:
                self.set(self.init)
            except Exception as e:
                Global._print(e)
                Global._error("Population.reset(): something went wrong while resetting", var)
        else: # only some of them
            for var in attributes:
                # check it exists
                if not var in self.attributes:
                    Global._warning("Population.reset():", var, "is not an attribute of the population, skipping.")
                    continue

                try:
                    self.__setattr__(var, self.init[var])
                except Exception as e:
                    Global._print(e)
                    Global._warning("Population.reset(): something went wrong while resetting", var)

        self.cyInstance.activate(self.enabled)
        self.cyInstance.reset()
Ejemplo n.º 12
0
    def start_record(self, variable, period=None, ranks='all'):
        """
        **Deprecated!!**

        Start recording neural variables.

        Parameter:

            * **variable**: single variable name or list of variable names.

            * **period**: delay in ms between two recording (default: dt). Not valid for the ``spike`` variable.

            * **ranks**: list of ranks of the neurons to record (default: 'all').

        Example::

            pop1.start_record('r')
            pop2.start_record(['mp', 'r'], period=10.0)
            pop3.start_record(['spike'])
            pop4.start_record(['r'], ranks=range(10, 100))
        """
        Global._warning(
            "recording from a Population is deprecated, use a Monitor instead."
        )
        from .Record import Monitor
        if ranks == 'all':
            self._monitor = Monitor(self, variable, period=period)
        else:
            self._monitor = Monitor(PopulationView(self, ranks),
                                    variable,
                                    period=period)
Ejemplo n.º 13
0
    def reset(self, attributes=-1):
        """
        Resets all parameters and variables of the population to the value they had before the call to compile().

        :param attributes: list of attributes (parameter or variable) which should be reinitialized. Default: all attributes.
        """
        if attributes == -1:
            try:
                self.set(self.init)
            except Exception as e:
                Global._print(e)
                Global._error(
                    "Population.reset(): something went wrong while resetting."
                )
        else:  # only some of them
            for var in attributes:
                # check it exists
                if not var in self.attributes:
                    Global._warning(
                        "Population.reset():", var,
                        "is not an attribute of the population, skipping.")
                    continue

                try:
                    self.__setattr__(var, self.init[var])
                except Exception as e:
                    Global._print(e)
                    Global._warning(
                        "Population.reset(): something went wrong while resetting",
                        var)

        self.cyInstance.activate(self.enabled)
        self.cyInstance.reset()
Ejemplo n.º 14
0
 def get_populations(self):
     """
     Returns a list of all declared populations in this network.
     """
     if self.populations == []:
         Global._warning("Network.get_populations(): no populations attached to this network.")
     return self.populations
Ejemplo n.º 15
0
    def _function(self, func):
        "Access a user defined function"
        if not self.initialized:
            Global._warning('the network is not compiled yet, cannot access the function ' + func)
            return

        return getattr(self.cyInstance, func)
Ejemplo n.º 16
0
    def refractory(self, value):
        if self.neuron_type.description['type'] == 'spike':

            if isinstance(self.neuron_type.description['refractory'], str):
                Global._warning(
                    "The refractory period is linked to the neural variable",
                    self.neuron_type.description['refractory'],
                    ", doing nothing... Change its value instead.")
                return

            if self.initialized:
                if isinstance(value, RandomDistribution):
                    refs = (value.get_values(self.size) /
                            Global.config['dt']).astype(int)
                elif isinstance(value, np.ndarray):
                    refs = (value / Global.config['dt']).astype(int).reshape(
                        self.size)
                else:
                    refs = (value / Global.config['dt'] *
                            np.ones(self.size)).astype(int)
                # TODO cast into int
                self.cyInstance.set_refractory(refs)
            else:  # not initialized yet, saving for later
                self.neuron_type.description['refractory'] = value
        else:
            Global._warning(
                'Rate-coded neurons do not have refractory periods...')
Ejemplo n.º 17
0
 def __getattr__(self, name):
     " Method called when accessing an attribute."
     if name == 'proj':
         return object.__getattribute__(self, name)
     elif hasattr(self, 'proj'):
         if name == 'rank':  # TODO: remove 'rank' in a future version
             Global._warning(
                 "Dendrite.rank: the attribute is deprecated, use Dendrite.pre_ranks instead."
             )
             return self.proj.cyInstance.pre_rank(self.idx)
         elif name == 'pre_rank':
             return self.proj.cyInstance.pre_rank(self.idx)
         elif name == 'delay':
             if self.proj.uniform_delay == -1:
                 return [
                     d * Global.config['dt'] for d in
                     self.proj.cyInstance.get_dendrite_delay(self.idx)
                 ]
             else:
                 return self.proj.max_delay * Global.config['dt']
         elif name in self.proj.attributes:
             return getattr(self.proj.cyInstance,
                            'get_dendrite_' + name)(self.idx)
         else:
             return object.__getattribute__(self, name)
     else:
         return object.__getattribute__(self, name)
Ejemplo n.º 18
0
    def start_record(self, variable, period=None, ranks="all"):
        """
        **Deprecated!!**

        Start recording neural variables.

        Parameter:

            * **variable**: single variable name or list of variable names.

            * **period**: delay in ms between two recording (default: dt). Not valid for the ``spike`` variable.

            * **ranks**: list of ranks of the neurons to record (default: 'all').

        Example::

            pop1.start_record('r')
            pop2.start_record(['mp', 'r'], period=10.0)
            pop3.start_record(['spike'])
            pop4.start_record(['r'], ranks=range(10, 100))
        """
        Global._warning("recording from a Population is deprecated, use a Monitor instead.")
        from .Record import Monitor

        if ranks == "all":
            self._monitor = Monitor(self, variable, period=period)
        else:
            self._monitor = Monitor(PopulationView(self, ranks), variable, period=period)
Ejemplo n.º 19
0
    def pause_record(self):
        """
        **Deprecated!!**

        Pause in recording the defined variables.
        """
        Global._warning("recording from a Dendrite is deprecated, use a Monitor instead.")
        self.proj.recorded_variables[self.post_rank].pause()
Ejemplo n.º 20
0
    def resume_record(self):
        """
        **Deprecated!!**

        Resume recording the previous defined variables.
        """
        Global._warning("recording from a Dendrite is deprecated, use a Monitor instead.")
        self.proj.recorded_variables[self.post_rank].resume()
Ejemplo n.º 21
0
 def get_populations(self):
     """
     Returns a list of all declared populations in this network.
     """
     if self.populations == []:
         Global._warning(
             "Network.get_populations(): no populations attached to this network."
         )
     return self.populations
Ejemplo n.º 22
0
    def resume_record(self):
        """
        **Deprecated!!**

        Resume recording the previous defined variables.
        """
        Global._warning(
            "recording from a Dendrite is deprecated, use a Monitor instead.")
        self.proj.recorded_variables[self.post_rank].resume()
Ejemplo n.º 23
0
 def refractory(self):
     if self.neuron_type.description["type"] == "spike":
         if self.initialized:
             return Global.config["dt"] * self.cyInstance.get_refractory()
         else:
             return self.neuron_type.description["refractory"]
     else:
         Global._warning("rate-coded neurons do not have refractory periods...")
         return None
Ejemplo n.º 24
0
    def _function(self, func):
        "Access a user defined function"
        if not self.initialized:
            Global._warning(
                'the network is not compiled yet, cannot access the function '
                + func)
            return

        return getattr(self.cyInstance, func)
Ejemplo n.º 25
0
    def pause_record(self):
        """
        **Deprecated!!**

        Pause in recording the defined variables.
        """
        Global._warning(
            "recording from a Dendrite is deprecated, use a Monitor instead.")
        self.proj.recorded_variables[self.post_rank].pause()
Ejemplo n.º 26
0
 def refractory(self):
     if self.neuron_type.description['type'] == 'spike':
         if self.initialized:
             return Global.config['dt']*self.cyInstance.get_refractory()
         else :
             return self.neuron_type.description['refractory']
     else:
         Global._warning('rate-coded neurons do not have refractory periods...')
         return None
Ejemplo n.º 27
0
 def refractory(self):
     if self.neuron_type.description['type'] == 'spike':
         if self.initialized:
             return Global.config['dt'] * self.cyInstance.get_refractory()
         else:
             return self.neuron_type.description['refractory']
     else:
         Global._warning(
             'rate-coded neurons do not have refractory periods...')
         return None
Ejemplo n.º 28
0
    def _proj_recorder_class(self, proj):
        """
        Generate the code for the recorder object.

        Returns:

            * complete code as string

        Templates:

            record
        """
        if Global.config['paradigm'] == "openmp":
            template = RecTemplate.omp_projection
        elif Global.config['paradigm'] == "cuda":
            template = RecTemplate.cuda_projection
        else:
            raise NotImplementedError

        # Specific template
        if 'monitor_class' in proj._specific_template.keys():
            return proj._specific_template['monitor_class']

        init_code = ""
        recording_code = ""
        struct_code = ""

        attributes = []
        for var in proj.synapse_type.description['parameters'] + proj.synapse_type.description['variables']:
            # Avoid doublons
            if var['name'] in attributes:
                continue
            attributes.append(var['name'])

            # Get the locality
            locality = var['locality']
            
            # Special case for single weights
            if var['name'] == "w" and proj._has_single_weight():
                locality = 'global'
                
            # Get the template for the structure declaration
            struct_code += template[locality]['struct'] % {'type' : var['ctype'], 'name': var['name']}
            
            # Get the initialization code
            init_code += template[locality]['init'] % {'type' : var['ctype'], 'name': var['name']}
            
            # Get the recording code
            if proj._storage_format == "lil":
                recording_code += template[locality]['recording'] % {'id': proj.id, 'type' : var['ctype'], 'name': var['name']}
            else:
                Global._warning("Monitor: variable "+ var['name'] + " cannot be recorded for a projection using the csr format...")

        return template['struct'] % {'id': proj.id, 'init_code': init_code, 'recording_code': recording_code, 'struct_code': struct_code}
Ejemplo n.º 29
0
def _load_pop_data(pop, desc):
    """
    Update a population with the stored data set. 
    """
    if not 'attributes' in desc.keys():
        _error('Saved with a too old version of ANNarchy (< 4.2).', exit=True)
        
    for var in desc['attributes']:
        try:
            getattr(pop.cyInstance, 'set_'+var)(desc[var]) 
        except:
            Global._warning('Can not load the variable ' + var + ' in the population ' + pop.name)
            Global._print('Skipping this variable.')
            continue
Ejemplo n.º 30
0
    def stop_record(self):
        """
        **Deprecated!!**

        Stops recording all the previously defined variables.

        Example::

            pop1.stop_record()
        """
        Global._warning("recording from a Population is deprecated, use a Monitor instead.")
        if self._monitor:
            self._monitor.stop()
            self._monitor = None
Ejemplo n.º 31
0
    def __getattr__(self, name):
        # Method called when accessing an attribute.
        if name == 'proj':
            return object.__getattribute__(self, name)
        elif hasattr(self, 'proj'):
            if name == 'rank':  # TODO: remove 'rank' in a future version
                Global._warning(
                    "Dendrite.rank: the attribute is deprecated, use Dendrite.pre_ranks instead."
                )
                return self.proj.cyInstance.pre_rank(self.idx)

            elif name == 'pre_rank':
                return self.proj.cyInstance.pre_rank(self.idx)

            elif name == 'delay':
                if self.proj.uniform_delay == -1:
                    return [
                        d * Global.config['dt'] for d in
                        self.proj.cyInstance.get_dendrite_delay(self.idx)
                    ]
                else:
                    return self.proj.max_delay * Global.config['dt']

            elif name == "w" and self.proj._has_single_weight():
                return self.proj.cyInstance.get_global_attribute(
                    name, Global.config["precision"])

            elif name in self.proj.attributes:
                # Determine C++ data type
                ctype = None
                for var in self.proj.synapse_type.description[
                        'variables'] + self.proj.synapse_type.description[
                            'parameters']:
                    if var['name'] == name:
                        ctype = var['ctype']

                if name in self.proj.synapse_type.description['local']:
                    return self.proj.cyInstance.get_local_attribute_row(
                        name, self.idx, ctype)
                elif name in self.proj.synapse_type.description['semiglobal']:
                    return self.proj.cyInstance.get_semiglobal_attribute(
                        name, self.idx, ctype)
                else:
                    return self.proj.cyInstance.get_global_attribute(
                        name, ctype)
            else:
                return object.__getattribute__(self, name)
        else:
            return object.__getattribute__(self, name)
Ejemplo n.º 32
0
    def reset(self, synapses=False):
        """
        Resets all parameters and variables to the value they had before the call to compile.

        *Parameters*:

        * **synapses**: if True, the connections will also be erased (default: False).

        .. note::

            Not implemented yet...
        """
        self._init_attributes()
        if synapses:
            Global._warning('Resetting synapses is not implemented yet...')
Ejemplo n.º 33
0
    def start_record(self, variable, period=None):
        """
        **Deprecated!!**

        Starts recording the given variables.

        *Parameter*:

        * **variable**: single variable name or list of variable names.

        * **period**:  period of recording in milliseconds.
        """
        Global._warning("recording from a Dendrite is deprecated, use a Monitor instead.")
        from .Record import Monitor
        self.proj.recorded_variables[self.post_rank] = Monitor(self, variable, period)
Ejemplo n.º 34
0
 def refractory(self, value):
     if self.neuron_type.description['type'] == 'spike':
         if self.initialized:
             if isinstance(value, RandomDistribution):
                 refs = (value.get_values(self.size)/Global.config['dt']).astype(int)
             elif isinstance(value, np.ndarray):
                 refs = (value / Global.config['dt']).astype(int).reshape(self.size)
             else:
                 refs = (value/ Global.config['dt']*np.ones(self.size)).astype(int)
             # TODO cast into int
             self.cyInstance.set_refractory(refs)
         else: # not initialized yet, saving for later
             self.neuron_type.description['refractory'] = value
     else:
         Global._warning('rate-coded neurons do not have refractory periods...')
Ejemplo n.º 35
0
def population_rate(data, smooth=Global.config['dt']):
    """ 
    **Deprecated!!**

    Takes the recorded spikes of a population and returns a smoothed firing rate for the whole population.

    *Parameters*:

    * **data**: the dictionary returned by ``get_record()[pop]['spike']``

    * **smooth**: the smoothing time constant (default: dt)
    """
    Global._warning("population_rate() is deprecated, use a Monitor instead.")
    import ANNarchy.core.cython_ext.Transformations as Transformations
    return Transformations.population_rate(data, smooth)
Ejemplo n.º 36
0
def population_rate(data, smooth=Global.config['dt']):
    """ 
    **Deprecated!!**

    Takes the recorded spikes of a population and returns a smoothed firing rate for the whole population.

    *Parameters*:

    * **data**: the dictionary returned by ``get_record()[pop]['spike']``

    * **smooth**: the smoothing time constant (default: dt)
    """
    Global._warning("population_rate() is deprecated, use a Monitor instead.")
    import ANNarchy.core.cython_ext.Transformations as Transformations
    return Transformations.population_rate(data, smooth)
Ejemplo n.º 37
0
    def get_record(self, variable=None):
        """
        **Deprecated!!**

        Returns the recorded data as one matrix or a dictionary if more then one variable is requested.
        The first dimension is the neuron index, the last dimension represents the number of simulation steps.

        *Parameter*:

        * **variable**: single variable name or list of variable names. If no argument provided, all recorded data is returned.
        """
        Global._warning("recording from a Dendrite is deprecated, use a Monitor instead.")

        data_dict = self.proj.recorded_variables[self.post_rank].get(variable)

        return data_dict
Ejemplo n.º 38
0
    def stop_record(self):
        """
        **Deprecated!!**

        Stops recording all the previously defined variables.

        Example::

            pop1.stop_record()
        """
        Global._warning(
            "recording from a Population is deprecated, use a Monitor instead."
        )
        if self._monitor:
            self._monitor.stop()
            self._monitor = None
Ejemplo n.º 39
0
    def get_record(self, variable=None):
        """
        **Deprecated!!**

        Returns the recorded data as one matrix or a dictionary if more then one variable is requested.
        The first dimension is the neuron index, the last dimension represents the number of simulation steps.

        *Parameter*:

        * **variable**: single variable name or list of variable names. If no argument provided, all recorded data is returned.
        """
        Global._warning(
            "recording from a Dendrite is deprecated, use a Monitor instead.")

        data_dict = self.proj.recorded_variables[self.post_rank].get(variable)

        return data_dict
Ejemplo n.º 40
0
    def start_record(self, variable, period=None):
        """
        **Deprecated!!**

        Starts recording the given variables.

        *Parameter*:

        * **variable**: single variable name or list of variable names.

        * **period**:  period of recording in milliseconds.
        """
        Global._warning(
            "recording from a Dendrite is deprecated, use a Monitor instead.")
        from .Record import Monitor
        self.proj.recorded_variables[self.post_rank] = Monitor(
            self, variable, period)
Ejemplo n.º 41
0
def sparse_random_matrix(pre, post, p, weight, delay=0):
    """
    Returns a sparse (lil) matrix to connect the pre and post populations with the probability p and the value weight.
    """
    try:
        from scipy.sparse import lil_matrix
    except:
        Global._warning("scipy is not installed, sparse matrices won't work")
        return None
    from random import sample
    W=lil_matrix((pre, post))
    for i in xrange(pre):
        k=np.random.binomial(post,p,1)[0]
        W.rows[i]=sample(xrange(post),k)
        W.data[i]=[weight]*k

    return W
Ejemplo n.º 42
0
def sparse_random_matrix(pre, post, p, weight, delay=0):
    """
    Returns a sparse (lil) matrix to connect the pre and post populations with the probability p and the value weight.
    """
    try:
        from scipy.sparse import lil_matrix
    except:
        Global._warning("scipy is not installed, sparse matrices won't work")
        return None
    from random import sample
    W = lil_matrix((pre, post))
    for i in range(pre):
        k = np.random.binomial(post, p, 1)[0]
        W.rows[i] = sample(range(post), k)
        W.data[i] = [weight] * k

    return W
Ejemplo n.º 43
0
    def resume_record(self, variable=None):
        """
        **Deprecated!!**

        Resume recording the previous defined variables.

        *Parameter*:

            * **variable**: single variable name or list of variable names.

        Example::

            pop1.resume_record('r')
            pop2.resume_record(['mp', 'r'])
        """
        Global._warning("recording from a Population is deprecated, use a Monitor instead.")
        if self._monitor:
            self._monitor.resume(variable)
Ejemplo n.º 44
0
    def _load_pop_data(self, desc):
        """
        Updates the population with the stored data set.
        """
        if not 'attributes' in desc.keys():
            Global._error('Saved with a too old version of ANNarchy (< 4.2).',
                          exit=True)

        for var in desc['attributes']:
            try:
                self._set_cython_attribute(var, desc[var])

            except Exception as e:
                Global._print(e)
                Global._warning('Can not load the variable ' + var +
                                ' in the population ' + self.name)
                Global._print('Skipping this variable.')
                continue
Ejemplo n.º 45
0
    def pause_record(self, variable=None):
        """
        **Deprecated!!**

        Pauses the recording of variables (can be resumed later with resume_record()).

        *Parameter*:

        * **variable**: single variable name or list of variable names. If no argument is provided all recordings will pause.

        Example::

            pop1.pause_record('r')
            pop2.pause_record(['mp', 'r'])
        """
        Global._warning("recording from a Population is deprecated, use a Monitor instead.")
        if self._monitor:
            self._monitor.pause(variable)
Ejemplo n.º 46
0
    def _data(self):
        "Returns a dictionary containing all information about the population. Used for saving."
        desc = {}
        desc['name'] = self.name
        desc['geometry'] = self.geometry
        desc['size'] = self.size
        # Attributes
        desc['attributes'] = self.attributes
        desc['parameters'] = self.parameters
        desc['variables'] = self.variables
        # Save all attributes
        for var in self.attributes:
            try:
                desc[var] = getattr(self.cyInstance, 'get_'+var)()
            except:
                Global._warning('Can not save the attribute ' + var + 'in the population ' + self.name + '.')

        return desc
Ejemplo n.º 47
0
    def _data(self):
        "Returns a dictionary containing all information about the population. Used for saving."
        desc = {}
        desc["name"] = self.name
        desc["geometry"] = self.geometry
        desc["size"] = self.size
        # Attributes
        desc["attributes"] = self.attributes
        desc["parameters"] = self.parameters
        desc["variables"] = self.variables
        # Save all attributes
        for var in self.attributes:
            try:
                desc[var] = getattr(self.cyInstance, "get_" + var)()
            except:
                Global._warning("Can not save the attribute " + var + "in the population " + self.name + ".")

        return desc
Ejemplo n.º 48
0
    def disable_learning(self, update=None):
        """
        Disables learning for all synapses of this projection.

        The effect depends on the rate-coded or spiking nature of the projection:

        * **Rate-coded**: the updating of all synaptic variables is disabled (including the weights ``w``). This is equivalent to ``proj.update = False``.

        * **Spiking**: the updating of the weights ``w`` is disabled, but all other variables are updated. This is equivalent to ``proj.plasticity = False``.

        This method is useful when performing some tests on a trained network without messing with the learned weights.
        """
        try:
            if self.synapse_type.type == 'rate':
                self.cyInstance._set_update(False)
            else:
                self.cyInstance._set_plasticity(False)
        except:
            Global._warning('disabling learning is only possible after compile().')
Ejemplo n.º 49
0
    def rank_from_coordinates(self, coord):
        """
        Returns the rank of a neuron based on coordinates.

        *Parameter*:

            * **coord**: coordinate tuple, can be multidimensional.
        """
        try:
            rank = self._rank_from_coord( coord, self.geometry )
        except:
            Global._error('There is no neuron of coordinates', coord, 'in the population', self.name, self.geometry)
            exit(0)

        if rank > self.size:
            Global._warning('Error when accessing neuron', str(coord), ': the population' , self.name , 'has only', self.size, 'neurons (geometry '+ str(self.geometry) +').')
            exit(0)
        else:
            return rank
Ejemplo n.º 50
0
 def refractory(self, value):
     if self.neuron_type.description['type'] == 'spike':
         if self.initialized:
             if isinstance(value, RandomDistribution):
                 refs = (value.get_values(self.size) /
                         Global.config['dt']).astype(int)
             elif isinstance(value, np.ndarray):
                 refs = (value / Global.config['dt']).astype(int).reshape(
                     self.size)
             else:
                 refs = (value / Global.config['dt'] *
                         np.ones(self.size)).astype(int)
             # TODO cast into int
             self.cyInstance.set_refractory(refs)
         else:  # not initialized yet, saving for later
             self.neuron_type.description['refractory'] = value
     else:
         Global._warning(
             'rate-coded neurons do not have refractory periods...')
Ejemplo n.º 51
0
    def _data(self):
        "Returns a dictionary containing all information about the population. Used for saving."
        desc = {}
        desc['name'] = self.name
        desc['geometry'] = self.geometry
        desc['size'] = self.size
        # Attributes
        desc['attributes'] = self.attributes
        desc['parameters'] = self.parameters
        desc['variables'] = self.variables
        # Save all attributes
        for var in self.attributes:
            try:
                desc[var] = getattr(self.cyInstance, 'get_' + var)()
            except:
                Global._warning('Can not save the attribute ' + var +
                                'in the population ' + self.name + '.')

        return desc
Ejemplo n.º 52
0
    def get(self, name):
        """
        Returns the value of a variable/parameter.

        *Parameter*:

        * *name*: name of the parameter/variable::

            dendrite.get('w')
        """
        if name == 'rank':
            Global._warning("Dendrite.get('rank'): the attribute is deprecated, use Dendrite.pre_ranks instead.")
            return self.proj.cyInstance.pre_rank(self.idx)
        elif name == 'pre_ranks':
            return self.proj.cyInstance.pre_rank(self.idx)
        elif name in self.attributes:
            return getattr(self.proj.cyInstance, 'get_dendrite_'+name)(self.idx)
        else:
            Global._error("Dendrite has no parameter/variable called", name)
Ejemplo n.º 53
0
    def _store_connectivity(self, method, args, delay, storage_format="lil", storage_order="post_to_pre"):
        """
        Store connectivity data. This function is called from cython_ext.Connectors module.
        """
        if self._connection_method != None:
            Global._warning("Projection ", self.proj.name, " was already connected ... data will be overwritten.")

        # Store connectivity pattern parameters
        self._connection_method = method
        self._connection_args = args
        self._connection_delay = delay
        self._storage_format = storage_format
        self._storage_order = storage_order

        # Analyse the delay
        if isinstance(delay, (int, float)): # Uniform delay
            self.max_delay = round(delay/Global.config['dt'])
            self.uniform_delay = round(delay/Global.config['dt'])
        elif isinstance(delay, RandomDistribution): # Non-uniform delay
            self.uniform_delay = -1
            # Ensure no negative delays are generated
            if delay.min is None or delay.min < Global.config['dt']:
                delay.min = Global.config['dt']
            # The user needs to provide a max in order to compute max_delay
            if delay.max is None:
                Global._error('Projection.connect_xxx(): if you use a non-bounded random distribution for the delays (e.g. Normal), you need to set the max argument to limit the maximal delay.')

            self.max_delay = round(delay.max/Global.config['dt'])
        elif isinstance(delay, (list, np.ndarray)): # connect_from_matrix/sparse
            if len(delay) > 0:
                self.uniform_delay = -1
                self.max_delay = round(max([max(l) for l in delay])/Global.config['dt'])
            else: # list is empty, no delay
                self.max_delay = -1
                self.uniform_delay = -1
        else:
            Global._error('Projection.connect_xxx(): delays are not valid!')

        # Transmit the max delay to the pre pop
        if isinstance(self.pre, PopulationView):
            self.pre.population.max_delay = max(self.max_delay, self.pre.population.max_delay)
        else:
            self.pre.max_delay = max(self.max_delay, self.pre.max_delay)
Ejemplo n.º 54
0
    def disable_learning(self, update=None):
        """
        Disables learning for all synapses of this projection.

        The effect depends on the rate-coded or spiking nature of the projection:

        * **Rate-coded**: the updating of all synaptic variables is disabled (including the weights ``w``). This is equivalent to ``proj.update = False``.

        * **Spiking**: the updating of the weights ``w`` is disabled, but all other variables are updated. This is equivalent to ``proj.plasticity = False``.

        This method is useful when performing some tests on a trained network without messing with the learned weights.
        """
        try:
            if self.synapse_type.type == 'rate':
                self.cyInstance._set_update(False)
            else:
                self.cyInstance._set_plasticity(False)
        except Exception as e:
            Global._warning('disabling learning is only possible after compile().')
Ejemplo n.º 55
0
    def pause_record(self, variable=None):
        """
        **Deprecated!!**

        Pauses the recording of variables (can be resumed later with resume_record()).

        *Parameter*:

        * **variable**: single variable name or list of variable names. If no argument is provided all recordings will pause.

        Example::

            pop1.pause_record('r')
            pop2.pause_record(['mp', 'r'])
        """
        Global._warning(
            "recording from a Population is deprecated, use a Monitor instead."
        )
        if self._monitor:
            self._monitor.pause(variable)
Ejemplo n.º 56
0
    def resume_record(self, variable=None):
        """
        **Deprecated!!**

        Resume recording the previous defined variables.

        *Parameter*:

            * **variable**: single variable name or list of variable names.

        Example::

            pop1.resume_record('r')
            pop2.resume_record(['mp', 'r'])
        """
        Global._warning(
            "recording from a Population is deprecated, use a Monitor instead."
        )
        if self._monitor:
            self._monitor.resume(variable)