예제 #1
0
 def __init__(self, model, parameters):
     BaseComponent.__init__(self, model, parameters)
     self.sim = self.model.sim
     self.name = parameters.name  # the name of the population
     self.model.register_sheet(self)
     self._pop = None
     
     # We want to be able to define in cell.params the cell parameters as also PyNNDistributions so we can get variably parametrized populations
     # The problem is that the pyNN.Population can accept only scalar parameters. There fore we will remove from cell.params all parameters
     # that are PyNNDistributions, and will initialize them later just after the population is initialized (in property pop())
     self.dist_params = {}
     for k in self.parameters.cell.params.keys():
         if isinstance(self.parameters.cell.params[k],PyNNDistribution):
            self.dist_params[k]=self.parameters.cell.params[k]
            del self.parameters.cell.params[k]
예제 #2
0
파일: __init__.py 프로젝트: dguarino/mozaik
    def __init__(self, sim, num_threads, parameters):
        BaseComponent.__init__(self, self, parameters)
        self.first_time = True
        self.sim = sim
        self.node = sim.setup(timestep=self.parameters.time_step, min_delay=self.parameters.min_delay, max_delay=self.parameters.max_delay, threads=num_threads)  # should have some parameters here
        self.sheets = {}
        self.connectors = {}

        # Set-up the input space
        if self.parameters.input_space != None:
            input_space_type = load_component(self.parameters.input_space_type)
            self.input_space = input_space_type(self.parameters.input_space)
        else:
            self.input_space = None
            
        self.simulator_time = 0
예제 #3
0
파일: __init__.py 프로젝트: dguarino/mozaik
 def __init__(self, model, name, source, target, parameters):
     logger.info("Creating %s between %s and %s" % (self.__class__.__name__,
                                                    source.__class__.__name__,
                                                    target.__class__.__name__))
     BaseComponent.__init__(self, model, parameters)
     self.name = name
     self.model.register_connector(self)
     self.sim = self.model.sim
     self.source = source
     self.target = target
     self.input = source
     self.target.input = self
     
     self.weight_scaler = 1.0 # This scaler has to be always applied to all weights just before sent to pyNN connect command
                              # This is because certain pyNN synaptic models interpret weights with different units and the Connector
                              # function here corrects for these - ie. the Connectors in Mozaik will always assume the weights to be in nano-siemens 
     if self.parameters.short_term_plasticity != None:
        self.weight_scaler = 1000.0
예제 #4
0
파일: __init__.py 프로젝트: flcunha/mozaik
    def __init__(self, model, name, source, target, parameters):
        logger.info("Creating %s between %s and %s" %
                    (self.__class__.__name__, source.__class__.__name__,
                     target.__class__.__name__))
        BaseComponent.__init__(self, model, parameters)
        self.name = name
        self.model.register_connector(self)
        self.sim = self.model.sim
        self.source = source
        self.target = target
        self.input = source
        self.target.input = self

        self.weight_scaler = 1.0  # This scaler has to be always applied to all weights just before sent to pyNN connect command
        # This is because certain pyNN synaptic models interpret weights with different units and the Connector
        # function here corrects for these - ie. the Connectors in Mozaik will always assume the weights to be in nano-siemens
        if self.parameters.short_term_plasticity != None:
            self.weight_scaler = 1000.0
예제 #5
0
파일: __init__.py 프로젝트: RCagnol/mozaik
    def __init__(self, sim, num_threads, parameters):
        BaseComponent.__init__(self, self, parameters)
        self.first_time = True
        self.sim = sim
        self.node = sim.setup(
            timestep=self.parameters.time_step,
            min_delay=self.parameters.min_delay,
            max_delay=self.parameters.max_delay,
            threads=num_threads)  # should have some parameters here
        self.sheets = OrderedDict()
        self.connectors = OrderedDict()
        self.num_threads = num_threads

        # Set-up the input space
        if self.parameters.input_space != None:
            input_space_type = load_component(self.parameters.input_space_type)
            self.input_space = input_space_type(self.parameters.input_space)
        else:
            self.input_space = None

        self.simulator_time = 0
예제 #6
0
    def __init__(self, network, lgn_on, lgn_off, target, parameters, name):
        from numpy import random
        random.seed(1023)
        BaseComponent.__init__(self, network, parameters)
        self.name = name

        t_size = target.size_in_degrees()
        or_map = None
        if self.parameters.or_map:

            f = open(self.parameters.or_map_location, 'r')
            or_map = pickle.load(f)*numpy.pi
            coords_x = numpy.linspace(-t_size[0]/2.0,
                                      t_size[0]/2.0,
                                      numpy.shape(or_map)[0])
            coords_y = numpy.linspace(-t_size[1]/2.0,
                                      t_size[1]/2.0,
                                      numpy.shape(or_map)[1])
            print min(coords_x), max(coords_x)
            print min(coords_y), max(coords_y)
                                      
            X, Y = numpy.meshgrid(coords_x, coords_y)
            
            or_map = NearestNDInterpolator(zip(X.flatten(), Y.flatten()),
                                           or_map.flatten())

        phase_map = None
        if self.parameters.phase_map:
            f = open(self.parameters.phase_map_location, 'r')
            phase_map = pickle.load(f)
            coords_x = numpy.linspace(-t_size[0]/2.0,
                                      t_size[0]/2.0,
                                      numpy.shape(phase_map)[0])
            coords_y = numpy.linspace(-t_size[1]/2.0,
                                      t_size[1]/2.0,
                                      numpy.shape(phase_map)[1])
            X, Y = numpy.meshgrid(coords_x, coords_y)
            phase_map = NearestNDInterpolator(zip(X.flatten(), Y.flatten()),
                                              phase_map.flatten())
        
        print min(target.pop.positions[0]), max(target.pop.positions[0])
        print min(target.pop.positions[1]), max(target.pop.positions[1])
        for (j, neuron2) in enumerate(target.pop.all()):
            if or_map:
                orientation = or_map(target.pop.positions[0][j],
                                     target.pop.positions[1][j])
            else:
                orientation = parameters.orientation_preference.next()[0]

            if phase_map:
                phase = phase_map(target.pop.positions[0][j],
                                  target.pop.positions[1][j])
            else:
                phase = parameters.phase.next()[0]

            aspect_ratio = parameters.aspect_ratio.next()[0]
            frequency = parameters.frequency.next()[0]
            size = parameters.size.next()[0]

            assert orientation < numpy.pi

            target.add_neuron_annotation(j, 'LGNAfferentOrientation', orientation, protected=True)
            target.add_neuron_annotation(j, 'LGNAfferentAspectRatio', aspect_ratio, protected=True)
            target.add_neuron_annotation(j, 'LGNAfferentFrequency', frequency, protected=True)
            target.add_neuron_annotation(j, 'LGNAfferentSize', size, protected=True)
            target.add_neuron_annotation(j, 'LGNAfferentPhase', phase, protected=True)
            
            if self.parameters.topological:
                target.add_neuron_annotation(j, 'LGNAfferentX', target.pop.positions[0][j], protected=True)
                target.add_neuron_annotation(j, 'LGNAfferentY', target.pop.positions[1][j], protected=True)
            else:
                target.add_neuron_annotation(j, 'LGNAfferentX', 0, protected=True)
                target.add_neuron_annotation(j, 'LGNAfferentY', 0, protected=True)
                

        ps = ParameterSet({   'target_synapses' : 'excitatory',               
                              'weight_functions' : {  'f1' : {
                                                                 'component' : 'mozaik.connectors.vision.GaborArborization',
                                                                 'params' : {
                                                                                'ON' : True,
                                                                            }
                                                             }                                                                              
                                                   },
                             'delay_functions' : {},
                             'weight_expression' : 'f1', # a python expression that can use variables f1..fn where n is the number of functions in weight_functions, and fi corresponds to the name given to a ModularConnectorFunction in weight_function ParameterSet. It determines how are the weight functions combined to obtain the weights
                             'delay_expression' : str(self.parameters.delay),
                             'short_term_plasticity' : self.parameters.short_term_plasticity,
                             'base_weight' : self.parameters.base_weight,
                             'num_samples' : self.parameters.num_samples,
                             'fan_in' : self.parameters.fan_in,
                          })
        ModularSamplingProbabilisticConnector(network,name+'On',lgn_on,target,ps).connect()
        ps['weight_functions.f1.params.ON']=False
        ModularSamplingProbabilisticConnector(network,name+'Off',lgn_off,target,ps).connect()
예제 #7
0
    def __init__(self, network, lgn_on, lgn_off, target, parameters, name):
        from numpy import random
        random.seed(1023)
        BaseComponent.__init__(self, network, parameters)
        self.name = name

        t_size = target.size_in_degrees()
        or_map = None
        if self.parameters.or_map:

            f = open(self.parameters.or_map_location, 'r')
            or_map = pickle.load(f) * numpy.pi
            #or_map = pickle.load(f)*numpy.pi*2
            #or_map = numpy.cos(or_map) + 1j*numpy.sin(or_map)

            coords_x = numpy.linspace(-t_size[0] / 2.0, t_size[0] / 2.0,
                                      numpy.shape(or_map)[0])
            coords_y = numpy.linspace(-t_size[1] / 2.0, t_size[1] / 2.0,
                                      numpy.shape(or_map)[1])

            X, Y = numpy.meshgrid(coords_x, coords_y)

            or_map = NearestNDInterpolator(zip(X.flatten(), Y.flatten()),
                                           or_map.flatten())
            #or_map = CloughTocher2DInterpolator(zip(X.flatten(), Y.flatten()),
            #                               or_map.flatten())

        phase_map = None
        if self.parameters.phase_map:
            f = open(self.parameters.phase_map_location, 'r')
            phase_map = pickle.load(f)
            coords_x = numpy.linspace(-t_size[0] / 2.0, t_size[0] / 2.0,
                                      numpy.shape(phase_map)[0])
            coords_y = numpy.linspace(-t_size[1] / 2.0, t_size[1] / 2.0,
                                      numpy.shape(phase_map)[1])
            X, Y = numpy.meshgrid(coords_x, coords_y)
            phase_map = NearestNDInterpolator(zip(X.flatten(), Y.flatten()),
                                              phase_map.flatten())

        for (j, neuron2) in enumerate(target.pop.all()):
            if or_map:
                orientation = or_map(target.pop.positions[0][j],
                                     target.pop.positions[1][j])

                #orientation = (numpy.angle(or_map(target.pop.positions[0][j],
                #                     target.pop.positions[1][j]))+numpy.pi)/2.0

            else:
                orientation = parameters.orientation_preference.next()

            if phase_map:
                phase = phase_map(target.pop.positions[0][j],
                                  target.pop.positions[1][j])
            else:
                phase = parameters.phase.next()

            aspect_ratio = parameters.aspect_ratio.next()
            frequency = parameters.frequency.next()
            size = parameters.size.next()

            assert orientation < numpy.pi

            target.add_neuron_annotation(j,
                                         'LGNAfferentOrientation',
                                         orientation,
                                         protected=True)
            target.add_neuron_annotation(j,
                                         'LGNAfferentAspectRatio',
                                         aspect_ratio,
                                         protected=True)
            target.add_neuron_annotation(j,
                                         'LGNAfferentFrequency',
                                         frequency,
                                         protected=True)
            target.add_neuron_annotation(j,
                                         'LGNAfferentSize',
                                         size,
                                         protected=True)
            target.add_neuron_annotation(j,
                                         'LGNAfferentPhase',
                                         phase,
                                         protected=True)
            target.add_neuron_annotation(j,
                                         'aff_samples',
                                         self.parameters.num_samples.next(),
                                         protected=True)

            if self.parameters.topological:
                target.add_neuron_annotation(j,
                                             'LGNAfferentX',
                                             target.pop.positions[0][j] +
                                             parameters.rf_jitter.next(),
                                             protected=True)
                target.add_neuron_annotation(j,
                                             'LGNAfferentY',
                                             target.pop.positions[1][j] +
                                             parameters.rf_jitter.next(),
                                             protected=True)
            else:
                target.add_neuron_annotation(j,
                                             'LGNAfferentX',
                                             parameters.rf_jitter.next(),
                                             protected=True)
                target.add_neuron_annotation(j,
                                             'LGNAfferentY',
                                             parameters.rf_jitter.next(),
                                             protected=True)

        ps = ParameterSet({
            'target_synapses': 'excitatory',
            'weight_functions': {
                'f1': {
                    'component': 'mozaik.connectors.vision.GaborArborization',
                    'params': {
                        'ON': True,
                    }
                }
            },
            'delay_functions': self.parameters.delay_functions,
            'weight_expression':
            'f1',  # a python expression that can use variables f1..fn where n is the number of functions in weight_functions, and fi corresponds to the name given to a ModularConnectorFunction in weight_function ParameterSet. It determines how are the weight functions combined to obtain the weights
            'delay_expression': self.parameters.delay_expression,
            'short_term_plasticity': self.parameters.short_term_plasticity,
            'base_weight': self.parameters.base_weight,
            'num_samples': 0,
            'annotation_reference_name': 'aff_samples',
        })

        ModularSamplingProbabilisticConnectorAnnotationSamplesCount(
            network, name + 'On', lgn_on, target, ps).connect()
        ps['weight_functions.f1.params.ON'] = False
        ps['base_weight'] = self.parameters.base_weight * self.parameters.off_bias
        ModularSamplingProbabilisticConnectorAnnotationSamplesCount(
            network, name + 'Off', lgn_off, target, ps).connect()