def walk(d, label):
     # Iterate through the dictionary `d`, replacing `dict`s by
     # `ParameterSet` objects.
     for k,v in d.items():
         ParameterSet.check_validity(k)
         if isinstance(v, ParameterSet):
             d[k] = v
         elif isinstance(v, dict):
             d[k] = walk(v, k)
         else:
             d[k] = v
     return MozaikExtendedParameterSet(d, label)
예제 #2
0
파일: vision.py 프로젝트: flcunha/mozaik
class VisualCorticalUniformSheet(SheetWithMagnificationFactor):
    """
    Represents a visual cortical sheet of neurons, randomly uniformly distributed in cortical space.
    
    Other parameters
    ----------------
    density : float (neurons/mm^2)
            The density of neurons per square milimeter.
    """

    required_parameters = ParameterSet({
        'density': float,  # neurons/(mm^2)
    })

    def __init__(self, model, parameters):
        SheetWithMagnificationFactor.__init__(self, model, parameters)
        dx, dy = self.cs_2_vf(parameters.sx, parameters.sy)

        from pyNN.random import NumpyRNG
        rs = space.RandomStructure(boundary=space.Cuboid(dx, dy, 0),
                                   origin=(0.0, 0.0, 0.0),
                                   rng=mozaik.pynn_rng)

        self.pop = self.sim.Population(
            int(parameters.sx * parameters.sy / 1000000 * parameters.density),
            getattr(self.model.sim, self.parameters.cell.model),
            self.parameters.cell.params,
            structure=rs,
            initial_values=self.parameters.cell.initial_values,
            label=self.name)
예제 #3
0
def create_experiments_spont(model):

    return [
        #Spontaneous Activity
        NoStimulation(model, ParameterSet({'duration':
                                           8 * 2 * 5 * 3 * 8 * 7})),
    ]
예제 #4
0
class InjTest(Experiment):
    required_parameters = ParameterSet({
        'duration':
        float,
        'current':
        float,
        'sheet_list':
        list,
        'stimulation_configuration':
        ParameterSet,
    })

    def __init__(self, model, parameters):
        Experiment.__init__(self, model, parameters)
        from mozaik.sheets.direct_stimulator import Depolarization

        d = {}
        for i, sheet in enumerate(self.parameters.sheet_list):
            p = MozaikExtendedParameterSet({
                'current':
                self.parameters.current,
                'population_selector':
                self.parameters.stimulation_configuration
            })

            d[sheet] = [Depolarization(model.sheets[sheet], p)]

        self.direct_stimulation = [d]
        self.stimuli.append(
            InternalStimulus(frame_duration=self.parameters.duration,
                             duration=self.parameters.duration,
                             trial=0,
                             direct_stimulation_name='Injection',
                             direct_stimulation_parameters=p))
예제 #5
0
파일: __init__.py 프로젝트: aopy/mozaik
class NoStimulation(Experiment):
    """ 
    This is a special experiment that does not show any stimulus for the duration of the experiment. 

    This experiment is universal, in that it is not dependent on what sensory modality/model is used in the
    given simulation. It will ensure that no sensory stimulation will be performed.  
    
    Notes
    -----
    Unlike :class:`.MeasureSpontaneousActivity` this can be used in model with no sensory input sheet.
    """
    required_parameters = ParameterSet({
        'duration': float,
    })

    def __init__(self, model, parameters):
        Experiment.__init__(self, model, parameters)
        # print("No stimulation 0 XX")
        # print(parameters)
        # print(self.stimuli)
        # x = InternalStimulus(frame_duration=self.parameters.duration,
        #                                     duration=self.parameters.duration,
        #                                     trial=0,)
        # print(x)
        # print(x.name)
        # print("No stimulation 0 XX end")
        self.stimuli.append(
            InternalStimulus(
                frame_duration=self.parameters.duration,
                duration=self.parameters.duration,
                trial=0,
            ))
예제 #6
0
파일: model.py 프로젝트: brainscales/mozaik
class VogelsAbbott(Model):

    required_parameters = ParameterSet({
        'exc_layer': ParameterSet,
        'inh_layer': ParameterSet,
    })

    def __init__(self, sim, num_threads, parameters):
        Model.__init__(self, sim, num_threads, parameters)
        # Load components
        ExcLayer = load_component(self.parameters.exc_layer.component)
        InhLayer = load_component(self.parameters.inh_layer.component)

        exc = ExcLayer(self, self.parameters.exc_layer.params)
        inh = InhLayer(self, self.parameters.inh_layer.params)

        # initialize projections
        UniformProbabilisticArborization(
            self, 'ExcExcConnection', exc, exc,
            self.parameters.exc_layer.ExcExcConnection).connect()
        UniformProbabilisticArborization(
            self, 'ExcInhConnection', exc, inh,
            self.parameters.exc_layer.ExcInhConnection).connect()
        UniformProbabilisticArborization(
            self, 'InhExcConnection', inh, exc,
            self.parameters.inh_layer.InhExcConnection).connect()
        UniformProbabilisticArborization(
            self, 'InhInhConnection', inh, inh,
            self.parameters.inh_layer.InhInhConnection).connect()
예제 #7
0
class VogelsAbbott(Model):

    required_parameters = ParameterSet({
        'l4_cortex_exc': ParameterSet,
        'l4_cortex_inh': ParameterSet,
    })

    def __init__(self, sim, num_threads, parameters):
        Model.__init__(self, sim, num_threads, parameters)
        # Load components
        CortexExcL4 = load_component(self.parameters.l4_cortex_exc.component)
        CortexInhL4 = load_component(self.parameters.l4_cortex_inh.component)

        cortex_exc_l4 = CortexExcL4(self, self.parameters.l4_cortex_exc.params)
        cortex_inh_l4 = CortexInhL4(self, self.parameters.l4_cortex_inh.params)

        # initialize projections
        UniformProbabilisticArborization(
            self, 'V1L4ExcL4ExcConnection', cortex_exc_l4, cortex_exc_l4,
            self.parameters.l4_cortex_exc.L4ExcL4ExcConnection).connect()
        UniformProbabilisticArborization(
            self, 'V1L4ExcL4InhConnection', cortex_exc_l4, cortex_inh_l4,
            self.parameters.l4_cortex_exc.L4ExcL4InhConnection).connect()
        UniformProbabilisticArborization(
            self, 'V1L4InhL4ExcConnection', cortex_inh_l4, cortex_exc_l4,
            self.parameters.l4_cortex_inh.L4InhL4ExcConnection).connect()
        UniformProbabilisticArborization(
            self, 'V1L4InhL4InhConnection', cortex_inh_l4, cortex_inh_l4,
            self.parameters.l4_cortex_inh.L4InhL4InhConnection).connect()
예제 #8
0
def create_experiments_spont(model):
    
    return  [
			   #PoissonNetworkKick(model,duration=8*8*7,drive_period=200.0,sheet_list=["V1_Exc_L4","V1_Inh_L4"],stimulation_configuration={'component' : 'mozaik.sheets.population_selector.RCRandomPercentage','params' : {'percentage' : 100.0}},lambda_list=[400.0,400.0],weight_list=[0.0012,0.0012]),
                           #Spontaneous Activity 
                            NoStimulation(model,ParameterSet({'duration':3*2*5*3*8*7})),
    ]
예제 #9
0
class ParamFilterQuery(Query):
    """
    See :func:`.param_filter_query` for description.
    
    Other parameters
    ----------------
    
    params : ParameterSet
               The set of mozaik parameters and their associated values to which to restrict the DSV. (see \*\*kwargs in :func:.`param_filter_query`)
    ads_unique : bool, optional
               If True the query will raise an exception if the query does not identify a unique ADS.

    rec_unique : bool, optional
               If True the query will raise an exception if the query does not identify a unique recording.
    """

    required_parameters = ParameterSet({
        'params': ParameterSet,
        'ads_unique':
        bool,  # It will raise exception if result does not contain a single AnalysisDataStructure
        'rec_unique':
        bool,  # It will raise exception if result does not contain a single segment (Recording structure)
    })

    def query(self, dsv):
        return param_filter_query(dsv,
                                  ads_unique=self.parameters.ads_unique,
                                  rec_unique=self.parameters.rec_unique,
                                  **self.parameters.params)
예제 #10
0
class PartitionAnalysisResultsByStimulusParameterQuery(Query):
    """
    See  :func:`.partition_analysis_results_by_stimulus_parameters_query`.
    
    Other parameters
    ----------------
    
    
    parameter_list : list(string)
               The list of parameters that will vary in the returned DSVs, all other parameters will have the same value within each of the 
               returned DSVs.
    excpt : bool
               If excpt is True the query is allowed only on DSVs holding the same AnalysisDataStructures.
    """

    required_parameters = ParameterSet({
        'parameter_list':
        list,  # the index of the parameter against which to partition
        'excpt':
        bool,  # will treat the parameter list as except list - i.e. it will partition again all parameter except those in parameter_list
    })

    def query(self, dsv):
        return partition_analysis_results_by_stimulus_parameters_query(
            dsv, **self.parameters)
예제 #11
0
class FixedKConnector(Connector):
    """
    Connects source with target such that each neuron will have the same number of presynaptic neurons chosen randomly.
    """

    required_parameters = ParameterSet({
        'k':
        int,  # probability of connection between two neurons from the two populations
        'weights': float,  # nA, the synapse strength
        'delay': float,  # ms delay of the connections
    })

    def _connect(self):
        method = self.sim.FixedNumberPreConnector(self.parameters.k,
                                                  allow_self_connections=False,
                                                  safe=True,
                                                  rng=mozaik.pynn_rng)

        self.proj = self.sim.Projection(
            self.source.pop,
            self.target.pop,
            method,
            synapse_type=self.init_synaptic_mechanisms(
                weight=self.parameters.weights * self.weight_scaler,
                delay=self.parameters.delay),
            label=self.name,
            space=space.Space(axes='xy'),
            receptor_type=self.parameters.target_synapses)
예제 #12
0
class UniformProbabilisticArborization(Connector):
    """
    Connects source with target with equal probability between any two neurons.
    """

    required_parameters = ParameterSet({
        'connection_probability':
        float,  # probability of connection between two neurons from the two populations
        'weights': float,  # nA, the synapse strength
        'delay': float,  # ms delay of the connections
    })

    def _connect(self):
        method = self.sim.FixedProbabilityConnector(
            self.parameters.connection_probability,
            allow_self_connections=False,
            safe=True,
            rng=mozaik.pynn_rng)

        self.proj = self.sim.Projection(
            self.source.pop,
            self.target.pop,
            method,
            synapse_type=self.init_synaptic_mechanisms(
                weight=self.parameters.weights * self.weight_scaler,
                delay=self.parameters.delay),
            label=self.name,
            space=space.Space(axes='xy'),
            receptor_type=self.parameters.target_synapses)
예제 #13
0
class RetinalInputMovie(Plotting):
    """
    This plots one plot showing the retinal input per each recording in the datastore. 
    
    It defines line of plots named: 'PixelMovie.Plot0' ... 'PixelMovie.PlotN'.
    
    Other parameters
    ----------------
    frame_rate : int
                The desired frame rate (per sec), it might be less if the computer is too slow.
    """
    required_parameters = ParameterSet({
        'frame_rate': int,  # the desired frame rate (per sec), it might be less if the computer is too slow
    })

    def __init__(self, datastore, parameters, plot_file_name=None,
                 fig_param=None):
        Plotting.__init__(self, datastore, parameters, plot_file_name, fig_param)
        self.length = None
        # currently there is no way to check whether the sensory input is retinal
        self.retinal_input = datastore.get_sensory_stimulus_stimulus()
        self.st = datastore.sensory_stimulus.keys()
        
    def subplot(self, subplotspec):
        return LinePlot(function=self._ploter,
                 length=len(self.retinal_input)
                 ).make_line_plot(subplotspec)

    def _ploter(self, idx, gs):
        return [('PixelMovie',PixelMovie(self.retinal_input[idx],1.0/self.parameters.frame_rate*1000),gs,{'x_axis':False, 'y_axis':False, "title" : str(self.st[idx])})]
예제 #14
0
class RCRandomPercentage(PopulationSelector):
    """
    Select random neurons.

    This PopulationSelector selects *percentage* of randomly chosen neurons from the given population.

    Other parameters
    ----------------
    percentage : float
    The percentage of neurons to select.

    """
    required_parameters = ParameterSet({
        'percentage': float,  # the cell type of the sheet
    })

    def generate_idd_list_of_neurons(self):
        print("Select random neurons RCRandomPercentage")
        # if isinstance(self.sheet.pop.all_cells, list):
        #    z = numpy.asarray(self.sheet.pop.all_cells, dtype=numpy.int)
        #    z = numpy.asarray(self.sheet.pop.all_cells)
        # else:
        #    z = self.sheet.pop.all_cells.astype(int)

        if isinstance(self.sheet.pop.all_cells, list):
            if hasattr(self.sheet.pop.all_cells[0], 'id'):
                z = numpy.asarray([idm.id for idm in self.sheet.pop.all_cells
                                   ])  # for IDMixin
            else:
                z = numpy.asarray(self.sheet.pop.all_cells)
        else:
            z = self.sheet.pop.all_cells.astype(int)
        mozaik.rng.shuffle(z)
        return z[:int(len(z) * self.parameters.percentage / 100)]
예제 #15
0
class GSynPlot(Plotting):
    """
    It plots the conductances stored in the recordings.
    It assumes a datastore with a set of recordings. It will plot a line of conductance 
    plots, one per each recording, showing the excitatory and inhibitory conductances corresponding to the given 
    recording.
    
    It defines one plot named: 'ConductancesPlot.Plot0' ... 'ConductancesPlot.PlotN'.
    
    Other parameters
    ----------------
    
    sheet_name : str
               From which layer to plot the conductances.
               
    neuron : int
            Id of the neuron to plot.
    """
    
    required_parameters = ParameterSet({
        'neuron': int,  # we can only plot one neuron - which one ?
        'sheet_name': str,
    })

    def subplot(self, subplotspec):
        dsv = queries.param_filter_query(self.datastore,sheet_name=self.parameters.sheet_name)
        return PerStimulusPlot(dsv, function=self._ploter, title_style="Standard"
                                        ).make_line_plot(subplotspec)

    def _ploter(self, dsv,gs):
        gsyn_es = [s.get_esyn(self.parameters.neuron) for s in dsv.get_segments()]
        gsyn_is = [s.get_isyn(self.parameters.neuron) for s in dsv.get_segments()]
        return [("ConductancesPlot",ConductancesPlot(gsyn_es, gsyn_is),gs,{})]
예제 #16
0
파일: __init__.py 프로젝트: RCagnol/mozaik
class SpecificArborization(Connector):
    """
    Generic connector which gets directly list of connections as the list of
    quadruplets as accepted by the pyNN FromListConnector.

    This connector cannot be parametrized directly via the parameter file
    because that does not support list of tuples.
    
    This connector also gets rid of very weak synapses (below one-hundreth of the maximum synapse)
    """

    required_parameters = ParameterSet({
        'weight_factor':
        float,  # the overall (sum) weight that a single target neuron should receive
    })

    def __init__(self, network, source, target, connection_matrix,
                 delay_matrix, parameters, name):
        Connector.__init__(self, network, name, source, target, parameters)
        self.connection_matrix = connection_matrix
        self.delay_matrix = delay_matrix

    def _connect(self):
        X = numpy.zeros(self.connection_matrix.shape)
        Y = numpy.zeros(self.connection_matrix.shape)

        for x in range(0, X.shape[0]):
            for y in range(0, X.shape[1]):
                X[x][y] = x
                Y[x][y] = y

        for i in range(0, self.target.pop.size):
            self.connection_matrix[:,
                                   i] = self.connection_matrix[:, i] / numpy.sum(
                                       self.connection_matrix[:, i]
                                   ) * self.parameters.weight_factor

        # This is due to native synapses models (which we currently use as the short term synaptic plasticity model)
        # do not apply the 1000 factor scaler as the pyNN synaptic models
        self.connection_matrix = self.connection_matrix * self.weight_scaler
        self.connection_list = list(
            zip(
                numpy.array(X).flatten(),
                numpy.array(Y).flatten(), self.connection_matrix.flatten(),
                self.delay_matrix.flatten()))
        # get rid of very weak synapses
        z = numpy.max(self.connection_matrix.flatten())
        self.connection_list = [(int(a), int(b), c, d)
                                for (a, b, c, d) in self.connection_list
                                if c > (z / 100.0)]
        method = self.sim.FromListConnector(self.connection_list)
        self.proj = self.sim.Projection(
            self.source.pop,
            self.target.pop,
            method,
            synapse_type=self.init_synaptic_mechanisms(),
            label=self.name,
            rng=None,
            receptor_type=self.parameters.target_synapses)
예제 #17
0
def create_experiments_or_small(model):

    return [
        # Spontaneous Activity
        NoStimulation(model, ParameterSet({'duration': 2 * 5 * 3 * 8 * 7})),
        # Measure orientation tuning with full-filed sinusoidal gratins
        #MeasureOrientationTuningFullfield(model,ParameterSet({'num_orientations':2,'spatial_frequency':0.8,'temporal_frequency':2,'grating_duration':2*143*7,'contrasts':[100,10],'num_trials': 2})),
    ]
예제 #18
0
def create_experiments_cs(model):

    return [

        #Spontaneous Activity
        NoStimulation(model, ParameterSet({'duration': 2 * 5 * 3 * 8 * 7})),
        MeasureContrastSensitivity(
            model,
            ParameterSet({
                'orientation': 0,
                'spatial_frequency': 0.8,
                'temporal_frequency': 2,
                'grating_duration': 2 * 143 * 7,
                'contrasts': [0.1, 1, 5, 10, 30, 60, 90, 100],
                'num_trials': 5
            })),
    ]
예제 #19
0
    def test_zero_quarantine(self):
        """
        Test there are no individuals quarantined if all quarantine parameters are "turned off"
        """
        params = ParameterSet(constant.TEST_DATA_FILE, line_number=1)
        params = utils.turn_off_quarantine(params)
        params.write_params(constant.TEST_DATA_FILE)

        # Call the model
        file_output = open(constant.TEST_OUTPUT_FILE, "w")
        completed_run = subprocess.run([constant.command],
                                       stdout=file_output,
                                       shell=True)
        df_output = pd.read_csv(constant.TEST_OUTPUT_FILE,
                                comment="#",
                                sep=",")
        np.testing.assert_equal(df_output["n_quarantine"].to_numpy().sum(), 0)
예제 #20
0
class DistanceDependentProbabilisticArborization(Connector):
    """
    A abstract connector that implements distance dependent connection.
    Each implementation just needs to implement the arborization_function and delay function.
    The distance input is in the 'native' metric of the sheets, i.e. degrees of visual field
    in RetinalSheet or micrometers in CorticalSheet.
    """

    required_parameters = ParameterSet({
        "weights": float,  # nA, the synapse strength
        # location of the map. It has to be a file containing a single pickled 2d numpy array with values between 0 and 1.0.
        "map_location": str
    })

    def arborization_function(self, distance):
        raise NotImplementedError
        pass

    def delay_function(self, distance):
        raise NotImplementedError
        pass

    def _connect(self):
        # JAHACK, 0.1 as minimal delay should be replaced with the simulations time_step
        if isinstance(self.target, SheetWithMagnificationFactor):
            self.arborization_expression = lambda d: self.arborization_function(
                self.target.dvf_2_dcs(d))
            self.delay_expression = lambda d: self.delay_function(
                self.target.dvf_2_dcs(d))
        else:
            self.arborization_expression = lambda d: self.arborization_function(
                d)
            self.delay_expression = lambda d: self.delay_function(d)

        method = self.sim.DistanceDependentProbabilityConnector(
            self.arborization_expression,
            allow_self_connections=False,
            weights=self.parameters.weights * self.weight_scaler,
            delays=self.delay_expression,
            space=space.Space(axes="xy"),
            safe=True,
            verbose=False,
            n_connections=None,
            rng=mozaik.pynn_rng)
        print("connectors fast DistanceDependentProbabilisticArborization ",
              method)
        print("connectors fast DistanceDependentProbabilisticArborization ",
              self.source.pop)
        print("connectors fast DistanceDependentProbabilisticArborization ",
              self.target.pop)

        self.proj = self.sim.Projection(
            self.source.pop,
            self.target.pop,
            method,
            synapse_type=self.init_synaptic_mechanisms(),
            label=self.name,
            receptor_type=self.parameters.target_synapses)
예제 #21
0
class ModularSingleWeightProbabilisticConnector(ModularConnector):
    """
    ModularConnector that interprets the weights as proportional probabilities of connectivity.
    The parameter connection_probability is interepreted as the average probability that two neurons will be connected in this
    projection. For each pair this connecter will make one random choice of connecting them (where the probability of this choice
    is determined as the proportional probability of the corresponding weight normalized by the connection_probability parameter).
    It will set each connections to the weight base_weight.
    """

    required_parameters = ParameterSet({
        "connection_probability": float,
        "base_weight": PyNNDistribution
    })

    def _connect(self):
        cl = []
        # for i in numpy.nonzero(self.target.pop._mask_local)[0]:
        if hasattr(self.target.pop, "_mask_local"):
            indices = numpy.nonzero(self.target.pop._mask_local)[0]
        else:
            indices = numpy.arange(self.target.pop.size)
        for i in indices:
            weights = self._obtain_weights(i)
            delays = self._obtain_delays(i)
            conections_probabilities = (
                weights / numpy.sum(weights) *
                self.parameters.connection_probability * len(weights))
            connection_indices = numpy.flatnonzero(
                conections_probabilities > numpy.random.rand(
                    len(conections_probabilities)))
            cl.extend([
                (k, i, self.weight_scaler * self.parameters.base_weight.next(),
                 delays[k]) for k in connection_indices
            ])

        method = self.sim.FromListConnector(cl)
        print("ModularSingleWeightProbabilisticConnector")
        print("projection method ", method)
        print("self.source.pop ", self.source.pop)
        print("self.target.pop ", self.target.pop)
        logger.warning("%s: %g %g", self.name, min(conections_probabilities),
                       max(conections_probabilities))
        logger.warning("%s: %d connections  [,%g,%g,%g]", self.name, len(cl),
                       self.parameters.connection_probability,
                       numpy.sum(weights), len(weights))

        if len(cl) > 0:
            self.proj = self.sim.Projection(
                self.source.pop,
                self.target.pop,
                method,
                synapse_type=self.init_synaptic_mechanisms(),
                label=self.name,
                receptor_type=self.parameters.target_synapses)
        else:
            logger.warning(
                "%s(%s): empty projection - pyNN projection not created." %
                (self.name, self.__class__.__name__))
예제 #22
0
 def __init__(self, *args, **kwargs):
     """
     Initialize as you would a dictionary. The ParameterSet constructor
     is called on each value.
     """
     super().__init__(*args, **kwargs)
     for k, v in self.items():
         self[k] = ParameterSet(v)
     self._default = None
예제 #23
0
    def __init__(self, initializer):
        import types
        ps = initializer

        # try to create a ParameterSet from ps if
        # it is not one already
        if not isinstance(ps, ParameterSet):
            # create ParameterSet, but allowing SchemaBase derived objects
            ps = ParameterSet(ps, update_namespace=schema_checkers_namespace)

        # convert each element
        for x in ps.flat():
            key = x[0]
            value = x[1]
            if isinstance(value, SchemaBase):
                self.flat_add(key, value)
            else:
                self.flat_add(key, Subclass(type=type(value)))
예제 #24
0
class OCTCTuningAnalysis(Analysis):
    """
      Calculates the Orientation Contrast tuning properties.
      """

    required_parameters = ParameterSet({
        'neurons':
        list,  # list of neurons for which to compute this (normally this analysis will only makes sense for neurons for which the sine grating disk stimulus has been optimally oriented)
        'sheet_name': str
    })

    def perform_analysis(self):
        dsv = queries.param_filter_query(
            self.datastore,
            identifier='PerNeuronValue',
            sheet_name=self.parameters.sheet_name,
            st_name='DriftingSinusoidalGratingCenterSurroundStimulus')

        if len(dsv.get_analysis_result()) == 0: return
        assert queries.ads_with_equal_stimulus_type(dsv)
        assert queries.equal_ads(dsv, except_params=['stimulus_id'])
        self.pnvs = dsv.get_analysis_result()

        # get stimuli
        self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs]

        # transform the pnvs into a dictionary of tuning curves according along the 'surround_orientation' parameter
        # also make sure they are ordered according to the first pnv's idds

        self.tc_dict = colapse_to_dictionary(
            [z.get_value_by_id(self.parameters.neurons) for z in self.pnvs],
            self.st, "surround_orientation")
        for k in self.tc_dict.keys():
            sis = []
            surround_tuning = []

            # we will do the calculation neuron by neuron
            for i in range(0, len(self.parameters.neurons)):

                ors = self.tc_dict[k][0]
                values = numpy.array([a[i] for a in self.tc_dict[k][1]])
                d = OrderedDict()
                for o, v in zip(ors, values):
                    d[o] = v
                sis.append(d[0] / d[numpy.pi / 2])

            self.datastore.full_datastore.add_analysis_result(
                PerNeuronValue(sis,
                               self.parameters.neurons,
                               None,
                               value_name='Suppression index of ' +
                               self.pnvs[0].value_name,
                               sheet_name=self.parameters.sheet_name,
                               tags=self.tags,
                               period=None,
                               analysis_algorithm=self.__class__.__name__,
                               stimulus_id=str(k)))
예제 #25
0
def create_experiments(model):
    return [
        # Lets kick the network up into activation
        # Spontaneous Activity
        NoStimulation(model, ParameterSet({"duration": 105})),
        # Measure orientation tuning with full-filed sinusoidal gratins
        MeasureOrientationTuningFullfield(
            model,
            ParameterSet({
                "num_orientations": 2,
                "spatial_frequency": 0.8,
                "temporal_frequency": 2,
                "grating_duration": 210,  # 15*7
                "contrasts": [100],
                "num_trials": 1,
            }),
        ),
    ]
예제 #26
0
class ModularSamplingProbabilisticConnector(ModularConnector):
    """
    ModularConnector that interprets the weights as proportional probabilities of connectivity
    and for each neuron in connections it samples num_samples of
    connections that actually get realized according to these weights.
    Each such sample connections will have weight equal to
    base_weight but note that there can be multiple
    connections between a pair of neurons in this sample (in which case the
    weights are set to the multiple of the base weights times the number of
    occurrences in the sample).
    """

    required_parameters = ParameterSet({
        'num_samples': PyNNDistribution,
        'base_weight': PyNNDistribution
    })

    def _connect(self):
        cl = []
        v = 0
        for i in numpy.nonzero(self.target.pop._mask_local)[0]:
            weights = self._obtain_weights(i)
            delays = self._obtain_delays(i)
            co = Counter(
                sample_from_bin_distribution(
                    weights, int(self.parameters.num_samples.next())))
            v = v + numpy.sum(co.values())
            k = co.keys()
            a = numpy.array([
                k,
                numpy.zeros(len(k)) + i, self.weight_scaler * numpy.multiply(
                    self.parameters.base_weight.next(len(k)), co.values()),
                numpy.array(delays)[k]
            ])
            cl.append(a)

        cl = numpy.hstack(cl).T
        method = self.sim.FromListConnector(cl)

        logger.warning(
            "%s(%s): %g connections were created, %g per target neuron [%g]" %
            (self.name, self.__class__.__name__, len(cl),
             len(cl) / len(numpy.nonzero(self.target.pop._mask_local)[0]),
             v / len(numpy.nonzero(self.target.pop._mask_local)[0])))

        if len(cl) > 0:
            self.proj = self.sim.Projection(
                self.source.pop,
                self.target.pop,
                method,
                synapse_type=self.init_synaptic_mechanisms(),
                label=self.name,
                receptor_type=self.parameters.target_synapses)
        else:
            logger.warning(
                "%s(%s): empty projection - pyNN projection not created." %
                (self.name, self.__class__.__name__))
예제 #27
0
def load_parameters(parameter_url,modified_parameters=ParameterSet({})):
    """
    A simple function for loading parameters that replaces the values in *modified_parameters* in the loaded parameters
    and subsequently expands references.
    """
    parameters = MozaikExtendedParameterSet(parameter_url)
    parameters.replace_values(**modified_parameters)
    parameters.replace_references()
    return parameters
예제 #28
0
    def test_hospitalised_zero(self):
        """
        Test setting hospitalised fractions to zero (should be no hospitalised)
        """
        params = ParameterSet(constant.TEST_DATA_FILE, line_number=1)
        params = utils.set_hospitalisation_fraction_all(params, 0.0)
        params.write_params(constant.TEST_DATA_FILE)

        # Call the model, pipe output to file, read output file
        file_output = open(constant.TEST_OUTPUT_FILE, "w")
        completed_run = subprocess.run([constant.command],
                                       stdout=file_output,
                                       shell=True)
        df_output = pd.read_csv(constant.TEST_OUTPUT_FILE,
                                comment="#",
                                sep=",")

        np.testing.assert_equal(df_output["n_hospital"].sum(), 0)
예제 #29
0
    def test_fraction_asymptomatic_one(self):
        """
        Setting fraction_asymptomatic to one (should only be asymptomatics)
        """
        params = ParameterSet(constant.TEST_DATA_FILE, line_number=1)
        params = utils.set_fraction_asymptomatic_all(params, 1.0)
        params.write_params(constant.TEST_DATA_FILE)

        # Call the model, pipe output to file, read output file
        file_output = open(constant.TEST_OUTPUT_FILE, "w")
        completed_run = subprocess.run([constant.command],
                                       stdout=file_output,
                                       shell=True)
        df_output = pd.read_csv(constant.TEST_OUTPUT_FILE,
                                comment="#",
                                sep=",")

        df_sub = df_output[["n_symptoms", "n_presymptom"]]
예제 #30
0
    def __init__(self, initializer):
        import types
        ps = initializer

        # try to create a ParameterSet from ps if
        # it is not one already
        if not isinstance(ps, ParameterSet):
            # create ParameterSet, but allowing SchemaBase derived objects
            ps = ParameterSet(ps, update_namespace=schema_checkers_namespace)

        # convert each element
        for x in ps.flat():
            key = x[0]
            value = x[1]
            if isinstance(value, SchemaBase):
                self.flat_add(key, value)
            else:
                self.flat_add(key, Subclass(type=type(value)))
예제 #31
0
class TextureModulation(Analysis):
    """
    Calculates the modulation of the response to texture stimuli compared to the response to spectrally-matched noise
    """
    required_parameters = ParameterSet({
        'sheet_list' : list,
    })

    def perform_analysis(self):

        dsv = queries.param_filter_query(self.datastore, identifier='PerNeuronValue')
        textures = list(set([MozaikParametrized.idd(ads.stimulus_id).texture for ads in dsv.get_analysis_result()]))
        samples = list(set([MozaikParametrized.idd(ads.stimulus_id).sample for ads in dsv.get_analysis_result()]))

        for sheet in self.parameters.sheet_list:
            for texture in textures:
                #First we calculate the modulation for each sample of each original image
                for sample in samples:
                    pnv_noise = queries.param_filter_query(dsv,sheet_name=sheet,st_sample=sample,st_texture=texture,st_stats_type=2).get_analysis_result()[0]
                    pnv_texture = queries.param_filter_query(dsv,sheet_name=sheet,st_sample=sample,st_texture=texture,st_stats_type=1).get_analysis_result()[0]
                    modulation=[]
                    for texture_firing_rate,noise_firing_rate in zip(pnv_texture.get_value_by_id(pnv_texture.ids),pnv_noise.get_value_by_id(pnv_noise.ids)):
                            modulation.append(numpy.nan_to_num((texture_firing_rate - noise_firing_rate)/(texture_firing_rate + noise_firing_rate)))
                    st = MozaikParametrized.idd(pnv_texture.stimulus_id)
                    setattr(st,'stats_type',None)
                    self.datastore.full_datastore.add_analysis_result(PerNeuronValue(modulation,pnv_texture.ids,None,value_name = "Sample Modulation of " + pnv_texture.value_name, sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
               

                #Then we calculate the modulation for each texture family by averaging the firing rates accross samples
                pnvs_noise = queries.param_filter_query(dsv,sheet_name=sheet,st_texture=texture,st_stats_type=2).get_analysis_result()
                pnvs_texture = queries.param_filter_query(dsv,sheet_name=sheet,st_texture=texture,st_stats_type=1).get_analysis_result()
                mean_rates_noise = [pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_noise]
                mean_rates_texture = [pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_texture]
                _mean_rates_noise = numpy.mean(mean_rates_noise,axis=0)
                _mean_rates_texture = numpy.mean(mean_rates_texture,axis=0)
                modulation = numpy.nan_to_num((_mean_rates_texture - _mean_rates_noise)/(_mean_rates_texture + _mean_rates_noise))
                st = MozaikParametrized.idd(pnvs_texture[0].stimulus_id)

                setattr(st,'stats_type',None)
                setattr(st,'sample',None)
                self.datastore.full_datastore.add_analysis_result(PerNeuronValue(modulation,pnv_texture.ids,None,value_name = "Texture Modulation of " + pnv_texture.value_name ,sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))

            #Finally  we calculate the global modulation by averaging the firing rates accross texture families 
            pnvs_noise = queries.param_filter_query(dsv,identifier='PerNeuronValue',sheet_name=sheet,st_stats_type=2).get_analysis_result()
            pnvs_texture = queries.param_filter_query(dsv,identifier='PerNeuronValue',sheet_name=sheet,st_stats_type=1).get_analysis_result()
            mean_rates_noise = [pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_noise]
            mean_rates_texture = [pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_texture]
            _mean_rates_noise = numpy.mean(mean_rates_noise,axis=0)
            _mean_rates_texture = numpy.mean(mean_rates_texture,axis=0)
            modulation = numpy.nan_to_num((_mean_rates_texture - _mean_rates_noise)/(_mean_rates_texture + _mean_rates_noise))
            st = MozaikParametrized.idd(pnvs_texture[0].stimulus_id)

            setattr(st,'stats_type',None)
            setattr(st,'sample',None)
            setattr(st,'texture',None)
            self.datastore.full_datastore.add_analysis_result(PerNeuronValue(modulation,pnv_texture.ids,None,value_name = "Global Modulation of " + pnv_texture.value_name ,sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
예제 #32
0
파일: __init__.py 프로젝트: RCagnol/mozaik
class SpecificProbabilisticArborization(Connector):
    """
    Generic connector which gets directly list of connections as the list
    of quadruplets as accepted by the pyNN FromListConnector.

    It interprets the weights as proportional probabilities of connectivity,
    and for each neuron out connections it samples num_samples of
    connections that actually get realized according to these weights.
    Each such sample connections will have weight equal to
    weight_factor/num_samples but note that there can be multiple
    connections between a pair of neurons in this sample (in which case the
    weights are set to the multiple of the base weights times the number of
    occurrences in the sample).

    This connector cannot be parameterized directly via the parameter file
    because that does not support list of tuples.
    """

    required_parameters = ParameterSet({
        'weight_factor':
        float,  # the overall strength of synapses in this connection per neuron (in µS) (i.e. the sum of the strength of synapses in this connection per target neuron)
        'num_samples': int
    })

    def __init__(self, network, source, target, connection_matrix,
                 delay_matrix, parameters, name):
        Connector.__init__(self, network, name, source, target, parameters)
        self.connection_matrix = connection_matrix
        self.delay_matrix = delay_matrix

    def _connect(self):
        # This is due to native synapses models (which we currently use as the short term synaptic plasticity model)
        # do not apply the 1000 factor scaler as the pyNN synaptic models
        wf = self.parameters.weight_factor * self.weight_scaler
        seeds = mozaik.get_seeds(self.target.pop.size)
        weights = self.connection_matrix
        delays = self.delay_matrix
        cl = []
        for i in range(0, self.target.pop.size):
            co = Counter(
                sample_from_bin_distribution(weights[:, i].flatten(),
                                             int(self.parameters.num_samples),
                                             seeds[i]))
            cl.extend([(int(k), int(i),
                        wf * co[k] / self.parameters.num_samples, delays[k][i])
                       for k in co.keys()])

        method = self.sim.FromListConnector(cl)

        self.proj = self.sim.Projection(
            self.source.pop,
            self.target.pop,
            method,
            synapse_type=self.init_synaptic_mechanisms(),
            label=self.name,
            receptor_type=self.parameters.target_synapses)
예제 #33
0
    
    data = P.as_dict()
    data.update(num_processes=num_processes,
                timings=timer.marks)
    return mpi_rank, data


if __name__ == "__main__":
    from datetime import datetime
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument("parameter_file", help="Parameter file (for format see http://parameters.readthedocs.org/)")
    parser.add_argument("data_store", help="filename for output data file")
    args = parser.parse_args()
    
    parameters = ParameterSet(args.parameter_file)
    
    #print parameters.pretty()
    timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
    if parameters.simulator == "pynest":
        main = main_pynest
    else:
        main = main_pyNN
    mpi_rank, data = main(parameters)
    
    if mpi_rank == 0:
        #import shelve
        #shelf = shelve.open(args.data_store)
        #shelf[timestamp] = data
        #shelf.close()
        import os, csv
# set up MPI environment
COMM = MPI.COMM_WORLD
SIZE = COMM.Get_size()
RANK = COMM.Get_rank()


# load some neuron-interface files needed for the EPFL cell types
neuron.h.load_file("stdrun.hoc")
neuron.h.load_file("import3d.hoc")


# test mode (1 cell per pop, all-to-all connectivity)
TESTING = False

# Creating a NeuroTools.parameters.ParameterSet object for the main parameters
PSET = ParameterSet({})

# output file destination
if TESTING:
    PSET.OUTPUTPATH = 'example_parallel_network_output_testing'
else:
    PSET.OUTPUTPATH = 'example_parallel_network_output'

# input file paths
# PATHs to current cell-specific files and NMODL files
PSET.CWD = os.getcwd()
PSET.CELLPATH = 'hoc_combos_syn.1_0_10.allzips'
PSET.NMODL = 'hoc_combos_syn.1_0_10.allmods'


########################################################
    def __init__(self, initialiser, label=None, update_namespace=None):
        if update_namespace == None:
           update_namespace = {}
        update_namespace['PyNNDistribution'] = PyNNDistribution

        def walk(d, label):
            # Iterate through the dictionary `d`, replacing `dict`s by
            # `ParameterSet` objects.
            for k,v in d.items():
                ParameterSet.check_validity(k)
                if isinstance(v, ParameterSet):
                    d[k] = v
                elif isinstance(v, dict):
                    d[k] = walk(v, k)
                else:
                    d[k] = v
            return MozaikExtendedParameterSet(d, label)
        
        self._url = None
        if isinstance(initialiser, basestring): # url or str
            try:
                # can't handle cases where authentication is required
                # should be rewritten using urllib2 
                #scheme, netloc, path, \
                #        parameters, query, fragment = urlparse(initialiser)
                f = urllib.urlopen(initialiser)
                pstr = f.read()
                self._url = initialiser

                
            except IOError:
                pstr = initialiser
                self._url = None
            else:
                f.close()


            # is it a yaml url?
            if self._url:
                import urlparse, os.path
                o = urlparse.urlparse(self._url)
                base,ext = os.path.splitext(o.path)
                if ext in ['.yaml','.yml']:
                    import yaml
                    initialiser = yaml.load(pstr)
                else:
                    initialiser = MozaikExtendedParameterSet.read_from_str(pstr,update_namespace)
            else:
                initialiser = MozaikExtendedParameterSet.read_from_str(pstr,update_namespace)

        
        # By this stage, `initialiser` should be a dict. Iterate through it,
        # copying its contents into the current instance, and replacing dicts by
        # ParameterSet objects.
        if isinstance(initialiser, dict):
            for k,v in initialiser.items():
                ParameterSet.check_validity(k)
                if isinstance(v, ParameterSet):
                    self[k] = v
                elif isinstance(v, dict):
                    self[k] = walk(v, k)
                else:
                    self[k] = v
        else:
            raise TypeError("`initialiser` must be a `dict`, a `ParameterSet` object, a string, or a valid URL")

        # Set the label
        if hasattr(initialiser, 'label'):
            self.label = label or initialiser.label # if initialiser was a ParameterSet, keep the existing label if the label arg is None
        else:
            self.label = label
        
        # Define some aliases, allowing, e.g.:
        # for name, value in P.parameters():
        # for name in P.names():
        self.names = self.keys
        self.parameters = self.items
예제 #36
0
    
    data = P.as_dict()
    data.update(num_processes=num_processes,
                timings=timer.marks)
    return mpi_rank, data


if __name__ == "__main__":
    from datetime import datetime
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument("parameter_file", help="Parameter file (for format see http://parameters.readthedocs.org/)")
    parser.add_argument("data_store", help="filename for output data file")
    args = parser.parse_args()
    
    parameters = ParameterSet(args.parameter_file)
    
    print parameters.pretty()
    timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
    if parameters.simulator == "pynest":
        main = main_pynest
    else:
        main = main_pyNN
    mpi_rank, data = main(parameters)
    
    if mpi_rank == 0:
        #import shelve
        #shelf = shelve.open(args.data_store)
        #shelf[timestamp] = data
        #shelf.close()
        import os, csv
  parser.add_argument('tasksPerNode', nargs='?', default=16, type=int)
  arguments = parser.parse_args()

  #Set up parameter space
  parameterSpace = ParameterSpace()

  f=5

  for reduceReductions in [0, 1]:
    for processes in [1, 10, 11, 82, 83, 11+81, 730, 11+81+729 ]: #, 6562]:
      forkLevelIncrement = 1
      if(processes == 82 or processes == 83 or processes == 730):
        forkLevelIncrement = 2


      psProcesses = ParameterSet(processes=processes, tasks_per_node=arguments.tasksPerNode, forkLevelIncrement=forkLevelIncrement, reduceReductions=reduceReductions)

      #6x6 Patches
      ps6x6PatchSize = psProcesses.derive(patchSize=6)
      parameterSpace.addParameterSet(ps6x6PatchSize.derive(gridSize=162, tFinal=1, useHeapCompression=1))
      parameterSpace.addParameterSet(ps6x6PatchSize.derive(gridSize=486, tFinal=f, useHeapCompression=1))
      parameterSpace.addParameterSet(ps6x6PatchSize.derive(gridSize=1458, tFinal=f*f, useHeapCompression=1))
      parameterSpace.addParameterSet(ps6x6PatchSize.derive(gridSize=4374, tFinal=f*f*f, useHeapCompression=1))
      parameterSpace.addParameterSet(ps6x6PatchSize.derive(gridSize=13122, tFinal=f*f*f*f, useHeapCompression=1))
      parameterSpace.addParameterSet(ps6x6PatchSize.derive(gridSize=13122*3, tFinal=f*f*f*f*f, useHeapCompression=1))
      parameterSpace.addParameterSet(ps6x6PatchSize.derive(gridSize=162, tFinal=1, useHeapCompression=0))
      parameterSpace.addParameterSet(ps6x6PatchSize.derive(gridSize=486, tFinal=f, useHeapCompression=0))
      parameterSpace.addParameterSet(ps6x6PatchSize.derive(gridSize=1458, tFinal=f*f, useHeapCompression=0))
      parameterSpace.addParameterSet(ps6x6PatchSize.derive(gridSize=4374, tFinal=f*f*f, useHeapCompression=0))
      parameterSpace.addParameterSet(ps6x6PatchSize.derive(gridSize=13122, tFinal=f*f*f*f, useHeapCompression=0))
      parameterSpace.addParameterSet(ps6x6PatchSize.derive(gridSize=13122*3, tFinal=f*f*f*f*f, useHeapCompression=0))
예제 #38
0
import argparse
from pprint import pprint
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from collections import defaultdict
from parameters import ParameterSet

# Parse command-line arguments and read parameter file
parser = argparse.ArgumentParser()
parser.add_argument("parameter_file", help="parameter file given to simple_network.py")
parser.add_argument("data_store", help="data file produced by simple_network.py, in CSV format")
parser.add_argument("-o", metavar="FILENAME", help="output file name",
                    default="Results/benchmark_summary.png")
args = parser.parse_args()
parameters = ParameterSet(args.parameter_file)

# Read data from CSV file
with open(args.data_store, "rb") as csvfile:
    reader = csv.DictReader(csvfile, quoting=csv.QUOTE_NONNUMERIC)
    records = list(reader)

# Filter and re-format data for plotting
independent_variable = "num_processes"
dependent_variables = ["import", "setup", "build", "connect", "record", "run", "get_data"]
conditions = parameters.flatten()
results = dict((var, defaultdict(list)) for var in dependent_variables)
stats = dict((var, {}) for var in dependent_variables)


def matches_conditions(record, conditions):
예제 #39
0
파일: plot_figure.py 프로젝트: jakobj/PyNN
import csv
import argparse
from pprint import pprint
import matplotlib.pyplot as plt
from collections import defaultdict
from parameters import ParameterSet

# Parse command-line arguments and read parameter file
parser = argparse.ArgumentParser()
parser.add_argument("parameter_file", help="parameter file given to simple_network.py")
parser.add_argument("data_store", help="data file produced by simple_network.py, in CSV format")
parser.add_argument("-o", metavar="FILENAME", help="output file name",
                    default="Results/benchmark_summary.png")
args = parser.parse_args()
parameters = ParameterSet(args.parameter_file)

# Read data from CSV file
with open(args.data_store, "rb") as csvfile:
    reader = csv.DictReader(csvfile, quoting=csv.QUOTE_NONNUMERIC)
    records = list(reader)

# Filter and re-format data for plotting
independent_variable = "num_processes"
dependent_variables = ["import", "setup", "build", "connect", "record", "run"]
conditions = parameters.flatten()

abscissae = []
ordinates = defaultdict(list)
for record in records:
    if all((record[condition] == value)
예제 #40
0
def create_param_files(default_pset_file, iterlist, new_pfx, writefiles=False):
  """
  Usage:
  ======

  outdir =  '/tmp/tvb_scripting_iter_devel'
  outpfx = outdir + '/new_set_'
  default_pset_file = '/media/sf_SharedFolder/Code/git_repos_of_mine/tvb-scripting/examples/tvb_demos/params__region_deterministic_demo.param'

  #                type           params/type        param                                values
  iterlist = [ ['connectivity',     'params',       'speed',                       np.linspace(0,10,15)       ],
               ['model',             'type',                    ['Generic2dOscillator', 'WongWang', 'JansenRit']  ],  ]


  model_files = create_param_files(default_pset_file, iterlist, new_pfx)


  """

  # Read default parameter file
  default_pset  = ParameterSet('file://%s' %default_pset_file)
  
  all_vals = [ [ a[0:-1]+[n] for n in a[-1] ] for a in iterlist]
  
  all_prods = list(product(*all_vals))

  all_new_psets = []
  all_new_names = []
  all_new_fnames = []
  all_new_fs_dict = {}

  model_files_dict = {}
  model_params_dict = {}

  for ap_it, ap in enumerate(all_prods):
    new_pset = default_pset.copy()
    new_name = 'model%s__' %(ap_it)
    for aa in ap:
  
      if len(aa) == 3:
        new_pset[aa[0]][aa[1]] = aa[2]
      elif len(aa) == 4: 
        new_pset[aa[0]][aa[1]][aa[2]] = aa[3]
   
      if 'type' in aa:
        new_name += '__%s_%s' %(aa[0], aa[2])
      else: 
        new_name += '__%s_%s' %(aa[-2],aa[-1])
    
    all_new_psets.append(new_pset)
    all_new_names.append(new_name)

    new_fname = '%s_%s.param' %(new_pfx,  new_name)
    print 'new file: %s' %new_fname
    if writefiles:
      f = open(new_fname, 'w+')
      f.writelines(str(new_pset))
      f.close()
    
    all_new_fnames.append(new_fname)

    all_new_fs_dict[new_name] = new_fname 

    model_files_dict[new_name] = new_fname
    model_params_dict[new_name] = {'num': ap_it,
                                   'varied_params': ap,
                                   'params_dict': new_pset}

  return model_files_dict, model_params_dict