def __init__(self,
              spiking_network,
              name,
              model,
              parameter="",
              tvb_coupling_id=0,
              nodes_ids=[],
              scale=np.array([1.0]),
              neurons=Series()):
     super(TVBtoSpikeNetParameterInterface, self).__init__(neurons)
     self.spiking_network = spiking_network
     self.name = str(name)
     self.model = str(model)
     if self.model not in self._available_input_parameters.keys():
         raise_value_error(
             "model %s is not one of the available parameter interfaces!" %
             self.model)
     self.parameter = str(
         parameter)  # The string of the target parameter name
     if len(parameter) == 0:
         self.parameter = self._available_input_parameters[self.model]
     else:
         if self.parameter != self._available_input_parameters[self.model]:
             LOG.warning(
                 "Parameter %s is different to the default one %s "
                 "for parameter interface model %s" %
                 (self.parameter,
                  self._available_input_parameters[self.model], self.model))
     self.tvb_coupling_id = int(tvb_coupling_id)
     # The target Spiking Network region nodes which coincide with the source TVB region nodes
     # (i.e., region i of TVB modifies a parameter in region i implemented in Spiking Network):
     self.nodes_ids = nodes_ids
     self.scale = scale  # a scaling weight
     LOG.info("%s of model %s for %s created!" %
              (self.__class__, self.model, self.name))
Exemple #2
0
 def from_device_set(self, device_set, name=None):
     if isinstance(device_set, DeviceSet):
         super(SpikeNetToTVBinterface, self).__init__(device_set.name, device_set.model, device_set)
     else:
         raise_value_error("Input device_set is not a DeviceSet!: %s" % str(device_set))
     if isinstance(name, string_types):
         self.name = name
     self.update_model()
     return self
 def _assert_connection_populations(self, connection):
     # This method will make sure that there are source and target user inputs for every population connection
     # and that every source/target population is already among the populations to be generated.
     for pop in ["source", "target"]:
         pops_labels = connection.get(pop, None)
         if pops_labels is None:
             raise_value_error("No %s population in connection!:\n%s" %
                               (pop, str(connection)))
         for pop_lbl in ensure_list(pops_labels):
             assert pop_lbl in self.populations_labels
 def from_device_set(self, device_set, tvb_sv_id=0, name=None):
     # Generate the interface from a DeviceSet (that corresponds to a collection of devices => proxy-nodes)
     if isinstance(device_set, DeviceSet):
         super(TVBtoSpikeNetDeviceInterface,
               self).__init__(device_set.name, device_set.model, device_set)
     else:
         raise_value_error("Input device_set is not a DeviceSet!: %s" %
                           str(device_set))
     self.tvb_sv_id = tvb_sv_id
     if isinstance(name, string_types):
         self.name = name
     self.update_model()
     return self
 def _assert_conn_params_shape(p, p_name, shape):
     if isinstance(p, dict):
         return np.tile(p, shape)
     elif not isinstance(p, np.ndarray):
         p = np.array(p)
     if np.any(p.shape != shape):
         if p.size == 1:
             return np.tile(p, shape)
         else:
             raise_value_error(
                 "Device %s are neither of shape (n_devices, n_nodes) = %s"
                 "nor of size 1:\n%s" % (p_name, str(shape), str(p)))
     return p
    def concatenate(self, time_series_gen_or_seq, dim, **kwargs):
        out_time_series = None
        first = True
        for time_series in time_series_gen_or_seq:
            if first:
                out_time_series, select_funs = self.select(
                    time_series, **kwargs)
                dim_label = out_time_series.get_dimension_name(dim)
                first = False
            else:
                if np.float32(out_time_series.sample_period) != np.float32(
                        time_series.sample_period):
                    raise ValueError(
                        "Timeseries concatenation failed!\n"
                        "Timeseries have a different time step %s \n "
                        "than the concatenated ones %s!" %
                        (str(np.float32(time_series.sample_period)),
                         str(np.float32(out_time_series.sample_period))))
                else:
                    time_series = self.select(time_series, select_funs)[0]
                    labels_dimensions = dict(out_time_series.labels_dimensions)
                    out_labels = out_time_series.get_dimension_labels(dim)
                    if out_labels is not None and len(
                            out_labels) == out_time_series.shape[dim]:
                        time_series_labels = time_series.get_dimension_labels(
                            dim)
                        if time_series_labels is not None and len(
                                time_series_labels) == time_series.shape[dim]:
                            labels_dimensions[dim_label] = \
                                np.array(ensure_list(out_labels) + ensure_list(time_series_labels))
                        else:
                            del labels_dimensions[dim_label]
                            warning(
                                "Dimension labels for dimensions %s cannot be concatenated! "
                                "Deleting them!" % dim_label)
                    try:
                        out_data = np.concatenate(
                            [out_time_series.data, time_series.data], axis=dim)
                    except:
                        raise_value_error(
                            "Timeseries concatenation failed!\n"
                            "Timeseries have a shape %s and the concatenated ones %s!"
                            % (str(out_time_series.shape),
                               str(time_series.shape)))
                    out_time_series = out_time_series.duplicate(
                        data=out_data, labels_dimensions=labels_dimensions)
        if out_time_series is None:
            raise_value_error("Cannot concatenate empty list of TimeSeries!")

        return out_time_series
def create_device(device_model, params=None, config=CONFIGURED, nest_instance=None, **kwargs):
    """Method to create a NESTDevice.
       Arguments:
        device_model: name (string) of the device model
        params: dictionary of parameters of device and/or its synapse. Default = None
        config: configuration class instance. Default: imported default CONFIGURED object.
        nest_instance: the NEST instance.
                       Default = None, in which case we are going to load one, and also return it in the output
       Returns:
        the NESTDevice class, and optionally, the NEST instance if it is loaded here.
    """
    if nest_instance is None:
        nest_instance = load_nest(config=config)
        return_nest = True
    else:
        return_nest = False
    # Assert the model name...
    device_model = device_to_dev_model(device_model)
    label = kwargs.pop("label", "")
    if device_model in NESTInputDeviceDict.keys():
        devices_dict = NESTInputDeviceDict
        default_params = deepcopy(config.NEST_INPUT_DEVICES_PARAMS_DEF.get(device_model, {}))
    elif device_model in NESTOutputDeviceDict.keys():
        devices_dict = NESTOutputDeviceDict
        default_params = deepcopy(config.NEST_OUTPUT_DEVICES_PARAMS_DEF.get(device_model, {}))
    else:
        raise_value_error("%s is neither one of the available input devices: %s\n "
                          "nor of the output ones: %s!" %
                          (device_model, str(config.NEST_INPUT_DEVICES_PARAMS_DEF),
                           str(config.NEST_OUTPUT_DEVICES_PARAMS_DEF)))
    default_params["label"] = label
    if isinstance(params, dict) and len(params) > 0:
        default_params.update(params)
    if device_model in NESTInputDeviceDict.keys():
        label = default_params.pop("label", label)
    else:
        label = default_params.get("label", label)
    # TODO: a better solution for the strange error with inhomogeneous poisson generator
    try:
        nest_device_id = nest_instance.Create(device_model, params=default_params)
    except:
        warning("Using temporary hack for creating successive %s devices!" % device_model)
        nest_device_id = nest_instance.Create(device_model, params=default_params)
    default_params["label"] = label
    nest_device = devices_dict[device_model](nest_device_id, nest_instance, **default_params)
    if return_nest:
        return nest_device, nest_instance
    else:
        return nest_device
def create_device(device_model, params=None, config=CONFIGURED, annarchy_instance=None, **kwargs):
    """function to create an ANNarchyInputDevice or ANNarchyOutputDevice.
       The device will be only created for ANNarchyOutputDevice and also populated for ANNarchyInputDevice.
       Arguments:
        device_model: name (string) of the device model
        params: dictionary of parameters of device and/or its synapse. Default = None
        config: configuration class instance. Default: imported default CONFIGURED object.
        annarchy_instance: the ANNarchy instance.
                           Default = None, in which case we are going to load one, and also return it in the output
       Returns:
        the ANNarchyDevice class, and optionally, the ANNarchy instance if it is loaded here.
    """
    if annarchy_instance is None:
        annarchy_instance = load_annarchy(config=config)
        return_annarchy = True
    else:
        return_annarchy = False
    # Figure out if this is an input or an output device:
    label = kwargs.pop("label", "")
    # Get the default parameters for this device...
    if device_model in ANNarchyInputDeviceDict.keys():
        devices_dict = ANNarchyInputDeviceDict
        default_params = deepcopy(config.ANNARCHY_INPUT_DEVICES_PARAMS_DEF.get(device_model, {}))
        if len(label):
            default_params["name"] = label
    elif device_model in ANNarchyOutputDeviceDict.keys():
        devices_dict = ANNarchyOutputDeviceDict
        default_params = deepcopy(config.ANNARCHY_OUTPUT_DEVICES_PARAMS_DEF.get(device_model, {}))
    else:
        raise_value_error("%s is neither one of the available input devices: %s\n "
                          "nor of the output ones: %s!" %
                          (device_model, str(config.ANNARCHY_INPUT_DEVICES_PARAMS_DEF),
                           str(config.ANNARCHY_OUTPUT_DEVICES_PARAMS_DEF)))
    # ...and update them with any user provided parameters
    if isinstance(params, dict) and len(params) > 0:
        default_params.update(params)
        label = default_params.get("name", default_params.pop("label", label))
    # Create the ANNarchy Device class:
    annarchy_device = devices_dict[device_model](None, label=label, annarchy_instance=annarchy_instance)
    if isinstance(annarchy_device, ANNarchyInputDevice):
        # If it is an input device, populate it:
        annarchy_device = create_input_device(annarchy_device,
                                              kwargs.get("import_path", config.MYMODELS_IMPORT_PATH),
                                              deepcopy(default_params))
    annarchy_device.params = deepcopy(default_params)
    if return_annarchy:
        return annarchy_device, annarchy_instance
    else:
        return annarchy_device
Exemple #9
0
 def _get_minmax_delay(self, delay, minmax):
     """A method to get the minimum or maximum delay from a distribution dictionary."""
     if isinstance(delay, dict):
         if "distribution" in delay.keys():
             if delay["distribution"] == "uniform":
                 return delay[minmax]
             else:
                 raise_value_error(
                     "Only uniform distribution is allowed for delays to make sure that > min_delay!\n"
                     "Distribution given is %s!" % delay["distribution"])
         else:
             raise_value_error(
                 "If delay is a dictionary it has to be a distribution dictionary!\n"
                 "Instead, the delay given is %s\n" % str(delay))
     else:
         return delay
 def build_interface(self, interface, interface_id):
     # One interface for every combination of Spiking node
     # and TVB state variable to be transmitted
     # from TVB to Spiking Network
     connections = interface["connections"]
     if isinstance(connections, string_types):
         connections = {connections: slice(None)}  # return all population types
     default_parameter = self._build_target_class._available_input_parameters[interface["model"]]
     spiking_nodes_ids = interface.get("nodes", self.spiking_nodes_ids)
     if spiking_nodes_ids is None:
         spiking_nodes_ids = self.spiking_nodes_ids
     spiking_nodes_ids = list(spiking_nodes_ids)
     if self.exclusive_nodes:
         assert np.all(spiking_node not in self.tvb_nodes_ids for spiking_node in spiking_nodes_ids)
     interface_weight_fun = property_to_fun(interface.get("interface_weights", 1.0))
     # Default behavior for any region node and any combination of populations
     # is to target all of their neurons:
     neurons_inds_fun = interface.pop("neurons_inds", None)
     if neurons_inds_fun is not None:
         neurons_inds_fun = property_to_fun(neurons_inds_fun)
     shape = (len(spiking_nodes_ids),)
     interface_weights = np.ones(shape).astype("O")
     neurons_inds = np.tile([None], shape).astype("O")
     for i_node, spiking_node_id in enumerate(spiking_nodes_ids):
         interface_weights[i_node] = interface_weight_fun(spiking_node_id)
         if neurons_inds_fun is not None:
             neurons_inds[i_node] = lambda neurons_inds: neurons_inds_fun(spiking_node_id, neurons_inds)
     tvb_to_spikeNet_interfaces = Series()
     for name, populations in connections.items():
         try:
             tvb_coupling_id = self.tvb_model.cvar.tolist().index(
                 self.tvb_model.state_variables.index(name))
         except:
             raise_value_error("Failed to compute the coupling index of TVB state variable %s!" % name)
         interface_index = "%d_%s->%s" % (interface_id, name, str(populations))
         tvb_to_spikeNet_interfaces[interface_index] = \
             self._build_target_class(self.spiking_network, name, interface["model"],
                                      interface.get("parameter", default_parameter),
                                      tvb_coupling_id, spiking_nodes_ids, interface_weights)
         for i_node in spiking_nodes_ids:
             node = self.spiking_network.brain_regions[self.spiking_nodes_ids.index(i_node)]
             tvb_to_spikeNet_interfaces[interface_index][node.label] = node[ensure_list(populations)]
         return tvb_to_spikeNet_interfaces
def node_key_index_and_label(node, labels):
    if isinstance(node, string_types):
        try:
            i_node = labels.index(node)
            label = node
            node_key = "%d-%s" % (i_node, node)
        except:
            raise_value_error(
                "Node %s is not a region node modeled in Spiking Simulator!" %
                node)
    else:
        try:
            label = labels[node]
            i_node = node
            node_key = "%d-%s" % (node, label)
        except:
            raise_value_error(
                "Node %d is not a region node modeled in Spiking Simulator!" %
                node)
    return node_key, i_node, label
def connect_device(nest_device, population, neurons_inds_fun, weight=1.0, delay=0.0, receptor_type=0,
                   nest_instance=None, config=CONFIGURED, **kwargs):
    """This method connects a NESTDevice to a NESTPopulation instance.
       Arguments:
        nest_device: the NESTDevice instance
        population: the NESTPopulation instance
        neurons_inds_fun: a function to return a NESTPopulation or a subset thereof of the target population.
                          Default = None.
        weight: the weights of the connection. Default = 1.0.
        delay: the delays of the connection. Default = 0.0.
        receptor_type: type of the synaptic receptor. Default = 0.
        config: configuration class instance. Default: imported default CONFIGURED object.
        nest_instance: instance of NEST. Default = None, in which case the one of the nest_device is used.
       Returns:
        the connected NESTDevice
    """
    if receptor_type is None:
        receptor_type = 0
    if nest_instance is None:
        raise_value_error("There is no NEST instance!")
    resolution = nest_instance.GetKernelStatus("resolution")
    if isinstance(delay, dict):
        if delay["low"] < resolution:
            delay["low"] = resolution
            warning("Minimum delay %f is smaller than the NEST simulation resolution %f!\n"
                    "Setting minimum delay equal to resolution!" % (delay["low"], resolution))
        if delay["high"] <= delay["low"]:
            raise_value_error("Maximum delay %f is not smaller than minimum one %f!" % (delay["high"], delay["low"]))
    else:
        if delay < resolution:
            delay = resolution
            warning("Delay %f is smaller than the NEST simulation resolution %f!\n"
                    "Setting minimum delay equal to resolution!" % (delay, resolution))
    syn_spec = {"weight": weight, "delay": delay, "receptor_type": receptor_type}
    neurons = get_populations_neurons(population, neurons_inds_fun)
    if nest_device.model == "spike_recorder":
        #                     source  ->  target
        nest_instance.Connect(neurons, nest_device.device, syn_spec=syn_spec)
    else:
        nest_instance.Connect(nest_device.device, neurons, syn_spec=syn_spec)
    return nest_device
Exemple #13
0
 def _assert_synapse_model(self, synapse_model, delay):
     """A method to assert the synapse_model (default = "static_synapse), in combination with the delay value.
        It is based on respecting the fact that rate_connection_instantaneous requires a delay of zero.
     """
     if synapse_model is None:
         synapse_model = "static_synapse"
     if synapse_model.find("rate") > -1:
         if synapse_model == "rate_connection_instantaneous" and delay != 0.0:
             raise_value_error(
                 "Coupling neurons with rate_connection_instantaneous synapse "
                 "and delay = %s != 0.0 is not possible!" % str(delay))
         elif self._get_min_delay(
                 delay
         ) == 0.0 and synapse_model == "rate_connection_delayed":
             raise_value_error(
                 "Coupling neurons with rate_connection_delayed synapse "
                 "and delay = %s <= 0.0 is not possible!" % str(delay))
         elif self._get_max_delay(delay) == 0.0:
             return "rate_connection_instantaneous"
         else:
             return "rate_connection_delayed"
     else:
         return synapse_model
Exemple #14
0
 def _assert_delay(self, delay, synapse_model="static_synapse"):
     """A method to assert the delay value, in combination with the synapse_model.
        It is based on respecting the minimum possible delay of the network,
        as well as the fact that rate_connection_instantaneous requires a delay of zero.
     """
     if synapse_model.find("rate") > -1:
         if synapse_model == "rate_connection_instantaneous" and delay != 0.0:
             raise_value_error(
                 "Coupling neurons with rate_connection_instantaneous synapse "
                 "and delay = %s != 0.0 is not possible!" % str(delay))
         elif synapse_model == "rate_connection_delayed" and self._get_min_delay(
                 delay) <= 0.0:
             raise_value_error(
                 "Coupling neurons with rate_connection_delayed synapse "
                 "and delay = %s <= 0.0 is not possible!" % str(delay))
         elif self._get_min_delay(delay) < 0.0:
             raise_value_error(
                 "Coupling rate neurons with negative delay = %s < 0.0 is not possible!"
                 % str(delay))
     elif self._get_min_delay(delay) < self.spiking_dt:
         raise_value_error(
             "Coupling spiking neurons with delay = %s < NEST integration step = %f is not possible!:"
             "\n" % (str(delay), self.spiking_dt))
     return delay
def assert_arrays(params, shape=None, transpose=False):
    # type: (object, object) -> object
    if shape is None or \
            not (isinstance(shape, tuple)
                 and len(shape) in range(3) and np.all([isinstance(s, (int, np.int)) for s in shape])):
        shape = None
        shapes = []  # list of all unique shapes
        n_shapes = []  # list of all unique shapes' frequencies
        size = 0  # initial shape
    else:
        size = shape_to_size(shape)

    for ip in range(len(params)):
        # Convert all accepted types to np arrays:
        if isinstance(params[ip], np.ndarray):
            pass
        elif isinstance(params[ip], (list, tuple)):
            # assuming a list or tuple of symbols...
            params[ip] = np.array(params[ip]).astype(type(params[ip][0]))
        elif is_numeric(params[ip]):
            params[ip] = np.array(params[ip])
        else:
            try:
                import sympy
            except:
                raise_import_error("sympy import failed")
            if isinstance(params[ip], tuple(sympy.core.all_classes)):
                params[ip] = np.array(params[ip])
            else:
                raise_value_error("Input " + str(params[ip]) + " of type " + str(type(params[ip])) + " is not numeric, "
                                                                                                     "of type np.ndarray, nor Symbol")
        if shape is None:
            # Only one size > 1 is acceptable
            if params[ip].size != size:
                if size > 1 and params[ip].size > 1:
                    raise_value_error("Inputs are of at least two distinct sizes > 1")
                elif params[ip].size > size:
                    size = params[ip].size
            # Construct a kind of histogram of all different shapes of the inputs:
            ind = np.array([(x == params[ip].shape) for x in shapes])
            if np.any(ind):
                ind = np.where(ind)[0]
                # TODO: handle this properly
                n_shapes[int(ind)] += 1
            else:
                shapes.append(params[ip].shape)
                n_shapes.append(1)
        else:
            if params[ip].size > size:
                raise_value_error("At least one input is of a greater size than the one given!")

    if shape is None:
        # Keep only shapes of the correct size
        ind = np.array([shape_to_size(s) == size for s in shapes])
        shapes = np.array(shapes)[ind]
        n_shapes = np.array(n_shapes)[ind]
        # Find the most frequent shape
        ind = np.argmax(n_shapes)
        shape = tuple(shapes[ind])

    if transpose and len(shape) > 1:
        if (transpose is "horizontal" or "row" and shape[0] > shape[1]) or \
                (transpose is "vertical" or "column" and shape[0] < shape[1]):
            shape = list(shape)
            temp = shape[1]
            shape[1] = shape[0]
            shape[0] = temp
            shape = tuple(shape)

    # Now reshape or tile when necessary
    for ip in range(len(params)):
        try:
            if params[ip].shape != shape:
                if params[ip].size in [0, 1]:
                    params[ip] = np.tile(params[ip], shape)
                else:
                    params[ip] = np.reshape(params[ip], shape)
        except:
            # TODO: maybe make this an explicit message
            logger.info("\n\nWTF?")

    if len(params) == 1:
        return params[0]
    else:
        return tuple(params)
def assert_equal_objects(obj1, obj2, attributes_dict=None, logger=None):
    def print_not_equal_message(attr, field1, field2, logger):
        # logger.error("\n\nValueError: Original and read object field "+ attr + " not equal!")
        # raise_value_error("\n\nOriginal and read object field " + attr + " not equal!")
        warning("Original and read object field " + attr + " not equal!" +
                "\nOriginal field:\n" + str(field1) +
                "\nRead object field:\n" + str(field2), logger)

    if isinstance(obj1, dict):
        get_field1 = lambda obj, key: obj[key]
        if not (isinstance(attributes_dict, dict)):
            attributes_dict = dict()
            for key in obj1.keys():
                attributes_dict.update({key: key})
    elif isinstance(obj1, (list, tuple)):
        get_field1 = lambda obj, key: get_list_or_tuple_item_safely(obj, key)
        indices = range(len(obj1))
        attributes_dict = dict(zip([str(ind) for ind in indices], indices))
    else:
        get_field1 = lambda obj, attribute: getattr(obj, attribute)
        if not (isinstance(attributes_dict, dict)):
            attributes_dict = dict()
            for key in obj1.__dict__.keys():
                attributes_dict.update({key: key})
    if isinstance(obj2, dict):
        get_field2 = lambda obj, key: obj.get(key, None)
    elif isinstance(obj2, (list, tuple)):
        get_field2 = lambda obj, key: get_list_or_tuple_item_safely(obj, key)
    else:
        get_field2 = lambda obj, attribute: getattr(obj, attribute, None)

    equal = True
    for attribute in attributes_dict:
        # print attributes_dict[attribute]
        field1 = get_field1(obj1, attributes_dict[attribute])
        field2 = get_field2(obj2, attributes_dict[attribute])
        try:
            # TODO: a better hack for the stupid case of an ndarray of a string, such as model.zmode or pmode
            # For non numeric types
            if isinstance(field1, string_types) or isinstance(field1, list) or isinstance(field1, dict) \
                    or (isinstance(field1, np.ndarray) and field1.dtype.kind in 'OSU'):
                if np.any(field1 != field2):
                    print_not_equal_message(attributes_dict[attribute], field1, field2, logger)
                    equal = False
            # For numeric numpy arrays:
            elif isinstance(field1, np.ndarray) and not field1.dtype.kind in 'OSU':
                # TODO: handle better accuracy differences, empty matrices and complex numbers...
                if field1.shape != field2.shape:
                    print_not_equal_message(attributes_dict[attribute], field1, field2, logger)
                    equal = False
                elif np.any(np.float32(field1) - np.float32(field2) > 0):
                    print_not_equal_message(attributes_dict[attribute], field1, field2, logger)
                    equal = False
            # For numeric scalar types
            elif is_numeric(field1):
                if np.float32(field1) - np.float32(field2) > 0:
                    print_not_equal_message(attributes_dict[attribute], field1, field2, logger)
                    equal = False
            else:
                equal = assert_equal_objects(field1, field2, logger=logger)
        except:
            try:
                warning("Comparing str(objects) for field "
                        + str(attributes_dict[attribute]) + " because there was an error!", logger)
                if np.any(str(field1) != str(field2)):
                    print_not_equal_message(attributes_dict[attribute], field1, field2, logger)
                    equal = False
            except:
                raise_value_error("ValueError: Something went wrong when trying to compare "
                                  + str(attributes_dict[attribute]) + " !", logger)

    if equal:
        return True
    else:
        return False
Exemple #17
0
 def build_interface(self, interface, interface_id):
     # One SpikeNet stimulation device for every combination of
     # TVB node and state variable to be transmitted from TVB to SpikeNet
     source_tvb_nodes = interface.pop("source_nodes", self.tvb_nodes_ids)
     if source_tvb_nodes is None:
         source_tvb_nodes = self.tvb_nodes_ids
     source_tvb_nodes = list(source_tvb_nodes)
     target_nodes = interface.pop("target_nodes", self.spiking_nodes_ids)
     if target_nodes is None:
         target_nodes = self.spiking_nodes_ids
     target_nodes = list(target_nodes)
     if self.exclusive_nodes:
         # TODO: decide about the following: can a TVB node be updated from a SpikeNet node via a SpikeNet -> TVB interface,
         # get simulated in TVB and again update SpikeNet via a TVB -> SpikeNet interface?
         # Will it depend on whether there is also a direct coupling of that SpikeNet node with other SpikeNet nodes?
         assert np.all(node not in self.tvb_nodes_ids for node in target_nodes)
         assert np.all(node not in self.spiking_nodes_ids for node in source_tvb_nodes)
     # Properties set as functions
     interface_weight_fun = property_to_fun(interface.pop("interface_weights", 1.0))
     interface_weights = np.ones((len(source_tvb_nodes),)).astype("f")
     weight_fun = property_to_fun(interface.pop("weights", self.default_connection["weight"]))
     delay_fun = property_to_fun(interface.pop("delays", self.default_connection["delay"]))
     receptor_type_fun = property_to_fun(interface.get("receptor_type",
                                                       self.default_connection["receptor_type"]))
     # Default behavior for any combination of region nodes and populations
     # is to target all of their neurons:
     neurons_inds_fun = interface.pop("neurons_inds", None)
     if neurons_inds_fun is not None:
         neurons_inds_fun = property_to_fun(neurons_inds_fun)
     # TODO: Find a way to change self directed weights in cases of non exclusive TVB and Spiking Network nodes!
     # Defaults just follow TVB connectivity
     weights = np.array(self.tvb_weights[source_tvb_nodes][:, target_nodes]).astype("O")
     delays = np.array(self.tvb_delays[source_tvb_nodes][:, target_nodes]).astype("O")
     shape = delays.shape
     receptor_type = np.tile(self.default_connection["receptor_type"], shape).astype("O")
     neurons_inds = np.tile([None], shape).astype("O")
     device_names = []
     # Apply now possible functions per source and target region node:
     for src_node in source_tvb_nodes:
         i_src = np.where(self.tvb_nodes_ids == src_node)[0][0]
         interface_weights[i_src] = interface_weight_fun(src_node)
         device_names.append(self.node_labels[src_node])
         for i_trg, trg_node in enumerate(target_nodes):
             weights[i_src, i_trg] = weight_fun(src_node, trg_node)
             delays[i_src, i_trg] = delay_fun(src_node, trg_node)
             receptor_type[i_src, i_trg] = receptor_type_fun(src_node, trg_node)
             if neurons_inds_fun is not None:
                 neurons_inds[i_src, i_trg] = lambda neurons_inds: neurons_inds_fun(src_node, trg_node, neurons_inds)
     interface["names"] = device_names
     interface["weights"] = weights
     interface["delays"] = delays
     interface["receptor_type"] = receptor_type
     interface["neurons_inds"] = neurons_inds
     interface["nodes"] = [np.where(self.spiking_nodes_ids == trg_node)[0][0] for trg_node in target_nodes]
     # Generate the devices => "proxy TVB nodes":
     device_set = self.build_and_connect_devices([interface], self.spiking_network.brain_regions)[0]
     tvb_to_spikeNet_interface = Series()
     try:
         # The TVB state variable index linked to the interface to build
         tvb_sv_id = self.tvb_model.state_variables.index(device_set.name)
     except:
         raise_value_error("Interface with %s doesn't correspond to a TVB state variable!")
     try:
         interface_builder = self._available_input_device_interfaces[device_set.model]
     except:
         raise_value_error("Interface model %s is not supported yet!" % device_set.model)
     interface_index = "%d_%s->%s" % (interface_id, device_set.name, str(list(interface["connections"].values())[0]))
     tvb_to_spikeNet_interface[interface_index] = \
         interface_builder(self.spiking_network,
                           nodes_ids=source_tvb_nodes,
                           target_nodes=target_nodes,
                           scale=interface_weights,
                           dt=self.tvb_dt).from_device_set(device_set, tvb_sv_id, device_set.name)
     return tvb_to_spikeNet_interface
def connect_input_device(annarchy_device, population, neurons_inds_fun=None,
                         weight=1.0, delay=0.0, receptor_type="exc",
                         import_path=CONFIGURED.MYMODELS_IMPORT_PATH):
    """This function connect an ANNarchyInputDevice to an ANNarchyPopulation instance.
       Arguments:
        annarchy_device: the ANNarchyInputDevice instance
        population: the ANNarchyPopulation instance
        neurons_inds_fun: a function to return an ANNarchy PopulationView of the target population. Default = None.
        weight: the weights of the connection. Default = 1.0
        delay: the delays of the connection. Default = 0.0
        receptor_type: type of the synapse (target in ANNarchy). Default = "exc"
        import_path: the path to be possibly searched to import the model. Default is taken from CONFIGURED
       Returns:
        the connected ANNarchyInputDevice
    """
    neurons = get_populations_neurons(population, neurons_inds_fun)
    # TODO: What should we do with this checking for the delay in ANNarchy?
    # resolution = annarchy_instance.dt()
    # if hasattr(delay, "min"):  # In case it is an ANNarchy distribution class
    #     if delay.min < resolution:
    #         delay.min = resolution
    #         warning("Minimum delay %f is smaller than the NEST simulation resolution %f!\n"
    #                 "Setting minimum delay equal to resolution!" % (delay.min, resolution))
    #     if delay.max <= delay.min:
    #         raise_value_error("Maximum delay %f is not smaller than minimum one %f!" % (delay.max, delay.min))
    # else:
    #     if delay < resolution:
    #         delay = resolution
    #         warning("Delay %f is smaller than the NEST simulation resolution %f!\n"
    #                 "Setting minimum delay equal to resolution!" % (delay, resolution))

    connection_args = {}
    source_view_fun = None
    if annarchy_device.number_of_devices_neurons == 0:
        raise_value_error("There is no input device population of neurons in device of model %s with label %s!"
                          % (annarchy_device.model, annarchy_device.label))
    elif annarchy_device.number_of_devices_neurons == 1:
        # A single input stimulating all target neurons
        connect_method = "all_to_all"
    elif annarchy_device.number_of_devices_neurons == neurons.size:
        # Inputs are equal to target neurons, therefore connecting with one_to_one,
        # no matter if there are already other connections.
        connect_method = "one_to_one"
    elif annarchy_device.number_of_devices_neurons < neurons.size:
        # This is the case where there are less total input neurons than target ones:
        connect_method = "fixed_number_pre"
        connection_args["number"] = annarchy_device.number_of_devices_neurons
        warning("Device of model %s with label %s:\n"
                "The number of device's population neurons %d > 1 "
                "is smaller than the number %d of the target neurons of population:\n%s"
                "\nConnecting with method 'connect_fixed_number_pre' with number = %d"
                % (annarchy_device.model, annarchy_device.label, annarchy_device.number_of_devices_neurons,
                   neurons.size, str(population), annarchy_device.number_of_devices_neurons))
    else:  # These are the cases where there are more total input neurons than target ones:
        connect_method = "one_to_one"  # for all cases below
        # The number of input neurons not yet connected:
        number_of_available_connections = \
            annarchy_device.number_of_neurons - annarchy_device.number_of_connected_neurons
        if number_of_available_connections < neurons.size:
            # TODO: think more about this: error, fixed_number_pre or overlapping?
            # If the remaining available neurons are nit enough,
            # use some of the already used ones with a partial overlap:
            source_view_fun = lambda _population: _population[:-neurons.size]
            warning("Device of model %s with label %s:\n"
                    "The number of device's population neurons that is available for connections %d"
                    "is smaller than the number %d of the target neurons of population:\n%s"
                    "\nConnecting with method 'connect_one_to_one' using the last %d neurons "
                    "with overlap of %d neurons!"
                    % (annarchy_device.model, annarchy_device.label, number_of_available_connections,
                       neurons.size, str(population), neurons.size, neurons.size - number_of_available_connections))
        else:
            # If the remaining available neurons are enough, just get the first available ones:
            source_view_fun = lambda _population: \
                                        _population[annarchy_device.number_of_connected_neurons :
                                                    annarchy_device.number_of_connected_neurons + neurons.size]

    synapse = annarchy_device.params.get("synapse", None)
    if synapse is not None:
        synapse = assert_model(synapse, annarchy_device.annarchy_instance, import_path)
    synapse_params = annarchy_device.params.get("synapse_params", {})
    proj = connect_two_populations(annarchy_device, population, weight, delay, receptor_type, synapse_params,
                                   source_view_fun=source_view_fun, target_view_fun=neurons_inds_fun,
                                   synapse=synapse, method=connect_method,
                                   annarchy_instance=annarchy_device.annarchy_instance, **connection_args)
    # Add this projection to the source device's and target population's inventories:
    annarchy_device.projections_pre.append(proj)
    population.projections_post.append(proj)
    # Update the number of connected neurons to the device:
    annarchy_device._number_of_connections = annarchy_device.get_number_of_connections()
    annarchy_device._number_of_neurons = annarchy_device.get_number_of_neurons()
    return annarchy_device