def print_not_equal_message(attr, field1, field2, logger):
     # logger.error("\n\nValueError: Original and read object field "+ attr + " not equal!")
     # raise_value_error("\n\nOriginal and read object field " + attr + " not equal!")
     warning(
         "Original and read object field " + attr + " not equal!" +
         "\nOriginal field:\n" + str(field1) + "\nRead object field:\n" +
         str(field2), logger)
Пример #2
0
    def concatenate(self, time_series_gen_or_seq, dim, **kwargs):
        out_time_series = None
        first = True
        for time_series in time_series_gen_or_seq:
            if first:
                out_time_series, select_funs = self.select(
                    time_series, **kwargs)
                dim_label = out_time_series.get_dimension_name(dim)
                first = False
            else:
                if np.float32(out_time_series.sample_period) != np.float32(
                        time_series.sample_period):
                    raise ValueError(
                        "Timeseries concatenation failed!\n"
                        "Timeseries have a different time step %s \n "
                        "than the concatenated ones %s!" %
                        (str(np.float32(time_series.sample_period)),
                         str(np.float32(out_time_series.sample_period))))
                else:
                    time_series = self.select(time_series, select_funs)[0]
                    labels_dimensions = dict(out_time_series.labels_dimensions)
                    out_labels = out_time_series.get_dimension_labels(dim)
                    if out_labels is not None and len(
                            out_labels) == out_time_series.shape[dim]:
                        time_series_labels = time_series.get_dimension_labels(
                            dim)
                        if time_series_labels is not None and len(
                                time_series_labels) == time_series.shape[dim]:
                            labels_dimensions[dim_label] = \
                                np.array(ensure_list(out_labels) + ensure_list(time_series_labels))
                        else:
                            del labels_dimensions[dim_label]
                            warning(
                                "Dimension labels for dimensions %s cannot be concatenated! "
                                "Deleting them!" % dim_label)
                    try:
                        out_data = np.concatenate(
                            [out_time_series.data, time_series.data], axis=dim)
                    except:
                        raise_value_error(
                            "Timeseries concatenation failed!\n"
                            "Timeseries have a shape %s and the concatenated ones %s!"
                            % (str(out_time_series.shape),
                               str(time_series.shape)))
                    out_time_series = out_time_series.duplicate(
                        data=out_data, labels_dimensions=labels_dimensions)
        if out_time_series is None:
            raise_value_error("Cannot concatenate empty list of TimeSeries!")

        return out_time_series
def create_device(device_model, params=None, config=CONFIGURED, nest_instance=None, **kwargs):
    """Method to create a NESTDevice.
       Arguments:
        device_model: name (string) of the device model
        params: dictionary of parameters of device and/or its synapse. Default = None
        config: configuration class instance. Default: imported default CONFIGURED object.
        nest_instance: the NEST instance.
                       Default = None, in which case we are going to load one, and also return it in the output
       Returns:
        the NESTDevice class, and optionally, the NEST instance if it is loaded here.
    """
    if nest_instance is None:
        nest_instance = load_nest(config=config)
        return_nest = True
    else:
        return_nest = False
    # Assert the model name...
    device_model = device_to_dev_model(device_model)
    label = kwargs.pop("label", "")
    if device_model in NESTInputDeviceDict.keys():
        devices_dict = NESTInputDeviceDict
        default_params = deepcopy(config.NEST_INPUT_DEVICES_PARAMS_DEF.get(device_model, {}))
    elif device_model in NESTOutputDeviceDict.keys():
        devices_dict = NESTOutputDeviceDict
        default_params = deepcopy(config.NEST_OUTPUT_DEVICES_PARAMS_DEF.get(device_model, {}))
    else:
        raise_value_error("%s is neither one of the available input devices: %s\n "
                          "nor of the output ones: %s!" %
                          (device_model, str(config.NEST_INPUT_DEVICES_PARAMS_DEF),
                           str(config.NEST_OUTPUT_DEVICES_PARAMS_DEF)))
    default_params["label"] = label
    if isinstance(params, dict) and len(params) > 0:
        default_params.update(params)
    if device_model in NESTInputDeviceDict.keys():
        label = default_params.pop("label", label)
    else:
        label = default_params.get("label", label)
    # TODO: a better solution for the strange error with inhomogeneous poisson generator
    try:
        nest_device_id = nest_instance.Create(device_model, params=default_params)
    except:
        warning("Using temporary hack for creating successive %s devices!" % device_model)
        nest_device_id = nest_instance.Create(device_model, params=default_params)
    default_params["label"] = label
    nest_device = devices_dict[device_model](nest_device_id, nest_instance, **default_params)
    if return_nest:
        return nest_device, nest_instance
    else:
        return nest_device
Пример #4
0
    def _determine_datasets_and_attributes(self, object, datasets_size=None):
        datasets_dict = {}
        metadata_dict = {}
        groups_keys = []

        try:
            if isinstance(object, dict):
                dict_object = object
            elif hasattr(object, "to_dict"):
                dict_object = object.to_dict()
            else:
                dict_object = vars(object)
            for key, value in dict_object.items():
                if isinstance(value, numpy.ndarray):
                    # if value.size == 1:
                    #     metadata_dict.update({key: value})
                    # else:
                    datasets_dict.update({key: value})
                    # if datasets_size is not None and value.size == datasets_size:
                    #     datasets_dict.update({key: value})
                    # else:
                    #     if datasets_size is None and value.size > 0:
                    #         datasets_dict.update({key: value})
                    #     else:
                    #         metadata_dict.update({key: value})
                # TODO: check how this works! Be carefull not to include lists and tuples if possible in tvb classes!
                elif isinstance(value, (list, tuple)):
                    warning(
                        "Writing %s %s to h5 file as a numpy array dataset !" %
                        (value.__class__, key), self.logger)
                    datasets_dict.update({key: numpy.array(value)})
                else:
                    if is_numeric(value) or isinstance(value, str):
                        metadata_dict.update({key: value})
                    elif callable(value):
                        metadata_dict.update({key: inspect.getsource(value)})
                    elif value is None:
                        continue
                    else:
                        groups_keys.append(key)
        except:
            msg = "Failed to decompose group object: " + str(object) + "!"
            try:
                self.logger.info(str(object.__dict__))
            except:
                msg += "\n It has no __dict__ attribute!"
            warning(msg, self.logger)

        return datasets_dict, metadata_dict, groups_keys
def connect_two_populations(source_pop, target_pop, weights=1.0, delays=0.0, target="exc", params={},
                            source_view_fun=None, target_view_fun=None,
                            synapse=None, method="all_to_all", name=None,
                            annarchy_instance=None, **connection_args):
    """
    function to set up and connect a projection between two ANNarchyPopulations.
    Arguments:
      - souce_pop: The ANNarchyPopulation we want to connect from.
      - target_pop: The ANNarchyPopulation we want to connect to.
      - weights: the weights of the connection. Default = 1.0.
      - delays: the delays of the connection. Default = 0.0.
      - target: type of the synapse.. Default = "exc".
      - source_view_fun: a function to return an ANNarchy PopulationView of the source population.
                         Default = None.
      - target_view_fun: a function to return an ANNarchy PopulationView of the target population
                         Default = None.
      - synapse: an optional ANarchy.Synapse class, or a string of the name of a SpecificProjection class. Default=None.
      - params: optional dictionary of synaptic parameters. Default = {}
      - name: name of the projection
      - method: name of an ANNarchy connection method
      - **connection_args: depend on the chosen ANNarchy connection method
      Returns: the projection
      """
    # Create the projection first
    source_neurons = get_populations_neurons(source_pop, source_view_fun)
    target_neurons = get_populations_neurons(target_pop, target_view_fun)
    if name is None:
        name = "%s -> %s" % (source_pop.label, target_pop.label)
    if isinstance(synapse, string_types):
        # If this is a SpecificProjection, create it directly:
        proj = getattr(annarchy_instance, synapse)(source_neurons, target_neurons, target=target, name=name, **params)
    else:
        # Otherwise, create it via the Projection creator:
        proj = annarchy_instance.Projection(source_neurons, target_neurons, target=target, synapse=synapse, name=name)
        proj = set_model_parameters(proj, **params)
    # Build the connection:
    method = method.lower()
    if method == "current":
        warning("Ignoring weight and delay for connect_current method, for the connection %s -> %s!"
                % (source_pop.label, target_pop.label))
        proj = getattr(proj, "connect_" % method)(**connection_args)
    else:
        proj = getattr(proj, "connect_" + method)(weights=weights, delays=delays, **connection_args)
    return proj
def connect_device(nest_device, population, neurons_inds_fun, weight=1.0, delay=0.0, receptor_type=0,
                   nest_instance=None, config=CONFIGURED, **kwargs):
    """This method connects a NESTDevice to a NESTPopulation instance.
       Arguments:
        nest_device: the NESTDevice instance
        population: the NESTPopulation instance
        neurons_inds_fun: a function to return a NESTPopulation or a subset thereof of the target population.
                          Default = None.
        weight: the weights of the connection. Default = 1.0.
        delay: the delays of the connection. Default = 0.0.
        receptor_type: type of the synaptic receptor. Default = 0.
        config: configuration class instance. Default: imported default CONFIGURED object.
        nest_instance: instance of NEST. Default = None, in which case the one of the nest_device is used.
       Returns:
        the connected NESTDevice
    """
    if receptor_type is None:
        receptor_type = 0
    if nest_instance is None:
        raise_value_error("There is no NEST instance!")
    resolution = nest_instance.GetKernelStatus("resolution")
    if isinstance(delay, dict):
        if delay["low"] < resolution:
            delay["low"] = resolution
            warning("Minimum delay %f is smaller than the NEST simulation resolution %f!\n"
                    "Setting minimum delay equal to resolution!" % (delay["low"], resolution))
        if delay["high"] <= delay["low"]:
            raise_value_error("Maximum delay %f is not smaller than minimum one %f!" % (delay["high"], delay["low"]))
    else:
        if delay < resolution:
            delay = resolution
            warning("Delay %f is smaller than the NEST simulation resolution %f!\n"
                    "Setting minimum delay equal to resolution!" % (delay, resolution))
    syn_spec = {"weight": weight, "delay": delay, "receptor_type": receptor_type}
    neurons = get_populations_neurons(population, neurons_inds_fun)
    if nest_device.model == "spike_recorder":
        #                     source  ->  target
        nest_instance.Connect(neurons, nest_device.device, syn_spec=syn_spec)
    else:
        nest_instance.Connect(nest_device.device, neurons, syn_spec=syn_spec)
    return nest_device
Пример #7
0
    def _write_dicts_at_location(self, datasets_dict, metadata_dict, location):
        for key, value in datasets_dict.items():
            try:
                try:
                    location.create_dataset(key, data=value)
                except:
                    location.create_dataset(key, data=numpy.str(value))
            except:
                warning(
                    "Failed to write to %s dataset %s %s:\n%s !" %
                    (str(location), value.__class__, key, str(value)),
                    self.logger)

        for key, value in metadata_dict.items():
            try:
                location.attrs.create(key, value)
            except:
                warning(
                    "Failed to write to %s attribute %s %s:\n%s !" %
                    (str(location), value.__class__, key, str(value)),
                    self.logger)
        return location
def assert_equal_objects(obj1, obj2, attributes_dict=None, logger=None):
    def print_not_equal_message(attr, field1, field2, logger):
        # logger.error("\n\nValueError: Original and read object field "+ attr + " not equal!")
        # raise_value_error("\n\nOriginal and read object field " + attr + " not equal!")
        warning("Original and read object field " + attr + " not equal!" +
                "\nOriginal field:\n" + str(field1) +
                "\nRead object field:\n" + str(field2), logger)

    if isinstance(obj1, dict):
        get_field1 = lambda obj, key: obj[key]
        if not (isinstance(attributes_dict, dict)):
            attributes_dict = dict()
            for key in obj1.keys():
                attributes_dict.update({key: key})
    elif isinstance(obj1, (list, tuple)):
        get_field1 = lambda obj, key: get_list_or_tuple_item_safely(obj, key)
        indices = range(len(obj1))
        attributes_dict = dict(zip([str(ind) for ind in indices], indices))
    else:
        get_field1 = lambda obj, attribute: getattr(obj, attribute)
        if not (isinstance(attributes_dict, dict)):
            attributes_dict = dict()
            for key in obj1.__dict__.keys():
                attributes_dict.update({key: key})
    if isinstance(obj2, dict):
        get_field2 = lambda obj, key: obj.get(key, None)
    elif isinstance(obj2, (list, tuple)):
        get_field2 = lambda obj, key: get_list_or_tuple_item_safely(obj, key)
    else:
        get_field2 = lambda obj, attribute: getattr(obj, attribute, None)

    equal = True
    for attribute in attributes_dict:
        # print attributes_dict[attribute]
        field1 = get_field1(obj1, attributes_dict[attribute])
        field2 = get_field2(obj2, attributes_dict[attribute])
        try:
            # TODO: a better hack for the stupid case of an ndarray of a string, such as model.zmode or pmode
            # For non numeric types
            if isinstance(field1, string_types) or isinstance(field1, list) or isinstance(field1, dict) \
                    or (isinstance(field1, np.ndarray) and field1.dtype.kind in 'OSU'):
                if np.any(field1 != field2):
                    print_not_equal_message(attributes_dict[attribute], field1, field2, logger)
                    equal = False
            # For numeric numpy arrays:
            elif isinstance(field1, np.ndarray) and not field1.dtype.kind in 'OSU':
                # TODO: handle better accuracy differences, empty matrices and complex numbers...
                if field1.shape != field2.shape:
                    print_not_equal_message(attributes_dict[attribute], field1, field2, logger)
                    equal = False
                elif np.any(np.float32(field1) - np.float32(field2) > 0):
                    print_not_equal_message(attributes_dict[attribute], field1, field2, logger)
                    equal = False
            # For numeric scalar types
            elif is_numeric(field1):
                if np.float32(field1) - np.float32(field2) > 0:
                    print_not_equal_message(attributes_dict[attribute], field1, field2, logger)
                    equal = False
            else:
                equal = assert_equal_objects(field1, field2, logger=logger)
        except:
            try:
                warning("Comparing str(objects) for field "
                        + str(attributes_dict[attribute]) + " because there was an error!", logger)
                if np.any(str(field1) != str(field2)):
                    print_not_equal_message(attributes_dict[attribute], field1, field2, logger)
                    equal = False
            except:
                raise_value_error("ValueError: Something went wrong when trying to compare "
                                  + str(attributes_dict[attribute]) + " !", logger)

    if equal:
        return True
    else:
        return False
def connect_input_device(annarchy_device, population, neurons_inds_fun=None,
                         weight=1.0, delay=0.0, receptor_type="exc",
                         import_path=CONFIGURED.MYMODELS_IMPORT_PATH):
    """This function connect an ANNarchyInputDevice to an ANNarchyPopulation instance.
       Arguments:
        annarchy_device: the ANNarchyInputDevice instance
        population: the ANNarchyPopulation instance
        neurons_inds_fun: a function to return an ANNarchy PopulationView of the target population. Default = None.
        weight: the weights of the connection. Default = 1.0
        delay: the delays of the connection. Default = 0.0
        receptor_type: type of the synapse (target in ANNarchy). Default = "exc"
        import_path: the path to be possibly searched to import the model. Default is taken from CONFIGURED
       Returns:
        the connected ANNarchyInputDevice
    """
    neurons = get_populations_neurons(population, neurons_inds_fun)
    # TODO: What should we do with this checking for the delay in ANNarchy?
    # resolution = annarchy_instance.dt()
    # if hasattr(delay, "min"):  # In case it is an ANNarchy distribution class
    #     if delay.min < resolution:
    #         delay.min = resolution
    #         warning("Minimum delay %f is smaller than the NEST simulation resolution %f!\n"
    #                 "Setting minimum delay equal to resolution!" % (delay.min, resolution))
    #     if delay.max <= delay.min:
    #         raise_value_error("Maximum delay %f is not smaller than minimum one %f!" % (delay.max, delay.min))
    # else:
    #     if delay < resolution:
    #         delay = resolution
    #         warning("Delay %f is smaller than the NEST simulation resolution %f!\n"
    #                 "Setting minimum delay equal to resolution!" % (delay, resolution))

    connection_args = {}
    source_view_fun = None
    if annarchy_device.number_of_devices_neurons == 0:
        raise_value_error("There is no input device population of neurons in device of model %s with label %s!"
                          % (annarchy_device.model, annarchy_device.label))
    elif annarchy_device.number_of_devices_neurons == 1:
        # A single input stimulating all target neurons
        connect_method = "all_to_all"
    elif annarchy_device.number_of_devices_neurons == neurons.size:
        # Inputs are equal to target neurons, therefore connecting with one_to_one,
        # no matter if there are already other connections.
        connect_method = "one_to_one"
    elif annarchy_device.number_of_devices_neurons < neurons.size:
        # This is the case where there are less total input neurons than target ones:
        connect_method = "fixed_number_pre"
        connection_args["number"] = annarchy_device.number_of_devices_neurons
        warning("Device of model %s with label %s:\n"
                "The number of device's population neurons %d > 1 "
                "is smaller than the number %d of the target neurons of population:\n%s"
                "\nConnecting with method 'connect_fixed_number_pre' with number = %d"
                % (annarchy_device.model, annarchy_device.label, annarchy_device.number_of_devices_neurons,
                   neurons.size, str(population), annarchy_device.number_of_devices_neurons))
    else:  # These are the cases where there are more total input neurons than target ones:
        connect_method = "one_to_one"  # for all cases below
        # The number of input neurons not yet connected:
        number_of_available_connections = \
            annarchy_device.number_of_neurons - annarchy_device.number_of_connected_neurons
        if number_of_available_connections < neurons.size:
            # TODO: think more about this: error, fixed_number_pre or overlapping?
            # If the remaining available neurons are nit enough,
            # use some of the already used ones with a partial overlap:
            source_view_fun = lambda _population: _population[:-neurons.size]
            warning("Device of model %s with label %s:\n"
                    "The number of device's population neurons that is available for connections %d"
                    "is smaller than the number %d of the target neurons of population:\n%s"
                    "\nConnecting with method 'connect_one_to_one' using the last %d neurons "
                    "with overlap of %d neurons!"
                    % (annarchy_device.model, annarchy_device.label, number_of_available_connections,
                       neurons.size, str(population), neurons.size, neurons.size - number_of_available_connections))
        else:
            # If the remaining available neurons are enough, just get the first available ones:
            source_view_fun = lambda _population: \
                                        _population[annarchy_device.number_of_connected_neurons :
                                                    annarchy_device.number_of_connected_neurons + neurons.size]

    synapse = annarchy_device.params.get("synapse", None)
    if synapse is not None:
        synapse = assert_model(synapse, annarchy_device.annarchy_instance, import_path)
    synapse_params = annarchy_device.params.get("synapse_params", {})
    proj = connect_two_populations(annarchy_device, population, weight, delay, receptor_type, synapse_params,
                                   source_view_fun=source_view_fun, target_view_fun=neurons_inds_fun,
                                   synapse=synapse, method=connect_method,
                                   annarchy_instance=annarchy_device.annarchy_instance, **connection_args)
    # Add this projection to the source device's and target population's inventories:
    annarchy_device.projections_pre.append(proj)
    population.projections_post.append(proj)
    # Update the number of connected neurons to the device:
    annarchy_device._number_of_connections = annarchy_device.get_number_of_connections()
    annarchy_device._number_of_neurons = annarchy_device.get_number_of_neurons()
    return annarchy_device