def get_data( self, values, offset=0, array_size=1, override_field_types=None, extend=False, state_var_indices=[]): """ Get a numpy array of uint32 of data for the given values :param values: A list of values with length the same size as the number of fields returned by field_types :type values: list(int or float or list(int) or list(float) or ~spinn_utilities.ranged.RangedList) :param int offset: The offset into each of the values where to start :param int array_size: The number of structs to generate :param list override_field_types: List with field types which will be overwritten. :rtype: ~numpy.ndarray(dtype="uint32") """ # Create an array to store values in if override_field_types is not None: return self._get_override_data( values, offset, array_size, override_field_types, extend, state_var_indices) np_dtypes = self.__get_numpy_dtypes(self.field_types) data = numpy.zeros(array_size, dtype=np_dtypes) # Go through and get the values and put them in the array for i, (vals, data_type) in enumerate(zip(values, self.field_types)): if is_singleton(vals): data_value = convert_to(vals, data_type) data["f" + str(i)] = data_value elif not isinstance(vals, RangedList): data_value = [convert_to(v, data_type) for v in vals[offset:(offset + array_size)]] data["f" + str(i)] = data_value else: for start, end, value in vals.iter_ranges_by_slice( offset, offset + array_size): # Get the values and get them into the correct data type if isinstance(value, RandomDistribution): rand_vals = value.next(end - start) data_value = [convert_to(v, data_type) for v in rand_vals] else: data_value = convert_to(value, data_type) data["f" + str(i)][ start - offset:end - offset] = data_value # Pad to whole number of uint32s overflow = (array_size * self.numpy_dtype.itemsize) % BYTES_PER_WORD if overflow != 0: data = numpy.pad( data.view("uint8"), (0, BYTES_PER_WORD - overflow), "constant") return data.view("uint32")
def is_list(value, size): # @UnusedVariable if callable(value): return True if is_singleton(value): raise TypeError( "Value must be an iterable or iterable of iterables") try: # Must be an iterable if len(value) == 0: return False # All or No values must be singletons singleton = is_singleton(value[0]) for i in range(1, len(value)): if singleton != is_singleton(value[1]): raise ValueError( "Illegal mixing of singleton and iterable") # A list of all singletons is a single value not a list here! return not singleton except TypeError: raise TypeError( "Value must be an iterable or iterable of iterables")
def is_list(value, size): # @UnusedVariable """ Determines if the value should be treated as a list. .. note:: This method can be extended to add other checks for list in which case :py:meth:`as_list` must also be extended. """ # Assume any iterable is a list if callable(value): return True return not is_singleton(value)
def get_data(self, values, offset=0, array_size=1): """ Get a numpy array of uint32 of data for the given values :param values:\ A list of values with length the same size as the number of fields\ returned by field_types :type values:\ list of (single value or list of values or RangedList of values) :param offset: The offset into each of the values where to start :param array_size: The number of structs to generate :rtype: numpy.array(dtype="uint32") """ # Create an array to store values in data = numpy.zeros(array_size, dtype=self.numpy_dtype) # Go through and get the values and put them in the array for i, (values, data_type) in enumerate(zip(values, self.field_types)): if is_singleton(values): data_value = convert_to(values, data_type) data["f" + str(i)] = data_value elif not isinstance(values, RangedList): data_value = [ convert_to(v, data_type) for v in values[offset:(offset + array_size)] ] data["f" + str(i)] = data_value else: for start, end, value in values.iter_ranges_by_slice( offset, offset + array_size): # Get the values and get them into the correct data type if isinstance(value, RandomDistribution): values = value.next(end - start) data_value = [convert_to(v, data_type) for v in values] else: data_value = convert_to(value, data_type) data["f" + str(i)][start - offset:end - offset] = data_value # Pad to whole number of uint32s overflow = (array_size * self.numpy_dtype.itemsize) % BYTES_PER_WORD if overflow != 0: data = numpy.pad(data.view("uint8"), (0, BYTES_PER_WORD - overflow), "constant") return data.view("uint32")
def __setitem__(self, value): if is_singleton(value): self._ranged_list.set_value_by_slice( self._vertex_slice.lo_atom, self._vertex_slice.hi_atom, value) else: # Find the ranges where the data is the same changes = numpy.nonzero(numpy.diff(value))[0] + 1 # Go through and set the data in ranges start_index = 0 for end_index in itertools.chain( changes, [self._vertex_slice.n_atoms]): self._ranged_list.set_value_by_slice( start_index, end_index, value[start_index]) start_index = end_index
def __setitem__(self, value): if is_singleton(value): self._ranged_list.set_value_by_slice(self._vertex_slice.lo_atom, self._vertex_slice.hi_atom, value) else: # Find the ranges where the data is the same changes = numpy.nonzero(numpy.diff(value))[0] + 1 # Go through and set the data in ranges start_index = 0 for end_index in itertools.chain(changes, [self._vertex_slice.n_atoms]): self._ranged_list.set_value_by_slice(start_index, end_index, value[start_index]) start_index = end_index
def get_data(self, values, offset=0, array_size=1): """ Get a numpy array of uint32 of data for the given values :param values:\ A list of values with length the same size as the number of fields\ returned by field_types :type values:\ list of (single value or list of values or RangedList of values) :param offset: The offset into each of the values where to start :param array_size: The number of structs to generate :rtype: numpy.array(dtype="uint32") """ # Create an array to store values in data = numpy.zeros(array_size, dtype=self.numpy_dtype) # Go through and get the values and put them in the array for i, (values, data_type) in enumerate(zip(values, self.field_types)): if is_singleton(values): data_value = convert_to(values, data_type) data["f" + str(i)] = data_value elif not isinstance(values, RangedList): data_value = [convert_to(v, data_type) for v in values[offset:(offset + array_size)]] data["f" + str(i)] = data_value else: for start, end, value in values.iter_ranges_by_slice( offset, offset + array_size): # Get the values and get them into the correct data type if get_simulator().is_a_pynn_random(value): values = value.next(end - start) data_value = [convert_to(v, data_type) for v in values] else: data_value = convert_to(value, data_type) data["f" + str(i)][ start - offset:end - offset] = data_value # Pad to whole number of uint32s overflow = (array_size * self.numpy_dtype.itemsize) % 4 if overflow != 0: data = numpy.pad(data.view("uint8"), (0, 4 - overflow), "constant") return data.view("uint32")
def test_is_singleton(): assert is_singleton(35) assert is_singleton(False) assert is_singleton(0.12) assert is_singleton("") assert is_singleton('a') assert is_singleton("flashy fish") assert not is_singleton([1, 2, 3]) assert not is_singleton({1: 2, 3: 4}) assert not is_singleton(frozenset([14])) assert not is_singleton((43876, )) assert is_singleton(object()) assert is_singleton(lambda x: x * 2 + 1)