Exemple #1
0
    def _process_spike_data(vertex_slice, spike_data, base_key, results):
        """
        :param ~pacman.model.graphs.common.Slice vertex_slice:
        :param bytearray spike_data:
        :param int base_key:
        :param list(~numpy.ndarray) results:
        """
        number_of_bytes_written = len(spike_data)
        offset = 0
        while offset < number_of_bytes_written:
            length, time = _TWO_WORDS.unpack_from(spike_data, offset)
            time *= machine_time_step_ms()
            data_offset = offset + 2 * BYTES_PER_WORD

            eieio_header = EIEIODataHeader.from_bytestring(
                spike_data, data_offset)
            if eieio_header.eieio_type.payload_bytes > 0:
                raise Exception("Can only read spikes as keys")

            data_offset += eieio_header.size
            timestamps = numpy.repeat([time], eieio_header.count)
            key_bytes = eieio_header.eieio_type.key_bytes
            keys = numpy.frombuffer(spike_data,
                                    dtype="<u{}".format(key_bytes),
                                    count=eieio_header.count,
                                    offset=data_offset)

            neuron_ids = (keys - base_key) + vertex_slice.lo_atom
            offset += length + 2 * BYTES_PER_WORD
            results.append(numpy.dstack((neuron_ids, timestamps))[0])
    def _process_spike_data(
            vertex_slice, spike_data, ms_per_tick, base_key, results):
        number_of_bytes_written = len(spike_data)
        offset = 0
        while offset < number_of_bytes_written:
            length = _ONE_WORD.unpack_from(spike_data, offset)[0]
            time = _ONE_WORD.unpack_from(spike_data, offset + 4)[0]
            time *= ms_per_tick
            data_offset = offset + 8

            eieio_header = EIEIODataHeader.from_bytestring(
                spike_data, data_offset)
            if eieio_header.eieio_type.payload_bytes > 0:
                raise Exception("Can only read spikes as keys")

            data_offset += eieio_header.size
            timestamps = numpy.repeat([time], eieio_header.count)
            key_bytes = eieio_header.eieio_type.key_bytes
            keys = numpy.frombuffer(
                spike_data, dtype="<u{}".format(key_bytes),
                count=eieio_header.count, offset=data_offset)

            neuron_ids = (keys - base_key) + vertex_slice.lo_atom
            offset += length + 8
            results.append(numpy.dstack((neuron_ids, timestamps))[0])
    def _process_spike_data(vertex_slice, spike_data, ms_per_tick, base_key,
                            results):
        number_of_bytes_written = len(spike_data)
        offset = 0
        while offset < number_of_bytes_written:
            length = _ONE_WORD.unpack_from(spike_data, offset)[0]
            time = _ONE_WORD.unpack_from(spike_data, offset + 4)[0]
            time *= ms_per_tick
            data_offset = offset + 8

            eieio_header = EIEIODataHeader.from_bytestring(
                spike_data, data_offset)
            if eieio_header.eieio_type.payload_bytes > 0:
                raise Exception("Can only read spikes as keys")

            data_offset += eieio_header.size
            timestamps = numpy.repeat([time], eieio_header.count)
            key_bytes = eieio_header.eieio_type.key_bytes
            keys = numpy.frombuffer(spike_data,
                                    dtype="<u{}".format(key_bytes),
                                    count=eieio_header.count,
                                    offset=data_offset)

            neuron_ids = (keys - base_key) + vertex_slice.lo_atom
            offset += length + 8
            results.append(numpy.dstack((neuron_ids, timestamps))[0])
    def _recording_sdram_per_timestep(cls, machine_time_step, is_recording,
                                      receive_rate, send_buffer_times, n_keys):
        """
        :param int machine_time_step:
        :param bool is_recording:
        :param float receive_rate:
        :param send_buffer_times:
        :type send_buffer_times:
            ~numpy.ndarray(~numpy.ndarray(numpy.int32)) or
            list(~numpy.ndarray(numpy.int32)) or None
        :param int n_keys:
        :rtype: int
        """
        # If not recording, no SDRAM needed per timestep
        if not is_recording:
            return 0

        # If recording send data, the recorded size is the send size
        if send_buffer_times is not None:
            return cls._send_buffer_sdram_per_timestep(send_buffer_times,
                                                       n_keys)

        # Recording live data, use the user provided receive rate
        keys_per_timestep = math.ceil(
            receive_rate /
            (machine_time_step * MICRO_TO_MILLISECOND_CONVERSION) * 1.1)
        header_size = EIEIODataHeader.get_header_size(EIEIOType.KEY_32_BIT,
                                                      is_payload_base=True)
        # Maximum size is one packet per key
        return ((header_size + EIEIOType.KEY_32_BIT.key_bytes) *
                keys_per_timestep)
Exemple #5
0
def read_eieio_data_message(data, offset):
    """ Reads the content of an EIEIO data message and returns an object\
        identifying the data which was contained in the packet

    :param bytes data: data received from the network as a bytestring
    :param int offset: offset at which the parsing operation should start
    :return: an object which inherits from EIEIODataMessage which contains
        parsed data received from the network
    :rtype: EIEIODataMessage
    """
    eieio_header = EIEIODataHeader.from_bytestring(data, offset)
    offset += eieio_header.size
    return EIEIODataMessage(eieio_header, data, offset)
def read_eieio_data_message(data, offset):
    """ Reads the content of an EIEIO data message and returns an object\
        identifying the data which was contained in the packet

    :param data: data received from the network as a bytestring
    :type data: str
    :param offset: offset at which the parsing operation should start
    :type offset: int
    :return: an object which inherits from EIEIODataMessage which contains\
        parsed data received from the network
    :rtype:\
        :py:class:`spinnman.messages.eieio.data_messages.EIEIODataMessage`
    """
    eieio_header = EIEIODataHeader.from_bytestring(data, offset)
    offset += eieio_header.size
    return EIEIODataMessage(eieio_header, data, offset)
Exemple #7
0
    def recording_sdram_per_timestep(machine_time_step, is_recording,
                                     receive_rate, send_buffer_times, n_keys):

        # If recording live data, use the user provided receive rate
        if is_recording and send_buffer_times is None:
            keys_per_timestep = math.ceil(
                (receive_rate / (machine_time_step * 1000.0)) * 1.1)
            header_size = EIEIODataHeader.get_header_size(EIEIOType.KEY_32_BIT,
                                                          is_payload_base=True)
            # Maximum size is one packet per key
            return ((header_size + EIEIOType.KEY_32_BIT.key_bytes) *
                    keys_per_timestep)

        # If recording send data, the recorded size is the send size
        if is_recording and send_buffer_times is not None:
            return (ReverseIPTagMulticastSourceMachineVertex.
                    send_buffer_sdram_per_timestep(send_buffer_times, n_keys))

        # Not recording no SDRAM needed per timestep
        return 0
Exemple #8
0
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

import bisect
import math
from spinnman.messages.eieio.command_messages import HostSendSequencedData
from spinnman.messages.eieio.data_messages import EIEIODataHeader
from spinnman.messages.eieio import EIEIOType
from spinnman.constants import UDP_MESSAGE_MAX_SIZE

_HEADER_SIZE = EIEIODataHeader.get_header_size(EIEIOType.KEY_32_BIT,
                                               is_payload_base=True)

# The number of bytes in each key to be sent
_N_BYTES_PER_KEY = EIEIOType.KEY_32_BIT.key_bytes  # @UndefinedVariable

# The number of keys allowed (different from the actual number as there is
#  an additional header)
_N_KEYS_PER_MESSAGE = (UDP_MESSAGE_MAX_SIZE -
                       (HostSendSequencedData.get_min_packet_length() +
                        _HEADER_SIZE)) // _N_BYTES_PER_KEY


def get_n_bytes(n_keys):
    """ Get the number of bytes used by a given number of keys.

    :param int n_keys: The number of keys
Exemple #9
0
class BufferedSendingRegion(object):
    """ A set of keys to be sent at given timestamps for a given region of\
        data.  Note that keys must be added in timestamp order or else an\
        exception will be raised
    """

    __slots__ = [

        # The maximum size of any buffer
        "_max_size_of_buffer",

        # A dictionary of timestamp -> list of keys
        "_buffer",

        # A list of timestamps
        "_timestamps",

        # The current position in the list of timestamps
        "_current_timestamp_pos",

        # int stating the size of the buffer
        "_buffer_size",

        # int stating the total size of the buffered region
        "_total_region_size",

        # The maximum number of packets in any timestamp
        "_max_packets_in_timestamp"
    ]

    _HEADER_SIZE = EIEIODataHeader.get_header_size(EIEIOType.KEY_32_BIT,
                                                   is_payload_base=True)

    # The number of bytes in each key to be sent
    _N_BYTES_PER_KEY = EIEIOType.KEY_32_BIT.key_bytes  # @UndefinedVariable

    # The number of keys allowed (different from the actual number as there is
    #  an additional header)
    _N_KEYS_PER_MESSAGE = (UDP_MESSAGE_MAX_SIZE -
                           (HostSendSequencedData.get_min_packet_length() +
                            _HEADER_SIZE)) / _N_BYTES_PER_KEY

    def __init__(self, max_buffer_size):
        self._max_size_of_buffer = max_buffer_size

        # A dictionary of timestamp -> list of keys
        self._buffer = dict()

        # A list of timestamps
        self._timestamps = list()

        # The current position in the list of timestamps
        self._current_timestamp_pos = 0

        self._buffer_size = None
        self._total_region_size = None
        self._max_packets_in_timestamp = 0

    @property
    def buffer_size(self):
        """
        property method for getting the max size of this buffer
        """
        if self._buffer_size is None:
            self._calculate_sizes()
        return self._buffer_size

    @property
    def total_region_size(self):
        """ Get the max size of this region
        """
        if self._total_region_size is None:
            self._calculate_sizes()
        return self._total_region_size

    @property
    def max_buffer_size_possible(self):
        """ Get the max possible size of a buffer from this region
        """
        return self._max_size_of_buffer

    def _calculate_sizes(self):
        """ Deduce how big the buffer and the region needs to be
        """
        size = 0
        for timestamp in self._timestamps:
            n_keys = self.get_n_keys(timestamp)
            size += self.get_n_bytes(n_keys)
        size += EventStopRequest.get_min_packet_length()
        if size > self._max_size_of_buffer:
            self._buffer_size = self._max_size_of_buffer
        else:
            self._buffer_size = size
        self._total_region_size = size

    def get_n_bytes(self, n_keys):
        """ Get the number of bytes used by a given number of keys

        :param n_keys: The number of keys
        :type n_keys: int
        """

        # Get the total number of messages
        n_messages = int(math.ceil(float(n_keys) / self._N_KEYS_PER_MESSAGE))

        # Add up the bytes
        return ((self._HEADER_SIZE * n_messages) +
                (n_keys * self._N_BYTES_PER_KEY))

    def add_key(self, timestamp, key):
        """ Add a key to be sent at a given time

        :param timestamp: The time at which the key is to be sent
        :type timestamp: int
        :param key: The key to send
        :type key: int
        """
        if timestamp not in self._buffer:
            bisect.insort(self._timestamps, timestamp)
            self._buffer[timestamp] = list()
        self._buffer[timestamp].append(key)
        self._total_region_size = None
        self._buffer_size = None
        if len(self._buffer[timestamp]) > self._max_packets_in_timestamp:
            self._max_packets_in_timestamp = len(self._buffer[timestamp])

    def add_keys(self, timestamp, keys):
        """ Add a set of keys to be sent at the given time

        :param timestamp: The time at which the keys are to be sent
        :type timestamp: int
        :param keys: The keys to send
        :type keys: iterable of int
        """
        for key in keys:
            self.add_key(timestamp, key)

    @property
    def n_timestamps(self):
        """ The number of timestamps available

        :rtype: int
        """
        return len(self._timestamps)

    @property
    def timestamps(self):
        """ The timestamps for which there are keys

        :rtype: iterable of int
        """
        return self._timestamps

    def get_n_keys(self, timestamp):
        """ Get the number of keys for a given timestamp

        :param timestamp: the time stamp to check if there's still keys to\
                transmit
        """
        if timestamp in self._buffer:
            return len(self._buffer[timestamp])
        return 0

    @property
    def is_next_timestamp(self):
        """ Determines if the region is empty

        :return: True if the region is empty, false otherwise
        :rtype: bool
        """
        return self._current_timestamp_pos < len(self._timestamps)

    @property
    def next_timestamp(self):
        """ The next timestamp of the data to be sent, or None if no more data

        :rtype: int or None
        """
        if self.is_next_timestamp:
            return self._timestamps[self._current_timestamp_pos]
        return None

    def is_next_key(self, timestamp):
        """ Determine if there is another key for the given timestamp

        :param timestamp: the time stamp to check if there's still keys to\
                transmit
        :rtype: bool
        """
        if timestamp in self._buffer:
            return len(self._buffer[timestamp]) > 0
        return False

    @property
    def next_key(self):
        """ The next key to be sent

        :rtype: int
        """
        next_timestamp = self.next_timestamp
        keys = self._buffer[next_timestamp]
        key = keys.pop()
        if len(keys) == 0:
            del self._buffer[next_timestamp]
            self._current_timestamp_pos += 1
        return key

    @property
    def current_timestamp(self):
        """ Get the current timestamp in the iterator
        """
        return self._current_timestamp_pos

    def rewind(self):
        """ Rewind the buffer to initial position.
        """
        self._current_timestamp_pos = 0

    def clear(self):
        """ Clears the buffer
        """

        # A dictionary of timestamp -> list of keys
        self._buffer = dict()

        # A list of timestamps
        self._timestamps = list()

        # The current position in the list of timestamps
        self._current_timestamp_pos = 0

        self._buffer_size = None

        self._total_region_size = None

    @property
    def max_packets_in_timestamp(self):
        """ The maximum number of packets in any time stamp
        """
        return self._max_packets_in_timestamp