def test_failed_conversion(self): channel_type = "int32" compression = None endianness = "<" value = 10 value_reader = get_value_reader(channel_type, compression, shape=None, endianness=endianness) result = value_reader(value.to_bytes(4, byteorder='little')) self.assertEqual(value, result) result = value_reader(value.to_bytes(3, byteorder='little')) self.assertEqual(None, result)
def get_channel_reader(channel): """ Construct a value reader for the provided channel. :param channel: Channel to construct the value reader for. :return: Value reader. """ # If no channel type is specified, float64 is assumed. channel_type = channel['type'].lower() if 'type' in channel else None if channel_type is None: _logger.warning( "'type' channel field not found. Parse as 64-bit floating-point number float64 (default)." ) channel_type = "float64" name = channel['name'] compression = channel['compression'] if "compression" in channel else None shape = channel['shape'] if "shape" in channel else None endianness = channel['encoding'] value_reader = get_value_reader(channel_type, compression, shape, endianness, name) return value_reader
def receive(self, receiver): # Receive main header header = receiver.next(as_json=True) changed = False # We cannot process an empty Header. if not header: return None message = Message() message.pulse_id = header['pulse_id'] message.hash = header['hash'] if 'global_timestamp' in header: if 'sec' in header['global_timestamp']: message.global_timestamp = header['global_timestamp']['sec'] elif 'epoch' in header['global_timestamp']: message.global_timestamp = header['global_timestamp']['epoch'] else: raise RuntimeError( "Invalid timestamp format in BSDATA header message {}". format(message)) message.global_timestamp_offset = header['global_timestamp']['ns'] # Receiver data header, check if header has changed - and in this case recreate the channel definitions. if receiver.has_more() and (self.data_header_hash != header['hash']): changed = self.data_header_hash is not None # Set the current header hash as the new hash. self.data_header_hash = header['hash'] # Read the data header. data_header_bytes = receiver.next() data_header = json.loads( get_value_reader("string", header.get('dh_compression'), value_name="data_header")(data_header_bytes)) # If a message with ho channel information is received, # ignore it and return from function with no data. if not data_header['channels']: logging.warning("Received message without channels.") while receiver.has_more(): # Drain rest of the messages - if entering this code there is actually something wrong receiver.next() return message # TODO: Why do we need to pre-process the message? Source change? for channel in data_header['channels']: # Define endianness of data # > - big endian # < - little endian (default) channel["encoding"] = '>' if channel.get( "encoding") == "big" else '<' # Construct the channel definitions. self.channels_definitions = [(channel["name"], channel["encoding"], get_channel_reader(channel)) for channel in data_header['channels'] ] # Signal that the format has changed. message.format_changed = True else: # Skip second header - we already have the receive functions setup. receiver.next() # Receiving data counter = 0 # Todo add some more error checking while receiver.has_more(): channel_name, channel_endianness, channel_reader = self.channels_definitions[ counter] raw_data = receiver.next() channel_value = Value() if raw_data: channel_value.value = channel_reader(raw_data) if receiver.has_more(): raw_timestamp = receiver.next() if raw_timestamp: timestamp_array = numpy.frombuffer( raw_timestamp, dtype=channel_endianness + 'u8') channel_value.timestamp = timestamp_array[ 0] # Second past epoch channel_value.timestamp_offset = timestamp_array[ 1] # Nanoseconds offset else: # Consume empty timestamp message if receiver.has_more(): receiver.next() # Read empty timestamp message message.data[channel_name] = channel_value counter += 1 if changed: if self.data_change_callback is not None: self.data_change_callback(data_header['channels']) return message
def receive(self, receiver): header = receiver.next(as_json=True) # We cannot process an empty Header. if not header: return None return_value = {} data = [] timestamp = [] timestamp_offset = [] pulse_ids = [] pulse_id_array = [] # array of all pulse ids pulse_id = header['pulse_id'] pulse_id_array.append(pulse_id) if receiver.has_more() and (self.data_header_hash != header['hash']): self.data_header_hash = header['hash'] # Read the data header. data_header_bytes = receiver.next() data_header = json.loads( get_value_reader("string", header.get('dh_compression'), value_name="data_header")(data_header_bytes)) # If a message with ho channel information is received, # ignore it and return from function with no data. if not data_header['channels']: logging.warning("Received message without channels.") while receiver.has_more(): receiver.next() return_value['header'] = header return_value['pulse_id_array'] = pulse_id_array return_value['data'] = 'No channel' return_value['timestamp'] = None return_value['timestamp_offset'] = None return_value['pulse_ids'] = None return return_value # TODO: Why do we need to pre-process the message? Source change? for channel in data_header['channels']: # Define endianness of data # > - big endian # < - little endian (default) channel["encoding"] = '>' if channel.get( "encoding") == "big" else '<' # Construct the channel definitions. self.channels_definitions = [(channel["name"], channel["encoding"], get_channel_reader(channel)) for channel in data_header['channels'] ] self.data_header = data_header else: # Skip second header receiver.next() # The data header should be added to every message. return_value['data_header'] = self.data_header # Receiving data counter = 0 # msg_data_size = 0 while receiver.has_more(): channel_name, channel_endianness, channel_reader = self.channels_definitions[ counter] raw_data = receiver.next() if raw_data: pulse_ids.append(pulse_id) data.append(channel_reader(raw_data)) if receiver.has_more(): raw_timestamp = receiver.next() if raw_timestamp: timestamp_array = numpy.frombuffer( raw_timestamp, dtype=channel_endianness + 'u8') timestamp.append( timestamp_array[0]) # Second past epoch timestamp_offset.append( timestamp_array[1]) # Nanoseconds offset else: if receiver.has_more(): receiver.next() # Read empty timestamp message data.append(None) timestamp.append(None) timestamp_offset.append(None) pulse_ids.append(None) counter += 1 # Todo need to add some more error checking return_value['header'] = header return_value['pulse_id_array'] = pulse_id_array return_value['data'] = data return_value['timestamp'] = timestamp return_value['timestamp_offset'] = timestamp_offset return_value['pulse_ids'] = pulse_ids # return_value['size'] = msg_data_size return return_value