def test_overlap(self): self.assertEqual([(0, 5)], StringChunker._prune_overlaps([(0, 5)])) self.assertEqual([], StringChunker._prune_overlaps([])) self.assertEqual([(0, 5)], StringChunker._prune_overlaps([(0, 5), (3, 6)])) self.assertEqual([(0, 5), (5, 7)], StringChunker._prune_overlaps([(0, 5), (5, 7), (6, 8)]))
def test_chunker_invalid_checksum(self): sample = bytearray(OPTAA_SAMPLE_DATA) # change 1 byte, checksum should fail and no chunk should be generated sample[20] = sample[20] + 1 chunker = StringChunker(Protocol.sieve_function) ts = self.get_ntp_timestamp() chunker.add_chunk(sample, ts) (timestamp, result) = chunker.get_next_data() self.assertEqual(timestamp, None) self.assertEqual(result, None)
def test_funky_chunks(self): def funky_sieve(data): return [(3, 6), (0, 3)] self._chunker = StringChunker(funky_sieve) self._chunker.add_chunk("BarFoo", self.TIMESTAMP_1) (time, result) = self._chunker.get_next_data() self.assertEquals(result, "Bar") self.assertEquals(time, self.TIMESTAMP_1) (time, result) = self._chunker.get_next_data() self.assertEquals(result, "Foo") self.assertEquals(time, self.TIMESTAMP_1)
def test_chunker(self): """ Test the chunker and verify the particles created. """ chunker = StringChunker(Protocol.sieve_function) for sample in self._sample_chunks: self.assert_chunker_sample(chunker, sample) self.assert_chunker_fragmented_sample(chunker, sample) self.assert_chunker_combined_sample(chunker, sample) self.assert_chunker_sample_with_noise(chunker, sample) # create a malformed sample malformed = sample.replace('0', '4') chunker.add_chunk(malformed, self.get_ntp_timestamp()) self.assert_chunker_sample(chunker, sample)
def test_regex_sieve(self): """ Do a test of the regex based sieve to make sure it does what we want. """ pattern = r'SATPAR(?P<sernum>\d{4}),(?P<timer>\d{1,7}.\d\d),(?P<counts>\d{10}),(?P<checksum>\d{1,3})' regex = re.compile(pattern) self._chunker = StringChunker(partial(self._chunker.regex_sieve_function, regex_list=[regex])) self.assertEquals([(0,31)], self._chunker.regex_sieve_function(self.SAMPLE_1, [regex])) self.assertEquals([], self._chunker.regex_sieve_function(self.FRAGMENT_1, [regex])) self.assertEquals([(0,31), (33, 64)], self._chunker.regex_sieve_function(self.MULTI_SAMPLE_1, [regex]))
def __init__(self, prompts, newline, driver_event): """ Protocol constructor. @param prompts A BaseEnum class containing instrument prompts. @param newline The newline. @param driver_event Driver process event callback. """ CommandResponseInstrumentProtocol.__init__(self, prompts, newline, driver_event) self._protocol_fsm = InstrumentFSM(ProtocolState, ProtocolEvent, ProtocolEvent.ENTER, ProtocolEvent.EXIT) handlers = { ProtocolState.UNKNOWN: [(ProtocolEvent.ENTER, self._handler_unknown_enter), (ProtocolEvent.EXIT, self._handler_unknown_exit)], } for state in handlers: for event, handler in handlers[state]: self._protocol_fsm.add_handler(state, event, handler) # State state machine in UNKNOWN state. self._protocol_fsm.start(ProtocolState.UNKNOWN) # create chunker for processing instrument samples. self._chunker = StringChunker(self.sieve_function)
def __init__(self, prompts, newline, driver_event): """ Protocol constructor. @param prompts A BaseEnum class containing instrument prompts. @param newline The newline. @param driver_event Driver process event callback. """ # Construct protocol superclass. SamiProtocol.__init__(self, prompts, newline, driver_event) ## Continue building protocol state machine from SamiProtocol self._protocol_fsm.add_handler(ProtocolState.SCHEDULED_SAMPLE, ProtocolEvent.SUCCESS, self._handler_sample_success) self._protocol_fsm.add_handler(ProtocolState.SCHEDULED_SAMPLE, ProtocolEvent.TIMEOUT, self._handler_sample_timeout) self._protocol_fsm.add_handler(ProtocolState.POLLED_SAMPLE, ProtocolEvent.SUCCESS, self._handler_sample_success) self._protocol_fsm.add_handler(ProtocolState.POLLED_SAMPLE, ProtocolEvent.TIMEOUT, self._handler_sample_timeout) # State state machine in UNKNOWN state. self._protocol_fsm.start(ProtocolState.UNKNOWN) # build the chunker bot self._chunker = StringChunker(Protocol.sieve_function)
def __init__(self, prompts, newline, driver_event): """ Protocol constructor. @param prompts A BaseEnum class containing instrument prompts. @param newline The newline. @param driver_event Driver process event callback. """ log.debug("IN WorkhorseProtocol.__init__") # Construct protocol superclass. TeledyneProtocol.__init__(self, prompts, newline, driver_event) self._protocol_fsm.add_handler(WorkhorseProtocolState.COMMAND, WorkhorseProtocolEvent.POWER_DOWN, self._handler_command_power_down) self._protocol_fsm.add_handler( WorkhorseProtocolState.COMMAND, WorkhorseProtocolEvent.RESTORE_FACTORY_PARAMS, self._handler_command_restore_factory_params) self._add_build_handler(WorkhorseInstrumentCmds.RESTORE_FACTORY_PARAMS, self._build_simple_command) self._add_response_handler( WorkhorseInstrumentCmds.RESTORE_FACTORY_PARAMS, self._parse_restore_factory_params_response) self._chunker = StringChunker(WorkhorseProtocol.sieve_function)
def test_chunker(self): """ Test the chunker and verify the particles created. """ chunker = StringChunker(Protocol.sieve_function) self.assert_chunker_sample(chunker, VALID_SAMPLE_01)
def test_chunker(self): """ Test the chunker and verify the particles created. """ chunker = StringChunker(CAMHDProtocol.sieve_function) self.assert_chunker_sample(chunker, self.VALID_ADREAD_RESPONSE)
def __init__(self, prompts, newline, driver_event): """ Protocol constructor. @param prompts A BaseEnum class containing instrument prompts. @param newline The newline. @param driver_event Driver process event callback. """ # Build protocol state machine. self._protocol_fsm = ThreadSafeFSM( ProtocolState, ProtocolEvent, ProtocolEvent.ENTER, ProtocolEvent.EXIT) # Construct protocol superclass. Pco2wProtocol.__init__(self, prompts, newline, driver_event) # Build protocol state machine. self._protocol_fsm.add_handler( ProtocolState.COMMAND, ProtocolEvent.RUN_EXTERNAL_PUMP, self._handler_command_run_external_pump) # this state would be entered whenever a RUN_EXTERNAL_PUMP event # occurred while in the COMMAND state self._protocol_fsm.add_handler( ProtocolState.RUN_EXTERNAL_PUMP, ProtocolEvent.ENTER, self._execution_state_enter) self._protocol_fsm.add_handler( ProtocolState.RUN_EXTERNAL_PUMP, ProtocolEvent.EXIT, self._execution_state_exit) self._protocol_fsm.add_handler( ProtocolState.RUN_EXTERNAL_PUMP, ProtocolEvent.EXECUTE, self._handler_run_external_pump_execute) self._protocol_fsm.add_handler( ProtocolState.RUN_EXTERNAL_PUMP, ProtocolEvent.SUCCESS, self._execution_success_to_command_state) self._protocol_fsm.add_handler( ProtocolState.RUN_EXTERNAL_PUMP, ProtocolEvent.TIMEOUT, self._execution_timeout_to_command_state) ## Events to queue - intended for schedulable events occurring when a sample is being taken self._protocol_fsm.add_handler( ProtocolState.RUN_EXTERNAL_PUMP, ProtocolEvent.ACQUIRE_STATUS, self._handler_queue_acquire_status) # Add build handlers for device commands. ### primarily defined in base class self._add_build_handler(InstrumentCommand.PCO2WB_ACQUIRE_SAMPLE_DEV1, self._build_simple_command) # Add response handlers for device commands. ### primarily defined in base class self._add_response_handler(InstrumentCommand.PCO2WB_ACQUIRE_SAMPLE_DEV1, self._parse_response_sample_dev1) # Add sample handlers # Start state machine in UNKNOWN state. self._protocol_fsm.start(ProtocolState.UNKNOWN) # build the chunker self._chunker = StringChunker(Protocol.sieve_function) self._engineering_parameters.append(Parameter.EXTERNAL_PUMP_DELAY)
def __init__(self, config, stream_handle, state, sieve_fn, state_callback, publish_callback): """ @param config The configuration parameters to feed into the parser @param stream_handle An already open file-like filehandle @param state The location in the file to start parsing from. This reflects what has already been published. @param sieve_fn A sieve function that might be added to a handler to appropriate filter out the data @param state_callback The callback method from the agent driver (ultimately the agent) to call back when a state needs to be updated @param publish_callback The callback from the agent driver (and ultimately from the agent) where we send our sample particle to be published into ION """ self._chunker = StringChunker(sieve_fn) self._stream_handle = stream_handle self._state = state self._state_callback = state_callback self._publish_callback = publish_callback self._config = config self._new_sequence = True #build class from module and class name, then set the state self._particle_module = __import__( config.get("particle_module"), fromlist=[config.get("particle_class")]) self._particle_class = getattr(self._particle_module, config.get("particle_class"))
def __init__(self, prompts, newline, driver_event, connections=None): """ Constructor. @param prompts Enum class containing possible device prompts used for command response logic. @param newline The device newline. @driver_event The callback for asynchronous driver events. """ if not type(connections) is list: raise InstrumentProtocolException( 'Unable to instantiate multi connection protocol without connection list' ) self._param_dict2 = ProtocolParameterDict() # Construct superclass. WorkhorseProtocol.__init__(self, prompts, newline, driver_event) # Create multiple connection versions of the pieces of protocol involving data to/from the instrument self._linebuf = {connection: '' for connection in connections} self._promptbuf = {connection: '' for connection in connections} self._last_data_timestamp = { connection: None for connection in connections } self.connections = {connection: None for connection in connections} self.chunkers = { connection: StringChunker(self.sieve_function) for connection in connections }
def test_chunker(self): """ Test the chunker and verify the particles created. """ chunker = StringChunker(Protocol.sieve_function) self.assert_chunker_sample(chunker, self.VALID_STATUS_MESSAGE) self.assert_chunker_sample_with_noise(chunker, self.VALID_STATUS_MESSAGE) self.assert_chunker_fragmented_sample(chunker, self.VALID_STATUS_MESSAGE) self.assert_chunker_combined_sample(chunker, self.VALID_STATUS_MESSAGE) self.assert_chunker_sample(chunker, self.VALID_CONTROL_RECORD) self.assert_chunker_sample_with_noise(chunker, self.VALID_CONTROL_RECORD) self.assert_chunker_fragmented_sample(chunker, self.VALID_CONTROL_RECORD) self.assert_chunker_combined_sample(chunker, self.VALID_CONTROL_RECORD) self.assert_chunker_sample(chunker, self.VALID_DATA_SAMPLE) self.assert_chunker_sample_with_noise(chunker, self.VALID_DATA_SAMPLE) self.assert_chunker_fragmented_sample(chunker, self.VALID_DATA_SAMPLE) self.assert_chunker_combined_sample(chunker, self.VALID_DATA_SAMPLE) self.assert_chunker_sample(chunker, self.VALID_CONFIG_STRING) self.assert_chunker_sample_with_noise(chunker, self.VALID_CONFIG_STRING) self.assert_chunker_fragmented_sample(chunker, self.VALID_CONFIG_STRING) self.assert_chunker_combined_sample(chunker, self.VALID_CONFIG_STRING)
def test_chunker(self): """ Test the chunker and verify the particles created. """ chunker = StringChunker(Protocol.sieve_function) self.assert_chunker_sample(chunker, SAMPLE_GETSD) self.assert_chunker_sample_with_noise(chunker, SAMPLE_GETSD) self.assert_chunker_fragmented_sample(chunker, SAMPLE_GETSD, 32) self.assert_chunker_combined_sample(chunker, SAMPLE_GETSD) self.assert_chunker_sample(chunker, SAMPLE_GETCD) self.assert_chunker_sample_with_noise(chunker, SAMPLE_GETCD) self.assert_chunker_fragmented_sample(chunker, SAMPLE_GETCD, 32) self.assert_chunker_combined_sample(chunker, SAMPLE_GETCD) self.assert_chunker_sample(chunker, SAMPLE_GETEC) self.assert_chunker_sample_with_noise(chunker, SAMPLE_GETEC) self.assert_chunker_fragmented_sample(chunker, SAMPLE_GETEC, 32) self.assert_chunker_combined_sample(chunker, SAMPLE_GETEC) self.assert_chunker_sample(chunker, SAMPLE_GETHD) self.assert_chunker_sample_with_noise(chunker, SAMPLE_GETHD) self.assert_chunker_fragmented_sample(chunker, SAMPLE_GETHD, 32) self.assert_chunker_combined_sample(chunker, SAMPLE_GETHD) self.assert_chunker_sample(chunker, SAMPLE_SAMPLE) self.assert_chunker_sample_with_noise(chunker, SAMPLE_SAMPLE) self.assert_chunker_fragmented_sample(chunker, SAMPLE_SAMPLE, 32) self.assert_chunker_combined_sample(chunker, SAMPLE_SAMPLE) self.assert_chunker_sample(chunker, SAMPLE_REF_OSC) self.assert_chunker_sample_with_noise(chunker, SAMPLE_REF_OSC) self.assert_chunker_fragmented_sample(chunker, SAMPLE_REF_OSC, 32) self.assert_chunker_combined_sample(chunker, SAMPLE_REF_OSC)
def test_chunker(self): """ Test the chunker and verify the particles created. """ chunker = StringChunker(Protocol.sieve_function) self.assert_chunker_sample(chunker, SAMPLE_MNU_RESPONSE) self.assert_chunker_sample_with_noise(chunker, SAMPLE_MNU_RESPONSE) self.assert_chunker_fragmented_sample(chunker, SAMPLE_MNU_RESPONSE, 32) self.assert_chunker_combined_sample(chunker, SAMPLE_MNU_RESPONSE) self.assert_chunker_sample(chunker, SAMPLE_RUN_RESPONSE) self.assert_chunker_sample_with_noise(chunker, SAMPLE_RUN_RESPONSE) self.assert_chunker_fragmented_sample(chunker, SAMPLE_RUN_RESPONSE, 1) self.assert_chunker_combined_sample(chunker, SAMPLE_RUN_RESPONSE) self.assert_chunker_sample(chunker, SAMPLE_MET_RESPONSE) self.assert_chunker_sample_with_noise(chunker, SAMPLE_MET_RESPONSE) self.assert_chunker_fragmented_sample(chunker, SAMPLE_MET_RESPONSE, 32) self.assert_chunker_combined_sample(chunker, SAMPLE_MET_RESPONSE) self.assert_chunker_sample(chunker, SAMPLE_SAMPLE_RESPONSE) self.assert_chunker_sample_with_noise(chunker, SAMPLE_SAMPLE_RESPONSE) self.assert_chunker_fragmented_sample(chunker, SAMPLE_SAMPLE_RESPONSE, 32) self.assert_chunker_combined_sample(chunker, SAMPLE_SAMPLE_RESPONSE) self.assert_chunker_sample(chunker, SAMPLE_DUMP_MEMORY_RESPONSE) self.assert_chunker_sample_with_noise(chunker, SAMPLE_DUMP_MEMORY_RESPONSE) self.assert_chunker_fragmented_sample(chunker, SAMPLE_DUMP_MEMORY_RESPONSE, 2) self.assert_chunker_combined_sample(chunker, SAMPLE_DUMP_MEMORY_RESPONSE)
def test_chunker(self): """ Verify the chunker can parse each sample type 1. complete data structure 2. fragmented data structure 3. combined data structure 4. data structure with noise """ chunker = StringChunker(Protocol.sieve_function) # test complete data structures self.assert_chunker_sample(chunker, velocity_sample()) self.assert_chunker_sample(chunker, system_sample()) self.assert_chunker_sample(chunker, velocity_header_sample()) # test fragmented data structures self.assert_chunker_fragmented_sample(chunker, velocity_sample()) self.assert_chunker_fragmented_sample(chunker, system_sample()) self.assert_chunker_fragmented_sample(chunker, velocity_header_sample()) # test combined data structures self.assert_chunker_combined_sample(chunker, velocity_sample()) self.assert_chunker_combined_sample(chunker, system_sample()) self.assert_chunker_combined_sample(chunker, velocity_header_sample()) # test data structures with noise self.assert_chunker_sample_with_noise(chunker, velocity_sample()) self.assert_chunker_sample_with_noise(chunker, system_sample()) self.assert_chunker_sample_with_noise(chunker, velocity_header_sample())
def __init__(self, config, stream_handle, state, sieve_fn, state_callback, publish_callback, exception_callback=None): """ @param config The configuration parameters to feed into the parser @param stream_handle An already open file-like filehandle @param state The location in the file to start parsing from. This reflects what has already been published. @param sieve_fn A sieve function that might be added to a handler to appropriate filter out the data @param state_callback The callback method from the agent driver (ultimately the agent) to call back when a state needs to be updated @param publish_callback The callback from the agent driver (and ultimately from the agent) where we send our sample particle to be published into ION @param exception_callback The callback from the agent driver (and ultimately from the agent) where we send our error events to be published into ION """ self._chunker = StringChunker(sieve_fn) self._stream_handle = stream_handle self._state = state self._state_callback = state_callback self._publish_callback = publish_callback self._exception_callback = exception_callback self._config = config # It was originally thought that we wanted to start a new sequence for every new file # But that has changed. If we want this behavior back then we need to change # this back to true self._new_sequence = False # Build class from module and class name, then set the state if config.get(DataSetDriverConfigKeys.PARTICLE_CLASS) is not None: if config.get(DataSetDriverConfigKeys.PARTICLE_MODULE): self._particle_module = __import__( config.get(DataSetDriverConfigKeys.PARTICLE_MODULE), fromlist=[ config.get(DataSetDriverConfigKeys.PARTICLE_CLASS) ]) # if there is more than one particle class for this parser, this cannot be used, need to hard code the # particle class in the driver try: self._particle_class = getattr( self._particle_module, config.get(DataSetDriverConfigKeys.PARTICLE_CLASS)) except TypeError: self._particle_class = None else: log.warn( "Particle class is specified in config, but no particle module is specified in config" )
def test_funky_chunks(self): def funky_sieve(data): return [(3,6),(0,3)] self._chunker = StringChunker(funky_sieve) self._chunker.add_chunk("BarFoo") result = self._chunker.get_next_data() self.assertEquals(result, "Bar") result = self._chunker.get_next_data() self.assertEquals(result, "Foo")
def test_chunker(self): """ Test the chunker and verify the particles created. """ chunker = StringChunker(Protocol.sieve_function) self.assert_chunker_sample(chunker, self.SAMPLE_DATA1) self.assert_chunker_sample_with_noise(chunker, self.SAMPLE_DATA1) self.assert_chunker_fragmented_sample(chunker, self.SAMPLE_DATA1) self.assert_chunker_combined_sample(chunker, self.SAMPLE_DATA1)
def __init__(self, prompts, newline, driver_event): """ Protocol constructor. @param prompts A BaseEnum class containing instrument prompts. @param newline The newline. @param driver_event Driver process event callback. """ # Construct protocol superclass. CommandResponseInstrumentProtocol.__init__(self, prompts, newline, driver_event) # Build protocol state machine. self._protocol_fsm = InstrumentFSM(ProtocolState, ProtocolEvent, ProtocolEvent.ENTER, ProtocolEvent.EXIT) # Add event handlers for protocol state machine. self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.ENTER, self._handler_unknown_enter) self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.EXIT, self._handler_unknown_exit) self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.DISCOVER, self._handler_unknown_discover) self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.ENTER, self._handler_autosample_enter) self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.EXIT, self._handler_autosample_exit) self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.STOP_AUTOSAMPLE, self._handler_autosample_stop_autosample) self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.ENTER, self._handler_command_enter) self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.EXIT, self._handler_command_exit) self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.GET, self._handler_command_get) self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.SET, self._handler_command_set) self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.START_AUTOSAMPLE, self._handler_command_start_autosample) self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.HEAT_ON, self._handler_command_heat_on) self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.HEAT_OFF, self._handler_command_heat_off) # Construct the parameter dictionary containing device parameters, # current parameter values, and set formatting functions. self._build_param_dict() # Add build handlers for device commands. self._add_build_handler(InstrumentCommand.HEAT_ON, self._build_heat_on_command) self._add_build_handler(InstrumentCommand.HEAT_OFF, self._build_heat_off_command) # Add response handlers for device commands. self._add_response_handler(InstrumentCommand.HEAT_ON, self._parse_heat_on_off_resp) self._add_response_handler(InstrumentCommand.HEAT_OFF, self._parse_heat_on_off_resp) # Add sample handlers. # State state machine in UNKNOWN state. self._protocol_fsm.start(ProtocolState.UNKNOWN) # commands sent sent to device to be filtered in responses for telnet DA self._sent_cmds = [] # self._chunker = StringChunker(Protocol.sieve_function) self._heat_duration = DEFAULT_HEAT_DURATION
def __init__(self, driver_event): """ Protocol constructor. @param prompts A BaseEnum class containing instrument prompts. @param newline The newline. @param driver_event Driver process event callback. """ CommandResponseInstrumentProtocol.__init__(self, None, None, driver_event) # create chunker for processing instrument samples. self._chunker = StringChunker(self.sieve_function)
def test_chunker(self): """ Tests the chunker """ # This will want to be created in the driver eventually... chunker = StringChunker(Protocol.sieve_function) self.assert_chunker_sample(chunker, FULL_SAMPLE) self.assert_chunker_fragmented_sample(chunker, FULL_SAMPLE) self.assert_chunker_combined_sample(chunker, FULL_SAMPLE) self.assert_chunker_sample_with_noise(chunker, FULL_SAMPLE)
def test_chunker(self): """ Tests the chunker """ # This will want to be created in the driver eventually... chunker = StringChunker(SatlanticOCR507InstrumentProtocol.sieve_function) for sample in [ VALID_SAMPLE_INVALID_CHECKSUM, VALID_SAMPLE_VALID_CHECKSUM, VALID_CONFIG ]: self.assert_chunker_sample(chunker, sample) self.assert_chunker_fragmented_sample(chunker, sample) self.assert_chunker_combined_sample(chunker, sample) self.assert_chunker_sample_with_noise(chunker, sample)
def test_funky_chunks(self): def funky_sieve(_): return [(3,6),(0,3)] self._chunker = StringChunker(funky_sieve) self._chunker.add_chunk("BarFoo", self.TIMESTAMP_1) (time, result) = self._chunker.get_next_data() self.assertEquals(result, "Bar") self.assertEquals(time, self.TIMESTAMP_1) (time, result) = self._chunker.get_next_data() self.assertEquals(result, "Foo") self.assertEquals(time, self.TIMESTAMP_1)
def test_regex_sieve(self): """ Do a test of the regex based sieve to make sure it does what we want. """ pattern = r"SATPAR(?P<sernum>\d{4}),(?P<timer>\d{1,7}.\d\d),(?P<counts>\d{10}),(?P<checksum>\d{1,3})" regex = re.compile(pattern) self._chunker = StringChunker(partial(self._chunker.regex_sieve_function, regex_list=[regex])) self.assertEquals([(0, 31)], self._chunker.regex_sieve_function(self.SAMPLE_1, [regex])) self.assertEquals([], self._chunker.regex_sieve_function(self.FRAGMENT_1, [regex])) self.assertEquals([(0, 31), (33, 64)], self._chunker.regex_sieve_function(self.MULTI_SAMPLE_1, [regex]))
def __init__(self, prompts, newline, driver_event): """ Protocol constructor. @param prompts A BaseEnum class containing instrument prompts. @param newline The newline. @param driver_event Driver process event callback. """ # Construct protocol superclass. TeledyneProtocol.__init__(self, prompts, newline, driver_event) self._chunker = StringChunker(WorkhorseProtocol.sieve_function)
def test_chunker(self): """ Test the chunker """ chunker = StringChunker(Protocol.sieve_function) chunks = [TELEGRAM_1] for chunk in chunks: self.assert_chunker_sample(chunker, chunk + NEWLINE) self.assert_chunker_fragmented_sample(chunker, chunk + NEWLINE) self.assert_chunker_sample_with_noise(chunker, chunk + NEWLINE) self.assert_chunker_combined_sample(chunker, chunk + NEWLINE)
def __init__(self, prompts, newline, driver_event): """ Protocol constructor. @param prompts A BaseEnum class containing instrument prompts. @param newline The newline. @param driver_event Driver process event callback. """ # Construct protocol superclass. SamiProtocol.__init__(self, prompts, newline, driver_event) # Build protocol state machine. ### # most of these are defined in the base class with exception of handlers # defined below that differ for the two instruments (what defines # success and the timeout duration) ### # this state would be entered whenever an ACQUIRE_SAMPLE event occurred # while in the AUTOSAMPLE state and will last anywhere from a few # seconds to ~12 minutes depending on instrument and the type of # sampling. self._protocol_fsm.add_handler(ProtocolState.SCHEDULED_SAMPLE, ProtocolEvent.SUCCESS, self._handler_sample_success) self._protocol_fsm.add_handler(ProtocolState.SCHEDULED_SAMPLE, ProtocolEvent.TIMEOUT, self._handler_sample_timeout) # this state would be entered whenever an ACQUIRE_SAMPLE event occurred # while in either the COMMAND state (or via the discover transition # from the UNKNOWN state with the instrument unresponsive) and will # last anywhere from a few seconds to 3 minutes depending on instrument # and sample type. self._protocol_fsm.add_handler(ProtocolState.POLLED_SAMPLE, ProtocolEvent.SUCCESS, self._handler_sample_success) self._protocol_fsm.add_handler(ProtocolState.POLLED_SAMPLE, ProtocolEvent.TIMEOUT, self._handler_sample_timeout) # Add build handlers for device commands. ### primarily defined in base class self._add_build_handler(InstrumentCommand.ACQUIRE_SAMPLE_DEV1, self._build_sample_dev1) # Add response handlers for device commands. ### primarily defined in base class self._add_response_handler(InstrumentCommand.ACQUIRE_SAMPLE_DEV1, self._build_response_sample_dev1) # Add sample handlers # State state machine in UNKNOWN state. self._protocol_fsm.start(ProtocolState.UNKNOWN) # build the chunker self._chunker = StringChunker(Protocol.sieve_function)
def test_chunker(self): """ Verify the chunker can parse each sample type 1. complete data structure 2. fragmented data structure 3. combined data structure 4. data structure with noise """ chunker = StringChunker(Protocol.sieve_function) self.assert_chunker_sample(chunker, VELOCITY_SAMPLE) self.assert_chunker_fragmented_sample(chunker, VELOCITY_SAMPLE) self.assert_chunker_combined_sample(chunker, VELOCITY_SAMPLE) self.assert_chunker_sample_with_noise(chunker, VELOCITY_SAMPLE)
def test_overlap(self): self.assertFalse(StringChunker.overlaps([(0, 5)])) self.assertFalse(StringChunker.overlaps([])) self.assertTrue(StringChunker.overlaps([(0, 5), (3, 6)])) self.assertTrue(StringChunker.overlaps([(0, 5), (5, 7), (6, 8)])) self.assertTrue(StringChunker.overlaps([(0, 5), (6, 9), (5, 7)])) def overlap_sieve(data): return [(0, 3), (2, 6)] self._chunker = StringChunker(overlap_sieve) self.assertRaises(SampleException, self._chunker.add_chunk, "foobar", self.TIMESTAMP_1)
def test_chunker(self): """ Test the chunker and verify the particles created. """ chunker = StringChunker(CAMDSProtocol.sieve_function) self.assert_chunker_sample(chunker, self._health_data) self.assert_chunker_sample_with_noise(chunker, self._health_data) self.assert_chunker_fragmented_sample(chunker, self._health_data, 5) self.assert_chunker_combined_sample(chunker, self._health_data) self.assert_chunker_sample(chunker, self._disk_data) self.assert_chunker_sample_with_noise(chunker, self._disk_data) self.assert_chunker_fragmented_sample(chunker, self._disk_data, 6) self.assert_chunker_combined_sample(chunker, self._disk_data)
def test_chunker(self): """ Test the chunker and verify the particles created. """ chunker = StringChunker(Protocol.sieve_function) self.assert_chunker_sample(chunker, self.VALID_CTDBP_NO_SAMPLE) self.assert_chunker_sample_with_noise(chunker, self.VALID_CTDBP_NO_SAMPLE) self.assert_chunker_fragmented_sample(chunker, self.VALID_CTDBP_NO_SAMPLE) self.assert_chunker_combined_sample(chunker, self.VALID_CTDBP_NO_SAMPLE) self.assert_chunker_sample(chunker, self.VALID_CTDPF_SBE43_SAMPLE) self.assert_chunker_sample_with_noise(chunker, self.VALID_CTDPF_SBE43_SAMPLE) self.assert_chunker_fragmented_sample(chunker, self.VALID_CTDPF_SBE43_SAMPLE) self.assert_chunker_combined_sample(chunker, self.VALID_CTDPF_SBE43_SAMPLE)
def test_overlap(self): self.assertFalse(StringChunker.overlaps([(0, 5)])) self.assertFalse(StringChunker.overlaps([])) self.assertTrue(StringChunker.overlaps([(0, 5), (3, 6)])) self.assertTrue(StringChunker.overlaps([(0, 5), (5, 7), (6, 8)])) self.assertTrue(StringChunker.overlaps([(0, 5), (6, 9), (5, 7)])) def overlap_sieve(data): return [(0,3),(2,6)] self._chunker = StringChunker(overlap_sieve) self.assertRaises(SampleException, self._chunker.add_chunk, "foobar")
def setUp(self): """ Setup a chunker for use in tests """ self._chunker = StringChunker(UnitTestStringChunker.sieve_function)
class Protocol(CommandResponseInstrumentProtocol): """ Instrument protocol class Subclasses CommandResponseInstrumentProtocol """ __metaclass__ = META_LOGGER def __init__(self, prompts, newline, driver_event): """ Protocol constructor. @param prompts A BaseEnum class containing instrument prompts. @param newline The newline. @param driver_event Driver process event callback. """ # Construct protocol superclass. CommandResponseInstrumentProtocol.__init__(self, prompts, newline, driver_event) # Build protocol state machine. self._protocol_fsm = ThreadSafeFSM(ProtocolState, ProtocolEvent, ProtocolEvent.ENTER, ProtocolEvent.EXIT) # Add event handlers for protocol state machine. handlers = { ProtocolState.UNKNOWN: [ (ProtocolEvent.ENTER, self._handler_generic_enter), (ProtocolEvent.EXIT, self._handler_generic_exit), (ProtocolEvent.DISCOVER, self._handler_unknown_discover), ], ProtocolState.COMMAND: [ (ProtocolEvent.ENTER, self._handler_generic_enter), (ProtocolEvent.EXIT, self._handler_generic_exit), (ProtocolEvent.START_DIRECT, self._handler_command_start_direct), (ProtocolEvent.GET, self._handler_command_get), (ProtocolEvent.SET, self._handler_command_set), (ProtocolEvent.START_SCAN, self._handler_command_start_scan), ], ProtocolState.DIRECT_ACCESS: [ (ProtocolEvent.ENTER, self._handler_direct_access_enter), (ProtocolEvent.EXIT, self._handler_generic_exit), (ProtocolEvent.STOP_DIRECT, self._handler_direct_access_stop_direct), (ProtocolEvent.EXECUTE_DIRECT, self._handler_direct_access_execute_direct), ], ProtocolState.SCAN: [ (ProtocolEvent.ENTER, self._handler_scan_enter), (ProtocolEvent.EXIT, self._handler_scan_exit), (ProtocolEvent.STOP_SCAN, self._handler_scan_stop_scan), (ProtocolEvent.TAKE_SCAN, self._handler_scan_take_scan), (ProtocolEvent.TIMEOUT, self._handler_scan_timeout), (ProtocolEvent.ERROR, self._handler_scan_error), ], ProtocolState.ERROR: [ (ProtocolEvent.ENTER, self._handler_generic_enter), (ProtocolEvent.EXIT, self._handler_generic_exit), (ProtocolEvent.CLEAR, self._handler_error_clear), (ProtocolEvent.GET, self._handler_command_get), ] } for state in handlers: for event, handler in handlers[state]: self._protocol_fsm.add_handler(state, event, handler) # Construct the parameter dictionary containing device parameters, # current parameter values, and set formatting functions. self._build_param_dict() self._build_command_dict() self._build_driver_dict() # Add build handlers for device commands. for command in InstrumentCommand.list(): self._add_build_handler(command, self._generic_build_handler) # Add response handlers for device commands. for command in InstrumentCommand.list(): self._add_response_handler(command, functools.partial(self._generic_response_handler, command=command)) # State state machine in UNKNOWN state. self._protocol_fsm.start(ProtocolState.UNKNOWN) # commands sent sent to device to be filtered in responses for telnet DA self._sent_cmds = [] self._chunker = StringChunker(Protocol.sieve_function) self.initialize_scheduler() # all calls to do_cmd_resp should expect RESPONSE_REGEX and use TIMEOUT. Freeze these arguments... self._do_cmd_resp = functools.partial(self._do_cmd_resp, response_regex=RESPONSE_REGEX, timeout=TIMEOUT) # these variables are used to track scan time and completion status # for development and performance data self.scan_start_time = 0 self.in_scan = False @staticmethod def sieve_function(raw_data): """ This is a placeholder. The sieve function for the RGA is built dynamically when a scan is started. This function must return a list. see self._build_sieve_function() """ return [] def _build_param_dict(self): """ Populate the parameter dictionary with parameters. For each parameter key, add match string, match lambda function, and value formatting function for set commands. """ name = 'display_name' desc = 'description' units = 'units' val_desc = 'value_description' parameters = { Parameter.ID: { name: 'RGA ID String', desc: '', }, Parameter.EE: { name: 'Electron Energy', desc: 'The desired electron ionization energy: (25 - 105)', units: DriverUnits.ELECTRONVOLT, val_desc: 'The desired electron ionization energy in units of eV' }, Parameter.IE: { name: 'Ion Energy', desc: 'The ion energy: (0:8eV | 1:12eV)', val_desc: 'Ion energy level: 0 for Low and 1 for High', }, Parameter.VF: { name: 'Focus Plate Voltage', desc: 'The focus plate voltage in the ionizer: (0 - 150)', val_desc: 'The parameter represents the magnitude of the biasing voltage (negative) in units of volts.', units: DriverUnits.VOLT, }, Parameter.NF: { name: 'Noise Floor', desc: 'Rate and detection limit for ion current measurements: (0 - 7)', val_desc: 'The parameter represents the noise-floor level desired. Lower parameter values ' + 'correspond to lower baseline noise, better detection limits and increased measurement ' + 'times. Please refer to the Electrometer section of the RGA Electronics Control Unit ' + 'chapter to obtain detailed information about detection limits and bandwidth values' + 'as a function of NF settings.', }, Parameter.SA: { name: 'Steps per AMU', desc: 'Number of steps executed per amu of analog scan: (10 - 25)', val_desc: 'The parameter specifies the number of steps-per-amu.', units: DriverUnits.COUNTS, }, Parameter.MI: { name: 'Initial Mass', desc: 'The initial scan mass: (1 - 200)', units: DriverUnits.AMU, }, Parameter.MF: { name: 'Final Mass', desc: 'The final scan mass: (1 - 200)', units: DriverUnits.AMU, }, Parameter.FL: { name: 'Electron Emission Current', desc: 'Electron emission current level in the ionizer: (0 - 3.5)', val_desc: 'The parameter represents the desired electron emission current.', units: Prefixes.MILLI + Units.AMPERE }, Parameter.FL_ACTUAL: { name: 'Actual Electron Emission Current', desc: 'The actual electron emission current level in the ionizer.', val_desc: 'The parameter represents the actual electron emission current.', units: Prefixes.MILLI + Units.AMPERE }, Parameter.AP: { name: 'Analog Scan Points', desc: 'The total number of ion currents that will be measured and transmitted ' + 'during an analog scan under the current scan conditions.', val_desc: 'Total number of ion currents to be transmitted. Does not include the four extra' + 'bytes for total pressure included when performing an analog scan.', units: DriverUnits.COUNTS }, Parameter.HV: { name: 'High Voltage CDEM', desc: 'Electron multiplier high voltage bias setting: (0:disables CDEM, 10 - 2490)', val_desc: '0 disables the CDEM, values from 10-2490 enable the CDEM and specify the CDEM bias voltage', units: Units.VOLT }, Parameter.ER: { name: 'Status Byte', desc: 'Bit-mapped value representing any errors detected by the RGA.', val_desc: '0 indicates no errors detected. See the RGA manual if this value is non-zero.', }, Parameter.ERROR_REASON: { name: 'RGA Error Reason', desc: 'Reason for RGA error state.' } } constraints = ParameterConstraints.dict() read_only = [Parameter.ID, Parameter.AP, Parameter.ER, Parameter.FL_ACTUAL, Parameter.ERROR_REASON] floats = [Parameter.FL, Parameter.FL_ACTUAL] strings = [Parameter.ID, Parameter.ERROR_REASON] for param in parameters: visibility = ParameterDictVisibility.READ_WRITE value_type = ParameterDictType.INT formatter = int startup = True if param in read_only: visibility = ParameterDictVisibility.READ_ONLY startup = False if param in floats: value_type = ParameterDictType.FLOAT formatter = float elif param in strings: value_type = ParameterDictType.STRING formatter = str if param in constraints: _type, minimum, maximum = constraints[param] parameters[param][val_desc] = '%s %s value from %d - %d' % (parameters[param].get(val_desc, ''), _type, minimum, maximum) self._param_dict.add(param, '', None, formatter, type=value_type, visibility=visibility, startup_param=startup, **parameters[param]) def _build_command_dict(self): """ Populate the command dictionary with commands. """ self._cmd_dict.add(Capability.START_SCAN, display_name="Start Scan") self._cmd_dict.add(Capability.STOP_SCAN, display_name="Stop Scan") self._cmd_dict.add(Capability.CLEAR, display_name="Clear Error State") self._cmd_dict.add(Capability.DISCOVER, display_name='Discover') def _build_driver_dict(self): """ Populate the driver dictionary with options """ self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, False) def _got_chunk(self, chunk, ts): """ The base class got_data has gotten a chunk from the chunker. We only generate sample particles and they cannot be verified (beyond size, which is done in the chunker). Just create a particle, reset the scheduler and start the next scan. @param chunk: data to process @param ts: timestamp """ elapsed = time.time() - self.scan_start_time self.in_scan = False log.debug('_got_chunk: Received complete scan. AP: %d NF: %d SIZE: %d ET: %d secs', self._param_dict.get(Parameter.AP), self._param_dict.get(Parameter.NF), len(chunk), elapsed) self._driver_event(DriverAsyncEvent.SAMPLE, RGASampleParticle(chunk, port_timestamp=ts).generate()) # Reset the scheduler and initiate the next scan if we are in the scan state if self.get_current_state() == ProtocolState.SCAN: self._build_scheduler() self._async_raise_fsm_event(ProtocolEvent.TAKE_SCAN) def _generic_response_handler(self, resp, prompt, command=None): """ Generic response handler. Shove the results into the param dict. The associated command should be frozen when the response handler is registered using functools.partial @param resp: command response @param prompt: not used, required to match signature @param command: command which generated response @return: response """ parameter = getattr(Parameter, command, None) log.debug('_generic_response_handler: command: %s parameter: %s resp: %s', command, parameter, resp) if parameter in self._param_dict.get_keys(): if parameter == Parameter.FL: parameter = Parameter.FL_ACTUAL try: self._param_dict.set_value(parameter, self._param_dict.format(parameter, resp)) except ValueError: # bad data? Don't set the value, but keep the driver moving forward # verify the data if necessary downstream. pass return resp def _generic_build_handler(self, command, *args, **kwargs): """ Generic build handler. If a value is passed, then this is a set, otherwise it's a query... @param command: command to build @param args: arglist which may contain a value @return: command string """ if len(args) == 1: # this is a set action value = args[0] return self._build_rga_set(command, value) + NEWLINE # this is a query return self._build_rga_query(command) + NEWLINE def _build_rga_set(self, command, value): """ Build a set command @param command: command to build @param value: value to set @return: command string """ return command + str(value) def _build_rga_query(self, command): """ Build a query command @param command: command to build @return: command string """ return command + '?' def _filter_capabilities(self, events): """ Return a list of currently available capabilities. @param events: list of events to be filtered @return: list of events which are in capability """ return [x for x in events if Capability.has(x)] def _wakeup(self, timeout, delay=1): """ Wakeup not required for this instrument """ def _build_scheduler(self): """ Remove any previously scheduled event, then generate an absolute trigger to schedule the next scan in case we lose some data and the next scan isn't triggered by got_chunk. """ try: self._remove_scheduler(ScheduledJob.TAKE_SCAN) log.debug('Successfully removed existing scheduled event TAKE_SCAN.') except KeyError as ke: log.debug('KeyError: %s', ke) # this formula was derived from testing, should yield a slightly higher time than the actual # time required to collect a single scan. delay = self._param_dict.get(Parameter.AP) / 9 / self._param_dict.get(Parameter.NF) + 5 if delay > 0: dt = datetime.datetime.now() + datetime.timedelta(seconds=delay) job_name = ScheduledJob.TAKE_SCAN config = { DriverConfigKey.SCHEDULER: { job_name: { DriverSchedulerConfigKey.TRIGGER: { DriverSchedulerConfigKey.TRIGGER_TYPE: TriggerType.ABSOLUTE, DriverSchedulerConfigKey.DATE: dt }, } } } self.set_init_params(config) self._add_scheduler_event(ScheduledJob.TAKE_SCAN, ProtocolEvent.TIMEOUT) def _update_params(self, *args, **kwargs): """ Parameters are NOT set in the instrument by this method, as the RGA is configured anytime a scan is started, as it may have been powered off since the last time we saw it. """ def _set_params(self, *args, **kwargs): """ Set parameters, raise a CONFIG_CHANGE event if necessary. @throws InstrumentParameterException """ self._verify_not_readonly(*args, **kwargs) params_to_set = args[0] old_config = self._param_dict.get_all() # check if in range constraints = ParameterConstraints.dict() parameters = Parameter.reverse_dict() # step through the list of parameters for key, val in params_to_set.iteritems(): # if constraint exists, verify we have not violated it constraint_key = parameters.get(key) if constraint_key in constraints: var_type, minimum, maximum = constraints[constraint_key] try: value = var_type(val) except ValueError: raise exceptions.InstrumentParameterException( 'Unable to verify type - parameter: %s value: %s' % (key, val)) if val < minimum or val > maximum: raise exceptions.InstrumentParameterException( 'Value out of range - parameter: %s value: %s min: %s max: %s' % (key, val, minimum, maximum)) # all constraints met or no constraints exist, set the values for key, val in params_to_set.iteritems(): if key in old_config: self._param_dict.set_value(key, val) else: raise exceptions.InstrumentParameterException( 'Attempted to set unknown parameter: %s value: %s' % (key, val)) new_config = self._param_dict.get_all() # If we changed anything, raise a CONFIG_CHANGE event if old_config != new_config: self._driver_event(DriverAsyncEvent.CONFIG_CHANGE) def _check_error_byte(self, error_string): """ Check the error byte as returned by some commands @param error_string: byte to be checked for errors @throws InstrumentStateException """ # trim, just in case we received some garbage with our response... if len(error_string) > 1: error_string = error_string[-1] if int(error_string): self._async_raise_fsm_event(ProtocolEvent.ERROR) error = 'RGA Error byte set: %s' % error_string self._param_dict.set_value(Parameter.ERROR_REASON, error) self._driver_event(DriverAsyncEvent.CONFIG_CHANGE) raise exceptions.InstrumentStateException(error) def _set_instrument_parameter(self, command): """ Set a parameter on the instrument. We will attempt up to MAX_SET_RETRIES to set the value correctly, according to the following sequence: 1. send set command 2. verify error byte, if returned (per Responses) 3. send query command 4. verify returned value equals the set value (within tolerance) @throws InstrumentParameterException """ response_type = getattr(Responses, command) parameter = getattr(Parameter, command) # store the configured setting old_value = self._param_dict.format(parameter) if old_value is None: raise exceptions.InstrumentParameterException('Missing required instrument parameter: %s' % parameter) log.debug('response_type: %s parameter: %s command: %s', response_type, getattr(Parameter, command), command) # attempt to set the value up to MAX_SET_RETRIES times for x in xrange(MAX_RETRIES): if response_type == STATUS: resp = self._do_cmd_resp(command, old_value) self._check_error_byte(resp) else: self._do_cmd_no_resp(command, old_value) # query the value from the instrument to load the parameter dictionary self._do_cmd_resp(command) # if values match, we were successful, return. difference = abs(self._param_dict.format(parameter) - old_value) if difference < CLOSE_ENOUGH: return log.error('Set attempt failed. Parameter: %s Set value: %s Returned value: %s difference: %.2f', parameter, old_value, self._param_dict.get(parameter), difference) # configuring the RGA failed, restore the setting from our configuration and raise an exception self._param_dict.set_value(parameter, old_value) raise exceptions.InstrumentParameterException('Unable to set instrument parameter: %s, attempted %d times' % (parameter, MAX_RETRIES)) def _build_sieve_function(self): """ Build a sieve function based on the expected data size. Replace the previous sieve function in the chunker. This should happen during the configuration phase. """ num_points = int(self._param_dict.get(Parameter.AP)) match_string = r'(?<=%s)(.{%d})' % (SCAN_START_SENTINEL, (num_points + 1) * 4) matcher = re.compile(match_string, re.DOTALL) def my_sieve(raw_data): return_list = [] log.debug('SIEVE: pattern=%r, raw_data_len=%d', matcher.pattern, len(raw_data)) # do not descend into this loop unless we are at log level trace... if log.isEnabledFor('trace'): temp = raw_data[:] while temp: log.trace('SIEVE: raw_data: %s', temp[:32].encode('hex')) if len(temp) > 32: temp = temp[32:] else: temp = '' for match in matcher.finditer(raw_data): # if sentinel value present in this slice it is invalid if not SCAN_START_SENTINEL in raw_data[match.start():match.end()]: return_list.append((match.start(), match.end())) return return_list self._chunker.sieve = my_sieve def _verify_filament(self): """ Ensure the filament is on and the current is within tolerance @throws InstrumentProtocolException """ self._do_cmd_resp(InstrumentCommand.FILAMENT_EMISSION) filament_difference = abs(1 - self._param_dict.get(Parameter.FL_ACTUAL)) if filament_difference > CLOSE_ENOUGH: self._async_raise_fsm_event(ProtocolEvent.ERROR) error = 'Filament power not withing tolerance (%.2f): %.2f' % (CLOSE_ENOUGH, filament_difference) self._param_dict.set_value(Parameter.ERROR_REASON, error) self._driver_event(DriverAsyncEvent.CONFIG_CHANGE) raise exceptions.InstrumentProtocolException(error) def _stop_instrument(self): """ Stop any running scan, flush the output buffer, turn off the filament and CDEM. Update the parameter dictionary for FL. """ try: self._remove_scheduler(ScheduledJob.TAKE_SCAN) log.debug('Successfully removed existing scheduled event TAKE_SCAN.') except KeyError as ke: log.debug('KeyError: %s', ke) self._do_cmd_resp(InstrumentCommand.INITIALIZE, 0) self._do_cmd_resp(InstrumentCommand.INITIALIZE, 2) self._do_cmd_resp(InstrumentCommand.FILAMENT_EMISSION) self.in_scan = False ######################################################################## # Unknown handlers. ######################################################################## def _handler_unknown_discover(self, *args, **kwargs): """ Discover current state @return (next_state, result) """ return ProtocolState.COMMAND, ResourceAgentState.IDLE ######################################################################## # Command handlers. ######################################################################## def _handler_command_get(self, *args, **kwargs): """ Get parameter """ return self._handler_get(*args, **kwargs) def _handler_command_set(self, *args, **kwargs): """ Set parameter """ self._set_params(*args, **kwargs) return None, None def _handler_command_start_direct(self): """ Start direct access @return next_state, (next_agent_state, None) """ return ProtocolState.DIRECT_ACCESS, (ResourceAgentState.DIRECT_ACCESS, None) def _handler_command_start_scan(self): """ Start a scan @return next_state, (next_agent_state, None) """ return ProtocolState.SCAN, (ResourceAgentState.STREAMING, None) ######################################################################## # Direct access handlers. ######################################################################## def _handler_direct_access_enter(self, *args, **kwargs): """ Enter direct access state. """ # Tell driver superclass to send a state change event. # Superclass will query the state. self._driver_event(DriverAsyncEvent.STATE_CHANGE) self._sent_cmds = [] def _handler_direct_access_execute_direct(self, data): """ Forward direct access commands to the instrument. @return next_state, (next_agent_state, None) """ self._do_cmd_direct(data) # add sent command to list for 'echo' filtering in callback self._sent_cmds.append(data) return None, (None, None) def _handler_direct_access_stop_direct(self): """ Stop direct access, return to COMMAND. @return next_state, (next_agent_state, None) """ return ProtocolState.COMMAND, (ResourceAgentState.COMMAND, None) ######################################################################## # Scan handlers ######################################################################## def _handler_scan_enter(self, *args, **kwargs): """ Enter the scan state. Configure the RGA, start the first scan and the scheduler. @throws InstrumentTimeoutException """ for attempt in range(1, MAX_RETRIES+1): try: self._handler_scan_configure_rga() self._async_raise_fsm_event(ProtocolEvent.TAKE_SCAN) self._build_scheduler() self._driver_event(DriverAsyncEvent.STATE_CHANGE) return except exceptions.InstrumentTimeoutException: log.error('Failed to configure the RGA - attempt %d', attempt) self._async_raise_fsm_event(ProtocolEvent.ERROR) error = 'Failed to configure RGA and start scanning.' self._param_dict.set_value(Parameter.ERROR_REASON, error) self._driver_event(DriverAsyncEvent.CONFIG_CHANGE) raise exceptions.InstrumentTimeoutException(error) def _handler_scan_exit(self, *args, **kwargs): """ Exit scan. Delete the scheduler. """ try: self._remove_scheduler(ScheduledJob.TAKE_SCAN) except KeyError: log.error("_remove_scheduler could not find: %s", ScheduledJob.TAKE_SCAN) def _handler_scan_configure_rga(self): """ Send the appropriate configuration to the RGA and update the chunker sieve function for the correct data length. """ # initialize the connection self._do_cmd_resp(InstrumentCommand.INITIALIZE, 0) # set these set_commands = [ (InstrumentCommand.ELECTRON_ENERGY, Parameter.EE), (InstrumentCommand.ION_ENERGY, Parameter.IE), (InstrumentCommand.FOCUS_VOLTAGE, Parameter.VF), (InstrumentCommand.NOISE_FLOOR, Parameter.NF), (InstrumentCommand.STEPS_PER_AMU, Parameter.SA), (InstrumentCommand.INITIAL_MASS, Parameter.MI), (InstrumentCommand.FINAL_MASS, Parameter.MF), ] for command, parameter in set_commands: self._set_instrument_parameter(command) # turn on the filament self._set_instrument_parameter(InstrumentCommand.FILAMENT_EMISSION) # query the read only items for command in [InstrumentCommand.READINGS_PER_SCAN, InstrumentCommand.FILAMENT_EMISSION, InstrumentCommand.ID, InstrumentCommand.CHECK_ERRORS]: self._do_cmd_resp(command) # publish the config as a status particle pd = self._param_dict.get_all() log.debug('parameter dictionary: %r', pd) ts = ntplib.system_to_ntp_time(time.time()) self._driver_event(DriverAsyncEvent.SAMPLE, RGAStatusParticle(pd, port_timestamp=ts).generate()) # replace the sieve function self._build_sieve_function() def _handler_scan_take_scan(self, *args, **kwargs): """ place a sentinel value in the chunker, then perform one analog scan from the RGA @return next_state, (next_agent_state, None) """ # empty the chunker self._chunker.clean() # place sentinel value in chunker self._chunker.add_chunk(SCAN_START_SENTINEL, ntplib.system_to_ntp_time(time.time())) self.scan_start_time = time.time() if self.in_scan: log.error('FAILED scan detected, in_scan sentinel set to TRUE') self.in_scan = True self._do_cmd_no_resp(InstrumentCommand.ANALOG_SCAN, 1) return None, (None, None) def _handler_scan_timeout(self, *args, **kwargs): """ Handle scan timeout @return next_state, (next_agent_state, None) """ # timeout, clear the instrument buffers self._do_cmd_resp(InstrumentCommand.INITIALIZE, 0) # verify the filament is still on self._verify_filament() return self._handler_scan_take_scan() def _handler_scan_stop_scan(self, *args, **kwargs): """ Stop scanning, go to COMMAND. @return next_state, (next_agent_state, None) """ self._stop_instrument() return ProtocolState.COMMAND, (ResourceAgentState.COMMAND, None) def _handler_scan_error(self, *args, **kwargs): """ Stop scanning, go to ERROR. @return next_state, (next_agent_state, None) """ self._stop_instrument() return ProtocolState.ERROR, (ResourceAgentState.COMMAND, None) ######################################################################## # Error handlers ######################################################################## def _handler_error_clear(self): """ Leave the error state, return to COMMAND. @return next_state, (next_agent_state, None) """ self._param_dict.set_value(Parameter.ERROR_REASON, '') self._driver_event(DriverAsyncEvent.CONFIG_CHANGE) return ProtocolState.COMMAND, (ResourceAgentState.COMMAND, None) ######################################################################## # Generic handlers ######################################################################## def _handler_generic_enter(self): """ Generic method to handle entering state. """ if self.get_current_state() != ProtocolState.UNKNOWN: self._init_params() self._driver_event(DriverAsyncEvent.STATE_CHANGE) def _handler_generic_exit(self): """
def __init__(self, prompts, newline, driver_event): """ Protocol constructor. @param prompts A BaseEnum class containing instrument prompts. @param newline The newline. @param driver_event Driver process event callback. """ # Construct protocol superclass. CommandResponseInstrumentProtocol.__init__(self, prompts, newline, driver_event) # Build protocol state machine. self._protocol_fsm = ThreadSafeFSM(ProtocolState, ProtocolEvent, ProtocolEvent.ENTER, ProtocolEvent.EXIT) # Add event handlers for protocol state machine. handlers = { ProtocolState.UNKNOWN: [ (ProtocolEvent.ENTER, self._handler_generic_enter), (ProtocolEvent.EXIT, self._handler_generic_exit), (ProtocolEvent.DISCOVER, self._handler_unknown_discover), ], ProtocolState.COMMAND: [ (ProtocolEvent.ENTER, self._handler_generic_enter), (ProtocolEvent.EXIT, self._handler_generic_exit), (ProtocolEvent.START_DIRECT, self._handler_command_start_direct), (ProtocolEvent.GET, self._handler_command_get), (ProtocolEvent.SET, self._handler_command_set), (ProtocolEvent.START_SCAN, self._handler_command_start_scan), ], ProtocolState.DIRECT_ACCESS: [ (ProtocolEvent.ENTER, self._handler_direct_access_enter), (ProtocolEvent.EXIT, self._handler_generic_exit), (ProtocolEvent.STOP_DIRECT, self._handler_direct_access_stop_direct), (ProtocolEvent.EXECUTE_DIRECT, self._handler_direct_access_execute_direct), ], ProtocolState.SCAN: [ (ProtocolEvent.ENTER, self._handler_scan_enter), (ProtocolEvent.EXIT, self._handler_scan_exit), (ProtocolEvent.STOP_SCAN, self._handler_scan_stop_scan), (ProtocolEvent.TAKE_SCAN, self._handler_scan_take_scan), (ProtocolEvent.TIMEOUT, self._handler_scan_timeout), (ProtocolEvent.ERROR, self._handler_scan_error), ], ProtocolState.ERROR: [ (ProtocolEvent.ENTER, self._handler_generic_enter), (ProtocolEvent.EXIT, self._handler_generic_exit), (ProtocolEvent.CLEAR, self._handler_error_clear), (ProtocolEvent.GET, self._handler_command_get), ] } for state in handlers: for event, handler in handlers[state]: self._protocol_fsm.add_handler(state, event, handler) # Construct the parameter dictionary containing device parameters, # current parameter values, and set formatting functions. self._build_param_dict() self._build_command_dict() self._build_driver_dict() # Add build handlers for device commands. for command in InstrumentCommand.list(): self._add_build_handler(command, self._generic_build_handler) # Add response handlers for device commands. for command in InstrumentCommand.list(): self._add_response_handler(command, functools.partial(self._generic_response_handler, command=command)) # State state machine in UNKNOWN state. self._protocol_fsm.start(ProtocolState.UNKNOWN) # commands sent sent to device to be filtered in responses for telnet DA self._sent_cmds = [] self._chunker = StringChunker(Protocol.sieve_function) self.initialize_scheduler() # all calls to do_cmd_resp should expect RESPONSE_REGEX and use TIMEOUT. Freeze these arguments... self._do_cmd_resp = functools.partial(self._do_cmd_resp, response_regex=RESPONSE_REGEX, timeout=TIMEOUT) # these variables are used to track scan time and completion status # for development and performance data self.scan_start_time = 0 self.in_scan = False
class UnitTestStringChunker(MiUnitTestCase): """ Test the basic functionality of the chunker system via unit tests """ # For testing, use PAR sensor data here...short and easy to work with... # But cheat with the checksum. Make it easy to recognize which sample SAMPLE_1 = "SATPAR0229,10.01,2206748111,111" SAMPLE_2 = "SATPAR0229,10.02,2206748222,222" SAMPLE_3 = "SATPAR0229,10.03,2206748333,333" FRAGMENT_1 = "SATPAR0229,10.01," FRAGMENT_2 = "2206748544,123" FRAGMENT_SAMPLE = FRAGMENT_1+FRAGMENT_2 MULTI_SAMPLE_1 = "%s\r\n%s" % (SAMPLE_1, SAMPLE_2) TIMESTAMP_1 = 3569168821.102485 TIMESTAMP_2 = 3569168822.202485 TIMESTAMP_3 = 3569168823.302485 @staticmethod def sieve_function(raw_data): """ The method that splits samples """ return_list = [] pattern = r'SATPAR(?P<sernum>\d{4}),(?P<timer>\d{1,7}.\d\d),(?P<counts>\d{10}),(?P<checksum>\d{1,3})' regex = re.compile(pattern) for match in regex.finditer(raw_data): return_list.append((match.start(), match.end())) log.debug("Sieving: %s...%s", raw_data[match.start():match.start()+5], raw_data[match.end()-5:match.end()]) return return_list def setUp(self): """ Setup a chunker for use in tests """ self._chunker = StringChunker(UnitTestStringChunker.sieve_function) def test_sieve(self): """ Do a quick test of the sieve to make sure it does what we want. """ self.assertEquals([(0,31)], UnitTestStringChunker.sieve_function(self.SAMPLE_1)) self.assertEquals([], UnitTestStringChunker.sieve_function(self.FRAGMENT_1)) self.assertEquals([(0,31), (33, 64)], UnitTestStringChunker.sieve_function(self.MULTI_SAMPLE_1)) def test_regex_sieve(self): """ Do a test of the regex based sieve to make sure it does what we want. """ pattern = r'SATPAR(?P<sernum>\d{4}),(?P<timer>\d{1,7}.\d\d),(?P<counts>\d{10}),(?P<checksum>\d{1,3})' regex = re.compile(pattern) self._chunker = StringChunker(partial(self._chunker.regex_sieve_function, regex_list=[regex])) self.assertEquals([(0,31)], self._chunker.regex_sieve_function(self.SAMPLE_1, [regex])) self.assertEquals([], self._chunker.regex_sieve_function(self.FRAGMENT_1, [regex])) self.assertEquals([(0,31), (33, 64)], self._chunker.regex_sieve_function(self.MULTI_SAMPLE_1, [regex])) def test_make_chunks(self): sample_string = "Foo%sBar%sBat" % (self.SAMPLE_1, self.SAMPLE_2) self._chunker.add_chunk(sample_string, self.TIMESTAMP_1) chunks = self._chunker.chunks self.assertEqual(len(chunks), 2) self.assertEqual(chunks[0][0], self.TIMESTAMP_1) self.assertEqual(chunks[0][1], self.SAMPLE_1) self.assertEqual(chunks[1][1], self.SAMPLE_2) def test_add_get_simple(self): """ Add a simple string of data to the buffer, get the next chunk out """ self._chunker.add_chunk(self.SAMPLE_1, self.TIMESTAMP_1) (time, result) = self._chunker.get_next_data() self.assertEquals(time, self.TIMESTAMP_1) self.assertEquals(result, self.SAMPLE_1) # It got cleared at the last fetch... (time, result) = self._chunker.get_next_data() self.assertEquals(time, None) self.assertEquals(result, None) self.assertEqual(self._chunker.buffer, '') def test_rebase_timestamps(self): """ Test an add/get without cleaning """ self._chunker.add_chunk(self.SAMPLE_1, self.TIMESTAMP_1) self._chunker.add_chunk("BLEH", self.TIMESTAMP_2) self._chunker.get_next_data() timestamps = self._chunker.timestamps self.assertEqual(len(timestamps), 1) self.assertEqual(timestamps[0][0], 0) self.assertEqual(timestamps[0][1], 4) self.assertEqual(timestamps[0][2], self.TIMESTAMP_2) def test_add_many_get_simple(self): """ Add a few simple strings of data to the buffer, get the chunks out """ self._chunker.add_chunk(self.SAMPLE_1, self.TIMESTAMP_1) self._chunker.add_chunk(self.SAMPLE_2, self.TIMESTAMP_2) self._chunker.add_chunk(self.SAMPLE_3, self.TIMESTAMP_3) (time, result) = self._chunker.get_next_data() self.assertEquals(time, self.TIMESTAMP_1) self.assertEquals(result, self.SAMPLE_1) (time, result) = self._chunker.get_next_data() self.assertEquals(time, self.TIMESTAMP_2) self.assertEquals(result, self.SAMPLE_2) (time, result) = self._chunker.get_next_data() self.assertEquals(time, self.TIMESTAMP_3) self.assertEquals(result, self.SAMPLE_3) (time, result) = self._chunker.get_next_data() self.assertEquals(result, None) self.assertEquals(time, None) def test_add_get_fragment(self): """ Add some fragments of a string, then verify that value is stitched together """ # Add a part of a sample self._chunker.add_chunk(self.FRAGMENT_1, self.TIMESTAMP_1) (time, result) = self._chunker.get_next_data() self.assertEquals(time, None) self.assertEquals(result, None) # add the rest of the sample self._chunker.add_chunk(self.FRAGMENT_2, self.TIMESTAMP_2) (time, result) = self._chunker.get_next_data() self.assertEquals(result, self.FRAGMENT_SAMPLE) self.assertEquals(time, self.TIMESTAMP_1) def test_add_multiple_in_one(self): """ Test multiple data bits input in a single sample. They will ultimately need to be split apart. """ self._chunker.add_chunk(self.MULTI_SAMPLE_1, self.TIMESTAMP_1) (time, result) = self._chunker.get_next_data() self.assertEquals(result, self.SAMPLE_1) self.assertEquals(time, self.TIMESTAMP_1) (time, result) = self._chunker.get_next_data() self.assertEquals(time, self.TIMESTAMP_1) self.assertEquals(result, self.SAMPLE_2) (time, result) = self._chunker.get_next_data() self.assertEquals(result, None) self.assertEquals(time, None) def test_funky_chunks(self): def funky_sieve(_): return [(3,6),(0,3)] self._chunker = StringChunker(funky_sieve) self._chunker.add_chunk("BarFoo", self.TIMESTAMP_1) (time, result) = self._chunker.get_next_data() self.assertEquals(result, "Bar") self.assertEquals(time, self.TIMESTAMP_1) (time, result) = self._chunker.get_next_data() self.assertEquals(result, "Foo") self.assertEquals(time, self.TIMESTAMP_1) def test_overlap(self): self.assertEqual([(0, 5)], StringChunker._prune_overlaps([(0, 5)])) self.assertEqual([], StringChunker._prune_overlaps([])) self.assertEqual([(0, 5)], StringChunker._prune_overlaps([(0, 5), (3, 6)])) self.assertEqual([(0, 5), (5, 7)], StringChunker._prune_overlaps([(0, 5), (5, 7), (6, 8)]))
class UnitTestStringChunker(MiUnitTestCase): """ Test the basic functionality of the chunker system via unit tests """ # For testing, use PAR sensor data here...short and easy to work with... # But cheat with the checksum. Make it easy to recognize which sample SAMPLE_1 = "SATPAR0229,10.01,2206748111,111" SAMPLE_2 = "SATPAR0229,10.02,2206748222,222" SAMPLE_3 = "SATPAR0229,10.03,2206748333,333" FRAGMENT_1 = "SATPAR0229,10.01," FRAGMENT_2 = "2206748544,123" FRAGMENT_SAMPLE = FRAGMENT_1+FRAGMENT_2 MULTI_SAMPLE_1 = "%s\r\n%s" % (SAMPLE_1, SAMPLE_2) @staticmethod def sieve_function(raw_data): """ The method that splits samples """ return_list = [] pattern = r'SATPAR(?P<sernum>\d{4}),(?P<timer>\d{1,7}.\d\d),(?P<counts>\d{10}),(?P<checksum>\d{1,3})' regex = re.compile(pattern) for match in regex.finditer(raw_data): return_list.append((match.start(), match.end())) log.debug("Sieving: %s...%s", raw_data[match.start():match.start()+5], raw_data[match.end()-5:match.end()]) return return_list def setUp(self): """ Setup a chunker for use in tests """ self._chunker = StringChunker(UnitTestStringChunker.sieve_function) def _display_chunk_list(self, data, chunk_list): """ Display the data as viewed through the chunk list """ data_list = [] if chunk_list == None: return data_list for (s, e) in chunk_list: data_list.append(data[s:e]) return data_list def test_sieve(self): """ Do a quick test of the sieve to make sure it does what we want. """ self.assertEquals([(0,31)], UnitTestStringChunker.sieve_function(self.SAMPLE_1)) self.assertEquals([], UnitTestStringChunker.sieve_function(self.FRAGMENT_1)) self.assertEquals([(0,31), (33, 64)], UnitTestStringChunker.sieve_function(self.MULTI_SAMPLE_1)) def test_generate_data_lists(self): sample_string = "Foo%sBar%sBat" % (self.SAMPLE_1, self.SAMPLE_2) self._chunker.add_chunk(sample_string) lists = self._chunker._generate_data_lists() log.debug("Data chunk list: %s", self._display_chunk_list(sample_string, lists['data_chunk_list'])) self.assertEquals(lists['data_chunk_list'], [(3,34), (37, 68)]) log.debug("Non-data chunk list: %s", self._display_chunk_list(sample_string, lists['non_data_chunk_list'])) self.assertEquals(lists['non_data_chunk_list'], [(0, 3), (34, 37)]) def test_clean_chunk_list(self): test_str = "abcdefghijklmnopqrstuvwxyz" short_test_str = test_str[10:] test_list = [(3, 5), (8, 12), (20, 25)] log.debug("Test string: %s", test_str) log.debug("Raw list: %s", self._display_chunk_list(test_str, test_list)) result = self._chunker._clean_chunk_list(test_list, 10) log.debug("Shortened test string: %s", short_test_str) log.debug("Cleaned list: %s", self._display_chunk_list(short_test_str, result)) self.assertEquals(result, [(0, 2), (10, 15)]) def test_add_get_simple(self): """ Add a simple string of data to the buffer, get the next chunk out """ self._chunker.add_chunk(self.SAMPLE_1) result = self._chunker.get_next_data() self.assertEquals(result, self.SAMPLE_1) # It got cleared at the last fetch... result = self._chunker.get_next_data() self.assertEquals(result, None) result = self._chunker.get_next_non_data() self.assertEquals(result, None) def test_no_clean_data(self): """ Test an add/get without cleaning """ self._chunker.add_chunk(self.SAMPLE_1) result = self._chunker.get_next_data(clean=False) self.assertEquals(result, self.SAMPLE_1) # It did NOT get cleared at the last fetch... result = self._chunker.get_next_data() self.assertEquals(result, self.SAMPLE_1) # and now it did result = self._chunker.get_next_data() self.assertEquals(result, None) def test_add_many_get_simple(self): """ Add a few simple strings of data to the buffer, get the chunks out """ self._chunker.add_chunk(self.SAMPLE_1) self._chunker.add_chunk(self.SAMPLE_2) self._chunker.add_chunk(self.SAMPLE_3) result = self._chunker.get_next_data() self.assertEquals(result, self.SAMPLE_1) result = self._chunker.get_next_data() self.assertEquals(result, self.SAMPLE_2) result = self._chunker.get_next_data() self.assertEquals(result, self.SAMPLE_3) result = self._chunker.get_next_data() self.assertEquals(result, None) def test_get_non_data(self): """ Get some non-data blocks """ self._chunker.add_chunk("Foo") self.assertEquals(len(self._chunker.nondata_chunk_list), 1) self.assertEquals(len(self._chunker.data_chunk_list), 0) self._chunker.add_chunk(self.SAMPLE_1) self.assertEquals(len(self._chunker.nondata_chunk_list), 1) self.assertEquals(len(self._chunker.data_chunk_list), 1) self._chunker.add_chunk("Bar") self._chunker.add_chunk("Bat") self.assertEquals(len(self._chunker.nondata_chunk_list), 2) self.assertEquals(len(self._chunker.data_chunk_list), 1) self._chunker.add_chunk(self.SAMPLE_2) self.assertEquals(len(self._chunker.nondata_chunk_list), 2) self.assertEquals(len(self._chunker.data_chunk_list), 2) self._chunker.add_chunk("Baz") self.assertEquals(len(self._chunker.nondata_chunk_list), 3) self.assertEquals(len(self._chunker.data_chunk_list), 2) result = self._chunker.get_next_data() self.assertEquals(result, self.SAMPLE_1) result = self._chunker.get_next_non_data() self.assertEquals(result, "BarBat") result = self._chunker.get_next_non_data() self.assertEquals(result, "Baz") result = self._chunker.get_next_data() self.assertEquals(result, None) def test_add_get_fragment(self): """ Add some fragments of a string, then verify that value is stitched together """ # Add a part of a sample self._chunker.add_chunk(self.FRAGMENT_1) result = self._chunker.get_next_data() self.assertEquals(result, None) self.assertEquals(len(self._chunker.nondata_chunk_list), 1) self.assertEquals(len(self._chunker.data_chunk_list), 0) # add the rest of the sample self._chunker.add_chunk(self.FRAGMENT_2) self.assertEquals(len(self._chunker.nondata_chunk_list), 0) self.assertEquals(len(self._chunker.data_chunk_list), 1) result = self._chunker.get_next_data() self.assertEquals(result, self.FRAGMENT_SAMPLE) def test_add_multiple_in_one(self): """ Test multiple data bits input in a single sample. They will ultimately need to be split apart. """ self._chunker.add_chunk(self.MULTI_SAMPLE_1) result = self._chunker.get_next_data() self.assertEquals(result, self.SAMPLE_1) result = self._chunker.get_next_data() self.assertEquals(result, self.SAMPLE_2) result = self._chunker.get_next_data() self.assertEquals(result, None) def test_get_raw(self): """ Test the ability to get raw data, but not totally hose data strings """ # Put some data fragments in self._chunker.add_chunk("Foo") self._chunker.add_chunk(self.SAMPLE_1) self._chunker.add_chunk(self.FRAGMENT_1) self._chunker.add_chunk(self.FRAGMENT_2) self._chunker.add_chunk("Baz") # Get a raw chunk out result = self._chunker.get_next_raw() self.assertEquals(result, "Foo") result = self._chunker.get_next_raw() self.assertEquals(result, self.SAMPLE_1) result = self._chunker.get_next_raw() self.assertEquals(result, self.FRAGMENT_1) # Fragments got ripped up result = self._chunker.get_next_data() self.assertEquals(result, None) def test_funky_chunks(self): def funky_sieve(data): return [(3,6),(0,3)] self._chunker = StringChunker(funky_sieve) self._chunker.add_chunk("BarFoo") result = self._chunker.get_next_data() self.assertEquals(result, "Bar") result = self._chunker.get_next_data() self.assertEquals(result, "Foo") def test_overlap(self): self.assertFalse(StringChunker.overlaps([(0, 5)])) self.assertFalse(StringChunker.overlaps([])) self.assertTrue(StringChunker.overlaps([(0, 5), (3, 6)])) self.assertTrue(StringChunker.overlaps([(0, 5), (5, 7), (6, 8)])) self.assertTrue(StringChunker.overlaps([(0, 5), (6, 9), (5, 7)])) def overlap_sieve(data): return [(0,3),(2,6)] self._chunker = StringChunker(overlap_sieve) self.assertRaises(SampleException, self._chunker.add_chunk, "foobar")