Пример #1
0
    def setUp(self):
        ParserUnitTestCase.setUp(self)

        self.error_callback_values = []
        self.state_callback_values = []
        self.publish_callback_values = []

        self.parser_config = {
            ParserConfigKey.ORBNAME: ParserConfigKey.ORBNAME,
            ParserConfigKey.SELECT: ParserConfigKey.SELECT,
            ParserConfigKey.REJECT: ParserConfigKey.REJECT,
        }

        self.parser_state = None

        self.PKT_ID = PKT_ID = 123
        self.PKT_TYPE = PKT_TYPE = 'GENC'
        self.PKT_DATA = PKT_DATA = 1, 2, 3, 4
        self.PKT_TIME = PKT_TIME = 999
        self.PKT_SAMPRATE = PKT_SAMPRATE = 666
        self.PKT_NET = PKT_NET = 'net'
        self.PKT_STA = PKT_STA = 'sta'
        self.PKT_CHAN = PKT_CHAN = 'chan'
        self.PKT_LOC = PKT_LOC = 'loc'

        from mi.core.kudu import _pkt
        pkt = _pkt._newPkt()
        _pkt._Pkt_pkttype_set(pkt, PKT_TYPE)
        pktchan = _pkt._newPktChannel()
        _pkt._PktChannel_data_set(pktchan, PKT_DATA)
        _pkt._PktChannel_samprate_set(pktchan, PKT_SAMPRATE)
        _pkt._PktChannel_time_set(pktchan, PKT_TIME)
        _pkt._PktChannel_net_set(pktchan, PKT_NET)
        _pkt._PktChannel_sta_set(pktchan, PKT_STA)
        _pkt._PktChannel_chan_set(pktchan, PKT_CHAN)
        _pkt._PktChannel_loc_set(pktchan, PKT_LOC)
        _pkt._Pkt_channels_set(pkt, [
            pktchan,
        ])
        pkttype, packet, srcname, time = _pkt._stuffPkt(pkt)
        _pkt._freePkt(pkt)

        with patch(
                'mi.dataset.parser.antelope_orb.OrbReapThr') as MockOrbReapThr:
            self.parser = AntelopeOrbParser(self.parser_config,
                                            self.parser_state,
                                            self.state_callback,
                                            self.pub_callback,
                                            self.error_callback)
        self.parser._orbreapthr.get = MagicMock(return_value=(PKT_ID, srcname,
                                                              time, packet))
Пример #2
0
 def _build_parser(self):
     """
     Build and return the parser
     """
     config = self._parser_config
     config.update({
         'particle_module': 'mi.dataset.parser.antelope_orb',
         'particle_class': ['AntelopeOrbPacketParticle']
     })
     log.debug("My Config: %s", config)
     log.debug("My parser state: %s", self._driver_state)
     self._parser = AntelopeOrbParser(
         config,
         self._driver_state.get(DriverStateKey.PARSER_STATE),
         self._save_parser_state,
         self._data_callback,
         self._sample_exception_callback,
     )
     return self._parser
    def setUp(self):
        ParserUnitTestCase.setUp(self)

        self.error_callback_values = []
        self.state_callback_values = []
        self.publish_callback_values = []

        self.parser_config = {
            ParserConfigKey.ORBNAME: ParserConfigKey.ORBNAME,
            ParserConfigKey.SELECT: ParserConfigKey.SELECT,
            ParserConfigKey.REJECT: ParserConfigKey.REJECT,
        }

        self.parser_state = None

        self.PKT_ID = PKT_ID = 123
        self.PKT_TYPE = PKT_TYPE = 'GENC'
        self.PKT_DATA = PKT_DATA = 1,2,3,4
        self.PKT_TIME = PKT_TIME = 999
        self.PKT_SAMPRATE = PKT_SAMPRATE = 666
        self.PKT_NET = PKT_NET = 'net'
        self.PKT_STA = PKT_STA = 'sta'
        self.PKT_CHAN = PKT_CHAN = 'chan'
        self.PKT_LOC = PKT_LOC = 'loc'

        from mi.core.kudu import _pkt
        pkt = _pkt._newPkt()
        _pkt._Pkt_pkttype_set(pkt, PKT_TYPE)
        pktchan = _pkt._newPktChannel()
        _pkt._PktChannel_data_set(pktchan, PKT_DATA)
        _pkt._PktChannel_samprate_set(pktchan, PKT_SAMPRATE)
        _pkt._PktChannel_time_set(pktchan, PKT_TIME)
        _pkt._PktChannel_net_set(pktchan, PKT_NET)
        _pkt._PktChannel_sta_set(pktchan, PKT_STA)
        _pkt._PktChannel_chan_set(pktchan, PKT_CHAN)
        _pkt._PktChannel_loc_set(pktchan, PKT_LOC)
        _pkt._Pkt_channels_set(pkt, [pktchan,])
        pkttype, packet, srcname, time = _pkt._stuffPkt(pkt)
        _pkt._freePkt(pkt)

        with patch('mi.dataset.parser.antelope_orb.OrbReapThr') as MockOrbReapThr:
            self.parser = AntelopeOrbParser(self.parser_config, self.parser_state,
                            self.state_callback, self.pub_callback,
                            self.error_callback)
        self.parser._orbreapthr.get = MagicMock(return_value=(PKT_ID, srcname, time, packet))
Пример #4
0
 def _build_parser(self):
     """
     Build and return the parser
     """
     config = self._parser_config
     config.update({
         'particle_module': 'mi.dataset.parser.antelope_orb',
         'particle_class': ['AntelopeOrbPacketParticle']
     })
     log.debug("My Config: %s", config)
     log.debug("My parser state: %s", self._driver_state)
     self._parser = AntelopeOrbParser(
         config,
         self._driver_state.get(DriverStateKey.PARSER_STATE),
         self._save_parser_state,
         self._data_callback,
         self._sample_exception_callback,
     )
     return self._parser
Пример #5
0
class AntelopeOrbDataSetDriver(DataSetDriver):
    _sampling = False

    def _poll(self):
        pass

    @classmethod
    def stream_config(cls):
        return [cls.type() for cls in PARTICLE_CLASSES.values()]

    def __init__(self, config, memento, data_callback, state_callback,
                 event_callback, exception_callback):
        super(AntelopeOrbDataSetDriver,
              self).__init__(config, memento, data_callback, state_callback,
                             event_callback, exception_callback)
        self._record_getter_greenlet = None
        self._parser = None
        self._driver_state = None

        self._init_state(memento)

        self._resource_id = self._config.get(DataSourceConfigKey.RESOURCE_ID)
        log.debug("Resource ID: %s", self._resource_id)

        self._file_in_process = '_'.join(
            (self._parser_config[ParserConfigKey.ORBNAME],
             self._parser_config[ParserConfigKey.SELECT],
             self._parser_config[ParserConfigKey.REJECT]))

    def _verify_config(self):
        """
        Verify we have good configurations for the parser.
        @raise: ConfigurationException if configuration is invalid
        """
        errors = []
        log.debug("Driver Config: %s", self._config)

        self._parser_config = self._config.get(DataSourceConfigKey.PARSER)
        if not self._parser_config:
            errors.append("missing 'parser' config")
        if not ParserConfigKey.ORBNAME in self._parser_config:
            errors.append("parser config missing 'orbname'")
        if not ParserConfigKey.SELECT in self._parser_config:
            errors.append("parser config missing 'select'")
        if not ParserConfigKey.REJECT in self._parser_config:
            errors.append("parser config missing 'reject'")

        if errors:
            log.error("Driver configuration error: %r", errors)
            raise ConfigurationException("driver configuration errors: %r",
                                         errors)

    def _init_state(self, memento):
        """
        Initialize driver state
        @param memento: agent persisted memento containing driver state
        """
        if memento != None:
            if not isinstance(memento, dict):
                raise TypeError("memento must be a dict.")

            self._driver_state = memento
            if not self._driver_state:
                # if the state is empty, add a version
                self._driver_state = {DriverStateKey.VERSION: 0.1}
        else:
            # initialize the state since none was specified
            self._driver_state = {DriverStateKey.VERSION: 0.1}
        log.debug('initial driver state %s', self._driver_state)

    def _save_parser_state(self, state, file_ingested):
        """
        Callback to store the parser state in the driver object.
        @param state: Object used by the parser to indicate position
        """
        log.trace("saving parser state: %r", state)
        self._driver_state[DriverStateKey.PARSER_STATE] = state
        # check if file has been completely parsed by comparing the parsed position and file size
        self._state_callback(self._driver_state)

    def _save_parser_state_after_error(self):
        """
        If a file has a sample exception that has made it to the driver, this file is done,
        mark it as ingested and save the state
        """
        # TODO whut? maybe take this method out? we never fully ingest an orb.
        log.debug("File %s fully parsed", self._file_in_process)
        #        self._driver_state[DriverStateKey.INGESTED] = True
        self._state_callback(self._driver_state)

    def _build_parser(self):
        """
        Build and return the parser
        """
        config = self._parser_config
        config.update({
            'particle_module': 'mi.dataset.parser.antelope_orb',
            'particle_class': ['AntelopeOrbPacketParticle']
        })
        log.debug("My Config: %s", config)
        log.debug("My parser state: %s", self._driver_state)
        self._parser = AntelopeOrbParser(
            config,
            self._driver_state.get(DriverStateKey.PARSER_STATE),
            self._save_parser_state,
            self._data_callback,
            self._sample_exception_callback,
        )
        return self._parser

    def _record_getter(self, parser):
        # greenlet to call get_records in loop
        # normally this is done in the context of the harvester greenlet, but
        # we have no harvester.
        # NOTE This is slightly different from what delay is used for in
        # SimpleDataSetDriver. There it's used to rate-limit particle
        # publication. Here it's used as a polling delay while we wait for more
        # data to arrive in the queue.
        # Rate limiting doesn't really make sense here because we are streaming
        # live data; we simply must keep up. The only odd case is when we are
        # playing back older data due to initial startup or recovery after
        # comms loss. If that hammers the system we may need to implement rate
        # limiting here.
        # NOTE change to zero when we go from polling to green-blocking
        delay = 1
        try:
            while True:
                result = parser.get_records()
                if result:
                    pass
                    log.trace("Record parsed: %r", result)
                else:
                    log.trace("No record, sleeping")
                    gevent.sleep(delay)
        except SampleException as e:
            # need to mark the bad file as ingested so we don't re-ingest it
            # no don't do that for antelope URLS
            self._save_parser_state_after_error()
            self._sample_exception_callback(e)

    def _start_sampling(self):
        try:
            log.warning("Start Sampling")
            self._sampling = True
            parser = self._parser = self._build_parser()
            self._record_getter_greenlet = gevent.spawn(
                self._record_getter, parser)
        except Exception as e:
            log.debug("Exception detected when starting sampling: %s",
                      e,
                      exc_info=True)
            self._exception_callback(e)
            self._sampling = False
            try:
                parser.kill_threads()
            except:
                pass
            try:
                self._record_getter_greenlet.kill()
            except:
                pass

    def _stop_sampling(self):
        log.warning("Stop Sampling")
        self._sampling = False
        if self._record_getter_greenlet is not None:
            self._record_getter_greenlet.kill()
            self._record_getter_greenlet = None
        if self._parser is not None:
            self._parser.kill_threads()
            self._parser = None

    def _is_sampling(self):
        """
        Currently the drivers only have two states, command and streaming and
        all resource commands are common, either start or stop autosample.
        Therefore we didn't implement an enitre state machine to manage states
        and commands.  If it does get more complex than this we should take the
        time to implement a state machine to add some flexibility
        """
        return self._sampling
Пример #6
0
class AntelopeOrbDataSetDriver(DataSetDriver):
    _sampling = False

    def _poll(self):
        pass

    @classmethod
    def stream_config(cls):
        return [AntelopeOrbPacketParticle.type()]

    def __init__(self, config, memento, data_callback, state_callback, event_callback, exception_callback):
        super(AntelopeOrbDataSetDriver, self).__init__(config, memento, data_callback, state_callback,
                                                       event_callback, exception_callback)
        self._record_getter_greenlet = None
        self._parser = None
        self._driver_state = None

        self._init_state(memento)

        self._resource_id = self._config.get(DataSourceConfigKey.RESOURCE_ID)
        log.debug("Resource ID: %s", self._resource_id)

        self._file_in_process = '_'.join((self._parser_config[ParserConfigKey.ORBNAME],
                                          self._parser_config[ParserConfigKey.SELECT],
                                          self._parser_config[ParserConfigKey.REJECT]))

    def _verify_config(self):
        """
        Verify we have good configurations for the parser.
        @raise: ConfigurationException if configuration is invalid
        """
        errors = []
        log.debug("Driver Config: %s", self._config)

        self._parser_config = self._config.get(DataSourceConfigKey.PARSER)
        if not self._parser_config:
            errors.append("missing 'parser' config")
        if not ParserConfigKey.ORBNAME in self._parser_config:
            errors.append("parser config missing 'orbname'")
        if not ParserConfigKey.SELECT in self._parser_config:
            errors.append("parser config missing 'select'")
        if not ParserConfigKey.REJECT in self._parser_config:
            errors.append("parser config missing 'reject'")

        if errors:
            log.error("Driver configuration error: %r", errors)
            raise ConfigurationException("driver configuration errors: %r", errors)

    def _init_state(self, memento):
        """
        Initialize driver state
        @param memento: agent persisted memento containing driver state
        """
        if memento != None:
            if not isinstance(memento, dict): raise TypeError("memento must be a dict.")

            self._driver_state = memento
            if not self._driver_state:
                # if the state is empty, add a version
                self._driver_state = {DriverStateKey.VERSION: 0.1}
        else:
            # initialize the state since none was specified
            self._driver_state = {DriverStateKey.VERSION: 0.1}
        log.debug('initial driver state %s', self._driver_state)

    def _save_parser_state(self, state, file_ingested):
        """
        Callback to store the parser state in the driver object.
        @param state: Object used by the parser to indicate position
        """
        log.trace("saving parser state: %r", state)
        self._driver_state[DriverStateKey.PARSER_STATE] = state
        # check if file has been completely parsed by comparing the parsed position and file size
        self._state_callback(self._driver_state)

    def _save_parser_state_after_error(self):
        """
        If a file has a sample exception that has made it to the driver, this file is done,
        mark it as ingested and save the state
        """
        # TODO whut? maybe take this method out? we never fully ingest an orb.
        log.debug("File %s fully parsed", self._file_in_process)
#        self._driver_state[DriverStateKey.INGESTED] = True
        self._state_callback(self._driver_state)

    def _build_parser(self):
        """
        Build and return the parser
        """
        config = self._parser_config
        config.update({
            'particle_module': 'mi.dataset.parser.antelope_orb',
            'particle_class': ['AntelopeOrbPacketParticle']
        })
        log.debug("My Config: %s", config)
        log.debug("My parser state: %s", self._driver_state)
        self._parser = AntelopeOrbParser(
            config,
            self._driver_state.get(DriverStateKey.PARSER_STATE),
            self._save_parser_state,
            self._data_callback,
            self._sample_exception_callback,
        )
        return self._parser

    def _record_getter(self, parser):
        # greenlet to call get_records in loop
        # normally this is done in the context of the harvester greenlet, but
        # we have no harvester.
        # NOTE This is slightly different from what delay is used for in
        # SimpleDataSetDriver. There it's used to rate-limit particle
        # publication. Here it's used as a polling delay while we wait for more
        # data to arrive in the queue.
        # Rate limiting doesn't really make sense here because we are streaming
        # live data; we simply must keep up. The only odd case is when we are
        # playing back older data due to initial startup or recovery after
        # comms loss. If that hammers the system we may need to implement rate
        # limiting here.
        # NOTE change to zero when we go from polling to green-blocking
        delay = 1
        try:
            while True:
                result = parser.get_records()
                if result:
                    pass
                    log.trace("Record parsed: %r", result)
                else:
                    log.trace("No record, sleeping")
                    gevent.sleep(delay)
        except SampleException as e:
            # need to mark the bad file as ingested so we don't re-ingest it
            # no don't do that for antelope URLS
            self._save_parser_state_after_error()
            self._sample_exception_callback(e)

    def _start_sampling(self):
        try:
            log.warning("Start Sampling")
            self._sampling = True
            parser = self._parser = self._build_parser()
            self._record_getter_greenlet = gevent.spawn(self._record_getter, parser)
        except Exception as e:
            log.debug("Exception detected when starting sampling: %s", e, exc_info=True)
            self._exception_callback(e)
            self._sampling = False
            try:
                parser.kill_threads()
            except:
                pass
            try:
                self._record_getter_greenlet.kill()
            except:
                pass

    def _stop_sampling(self):
        log.warning("Stop Sampling")
        self._sampling = False
        if self._record_getter_greenlet is not None:
            self._record_getter_greenlet.kill()
            self._record_getter_greenlet = None
        if self._parser is not None:
            self._parser.kill_threads()
            self._parser = None

    def _is_sampling(self):
        """
        Currently the drivers only have two states, command and streaming and
        all resource commands are common, either start or stop autosample.
        Therefore we didn't implement an enitre state machine to manage states
        and commands.  If it does get more complex than this we should take the
        time to implement a state machine to add some flexibility
        """
        return self._sampling
Пример #7
0
class AntelopeOrbParserUnitTestCase(ParserUnitTestCase):
    def state_callback(self, state, file_ingested):
        """ Call back method to watch what comes in via the state callback """
        log.trace("SETTING state_callback_value to " + str(state))
        self.state_callback_values.append(state)
        self.file_ingested = file_ingested

    def pub_callback(self, particle):
        """ Call back method to watch what comes in via the publish callback """
        log.trace("SETTING publish_callback_value to " + str(particle))
        self.publish_callback_values.append(particle)

    def error_callback(self, error):
        """ Call back method to watch what comes in via the state callback """
        log.trace("SETTING error_callback_value to " + str(error))
        self.error_callback_values.append(error)

    def setUp(self):
        ParserUnitTestCase.setUp(self)

        self.error_callback_values = []
        self.state_callback_values = []
        self.publish_callback_values = []

        self.parser_config = {
            ParserConfigKey.ORBNAME: ParserConfigKey.ORBNAME,
            ParserConfigKey.SELECT: ParserConfigKey.SELECT,
            ParserConfigKey.REJECT: ParserConfigKey.REJECT,
        }

        self.parser_state = None

        self.PKT_ID = PKT_ID = 123
        self.PKT_TYPE = PKT_TYPE = 'GENC'
        self.PKT_DATA = PKT_DATA = 1, 2, 3, 4
        self.PKT_TIME = PKT_TIME = 999
        self.PKT_SAMPRATE = PKT_SAMPRATE = 666
        self.PKT_NET = PKT_NET = 'net'
        self.PKT_STA = PKT_STA = 'sta'
        self.PKT_CHAN = PKT_CHAN = 'chan'
        self.PKT_LOC = PKT_LOC = 'loc'

        from mi.core.kudu import _pkt
        pkt = _pkt._newPkt()
        _pkt._Pkt_pkttype_set(pkt, PKT_TYPE)
        pktchan = _pkt._newPktChannel()
        _pkt._PktChannel_data_set(pktchan, PKT_DATA)
        _pkt._PktChannel_samprate_set(pktchan, PKT_SAMPRATE)
        _pkt._PktChannel_time_set(pktchan, PKT_TIME)
        _pkt._PktChannel_net_set(pktchan, PKT_NET)
        _pkt._PktChannel_sta_set(pktchan, PKT_STA)
        _pkt._PktChannel_chan_set(pktchan, PKT_CHAN)
        _pkt._PktChannel_loc_set(pktchan, PKT_LOC)
        _pkt._Pkt_channels_set(pkt, [
            pktchan,
        ])
        pkttype, packet, srcname, time = _pkt._stuffPkt(pkt)
        _pkt._freePkt(pkt)

        with patch(
                'mi.dataset.parser.antelope_orb.OrbReapThr') as MockOrbReapThr:
            self.parser = AntelopeOrbParser(self.parser_config,
                                            self.parser_state,
                                            self.state_callback,
                                            self.pub_callback,
                                            self.error_callback)
        self.parser._orbreapthr.get = MagicMock(return_value=(PKT_ID, srcname,
                                                              time, packet))

    def test_get_records(self):
        r = self.parser.get_records()
        self.assert_(r is not None)
        self.assertEqual(len(self.publish_callback_values), 1)

    def get_data_value(self, data_dict, key):
        for value in data_dict['values']:
            if value['value_id'] == key:
                return value['value']
        raise KeyError(key)

    def test_build_parsed_values(self):
        self.parser.get_records()
        particle = self.publish_callback_values[0][0]
        self.assertEquals(particle._data_particle_type,
                          'antelope_orb_packet_chan')
        r = particle.generate_dict()
        from pprint import pformat
        log.trace(pformat(r))
        self.assertEquals(
            self.PKT_ID, self.get_data_value(r,
                                             AntelopeOrbPacketParticleKey.ID))
        self.assertEquals(
            self.PKT_TYPE,
            self.get_data_value(r, AntelopeOrbPacketParticleKey.TYPE)[1])
        channels = self.get_data_value(r,
                                       AntelopeOrbPacketParticleKey.CHANNELS)
        self.assertEquals(len(channels), 1)
        chan = channels[0]
        self.assertEquals(
            self.PKT_DATA,
            tuple(chan[AntelopeOrbPacketParticleChannelKey.DATA]))
        self.assertEquals(self.PKT_TIME,
                          chan[AntelopeOrbPacketParticleChannelKey.TIME])
        self.assertEquals(self.PKT_SAMPRATE,
                          chan[AntelopeOrbPacketParticleChannelKey.SAMPRATE])
        self.assertEquals(self.PKT_NET,
                          chan[AntelopeOrbPacketParticleChannelKey.NET])
        self.assertEquals(self.PKT_STA,
                          chan[AntelopeOrbPacketParticleChannelKey.STA])
        self.assertEquals(self.PKT_CHAN,
                          chan[AntelopeOrbPacketParticleChannelKey.CHAN])
        self.assertEquals(self.PKT_LOC,
                          chan[AntelopeOrbPacketParticleChannelKey.LOC])

    def assert_state(self, expected_tafter):
        """
        Verify the state
        """
        state = self.parser._state
        log.debug("Current state: %s", state)

        position = state.get(StateKey.TAFTER)
        self.assertEqual(position, expected_tafter)

    def test_set_state(self):
        self.parser.get_records()
        self.assert_state(self.PKT_TIME)

    def test_get_exception(self):
        def f(*args, **kwargs):
            raise Exception()

        self.parser._orbreapthr.get = f
        self.assertRaises(Exception, self.parser.get_records)

    def test_get_error(self):
        from mi.core.kudu.brttpkt import GetError

        def f(*args, **kwargs):
            raise NoData()

        self.parser._orbreapthr.get = f
        self.parser.get_records()

    def test_sample_exception(self):
        self.parser._orbreapthr.get = MagicMock(return_value=(0, '', 0,
                                                              'asdf'))
        self.assertRaises(SampleException, self.parser.get_records)
class AntelopeOrbParserUnitTestCase(ParserUnitTestCase):
    def state_callback(self, state, file_ingested):
        """ Call back method to watch what comes in via the state callback """
        log.trace("SETTING state_callback_value to " + str(state))
        self.state_callback_values.append(state)
        self.file_ingested = file_ingested

    def pub_callback(self, particle):
        """ Call back method to watch what comes in via the publish callback """
        log.trace("SETTING publish_callback_value to " + str(particle))
        self.publish_callback_values.append(particle)

    def error_callback(self, error):
        """ Call back method to watch what comes in via the state callback """
        log.trace("SETTING error_callback_value to " + str(error))
        self.error_callback_values.append(error)

    def setUp(self):
        ParserUnitTestCase.setUp(self)

        self.error_callback_values = []
        self.state_callback_values = []
        self.publish_callback_values = []

        self.parser_config = {
            ParserConfigKey.ORBNAME: ParserConfigKey.ORBNAME,
            ParserConfigKey.SELECT: ParserConfigKey.SELECT,
            ParserConfigKey.REJECT: ParserConfigKey.REJECT,
        }

        self.parser_state = None

        self.PKT_ID = PKT_ID = 123
        self.PKT_TYPE = PKT_TYPE = 'GENC'
        self.PKT_DATA = PKT_DATA = 1,2,3,4
        self.PKT_TIME = PKT_TIME = 999
        self.PKT_SAMPRATE = PKT_SAMPRATE = 666
        self.PKT_NET = PKT_NET = 'net'
        self.PKT_STA = PKT_STA = 'sta'
        self.PKT_CHAN = PKT_CHAN = 'chan'
        self.PKT_LOC = PKT_LOC = 'loc'

        from mi.core.kudu import _pkt
        pkt = _pkt._newPkt()
        _pkt._Pkt_pkttype_set(pkt, PKT_TYPE)
        pktchan = _pkt._newPktChannel()
        _pkt._PktChannel_data_set(pktchan, PKT_DATA)
        _pkt._PktChannel_samprate_set(pktchan, PKT_SAMPRATE)
        _pkt._PktChannel_time_set(pktchan, PKT_TIME)
        _pkt._PktChannel_net_set(pktchan, PKT_NET)
        _pkt._PktChannel_sta_set(pktchan, PKT_STA)
        _pkt._PktChannel_chan_set(pktchan, PKT_CHAN)
        _pkt._PktChannel_loc_set(pktchan, PKT_LOC)
        _pkt._Pkt_channels_set(pkt, [pktchan,])
        pkttype, packet, srcname, time = _pkt._stuffPkt(pkt)
        _pkt._freePkt(pkt)

        with patch('mi.dataset.parser.antelope_orb.OrbReapThr') as MockOrbReapThr:
            self.parser = AntelopeOrbParser(self.parser_config, self.parser_state,
                            self.state_callback, self.pub_callback,
                            self.error_callback)
        self.parser._orbreapthr.get = MagicMock(return_value=(PKT_ID, srcname, time, packet))

    def test_get_records(self):
        r = self.parser.get_records()
        self.assert_(r is not None)
        self.assertEqual(len(self.publish_callback_values), 1)

    def get_data_value(self, data_dict, key):
        for value in data_dict['values']:
            if value['value_id'] == key:
                return value['value']
        raise KeyError(key)

    def test_build_parsed_values(self):
        self.parser.get_records()
        r = self.publish_callback_values[0][0].generate_dict()
        from pprint import pformat
        log.trace(pformat(r))
        self.assertEquals(self.PKT_ID, self.get_data_value(r, AntelopeOrbPacketParticleKey.ID))
        self.assertEquals(self.PKT_TYPE, self.get_data_value(r, AntelopeOrbPacketParticleKey.TYPE)[1])
        channels = self.get_data_value(r, AntelopeOrbPacketParticleKey.CHANNELS)
        self.assertEquals(len(channels), 1)
        chan = channels[0]
        self.assertEquals(self.PKT_DATA,
                            tuple(chan[AntelopeOrbPacketParticleChannelKey.DATA]))
        self.assertEquals(self.PKT_TIME, chan[AntelopeOrbPacketParticleChannelKey.TIME])
        self.assertEquals(self.PKT_SAMPRATE, chan[AntelopeOrbPacketParticleChannelKey.SAMPRATE])
        self.assertEquals(self.PKT_NET, chan[AntelopeOrbPacketParticleChannelKey.NET])
        self.assertEquals(self.PKT_STA, chan[AntelopeOrbPacketParticleChannelKey.STA])
        self.assertEquals(self.PKT_CHAN, chan[AntelopeOrbPacketParticleChannelKey.CHAN])
        self.assertEquals(self.PKT_LOC, chan[AntelopeOrbPacketParticleChannelKey.LOC])

    def assert_state(self, expected_tafter):
        """
        Verify the state
        """
        state = self.parser._state
        log.debug("Current state: %s", state)

        position = state.get(StateKey.TAFTER)
        self.assertEqual(position, expected_tafter)

    def test_set_state(self):
        self.parser.get_records()
        self.assert_state(self.PKT_TIME)

    def test_get_exception(self):
        def f(*args, **kwargs):
            raise Exception()
        self.parser._orbreapthr.get = f
        self.assertRaises(Exception, self.parser.get_records)

    def test_get_error(self):
        from mi.core.kudu.brttpkt import GetError
        def f(*args, **kwargs):
            raise NoData()
        self.parser._orbreapthr.get = f
        self.parser.get_records()

    def test_sample_exception(self):
        self.parser._orbreapthr.get = MagicMock(return_value=(0, '', 0, 'asdf'))
        self.parser.get_records()
        self.assertRaises(SampleException, self.publish_callback_values[0][0]._build_parsed_values)