예제 #1
0
class ZplscCInstrumentDataParticle(DataParticle):
    """
    Class for generating the zplsc_c instrument particle.
    """

    _data_particle_type = DataParticleType.ZPLSC_C_DCL_SAMPLE
    __metaclass__ = get_logging_metaclass(log_level='trace')

    def _build_parsed_values(self):
        """
        Build parsed values for Instrument Data Particle.
        @return: list containing type encoded "particle value id:value" dictionary pairs
        """

        # Generate a particle by calling encode_value for each entry
        # in the Instrument Particle Mapping table,
        # where each entry is a tuple containing the particle field name, count(or count reference),
        # and a function to use for data conversion.

        return [{
            DataParticleKey.VALUE_ID: name,
            DataParticleKey.VALUE: None
        } if self.raw_data[name] is None else self._encode_value(
            name, self.raw_data[name], function)
                for name, counter, function in ZPLSC_C_DATA_RULES]
예제 #2
0
class CamhdAParser(SimpleParser):
    """
    Parser for camhd_a video files
    """

    __metaclass__ = get_logging_metaclass(log_level='debug')

    def __init__(self, config, stream_handle, exception_callback):
        """
        Initialize the camhd_a parser, which does not use state or the chunker
        and sieve functions.
        @param config: The parser configuration dictionary
        @param stream_handle: The stream handle of the file to parse
        @param exception_callback: The callback to use when an exception occurs
        """

        super(CamhdAParser, self).__init__(config, stream_handle,
                                           exception_callback)

    def recov_exception_callback(self, message):
        log.warn(message)
        self._exception_callback(RecoverableSampleException(message))

    def parse_file(self):
        """
        Parse the *.mp4 file.
        """
        match = FILE_PATH_MATCHER.match(self._stream_handle.name)
        if match:
            file_datetime = match.group('Date') + match.group('Time')
            time_stamp = ntplib.system_to_ntp_time(
                utilities.formatted_timestamp_utc_time(file_datetime,
                                                       TIMESTAMP_FORMAT))

            # Extract a particle and append it to the record buffer
            particle = self._extract_sample(CamhdAInstrumentDataParticle,
                                            None,
                                            match.group('Path'),
                                            internal_timestamp=time_stamp)
            log.debug('Parsed particle: %s', particle.generate_dict())
            self._record_buffer.append(particle)

        else:
            # Files retrieved from the instrument should always match the timestamp naming convention
            self.recov_exception_callback(
                "Unable to extract file time from input file name: %s."
                "Expected format REFDES-YYYYmmddTHHMMSSZ.mp4" %
                self._stream_handle.name)
예제 #3
0
class WinchCsppParser(SimpleParser):
    """
    Parser for Winch CSPP data.
    """

    __metaclass__ = get_logging_metaclass(log_level='debug')

    def parse_file(self):
        """
        Parse Winch CSPP text file.
        """

        # loop over all lines in the data file and parse the data to generate Winch CSPP particles
        for line in self._stream_handle:

            match = WINCH_DATA_MATCHER.match(line)
            if not match:
                # If it is not a valid Winch Cspp record, ignore it.
                error_message = 'Winch Cspp data regex does not match for line: %s' % line
                log.warn(error_message)
                self._exception_callback(
                    RecoverableSampleException(error_message))
            else:

                date = match.group(WinchCsppParserDataParticleKey.DATE)
                year, month, day = date.split('-')
                hour, minute, second = match.group(
                    WinchCsppParserDataParticleKey.TIME).split(':')

                unix_time = calendar.timegm(
                    (int(year), int(month), int(day), int(hour), int(minute),
                     float(second)))
                time_stamp = ntplib.system_to_ntp_time(unix_time)

                # Generate a Winch CSPP particle using the group dictionary and add it to the internal buffer
                particle = self._extract_sample(WinchCsppDataParticle,
                                                None,
                                                match.groupdict(),
                                                internal_timestamp=time_stamp)
                if particle is not None:
                    self._record_buffer.append(particle)
예제 #4
0
class Protocol(CommandResponseInstrumentProtocol):
    """
    Instrument protocol class
    Subclasses CommandResponseInstrumentProtocol
    """

    __metaclass__ = get_logging_metaclass(log_level='trace')

    def __init__(self, prompts, newline, driver_event):
        """
        Protocol constructor.
        @param prompts A BaseEnum class containing instrument prompts.
        @param newline The newline.
        @param driver_event Driver process event callback.
        """
        # Construct protocol superclass.
        CommandResponseInstrumentProtocol.__init__(self, prompts, newline,
                                                   driver_event)

        # Build protocol state machine.
        self._protocol_fsm = ThreadSafeFSM(ProtocolState, ProtocolEvent,
                                           ProtocolEvent.ENTER,
                                           ProtocolEvent.EXIT)

        # Add event handlers for protocol state machine.
        self._protocol_fsm.add_handler(ProtocolState.UNKNOWN,
                                       ProtocolEvent.ENTER,
                                       self._handler_unknown_enter)
        self._protocol_fsm.add_handler(ProtocolState.UNKNOWN,
                                       ProtocolEvent.DISCOVER,
                                       self._handler_unknown_discover)

        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.ENTER,
                                       self._handler_command_enter)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.START_AUTOSAMPLE,
                                       self._handler_command_autosample)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.ACQUIRE_STATUS,
                                       self._handler_command_acquire_status)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.GET,
                                       self._handler_command_get)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.SET,
                                       self._handler_command_set)

        self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE,
                                       ProtocolEvent.STOP_AUTOSAMPLE,
                                       self._handler_autosample_stop)
        self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE,
                                       ProtocolEvent.GET,
                                       self._handler_command_get)

        # Construct the parameter dictionary containing device parameters,
        # current parameter values, and set formatting functions.
        self._build_driver_dict()
        self._build_command_dict()
        self._build_param_dict()

        # Add sample handlers.

        # State state machine in UNKNOWN state.
        self._protocol_fsm.start(ProtocolState.UNKNOWN)

        # commands sent sent to device to be filtered in responses for telnet DA
        self._sent_cmds = []

        self._chunker = StringChunker(self.sieve_function)

        log.info('processing particles with %d workers', POOL_SIZE)
        self._process_particles = True
        self._pending_particles = deque()
        self._processing_pool = multiprocessing.Pool(POOL_SIZE)

        self._particles_thread = Thread(target=self.particles_thread)
        self._particles_thread.setDaemon(True)
        self._particles_thread.start()

    def particles_thread(self):
        log.info('Starting particles generation thread.')
        processing_pool = self._processing_pool
        try:
            futures = {}

            while self._process_particles or futures:
                # Pull all processing requests from our request deque
                # Unless we have been instructed to terminate
                while True and self._process_particles:
                    try:
                        filepath, timestamp = self._pending_particles.popleft()
                        log.info('Received RAW file to process: %r %r',
                                 filepath, timestamp)
                        # Schedule for processing
                        # parse_datagram_file takes the filepath and returns a
                        # tuple containing the metadata and timestamp for creation
                        # of the particle
                        futures[(filepath,
                                 timestamp)] = processing_pool.apply_async(
                                     parse_particles_file, (filepath, ))
                    except IndexError:
                        break

                # Grab our keys here, to avoid mutating the dictionary while iterating
                future_keys = sorted(futures)
                if future_keys:
                    log.debug('Awaiting completion of %d particles',
                              len(future_keys))

                for key in future_keys:
                    future = futures[key]
                    if future.ready():
                        try:
                            # Job complete, remove the future from our dictionary and generate a particle
                            result = future.get()
                        except Exception as e:
                            result = e

                        futures.pop(key, None)

                        if isinstance(result, Exception):
                            self._driver_event(DriverAsyncEvent.ERROR, result)
                            continue

                        if result is not None:
                            metadata, internal_timestamp, data_times, power_data_dict, frequencies = result

                            filepath, timestamp = key
                            log.info(
                                'Completed particles with filepath: %r timestamp: %r',
                                filepath, timestamp)

                            metadata_particle = ZplscBInstrumentDataParticle(
                                metadata,
                                port_timestamp=timestamp,
                                internal_timestamp=internal_timestamp,
                                preferred_timestamp=DataParticleKey.
                                INTERNAL_TIMESTAMP)
                            parsed_sample = metadata_particle.generate()

                            if self._driver_event:
                                self._driver_event(DriverAsyncEvent.SAMPLE,
                                                   parsed_sample)

                            for counter, data_timestamp in enumerate(
                                    data_times):
                                zp_data = {
                                    ZplscBParticleKey.FREQ_CHAN_1:
                                    frequencies[1],
                                    ZplscBParticleKey.VALS_CHAN_1:
                                    list(power_data_dict[1][counter]),
                                    ZplscBParticleKey.FREQ_CHAN_2:
                                    frequencies[2],
                                    ZplscBParticleKey.VALS_CHAN_2:
                                    list(power_data_dict[2][counter]),
                                    ZplscBParticleKey.FREQ_CHAN_3:
                                    frequencies[3],
                                    ZplscBParticleKey.VALS_CHAN_3:
                                    list(power_data_dict[3][counter]),
                                }

                                sample_particle = ZplscBSampleDataParticle(
                                    zp_data,
                                    port_timestamp=timestamp,
                                    internal_timestamp=data_timestamp,
                                    preferred_timestamp=DataParticleKey.
                                    INTERNAL_TIMESTAMP)

                                parsed_sample_particles = sample_particle.generate(
                                )

                                if self._driver_event:
                                    self._driver_event(
                                        DriverAsyncEvent.SAMPLE,
                                        parsed_sample_particles)

                time.sleep(1)

        finally:
            if processing_pool:
                processing_pool.terminate()
                processing_pool.join()

    def shutdown(self):
        log.info('Shutting down ZPLSC protocol')
        super(Protocol, self).shutdown()
        # Do not add any more datagrams to the processing queue
        self._process_particles = False
        # Await completed processing of all datagrams for a maximum of 10 minutes
        log.info('Joining particles_thread')
        self._particles_thread.join(timeout=600)
        log.info('Completed ZPLSC protocol shutdown')

    def _build_param_dict(self):
        """
        Populate the parameter dictionary with parameters.
        For each parameter key, add match string, match lambda function,
        and value formatting function for set commands.
        """

        self._param_dict.add(
            Parameter.SCHEDULE,
            r'schedule:\s+(.*)',
            lambda match: match.group(1),
            str,
            type=ParameterDictType.STRING,
            display_name="Schedule",
            description=
            "Large block of text used to create the .yaml file defining the sampling schedule.",
            startup_param=True,
            default_value=yaml.dump(DEFAULT_CONFIG, default_flow_style=False))

        self._param_dict.add(
            Parameter.FTP_IP_ADDRESS,
            r'ftp address:\s+(\d\d\d\d\.\d\d\d\d\.\d\d\d\d\.\d\d\d)',
            lambda match: match.group(1),
            str,
            type=ParameterDictType.STRING,
            display_name="FTP IP Address",
            description=
            "IP address the driver uses to connect to the instrument FTP server.",
            startup_param=True,
            default_value=DEFAULT_HOST)

        self._param_dict.add(
            Parameter.FTP_USERNAME,
            r'username:(.*)',
            lambda match: match.group(1),
            str,
            type=ParameterDictType.STRING,
            display_name="FTP User Name",
            description="Username used to connect to the FTP server.",
            startup_param=True,
            default_value=USER_NAME)

        self._param_dict.add(
            Parameter.FTP_PASSWORD,
            r'password:(.*)',
            lambda match: match.group(1),
            str,
            type=ParameterDictType.STRING,
            display_name="FTP Password",
            description="Password used to connect to the FTP server.",
            startup_param=True,
            default_value=PASSWORD)

        self._param_dict.add(
            Parameter.FTP_PORT,
            r'port:(.*)',
            lambda match: match.group(1),
            str,
            type=ParameterDictType.STRING,
            display_name="FTP Port",
            description=
            "Location on the OOI infrastructure where .raw files stored.",
            startup_param=True,
            default_value=DEFAULT_PORT)

    def _build_driver_dict(self):
        """
        Populate the driver dictionary with options
        """
        self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, True)

    def _build_command_dict(self):
        """
        Populate the command dictionary with command.
        """
        self._cmd_dict.add(Capability.START_AUTOSAMPLE,
                           display_name="Start Autosample")
        self._cmd_dict.add(Capability.STOP_AUTOSAMPLE,
                           display_name="Stop Autosample")
        self._cmd_dict.add(Capability.ACQUIRE_STATUS,
                           display_name="Acquire Status")
        self._cmd_dict.add(Capability.DISCOVER, display_name='Discover')

    def _filter_capabilities(self, events):
        """
        Return a list of currently available capabilities.
        """
        return [x for x in events if Capability.has(x)]

    ########################################################################
    # Unknown handlers.
    ########################################################################

    def _handler_unknown_enter(self, *args, **kwargs):
        """
        Enter unknown state.
        """
        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    @staticmethod
    def _handler_unknown_exit(*args, **kwargs):
        """
        Exit unknown state.
        """
        pass

    def _handler_unknown_discover(self, *args, **kwargs):
        """
        Discover current state
        @retval next_state, (next_state, result)
        """
        next_state = ProtocolState.COMMAND
        result = []

        # Try to get the status to check if the instrument is alive
        host = self._param_dict.get_config_value(Parameter.FTP_IP_ADDRESS)
        port = self._param_dict.get_config_value(Parameter.FTP_PORT)
        response = self._url_request(host, port, '/status.json')

        if response is None:
            error_msg = "_handler_unknown_discover: Unable to connect to host: %s" % host
            log.error(error_msg)
            raise InstrumentConnectionException(error_msg)

        return next_state, (next_state, result)

    ########################################################################
    # Command handlers.
    ########################################################################
    def _handler_command_enter(self, *args, **kwargs):
        """
        Enter command state.
        @throws InstrumentTimeoutException if the device cannot be woken.
        @throws InstrumentProtocolException if the update commands and not recognized.
        """
        self._init_params()

        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    @staticmethod
    def _handler_command_exit(*args, **kwargs):
        """
        Exit command state.
        """
        pass

    def _handler_command_get(self, *args, **kwargs):
        """
        Get parameters while in the command state.
        @param params List of the parameters to pass to the state
        @retval returns (next_state, result) where result is a dict {}. No
            agent state changes happening with Get, so no next_agent_state
        @throw InstrumentParameterException for invalid parameter
        """
        result_vals = {}

        # Retrieve required parameter.
        # Raise if no parameter provided, or not a dict.
        try:
            params = args[0]

        except IndexError:
            raise InstrumentParameterException(
                '_handler_command_get requires a parameter dict.')

        if Parameter.ALL in params:
            log.debug("Parameter ALL in params")
            params = Parameter.list()
            params.remove(Parameter.ALL)

        log.debug("_handler_command_get: params = %s", params)

        if params is None or not isinstance(params, list):
            raise InstrumentParameterException(
                "GET parameter list not a list!")

        # fill the return values from the update
        for param in params:
            if not Parameter.has(param):
                raise InstrumentParameterException("Invalid parameter!")
            result_vals[param] = self._param_dict.get(param)
            self._param_dict.get_config_value(param)
        result = result_vals

        log.debug("Get finished, next_state: %s, result: %s", None, result)
        return None, result

    def _handler_command_set(self, *args, **kwargs):
        """
        Set parameter
        @retval next state, result
        """
        startup = False

        try:
            params = args[0]
        except IndexError:
            raise InstrumentParameterException(
                '_handler_command_set: command requires a parameter dict.')

        try:
            startup = args[1]
        except IndexError:
            pass

        if not isinstance(params, dict):
            raise InstrumentParameterException('Set parameters not a dict.')

        # For each key, val in the params, set the param dictionary.
        old_config = self._param_dict.get_config()
        self._set_params(params, startup)

        new_config = self._param_dict.get_config()
        if old_config != new_config:
            self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)

        return None, None

    def _set_params(self, *args, **kwargs):
        """
        Issue commands to the instrument to set various parameters
        """
        try:
            params = args[0]
        except IndexError:
            raise InstrumentParameterException(
                'Set command requires a parameter dict.')

        # verify param is not readonly param
        self._verify_not_readonly(*args, **kwargs)

        for key, val in params.iteritems():
            log.debug("KEY = %s VALUE = %s", key, val)
            self._param_dict.set_value(key, val)
            if key == Parameter.SCHEDULE:
                self._ftp_schedule_file()

                # Load the schedule file
                host = self._param_dict.get(Parameter.FTP_IP_ADDRESS)
                port = self._param_dict.get_config_value(Parameter.FTP_PORT)
                log.debug("_set_params: stop the current schedule file")
                self._url_request(host, port, '/stop_schedule', data={})
                log.debug("_set_params: upload driver YAML file to host %s",
                          host)
                res = self._url_request(host,
                                        port,
                                        '/load_schedule',
                                        data=json.dumps(
                                            {'filename': YAML_FILE_NAME}))
                log.debug("_set_params: result from load = %s", res)

        log.debug("set complete, update params")

    def _ftp_schedule_file(self):
        """
        Construct a YAML schedule file and
        ftp the file to the Instrument server
        """
        # Create a temporary file and write the schedule YAML information to the file
        try:
            config_file = tempfile.TemporaryFile()
            log.debug("temporary file created")

            if config_file is None or not isinstance(config_file, file):
                raise InstrumentException("config_file is not a temp file!")

            config_file.write(self._param_dict.get(Parameter.SCHEDULE))
            config_file.seek(0)
            log.debug("finished writing config file:\n%r",
                      self._param_dict.get(Parameter.SCHEDULE))

        except Exception as e:
            log.error("Create schedule YAML file exception: %s", e)
            raise e

        #  FTP the schedule file to the ZPLSC server
        host = ''

        try:
            log.debug("Create a ftp session")
            host = self._param_dict.get_config_value(Parameter.FTP_IP_ADDRESS)
            log.debug("Got host ip address %s", host)

            ftp_session = ftplib.FTP()
            ftp_session.connect(host)
            ftp_session.login(USER_NAME, PASSWORD)
            log.debug("ftp session was created...")

            ftp_session.set_pasv(False)
            ftp_session.cwd("config")

            ftp_session.storlines('STOR ' + YAML_FILE_NAME, config_file)
            files = ftp_session.dir()

            log.debug("*** Config yaml file sent: %s", files)

            ftp_session.quit()
            config_file.close()

        except (ftplib.socket.error, ftplib.socket.gaierror), e:
            log.error("ERROR: cannot reach FTP Host %s: %s ", host, e)
            raise InstrumentException("ERROR: cannot reach FTP Host %s " %
                                      host)

        log.debug("*** FTP %s to ftp host %s successfully", YAML_FILE_NAME,
                  host)
예제 #5
0
class SatlanticOCR507InstrumentProtocol(CommandResponseInstrumentProtocol):
    """The instrument protocol classes to deal with a Satlantic OCR507 sensor.
    The protocol is a very simple command/response protocol with a few show
    commands and a few set commands.
    Note protocol state machine must be called "self._protocol_fsm"
    """
    _data_particle_type = SatlanticOCR507DataParticle
    _config_particle_type = SatlanticOCR507ConfigurationParticle
    _data_particle_regex = SAMPLE_REGEX
    _config_particle_regex = CONFIG_REGEX

    __metaclass__ = get_logging_metaclass(log_level='debug')

    def __init__(self, callback=None):
        CommandResponseInstrumentProtocol.__init__(self, Prompt, EOLN,
                                                   callback)

        self._last_data_timestamp = None

        self._protocol_fsm = ThreadSafeFSM(SatlanticProtocolState,
                                           SatlanticProtocolEvent,
                                           SatlanticProtocolEvent.ENTER,
                                           SatlanticProtocolEvent.EXIT)

        self._protocol_fsm.add_handler(SatlanticProtocolState.UNKNOWN,
                                       SatlanticProtocolEvent.ENTER,
                                       self._handler_unknown_enter)
        self._protocol_fsm.add_handler(SatlanticProtocolState.UNKNOWN,
                                       SatlanticProtocolEvent.DISCOVER,
                                       self._handler_unknown_discover)
        self._protocol_fsm.add_handler(SatlanticProtocolState.COMMAND,
                                       SatlanticProtocolEvent.ENTER,
                                       self._handler_command_enter)
        self._protocol_fsm.add_handler(SatlanticProtocolState.COMMAND,
                                       SatlanticProtocolEvent.GET,
                                       self._handler_command_get)
        self._protocol_fsm.add_handler(SatlanticProtocolState.COMMAND,
                                       SatlanticProtocolEvent.SET,
                                       self._handler_command_set)
        self._protocol_fsm.add_handler(SatlanticProtocolState.COMMAND,
                                       SatlanticProtocolEvent.START_AUTOSAMPLE,
                                       self._handler_command_start_autosample)
        self._protocol_fsm.add_handler(SatlanticProtocolState.COMMAND,
                                       SatlanticProtocolEvent.ACQUIRE_STATUS,
                                       self._handler_command_acquire_status)
        self._protocol_fsm.add_handler(SatlanticProtocolState.COMMAND,
                                       SatlanticProtocolEvent.START_DIRECT,
                                       self._handler_command_start_direct)
        self._protocol_fsm.add_handler(SatlanticProtocolState.AUTOSAMPLE,
                                       SatlanticProtocolEvent.ENTER,
                                       self._handler_autosample_enter)
        self._protocol_fsm.add_handler(
            SatlanticProtocolState.AUTOSAMPLE,
            SatlanticProtocolEvent.STOP_AUTOSAMPLE,
            self._handler_autosample_stop_autosample)
        self._protocol_fsm.add_handler(SatlanticProtocolState.DIRECT_ACCESS,
                                       SatlanticProtocolEvent.ENTER,
                                       self._handler_direct_access_enter)
        self._protocol_fsm.add_handler(
            SatlanticProtocolState.DIRECT_ACCESS,
            SatlanticProtocolEvent.EXECUTE_DIRECT,
            self._handler_direct_access_execute_direct)
        self._protocol_fsm.add_handler(SatlanticProtocolState.DIRECT_ACCESS,
                                       SatlanticProtocolEvent.STOP_DIRECT,
                                       self._handler_direct_access_stop_direct)

        self._protocol_fsm.start(SatlanticProtocolState.UNKNOWN)

        self._add_response_handler(Command.GET, self._parse_get_response)
        self._add_response_handler(Command.SET, self._parse_set_response)
        self._add_response_handler(Command.INVALID,
                                   self._parse_invalid_response)

        self._param_dict.add(
            Parameter.MAX_RATE,
            r"Maximum\ Frame\ Rate:\ (\S+).*?\s*",
            lambda match: match.group(1),
            lambda sVal: '%s' % sVal,
            type=ParameterDictType.STRING,
            display_name="Max Rate",
            value_description=
            "valid values: 0=auto, 0.125, 0.25, 0.5, 1, 2, 4, 8, 10, 12",
            units=Units.HERTZ,
            default_value='0',
            startup_param=True,
            direct_access=True)

        self._param_dict.add(Parameter.INIT_AT,
                             r"Initialize Automatic Telemetry: (off|on)",
                             lambda match: True
                             if match.group(1) == 'on' else False,
                             self._boolean_to_off_on,
                             type=ParameterDictType.BOOL,
                             display_name="Init AT",
                             default_value=True,
                             visibility=ParameterDictVisibility.IMMUTABLE,
                             startup_param=True,
                             direct_access=True)

        self._param_dict.add(Parameter.INIT_SM,
                             r"Initialize Silent Mode: (off|on)",
                             lambda match: True
                             if match.group(1) == 'on' else False,
                             self._boolean_to_off_on,
                             type=ParameterDictType.BOOL,
                             display_name="Init SM",
                             default_value=True,
                             visibility=ParameterDictVisibility.IMMUTABLE,
                             startup_param=True,
                             direct_access=True)

        self._param_dict.add(Parameter.NET_MODE,
                             r"Network Mode: (off|on)",
                             lambda match: True
                             if match.group(1) == 'on' else False,
                             self._boolean_to_off_on,
                             type=ParameterDictType.BOOL,
                             display_name="Net Mode",
                             default_value=False,
                             visibility=ParameterDictVisibility.IMMUTABLE,
                             startup_param=True,
                             direct_access=True)

        self._cmd_dict.add(SatlanticCapability.START_AUTOSAMPLE,
                           display_name="Start Autosample")
        self._cmd_dict.add(SatlanticCapability.STOP_AUTOSAMPLE,
                           display_name="Stop Autosample")
        self._cmd_dict.add(SatlanticCapability.ACQUIRE_STATUS,
                           display_name="Acquire Status")

        self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, True)

        self._chunker = StringChunker(self.sieve_function)

    @staticmethod
    def _boolean_to_off_on(v):
        """
        Write a boolean value to string formatted for sbe16 set operations.
        @param v a boolean value.
        @retval A yes/no string formatted for sbe16 set operations.
        @throws InstrumentParameterException if value not a bool.
        """

        if not isinstance(v, bool):
            raise InstrumentParameterException('Value %s is not a bool.' %
                                               str(v))
        if v:
            return 'on'
        return 'off'

    @staticmethod
    def sieve_function(raw_data):
        """ The method that splits samples
        """
        log.debug("Raw Data: %r, len: %d", raw_data, len(raw_data))
        log.debug(SAMPLE_REGEX.pattern)
        matchers = [SAMPLE_REGEX, CONFIG_REGEX]
        return_list = []

        for matcher in matchers:
            for match in matcher.finditer(raw_data):
                return_list.append((match.start(), match.end()))

        return return_list

    def _filter_capabilities(self, events):
        """
        """
        events_out = [x for x in events if SatlanticCapability.has(x)]
        return events_out

    def get_config(self, *args, **kwargs):
        """ Get the entire configuration for the instrument

        @param params The parameters and values to set
        @retval None if nothing was done, otherwise result of FSM event handle
        Should be a dict of parameters and values
        """
        for param in Parameter.list():
            if param != Parameter.ALL:
                self._do_cmd_resp(Command.GET, param, **kwargs)

    def _do_cmd(self, cmd, *args, **kwargs):
        """
        Issue a command to the instrument after clearing of buffers.

        @param cmd The command to execute.
        @param args positional arguments to pass to the build handler.
        @retval The fully built command that was sent
        """
        expected_prompt = kwargs.get('expected_prompt', None)
        cmd_line = self._build_default_command(cmd, *args)

        # Send command.
        log.debug('_do_cmd: %s, length=%s' % (repr(cmd_line), len(cmd_line)))
        if len(cmd_line) == 1:
            self._connection.send(cmd_line)
        else:
            for char in cmd_line:
                starttime = time.time()
                self._connection.send(char)
                while len(self._promptbuf
                          ) == 0 or char not in self._promptbuf[-1]:
                    time.sleep(0.0015)
                    if time.time() > starttime + 3:
                        break

            time.sleep(0.115)
            starttime = time.time()
            self._connection.send(EOLN)
            while EOLN not in self._promptbuf[len(cmd_line):len(cmd_line) + 2]:
                time.sleep(0.0015)
                if time.time() > starttime + 3:
                    break

            # Limit resend_check_value from expected_prompt to one of the two below
            resend_check_value = None
            if expected_prompt is not None:
                for check in (Prompt.COMMAND, "SATDI7"):
                    if check in expected_prompt:
                        log.trace('_do_cmd: command: %s, check=%s' %
                                  (cmd_line, check))
                        resend_check_value = check

            # Resend the EOLN if it did not go through the first time
            starttime = time.time()
            if resend_check_value is not None:
                while True:
                    time.sleep(0.1)
                    if time.time() > starttime + 2:
                        log.debug("Sending eoln again.")
                        self._connection.send(EOLN)
                        starttime = time.time()
                    if resend_check_value in self._promptbuf:
                        break
                    if Prompt.INVALID_COMMAND in self._promptbuf:
                        break

        return cmd_line

    def _do_cmd_no_resp(self, cmd, *args, **kwargs):
        """
        Issue a command to the instrument after clearing of buffers. No response is handled as a result of the command.
        @param cmd The command to execute.
        @param args positional arguments to pass to the build handler.
        """
        self._do_cmd(cmd, *args, **kwargs)

    def _do_cmd_resp(self, cmd, *args, **kwargs):
        """
        Perform a command-response on the device.
        @param cmd The command to execute.
        @param args positional arguments to pass to the build handler.
        @param expected_prompt kwarg offering a specific prompt to look for
        other than the ones in the protocol class itself.
        @param response_regex kwarg with a compiled regex for the response to
        match. Groups that match will be returned as a string.
        Cannot be supplied with expected_prompt. May be helpful for instruments that do not have a prompt.
        @retval resp_result The (possibly parsed) response result including the
        first instance of the prompt matched. If a regex was used, the prompt
        will be an empty string and the response will be the joined collection of matched groups.
        @raises InstrumentCommandException if the response did not occur in time.
        @raises InstrumentProtocolException if command could not be built or if response was not recognized.
        """
        timeout = kwargs.get('timeout', DEFAULT_CMD_TIMEOUT)
        expected_prompt = kwargs.get(
            'expected_prompt',
            [Prompt.INVALID_COMMAND, Prompt.USAGE, Prompt.COMMAND])
        response_regex = kwargs.get('response_regex', None)

        if response_regex and not isinstance(response_regex, RE_PATTERN):
            raise InstrumentProtocolException(
                'Response regex is not a compiled pattern!')

        if expected_prompt and response_regex:
            raise InstrumentProtocolException(
                'Cannot supply both regex and expected prompt!')

        retry_count = 5
        retry_num = 0
        cmd_line = ""
        result = ""
        prompt = ""
        for retry_num in xrange(retry_count):
            # Clear line and prompt buffers for result.
            self._linebuf = ''
            self._promptbuf = ''

            cmd_line = self._do_cmd(cmd, *args, **kwargs)

            # Wait for the prompt, prepare result and return, timeout exception
            if response_regex:
                result_tuple = self._get_response(
                    timeout,
                    response_regex=response_regex,
                    expected_prompt=expected_prompt)
                result = "".join(result_tuple)
            else:
                (prompt,
                 result) = self._get_response(timeout,
                                              expected_prompt=expected_prompt)

            # Confirm the entire command was sent, otherwise resend retry_count number of times
            if len(cmd_line) > 1 and \
                    (expected_prompt is not None or
                         (response_regex is not None)) \
                    and cmd_line not in result:
                log.debug(
                    '_do_cmd_resp: Send command: %s failed %s attempt, result = %s.',
                    cmd, retry_num, result)
                if retry_num >= retry_count:
                    raise InstrumentCommandException(
                        '_do_cmd_resp: Failed %s attempts sending command: %s'
                        % (retry_count, cmd))
            else:
                break

        log.debug(
            '_do_cmd_resp: Sent command: %s, %s reattempts, expected_prompt=%s, result=%r.',
            cmd_line, retry_num, expected_prompt, result)

        resp_handler = self._response_handlers.get((self.get_current_state(), cmd), None) or \
                       self._response_handlers.get(cmd, None)
        resp_result = None
        if resp_handler:
            resp_result = resp_handler(result, prompt)

        time.sleep(
            0.3)  # give some time for the instrument connection to keep up

        return resp_result

    ########################################################################
    # Unknown handlers.
    ########################################################################

    def _handler_unknown_enter(self, *args, **kwargs):
        """
        Enter unknown state.
        """
        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_unknown_discover(self, *args, **kwargs):
        """
        Discover current state; can be COMMAND or AUTOSAMPLE.
        @retval (next_state, result), (SatlanticProtocolState.COMMAND, ResourceAgentState.IDLE or
        SatlanticProtocolState.AUTOSAMPLE, ResourceAgentState.STREAMING) if successful.
        """
        try:
            invalidCommandResponse = self._do_cmd_resp(
                Command.INVALID,
                timeout=3,
                expected_prompt=Prompt.INVALID_COMMAND)
        except InstrumentTimeoutException as ex:
            invalidCommandResponse = None  # The instrument is not in COMMAND: it must be polled or AUTOSAMPLE

        log.debug("_handler_unknown_discover: returned: %s",
                  invalidCommandResponse)
        if invalidCommandResponse:
            return SatlanticProtocolState.COMMAND, ResourceAgentState.IDLE
        # Put the instrument back into full autosample
        self._do_cmd_no_resp(Command.SWITCH_TO_AUTOSAMPLE)
        return SatlanticProtocolState.AUTOSAMPLE, ResourceAgentState.STREAMING

    ########################################################################
    # Command handlers.
    ########################################################################

    def _handler_command_enter(self, *args, **kwargs):
        """
        Enter command state.
        """
        # Command device to update parameters and send a config change event.
        self._init_params()

        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_command_get(self, *args, **kwargs):
        """Handle getting data from command mode

        @param params List of the parameters to pass to the state
        @retval return (next state, result)
        """
        return self._handler_get(*args, **kwargs)

    def _handler_command_set(self, *args, **kwargs):
        """Handle setting data from command mode

        @param params Dict of the parameters and values to pass to the state
        @return (next state, result)
        """
        self._set_params(*args, **kwargs)
        return None, None

    def _handler_command_start_autosample(self, params=None, *args, **kwargs):
        """
        Handle getting an start autosample event when in command mode
        @param params List of the parameters to pass to the state
        @return next state (next agent state, result)
        """
        result = None

        self._do_cmd_no_resp(Command.EXIT_AND_RESET)
        time.sleep(RESET_DELAY)
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)
        next_state = SatlanticProtocolState.AUTOSAMPLE
        next_agent_state = ResourceAgentState.STREAMING

        return next_state, (next_agent_state, result)

    def _handler_command_start_direct(self):
        """
        """
        result = None

        next_state = SatlanticProtocolState.DIRECT_ACCESS
        next_agent_state = ResourceAgentState.DIRECT_ACCESS

        log.debug("_handler_command_start_direct: entering DA mode")
        return next_state, (next_agent_state, result)

    def _handler_command_acquire_status(self, *args, **kwargs):
        """
        Handle SatlanticProtocolState.COMMAND SatlanticProtocolEvent.ACQUIRE_STATUS

        @return next state (next agent state, result)
        """
        next_state = None
        next_agent_state = None
        result = None

        self._do_cmd_no_resp(Command.ID)
        self._do_cmd_no_resp(Command.SHOW_ALL)

        return next_state, (next_agent_state, result)

    ########################################################################
    # Autosample handlers.
    ########################################################################

    def _handler_autosample_enter(self, *args, **kwargs):
        """
        Handle SatlanticProtocolState.AUTOSAMPLE SatlanticProtocolEvent.ENTER

        @param params Parameters to pass to the state
        @retval return (next state, result)
        @throw InstrumentProtocolException For hardware error
        """
        next_state = None
        result = None

        if not self._confirm_autosample_mode:
            raise InstrumentProtocolException(
                error_code=InstErrorCode.HARDWARE_ERROR,
                msg="Not in the correct mode!")

        self._driver_event(DriverAsyncEvent.STATE_CHANGE)
        return next_state, result

    def _handler_autosample_stop_autosample(self, *args, **kwargs):
        """Handle SatlanticProtocolState.AUTOSAMPLE stop

        @param params Parameters to pass to the state
        @retval return (next state, result)
        @throw InstrumentProtocolException For hardware error
        """
        next_state = None
        result = None

        try:
            self._send_break()
            next_state = SatlanticProtocolState.COMMAND
            next_agent_state = ResourceAgentState.COMMAND
        except InstrumentException:
            raise InstrumentProtocolException(
                error_code=InstErrorCode.HARDWARE_ERROR,
                msg="Could not break from autosample!")

        return next_state, (next_agent_state, result)

    ########################################################################
    # Direct access handlers.
    ########################################################################

    def _handler_direct_access_enter(self, *args, **kwargs):
        """
        Enter direct access state.
        Tell driver superclass to send a state change event.
        Superclass will query the state.
        """
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)
        self._sent_cmds = []

    def _do_cmd_direct(self, cmd):
        """
        Issue an untranslated command to the instrument. No response is handled as a result of the command.
        Overridden: Use _do_cmd to send commands reliably. Remove if digi-serial interface is ever fixed.

        @param cmd The high level command to issue
        """
        self._do_cmd(cmd)

    def _handler_direct_access_execute_direct(self, data):
        """
        """
        next_state = None
        result = None
        next_agent_state = None

        self._do_cmd_direct(data)

        # add sent command to list for 'echo' filtering in callback
        self._sent_cmds.append(data)

        return next_state, (next_agent_state, result)

    def _handler_direct_access_stop_direct(self):
        """
        """
        next_state, next_agent_state = self._handler_unknown_discover()
        if next_state == DriverProtocolState.COMMAND:
            next_agent_state = ResourceAgentState.COMMAND

        return next_state, (next_agent_state, None)

    ###################################################################
    # Builders
    ###################################################################
    def _build_default_command(self, *args):
        """
        """
        return " ".join(str(x) for x in args)

    ##################################################################
    # Response parsers
    ##################################################################
    def _parse_set_response(self, response, prompt):
        """Determine if a set was successful or not

        @param response What was sent back from the command that was sent
        @param prompt The prompt that was returned from the device
        """
        if prompt == Prompt.COMMAND:
            return True
        return False

    def _parse_get_response(self, response, prompt):
        """ Parse the response from the instrument for a couple of different
        query responses.

        @param response The response string from the instrument
        @param prompt The prompt received from the instrument
        @return The numerical value of the parameter in the known units
        @raise InstrumentProtocolException When a bad response is encountered
        """
        # should end with the response, an eol, and a prompt
        update_dict = self._param_dict.update_many(response)
        if not update_dict or len(update_dict) > 1:
            log.error(
                "Get response set multiple parameters (%r): expected only 1",
                update_dict)
            raise InstrumentProtocolException("Invalid response. Bad command?")

        return self._param_dict.get_all()

    def _parse_invalid_response(self, response, prompt):
        """ Parse the response from the instrument for a couple of different
        query responses.

        @param response The response string from the instrument
        @param prompt The prompt received from the instrument
        @return true iff Prompt.INVALID_COMMAND was returned
        """
        # should end with the response, an eoln, and a prompt
        return Prompt.INVALID_COMMAND == prompt

    ###################################################################
    # Helpers
    ###################################################################
    def _set_params(self, *args, **kwargs):
        """
        Issue commands to the instrument to set various parameters
        Also called when setting parameters during startup and direct access
        In the event an exception is generated dur
        @throws InstrumentParameterException if parameter does not exist or Maxrate is out of range
        @throws InstrumentCommandException if failed to set
        """

        params = args[0]

        self._verify_not_readonly(*args, **kwargs)
        old_config = self._param_dict.get_config()

        exception = None

        for key in params:
            if key not in self._param_dict._param_dict:
                exception = InstrumentParameterException("Bad parameter: %r" %
                                                         key)
                break
            val = self._param_dict.format(key, params[key])
            log.debug("KEY = %s VALUE = %s", str(key), str(val))
            if key == Parameter.MAX_RATE and float(
                    params[key]) not in VALID_MAXRATES:
                exception = InstrumentParameterException(
                    "Maxrate %s out of range" % val)
                break
            # Check for existance in dict (send only on change)
            if not self._do_cmd_resp(Command.SET, key, val):
                exception = InstrumentCommandException(
                    'Error setting: %s = %s' % (key, val))
                break
            self._param_dict.set_value(key, params[key])

        # Get new param dict config. If it differs from the old config,
        # tell driver superclass to publish a config change event.
        new_config = self._param_dict.get_config()
        log.debug("new_config: %s == old_config: %s", new_config, old_config)
        if old_config != new_config:
            self._do_cmd_resp(Command.SAVE, expected_prompt=Prompt.COMMAND)
            log.debug("configuration has changed.  Send driver event")
            self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)

        # Raise any exceptions encountered due to errors setting the parameter(s)
        if exception is not None:
            raise exception

    def _update_params(self, *args, **kwargs):
        """Fetch the parameters from the device, and update the param dict.

        @param args Unused
        @param kwargs Takes timeout value
        """
        old_config = self._param_dict.get_config()
        self.get_config()
        new_config = self._param_dict.get_config()
        if new_config != old_config:
            self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)

    def _send_break(self):
        """
        Send break every 0.3 seconds until the Command Console banner is received.
        @throws InstrumentTimeoutException if not Command Console banner not received within 5 seconds.
        """
        self._promptbuf = ""
        self._connection.send(Command.BREAK)
        starttime = time.time()
        resendtime = time.time()
        while True:
            if time.time() > resendtime + 0.3:
                log.debug("Sending break again.")
                self._connection.send(Command.BREAK)
                resendtime = time.time()

            if COMMAND_PATTERN in self._promptbuf:
                break

            if time.time() > starttime + 10:
                raise InstrumentTimeoutException(
                    "Break command failing to stop autosample!")

            time.sleep(0.1)

    def _got_chunk(self, chunk, timestamp):
        """
        extract samples from a chunk of data
        @param chunk: bytes to parse into a sample.
        """
        sample = self._extract_sample(self._data_particle_type, self._data_particle_regex, chunk, timestamp) or \
                 self._extract_sample(self._config_particle_type, self._config_particle_regex, chunk, timestamp)
        if not sample:
            raise InstrumentProtocolException(
                u'unhandled chunk received by _got_chunk: [{0!r:s}]'.format(
                    chunk))
        return sample

    def _confirm_autosample_mode(self):
        """
        Confirm we are in autosample mode.
        This is done by waiting for a sample to come in, and confirming that
        it does or does not.
        @retval True if in autosample mode, False if not
        """
        # timestamp now,
        start_time = self._last_data_timestamp
        # wait a sample period,
        current_maxrate = self._param_dict.get_config()[Parameter.MAX_RATE]
        if current_maxrate is None:
            current_maxrate = 0.125  # During startup, assume the slowest sample rate
        elif current_maxrate <= 0 or current_maxrate > 8:
            current_maxrate = 8  # Effective current maxrate, despite the instrument accepting higher values
        time_between_samples = (1.0 / current_maxrate) + 1
        time.sleep(time_between_samples)
        end_time = self._last_data_timestamp
        log.debug("_confirm_autosample_mode: end_time=%s, start_time=%s" %
                  (end_time, start_time))
        if end_time != start_time:
            log.debug("Confirmed in autosample mode")
            return True
        log.debug("Confirmed NOT in autosample mode")
        return False
예제 #6
0
class SeaBirdProtocol(CommandResponseInstrumentProtocol):
    """
    Instrument protocol class for seabird driver.
    Subclasses CommandResponseInstrumentProtocol
    """
    __metaclass__ = get_logging_metaclass(log_level='trace')

    def __init__(self, prompts, newline, driver_event):
        """
        Protocol constructor.
        @param prompts A BaseEnum class containing instrument prompts.
        @param newline The sbe26plus newline.
        @param driver_event Driver process event callback.
        """
        # Construct protocol superclass.
        CommandResponseInstrumentProtocol.__init__(self, prompts, newline,
                                                   driver_event)

    ########################################################################
    # Common handlers
    ########################################################################
    def _handler_command_enter(self, *args, **kwargs):
        """
        Enter command state.
        @throws InstrumentTimeoutException if the device cannot be woken.
        @throws InstrumentProtocolException if the update commands and not recognized.
        """
        # Command device to initialize parameters and send a config change event.
        self._protocol_fsm.on_event(DriverEvent.INIT_PARAMS)

        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_autosample_enter(self, *args, **kwargs):
        """
        Enter autosample state.
        """
        self._protocol_fsm.on_event(DriverEvent.INIT_PARAMS)

        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_command_init_params(self, *args, **kwargs):
        """
        initialize parameters
        """
        self._init_params()
        return None, None

    def _handler_autosample_init_params(self, *args, **kwargs):
        """
        initialize parameters.  For this instrument we need to
        put the instrument into command mode, apply the changes
        then put it back.
        """
        if self._init_type != InitializationType.NONE:

            try:
                self._stop_logging()
                self._init_params()

            finally:
                # Switch back to streaming
                if not self._is_logging():
                    log.debug("SBE is logging again")
                    self._start_logging()

        return None, None

    def _handler_command_get(self, *args, **kwargs):
        """
        Get device parameters from the parameter dict.  First we set a baseline timestamp
        that all data expiration will be calculated against.  Then we try to get parameter
        value.  If we catch an expired parameter then we will update all parameters and get
        values using the original baseline time that we set at the beginning of this method.
        Assuming our _update_params is updating all parameter values properly then we can
        ensure that all data will be fresh.  Nobody likes stale data!
        @param args[0] list of parameters to retrieve, or DriverParameter.ALL.
        @raise InstrumentParameterException if missing or invalid parameter.
        @raise InstrumentParameterExpirationException If we fail to update a parameter
        on the second pass this exception will be raised on expired data
        """
        return self._handler_get(*args, **kwargs)

    def _handler_command_set(self, *args, **kwargs):
        """
        Perform a set command.
        @param args[0] parameter : value dict.
        @param args[1] parameter : startup parameters?
        @retval (next_state, result) tuple, (None, None).
        @throws InstrumentParameterException if missing set parameters, if set parameters not ALL and
        not a dict, or if parameter can't be properly formatted.
        @throws InstrumentTimeoutException if device cannot be woken for set command.
        @throws InstrumentProtocolException if set command could not be built or misunderstood.
        """
        startup = False

        try:
            params = args[0]
        except IndexError:
            raise InstrumentParameterException(
                '_handler_command_set Set command requires a parameter dict.')

        try:
            startup = args[1]
        except IndexError:
            pass

        if not isinstance(params, dict):
            raise InstrumentParameterException('Set parameters not a dict.')

        # For each key, val in the dict, issue set command to device.
        # Raise if the command not understood.
        else:
            self._set_params(params, startup)

        return None, None

    ########################################################################
    # Private helpers.
    ########################################################################

    def _discover(self, *args, **kwargs):
        """
        Discover current state; can be COMMAND or AUTOSAMPLE.
        @retval (next_state, result)
        @retval (next_state, result), (ProtocolState.COMMAND or
        State.AUTOSAMPLE, None) if successful.
        @throws InstrumentTimeoutException if the device cannot be woken.
        @throws InstrumentStateException if the device response does not correspond to
        an expected state.
        """
        logging = self._is_logging()
        log.debug("are we logging? %s" % logging)

        if logging is None:
            next_state = DriverProtocolState.UNKNOWN
            next_agent_state = ResourceAgentState.ACTIVE_UNKNOWN

        elif logging:
            next_state = DriverProtocolState.AUTOSAMPLE
            next_agent_state = ResourceAgentState.STREAMING

        else:
            next_state = DriverProtocolState.COMMAND
            next_agent_state = ResourceAgentState.COMMAND

        log.debug("_handler_unknown_discover. result start: %s" % next_state)
        return next_state, next_agent_state

    def _sync_clock(self,
                    command,
                    date_time_param,
                    timeout=TIMEOUT,
                    delay=1,
                    time_format="%d %b %Y %H:%M:%S"):
        """
        Send the command to the instrument to synchronize the clock
        @param command: command to set6 date time
        @param date_time_param: date time parameter that we want to set
        @param timeout: command timeout
        @param delay: wakeup delay
        @param time_format: time format string for set command
        @raise: InstrumentProtocolException if command fails
        """
        # lets clear out any past data so it doesnt confuse the command
        self._linebuf = ''
        self._promptbuf = ''

        log.debug("Set time format(%s) '%s''", time_format, date_time_param)
        str_val = get_timestamp_delayed(time_format)
        log.debug("Set time value == '%s'", str_val)
        self._do_cmd_resp(command, date_time_param, str_val)

    ########################################################################
    # Startup parameter handlers
    ########################################################################
    def apply_startup_params(self):
        """
        Apply all startup parameters.  First we check the instrument to see
        if we need to set the parameters.  If they are they are set
        correctly then we don't do anything.

        If we need to set parameters then we might need to transition to
        command first.  Then we will transition back when complete.

        @todo: This feels odd.  It feels like some of this logic should
               be handled by the state machine.  It's a pattern that we
               may want to review.  I say this because this command
               needs to be run from autosample or command mode.
        @raise: InstrumentProtocolException if not in command or streaming
        """
        # Let's give it a try in unknown state
        log.debug("CURRENT STATE: %s", self.get_current_state())
        if (self.get_current_state() != DriverProtocolState.COMMAND and
                self.get_current_state() != DriverProtocolState.AUTOSAMPLE):
            raise InstrumentProtocolException(
                "Not in command or autosample state. Unable to apply startup params"
            )

        log.debug("sbe apply_startup_params, logging?")
        logging = self._is_logging()
        log.debug("sbe apply_startup_params, logging == %s", logging)

        # If we are in streaming mode and our configuration on the
        # instrument matches what we think it should be then we
        # don't need to do anything.
        if not self._instrument_config_dirty():
            log.debug("configuration not dirty.  Nothing to do here")
            return True

        try:
            if logging:
                # Switch to command mode,
                log.debug("stop logging")
                self._stop_logging()

            log.debug("sbe apply_startup_params now")
            self._apply_params()

        finally:
            # Switch back to streaming
            if logging:
                log.debug("sbe apply_startup_params start logging again")
                self._start_logging()

    def _start_logging(self):
        """
        Issue the instrument command to start logging data
        """
        raise NotImplementedException()

    def _stop_logging(self):
        """
        Issue the instrument command to stop logging data
        """
        raise NotImplementedException()

    def _is_logging(self):
        """
        Is the instrument in logging or command mode.
        @return: True if streaming, False if in command, None if we don't know
        """
        raise NotImplementedException()

    def _set_params(self, *args, **kwargs):
        """
        Issue commands to the instrument to set various parameters
        """
        startup = False
        try:
            params = args[0]
        except IndexError:
            raise InstrumentParameterException(
                'Set command requires a parameter dict.')

        try:
            startup = args[1]
        except IndexError:
            pass

        # Only check for readonly parameters if we are not setting them from startup
        if not startup:
            readonly = self._param_dict.get_visibility_list(
                ParameterDictVisibility.READ_ONLY)

            log.debug("set param, but check visibility first")
            log.debug("Read only keys: %s", readonly)

            for (key, val) in params.iteritems():
                if key in readonly:
                    raise InstrumentParameterException(
                        "Attempt to set read only parameter (%s)" % key)

    def _update_params(self):
        """
        Send instrument commands to get data to refresh the param_dict cache
        """
        raise NotImplementedException()

    def _apply_params(self):
        """
        apply startup parameters to the instrument.
        @raise: InstrumentProtocolException if in wrong mode.
        """
        config = self.get_startup_config()
        log.debug("_apply_params startup config: %s", config)
        # Pass true to _set_params so we know these are startup values
        self._set_params(config, True)

    def _instrument_config_dirty(self):
        """
        Read the startup config and compare that to what the instrument
        is configured too.  If they differ then return True
        @return: True if the startup config doesn't match the instrument
        @raise: InstrumentParameterException
        """
        # Refresh the param dict cache

        self._update_params()

        startup_params = self._param_dict.get_startup_list()
        log.debug("Startup Parameters: %s", startup_params)

        for param in startup_params:
            if self._param_dict.get(
                    param) != self._param_dict.get_config_value(param):
                log.debug("DIRTY: %s %s != %s", param,
                          self._param_dict.get(param),
                          self._param_dict.get_config_value(param))
                return True

        return False

    def _send_wakeup(self):
        """
        Send a newline to attempt to wake the sbe26plus device.
        """
        self._connection.send(ESCAPE)
        self._connection.send(NEWLINE)
예제 #7
0
class ZPLSCStatusParticle(DataParticle):
    """
    Routines for parsing raw data into a status particle structure. Override
    the building of values, and the rest should come along for free.

    Sample:
    {'connected': True,
     'er60_channels': {'GPT  38 kHz 00907207b7b1 6-1 OOI.38|200': {'frequency': 38000,
                                                                   'mode': 'active',
                                                                   'power': 100.0,
                                                                   'pulse_length': 0.000256,
                                                                   'sample_interval': 6.4e-05},
                       'GPT 120 kHz 00907207b7dc 1-1 ES120-7CD': {'frequency': 120000,
                                                                  'mode': 'active',
                                                                  'power': 100.0,
                                                                  'pulse_length': 6.4e-05,
                                                                  'sample_interval': 1.6e-05},
                       'GPT 200 kHz 00907207b7b1 6-2 OOI38|200': {'frequency': 200000,
                                                                  'mode': 'active',
                                                                  'power': 120.0,
                                                                  'pulse_length': 6.4e-05,
                                                                  'sample_interval': 1.6e-05}},
     'er60_status': {'current_running_interval': None,
                     'current_utc_time': '2014-07-08 22:34:18.667000',
                     'executable': 'c:/users/ooi/desktop/er60.lnk',
                     'fs_root': 'D:/',
                     'host': '157.237.15.100',
                     'next_scheduled_interval': None,
                     'pid': 1864,
                     'port': 56635,
                     'raw_output': {'current_raw_filename': 'OOI-D20140707-T214500.raw',
                                    'current_raw_filesize': None,
                                    'file_path': 'D:\\data\\QCT_1',
                                    'file_prefix': 'OOI',
                                    'max_file_size': 52428800,
                                    'sample_range': 220.0,
                                    'save_bottom': True,
                                    'save_index': True,
                                    'save_raw': True},
                     'scheduled_intervals_remaining': 0},
     'gpts_enabled': False,
     'schedule': {},
     'schedule_filename': 'qct_configuration_example_1.yaml'}

    """
    __metaclass__ = get_logging_metaclass(log_level='trace')

    _data_particle_type = DataParticleType.ZPLSC_STATUS

    def _encode_value(self, name, value, encoding_function):
        """
        Encode a value using the encoding function, if it fails store the error in a queue
        Override to handle None values.
        """
        encoded_val = None

        if value is not None:
            try:
                encoded_val = encoding_function(value)
            except Exception:
                log.error("Data particle error encoding. Name:%s Value:%s",
                          name, value)
                self._encoding_errors.append({name: value})
        return {
            DataParticleKey.VALUE_ID: name,
            DataParticleKey.VALUE: encoded_val
        }

    def _build_parsed_values(self):
        """
        Parse ZPLSC Status response and return the ZPLSC Status particles
        @throws SampleException If there is a problem with sample
        """
        try:
            log.debug("status raw_data = %s", self.raw_data)
            config = self.raw_data

            if not isinstance(config, dict):
                raise SampleException("ZPLSC status data is not a dictionary" %
                                      self.raw_data)

            active_200k_mode = None
            active_200k_power = None
            active_200k_pulse_length = None
            active_200k_sample_interval = None
            active_120k_mode = None
            active_120k_power = None
            active_120k_pulse_length = None
            active_120k_sample_interval = None
            active_38k_mode = None
            active_38k_power = None
            active_38k_pulse_length = None
            active_38k_sample_interval = None

            connected = config.get(CONNECTED)
            er60_channels = config.get(ER60_CHANNELS)
            if er60_channels is not None:
                for key in er60_channels:
                    if '200 kHz' in key:
                        active_200k_mode = er60_channels[key].get(MODE)
                        active_200k_power = er60_channels[key].get(POWER)
                        active_200k_pulse_length = er60_channels[key].get(
                            PULSE_LENGTH)
                        active_200k_sample_interval = er60_channels[key].get(
                            SAMPLE_INTERVAL)
                    elif '120 kHz' in key:
                        active_120k_mode = er60_channels[key].get(MODE)
                        active_120k_power = er60_channels[key].get(POWER)
                        active_120k_pulse_length = er60_channels[key].get(
                            PULSE_LENGTH)
                        active_120k_sample_interval = er60_channels[key].get(
                            SAMPLE_INTERVAL)
                    elif '38 kHz' in key:
                        active_38k_mode = er60_channels[key].get(MODE)
                        active_38k_power = er60_channels[key].get(POWER)
                        active_38k_pulse_length = er60_channels[key].get(
                            PULSE_LENGTH)
                        active_38k_sample_interval = er60_channels[key].get(
                            SAMPLE_INTERVAL)

            current_utc_time = None
            executable = None
            fs_root = None
            next_scheduled_interval = 'None'
            host = None
            pid = '0'
            port = None
            current_raw_filename = None
            current_raw_filesize = 0
            file_path = None
            file_prefix = None
            max_file_size = None
            sample_range = None
            save_bottom = None
            save_index = None
            save_raw = None
            scheduled_intervals_remaining = None

            er60_status = config.get(ER60_STATUS)
            if er60_status is not None:
                current_utc_time = er60_status.get(CURRENT_UTC_TIME)
                executable = er60_status.get(EXECUTABLE)
                fs_root = er60_status.get(FS_ROOT)

                if er60_status.get(NEXT_SCHEDULED_INTERVAL) is not None:
                    next_scheduled_interval = er60_status.get(
                        NEXT_SCHEDULED_INTERVAL)

                host = er60_status.get(HOST)
                if er60_status.get(PID) is not None:
                    pid = er60_status.get(PID)

                port = er60_status.get(PORT)
                raw_output = er60_status.get(RAW_OUTPUT)

                if raw_output is not None:
                    current_raw_filename = raw_output.get(CURRENT_RAW_FILENAME)

                    if raw_output.get(CURRENT_RAW_FILESIZE) is not None:
                        current_raw_filesize = raw_output.get(
                            CURRENT_RAW_FILESIZE)

                    file_path = raw_output.get(FILE_PATH)
                    file_prefix = raw_output.get(FILE_PREFIX)
                    max_file_size = raw_output.get(MAX_FILE_SIZE)
                    sample_range = raw_output.get(SAMPLE_RANGE)
                    save_bottom = raw_output.get(SAVE_BOTTOM)
                    save_index = raw_output.get(SAVE_INDEX)
                    save_raw = raw_output.get(SAVE_RAW)

                scheduled_intervals_remaining = er60_status.get(
                    SCHEDULED_INTERVALS_REMAINING)
            gpts_enabled = config.get(GPTS_ENABLED)
            schedule_filename = config.get(SCHEDULE_FILENAME)

        except KeyError:
            raise SampleException(
                "ValueError while converting ZPLSC Status: [%s]" %
                self.raw_data)

        result = [
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_CONNECTED,
                               connected, int),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_200K_MODE,
                               active_200k_mode, str),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_200K_POWER,
                               active_200k_power, float),
            self._encode_value(
                ZPLSCStatusParticleKey.ZPLSC_ACTIVE_200K_PULSE_LENGTH,
                active_200k_pulse_length, float),
            self._encode_value(
                ZPLSCStatusParticleKey.ZPLSC_ACTIVE_200K_SAMPLE_INTERVAL,
                active_200k_sample_interval, float),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_120K_MODE,
                               active_120k_mode, str),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_120K_POWER,
                               active_120k_power, float),
            self._encode_value(
                ZPLSCStatusParticleKey.ZPLSC_ACTIVE_120K_PULSE_LENGTH,
                active_120k_pulse_length, float),
            self._encode_value(
                ZPLSCStatusParticleKey.ZPLSC_ACTIVE_120K_SAMPLE_INTERVAL,
                active_120k_sample_interval, float),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_38K_MODE,
                               active_38k_mode, str),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_38K_POWER,
                               active_38k_power, float),
            self._encode_value(
                ZPLSCStatusParticleKey.ZPLSC_ACTIVE_38K_PULSE_LENGTH,
                active_38k_pulse_length, float),
            self._encode_value(
                ZPLSCStatusParticleKey.ZPLSC_ACTIVE_38K_SAMPLE_INTERVAL,
                active_38k_sample_interval, float),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_CURRENT_UTC_TIME,
                               current_utc_time, str),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_EXECUTABLE,
                               executable, str),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_FS_ROOT, fs_root,
                               str),
            self._encode_value(
                ZPLSCStatusParticleKey.ZPLSC_NEXT_SCHEDULED_INTERVAL,
                next_scheduled_interval, str),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_HOST, host, str),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_PID, pid, str),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_PORT, port, int),
            self._encode_value(
                ZPLSCStatusParticleKey.ZPLSC_CURRENT_RAW_FILENAME,
                current_raw_filename, str),
            self._encode_value(
                ZPLSCStatusParticleKey.ZPLSC_CURRENT_RAW_FILESIZE,
                current_raw_filesize, int),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_FILE_PATH,
                               file_path, str),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_FILE_PREFIX,
                               file_prefix, str),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_MAX_FILE_SIZE,
                               max_file_size, int),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SAMPLE_RANGE,
                               sample_range, float),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SAVE_BOTTOM,
                               save_bottom, int),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SAVE_INDEX,
                               save_index, int),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SAVE_RAW, save_raw,
                               int),
            self._encode_value(
                ZPLSCStatusParticleKey.ZPLSC_SCHEDULED_INTERVALS_REMAINING,
                scheduled_intervals_remaining, int),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_GPTS_ENABLED,
                               gpts_enabled, int),
            self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SCHEDULE_FILENAME,
                               schedule_filename, str)
        ]

        log.debug("build_parsed_value: %s", result)

        return result
예제 #8
0
class THSPHProtocol(CommandResponseInstrumentProtocol):
    """
    Instrument protocol class
    Subclasses CommandResponseInstrumentProtocol
    """
    __metaclass__ = get_logging_metaclass(log_level='debug')

    def __init__(self, prompts, newline, driver_event):
        """
        Protocol constructor.
        @param prompts A BaseEnum class containing instrument prompts.
        @param newline The newline.
        @param driver_event Driver process event callback.
        """
        # Construct protocol superclass.
        CommandResponseInstrumentProtocol.__init__(self, prompts, newline, driver_event)

        # Build protocol state machine.
        self._protocol_fsm = InstrumentFSM(ProtocolState, ProtocolEvent,
                                           ProtocolEvent.ENTER, ProtocolEvent.EXIT)

        # Add event handlers for protocol state machine.
        self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.ENTER, self._handler_unknown_enter)
        self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.EXIT, self._handler_unknown_exit)
        self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.DISCOVER, self._handler_unknown_discover)

        self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.ENTER, self._handler_command_enter)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.EXIT, self._handler_command_exit)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.START_AUTOSAMPLE,
                                       self._handler_command_start_autosample)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.ACQUIRE_SAMPLE,
                                       self._handler_command_acquire_sample)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.GET, self._handler_command_get)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.SET, self._handler_command_set)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.START_DIRECT,
                                       self._handler_command_start_direct)

        self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.ENTER, self._handler_autosample_enter)
        self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.EXIT, self._handler_autosample_exit)
        self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.SCHEDULE_ACQUIRE_SAMPLE,
                                       self._handler_command_acquire_sample)
        self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.STOP_AUTOSAMPLE,
                                       self._handler_autosample_stop_autosample)

        self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS, ProtocolEvent.ENTER,
                                       self._handler_direct_access_enter)
        self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS, ProtocolEvent.EXIT,
                                       self._handler_direct_access_exit)
        self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS, ProtocolEvent.EXECUTE_DIRECT,
                                       self._handler_direct_access_execute_direct)
        self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS, ProtocolEvent.STOP_DIRECT,
                                       self._handler_direct_access_stop_direct)

        # Construct the parameter dictionary containing device parameters,
        # current parameter values, and set formatting functions.
        self._build_driver_dict()
        self._build_command_dict()
        self._build_param_dict()

        # Add build handlers for device commands.
        self._add_build_handler(Command.GET_SAMPLE, self._build_simple_command)
        self._add_build_handler(Command.COMM_TEST, self._build_simple_command)

        # Add response handlers for device commands.

        # State state machine in COMMAND state.
        self._protocol_fsm.start(ProtocolState.UNKNOWN)

        # commands sent to device to be filtered in responses for telnet DA
        self._sent_cmds = []

        self._chunker = StringChunker(THSPHProtocol.sieve_function)

    @staticmethod
    def sieve_function(raw_data):
        """
        The method that splits samples
        """
        matchers = []
        return_list = []

        matchers.append(THSPHParticle.regex_compiled())

        for matcher in matchers:
            log.trace('matcher: %r raw_data: %r', matcher.pattern, raw_data)
            for match in matcher.finditer(raw_data):
                return_list.append((match.start(), match.end()))

        return return_list

    def _got_chunk(self, chunk, timestamp):
        """
        The base class got_data has gotten a chunk from the chunker.  Pass it to extract_sample
        with the appropriate particle objects and REGEXes.
        """
        if not self._extract_sample(THSPHParticle, THSPHParticle.regex_compiled(), chunk, timestamp):
            raise InstrumentProtocolException("Unhandled chunk")

    def _build_driver_dict(self):
        """
        Populate the driver dictionary with options
        """
        self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, True)

    def _build_command_dict(self):
        """
        Populate the command dictionary with command.
        """
        self._cmd_dict.add(Capability.START_AUTOSAMPLE, display_name="start autosample")
        self._cmd_dict.add(Capability.STOP_AUTOSAMPLE, display_name="stop autosample")
        self._cmd_dict.add(Capability.ACQUIRE_SAMPLE, display_name="acquire sample")
        self._cmd_dict.add(Capability.SET, display_name="set")
        self._cmd_dict.add(Capability.GET, display_name="get")

    def _build_param_dict(self):
        """
        Populate the parameter dictionary with THSPH parameters.
        For each parameter key, add match string, match lambda function,
        and value formatting function for set commands.
        """

        # Add parameter handlers to parameter dict.
        self._param_dict.add(Parameter.INTERVAL,
                             r'Auto Polled Interval = (\d+)',
                             lambda match: int(match.group(1)),
                             str,
                             type=ParameterDictType.INT,
                             units=Units.SECOND,
                             display_name="Polled Interval",
                             startup_param=True,
                             direct_access=False,
                             default_value=5)

    def _filter_capabilities(self, events):
        """
        Return a list of currently available capabilities.
        """
        return [x for x in events if Capability.has(x)]

    ########################################################################
    # Unknown State handlers.
    ########################################################################
    def _handler_unknown_enter(self, *args, **kwargs):

        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_unknown_exit(self, *args, **kwargs):
        """
        Exit unknown state.
        """
        pass

    def _handler_unknown_discover(self, *args, **kwargs):
        """
        Discover current state; Change next state to be COMMAND state.
        @retval (next_state, result).
        """
        log.debug('_handler_unknown_discover ')

        next_state = ProtocolState.COMMAND
        next_agent_state = ResourceAgentState.IDLE

        return next_state, next_agent_state

    ########################################################################
    # Command State handlers.
    ########################################################################
    def _handler_command_acquire_sample(self, *args, **kwargs):
        """
        Get device status
        """
        log.debug("_handler_command_acquire_sample")

        next_state = None
        next_agent_state = None
        result = None

        self._do_cmd_no_resp(Command.GET_SAMPLE, timeout=TIMEOUT)

        return next_state, (next_agent_state, result)

    def _handler_command_enter(self, *args, **kwargs):

        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._init_params()
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_command_exit(self, *args, **kwargs):
        pass

    def _handler_command_get(self, *args, **kwargs):
        """
        Get device parameters from the parameter dict.  First we set a baseline timestamp
        that all data expirations will be calculated against.  Then we try to get parameter
        value.  If we catch an expired parameter then we will update all parameters and get
        values using the original baseline time that we set at the beginning of this method.
        Assuming our _update_params is updating all parameter values properly then we can
        ensure that all data will be fresh.  Nobody likes stale data!
        @param args[0] list of parameters to retrieve, or DriverParameter.ALL.
        """
        return self._handler_get(*args, **kwargs)

    def _handler_command_set(self, *args, **kwargs):
        """
        Perform a set command.
        @param args[0] parameter : value dict.
        @retval (next_state, result) tuple, (None, None).
        @throws InstrumentParameterException if missing set parameters, if set parameters not ALL and
        not a dict, or if parameter can't be properly formatted.

        """
        next_state = None
        result = None
        startup = False

        log.debug("_handler_command_set enter ")
        # Retrieve required parameter.
        # Raise if no parameter provided, or not a dict.
        try:
            params = args[0]

        except IndexError:
            raise InstrumentParameterException('Set command requires a parameter dict.')

        if not isinstance(params, dict):
            raise InstrumentParameterException('Set parameters not a dict.')

        try:
            startup = args[1]
        except IndexError:
            pass

        old_config = self._param_dict.get_config()
        self._set_params(params, startup)

        new_config = self._param_dict.get_config()
        if old_config != new_config:
            self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)

        return next_state, result

    def _set_params(self, *args, **kwargs):
        """
        Set various parameters internally to the driver. No issuing commands to the
        instrument needed for this driver.
        """
        log.debug("_set_params ")
        try:
            params = args[0]
        except IndexError:
            raise InstrumentParameterException('Set command requires a parameter dict.')

        #list can be null, like in the case of direct access params, in this case do nothing
        if not params:
            return

        # Sampling interval is the only parameter that is set by the driver.
        # Do a range check before we start all sets
        for (key, val) in params.iteritems():
            if key == Parameter.INTERVAL and not (0 < val < 601):
                log.debug("Auto Sample Interval not in 1 to 600 range ")
                raise InstrumentParameterException("sample interval out of range [1, 600]")
            log.debug('key = (%s), value = (%s)' % (key, val))

        self._param_dict.set_value(Parameter.INTERVAL, params[Parameter.INTERVAL])

    def _handler_command_start_autosample(self, *args, **kwargs):
        """
        Switch into autosample mode.
        @retval (next_state, result) tuple, (ProtocolState.AUTOSAMPLE,
        (next_agent_state, None) if successful.
        @throws InstrumentTimeoutException if device cannot be woken for command.
        @throws InstrumentProtocolException if command could not be built or misunderstood.
        """
        result = None

        next_state = ProtocolState.AUTOSAMPLE
        next_agent_state = ResourceAgentState.STREAMING

        return next_state, (next_agent_state, result)

    def _handler_command_start_direct(self):
        """
        Start direct access
        """
        return ProtocolState.DIRECT_ACCESS, (ResourceAgentState.DIRECT_ACCESS, None)

    #######################################################################
    # Autosample State handlers.
    ########################################################################
    def _handler_autosample_enter(self, *args, **kwargs):
        """
        Enter autosample state  Because this is an instrument that must be
        polled we need to ensure the scheduler is added when we are in an
        autosample state.  This scheduler raises events to poll the
        instrument for data.
        @retval next_state, (next_agent_state, result)
        """
        log.debug("_handler_autosample_enter ")

        self._init_params()

        self._setup_autosample_config()

        # Schedule auto sample task
        self._add_scheduler_event(ScheduledJob.AUTO_SAMPLE, ProtocolEvent.SCHEDULE_ACQUIRE_SAMPLE)

        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

        return None, (None, None)

    def _setup_autosample_config(self):
        """
        Set up auto sample configuration and add it to the scheduler.
        """
        # Start the scheduler to poll the instrument for
        # data every sample interval seconds

        log.debug("_setup_autosample_config")
        job_name = ScheduledJob.AUTO_SAMPLE
        polled_interval = self._param_dict.get_config_value(Parameter.INTERVAL)
        config = {
            DriverConfigKey.SCHEDULER: {
                job_name: {
                    DriverSchedulerConfigKey.TRIGGER: {
                        DriverSchedulerConfigKey.TRIGGER_TYPE: TriggerType.INTERVAL,
                        DriverSchedulerConfigKey.SECONDS: polled_interval
                    }
                }
            }
        }
        self.set_init_params(config)

        # Start the scheduler if it is not running
        if not self._scheduler:
            self.initialize_scheduler()

    def _handler_autosample_exit(self, *args, **kwargs):
        """
        Exit auto sample state. Remove the auto sample task
        """
        log.debug("_handler_autosample_exit ")

        next_state = None
        next_agent_state = None
        result = None

        return next_state, (next_agent_state, result)

    def _handler_autosample_stop_autosample(self, *args, **kwargs):
        """
        Remove the auto sample task. Exit Auto sample state
        """
        log.debug("_handler_autosample_stop_autosample ")

        result = None

        # Stop the Auto Poll scheduling
        self._remove_scheduler(ScheduledJob.AUTO_SAMPLE)

        next_state = ProtocolState.COMMAND
        next_agent_state = ResourceAgentState.COMMAND
        return next_state, (next_agent_state, result)

    ########################################################################
    # Direct access handlers.
    ########################################################################

    def _handler_direct_access_enter(self, *args, **kwargs):
        """
        Enter direct access state.
        """
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)
        self._sent_cmds = []

    def _handler_direct_access_exit(self, *args, **kwargs):
        """
        Exit direct access state.
        """
        pass

    def _handler_direct_access_execute_direct(self, data):
        """
        Execute direct command
        """
        next_state = None
        result = None
        next_agent_state = None

        self._do_cmd_direct(data)

        # add sent command to list for 'echo' filtering in callback
        self._sent_cmds.append(data)

        return next_state, (next_agent_state, result)

    def _handler_direct_access_stop_direct(self):
        """
        @throw InstrumentProtocolException on invalid command
        """
        result = None

        next_state = ProtocolState.COMMAND
        next_agent_state = ResourceAgentState.COMMAND

        return next_state, (next_agent_state, result)

    def _build_simple_command(self, cmd, *args):
        """
        Build handler for basic THSPH commands.
        @param cmd the simple ooicore command to format.
        @retval The command to be sent to the device.
        """
        return "%s%s" % (cmd, NEWLINE)

    def _build_set_command(self, cmd, param, val):
        """
        Build handler for set commands. param=val followed by newline.
        String val constructed by param dict formatting function.
        @param param the parameter key to set.
        @param val the parameter value to set.
        @ retval The set command to be sent to the device.
        @throws InstrumentParameterException if the parameter is not valid or
        if the formatting function could not accept the value passed.
        @throws InstrumentProtocolException if there is no build handler for the
        communication test command.
        """
        try:
            str_val = self._param_dict.format(param, val)

            if param == 'INTERVAL':
                param = 'sampleinterval'

            set_cmd = '%s=%s' % (param, str_val)
            set_cmd += NEWLINE

        except KeyError:
            raise InstrumentParameterException('Unknown driver parameter %s' % param)

        return set_cmd

    def _wakeup(self, wakeup_timeout=WAKEUP_TIMEOUT, response_timeout=RESPONSE_TIMEOUT):
        """
        waking this instrument up by sending MAX_COM_TEST communication test commands
        (aP*)
        @param wakeup_timeout The timeout to wake the device.
        @param response_timeout The time to look for response to a wakeup attempt.
        @throw InstrumentTimeoutException if the device could not be woken.
        """
        log.debug("_wakeup ")

        sleep_time = CMD_RESP_TIME
        cmd_line = self._build_simple_command(Command.COMM_TEST)

        # Grab start time for overall wakeup timeout.
        start_time = time.time()
        test_count = 0
        while test_count < MAX_COMM_TEST:
            # Clear the prompt buffer.
            self._promptbuf = ''

            # Send a communication test command and wait delay amount for response.
            self._connection.send(cmd_line)
            time.sleep(sleep_time)
            if self._promptbuf.find(Prompt.COMM_RESPONSE) != -1:
                # instrument is awake
                log.debug('_wakeup: got communication test response %s', Prompt.COMM_RESPONSE)
                test_count += 1
            else:
                #clear test_count since we want MAX_COMM_TEST consecutive successful communication test
                test_count = 0
            # Stop wake up the instrument if the wake up time out has elapsed
            if time.time() > start_time + wakeup_timeout:
                break

        if test_count != MAX_COMM_TEST:
            log.debug('instrument failed to wakeup in %d seconds time' % wakeup_timeout)
            raise InstrumentTimeoutException(
                "_wakeup(): instrument failed to wakeup in %d seconds time" % wakeup_timeout)

        else:
            return Prompt.COMM_RESPONSE
예제 #9
0
from mi.core.instrument.instrument_driver import DriverEvent
from mi.core.instrument.instrument_driver import DriverAsyncEvent
from mi.core.instrument.instrument_driver import DriverProtocolState
from mi.core.instrument.instrument_driver import DriverParameter
from mi.core.instrument.instrument_driver import ResourceAgentState
from mi.core.instrument.data_particle import DataParticle
from mi.core.instrument.data_particle import CommonDataParticleType
from mi.core.instrument.chunker import StringChunker
from mi.instrument.harvard.massp.common import MASSP_STATE_ERROR, MASSP_CLEAR_ERROR

__author__ = 'Peter Cable'
__license__ = 'Apache 2.0'

log = get_logger()

METALOGGER = get_logging_metaclass()

# newline.
NEWLINE = '\r'


class DataParticleType(BaseEnum):
    """
    Data particle types produced by this driver
    """
    RAW = CommonDataParticleType.RAW
    MCU_STATUS = 'massp_mcu_status'


class ProtocolState(BaseEnum):
    """
예제 #10
0
class ZplscCDclParser(SimpleParser):
    """
    ZPLSC C DCL Parser.
    """

    __metaclass__ = get_logging_metaclass(log_level='trace')

    def parse_file(self):
        """
        Parse the zplsc_c log file (averaged condensed data).
        Read file line by line. Values are extracted from lines containing condensed ASCII data
        @return: dictionary of data values with the particle names as keys or None
        """

        # Loop over all lines in the data file and parse the data to generate particles
        for number, line in enumerate(self._stream_handle, start=1):

            # Check if this is the dcl status log
            match = DCL_LOG_MATCHER.match(line)
            if match is not None:
                log.trace("MATCHED DCL_LOG_MATCHER: %s: %s", number, match.groups())
                # No data to extract, move on to the next line
                continue

            # Check if this is the instrument phase status log
            match = PHASE_STATUS_MATCHER.match(line)
            if match is not None:
                log.trace("MATCHED PHASE_STATUS_MATCHER: %s: %s", number, match.groups())
                # No data to extract, move on to the next line
                continue

            # Check if this is the instrument condensed ASCII data
            match = SENSOR_DATA_MATCHER.match(line)
            if match is not None:
                log.trace("MATCHED SENSOR_DATA_MATCHER: %s: %s", number, match.groups())

                # Extract the condensed ASCII data from this line
                data_dict = self.parse_line(match)
                if data_dict is None:
                    log.error('Erroneous data found in line %s: %s', number, line)
                    continue

                dcl_timestamp = data_dict[ZplscCDataKey.DCL_TIMESTAMP]
                # dcl_timestamp is the port_timestamp
                port_timestamp = dcl_time_to_ntp(dcl_timestamp)

                transmission_timestamp = data_dict[ZplscCParticleKey.TRANS_TIMESTAMP]
                # transmission_timestamp is the the internal_timestamp
                internal_timestamp = timestamp_yyyymmddhhmmss_to_ntp(transmission_timestamp)

                # Extract a particle and append it to the record buffer.
                particle = self._extract_sample(ZplscCInstrumentDataParticle,
                                                None,
                                                data_dict,
                                                internal_timestamp=internal_timestamp,
                                                port_timestamp=port_timestamp,
                                                preferred_ts=DataParticleKey.PORT_TIMESTAMP)
                if particle is not None:
                    log.trace('Parsed particle: %s' % particle.generate_dict())
                    self._record_buffer.append(particle)

                continue

            # Error, line did not match any expected regex
            self._exception_callback(
                RecoverableSampleException('Unknown data found in line %s:%s' % (number, line)))

    @staticmethod
    def parse_line(matches):
        """
        Parse a line from the zplsc_c log file (averaged condensed data).
        If erroneous data is detected return None so the line will be skipped.
        @param matches: MatchObject containing regex matches for ZPLSC_C condensed ASCII data
        @return: dictionary of values with the particle names as keys or None
        """
        data = [matches.group('dcl_timestamp'), matches.group('transmission_timestamp')] +\
            matches.group('condensed_data').split(',')
        num_freqs = matches.group('num_of_freqs')

        # Number of frequencies should be a number from 1 through 4 only
        if not num_freqs.isdigit() or not (1 <= int(num_freqs) <= MAX_NUM_FREQS):
            log.error("Invalid data: Number of frequencies out of range(1-4): %s", num_freqs)
            return None

        data_dict = {}
        index = 0

        # Iterate through the ZPLSC_C data rules to parse out the individual condensed ASCII data
        for key, counter in ZPLSC_C_DATA_RULES:
            # Skip channels beyond the expected number of frequencies (from 1-4)
            channel = key[-1]
            if channel.isdigit() and (int(channel) > int(num_freqs)):
                data_dict[key] = None
                continue

            # Retrieve the expected length of data for this key
            count = counter
            if type(counter) is str:
                count = data_dict.get(counter)
                if not count.isdigit():
                    log.error("Invalid data: %s value %s %s is not a valid count integer.",
                              counter, type(count), count)
                    return None
                count = int(count)

            # Check if position and length are within array bounds
            if not 0 <= (len(data) - index) >= count:
                log.error("Invalid data: Expected data count(%s) out of bounds for %s"
                          ", length:%s, index: %s",
                          count, key, len(data), index)
                return None

            # Set the data in the particle data dictionary
            try:
                if count > 1:
                    # For the value list, add back in the minimum value that was subtracted by the instrument.
                    data_dict[key] = data[index:index + count]
                    if key in min_value_mapping:
                        min_value = int(data_dict[min_value_mapping[key]])
                        data_dict[key] = [int(data_value) + min_value for data_value in data_dict[key]]
                elif count == 1:
                    data_dict[key] = data[index]
                else:
                    data_dict[key] = None
            except IndexError, e:
                log.error("IndexError %s: %s: %s >= data length %s, index: %s",
                          e, key, count, (len(data)-index), index)
                return None

            index += count

        # Check for valid board numbers per channel (0-3)
        for value, name in enumerate((ZplscCDataKey.BOARD_NUM_CHAN_1, ZplscCDataKey.BOARD_NUM_CHAN_2,
                                      ZplscCDataKey.BOARD_NUM_CHAN_3, ZplscCDataKey.BOARD_NUM_CHAN_4)):
            if data_dict[name] not in (str(value), None):
                log.error("Invalid data: %s should always be \'%s\' or None: %s %s",
                          name, value, data_dict[name], type(data_dict[name]))
                return None

        # Check for valid frequency value per channel
        for name in (ZplscCParticleKey.FREQ_CHAN_1, ZplscCParticleKey.FREQ_CHAN_2,
                     ZplscCParticleKey.FREQ_CHAN_3, ZplscCParticleKey.FREQ_CHAN_4):
            if data_dict[name] not in VALID_FREQUENCIES:
                log.error("Invalid data: %s: %s (Valid values %s)",
                          name, data_dict[name], VALID_FREQUENCIES)
                return None

        return data_dict
예제 #11
0
class THSPHProtocol(CommandResponseInstrumentProtocol):
    """
    Instrument protocol class
    Subclasses CommandResponseInstrumentProtocol
    """
    SERIES_A = 'A'
    SERIES_B = 'B'
    SERIES_C = 'C'
    GET_SAMPLE_SERIES_A = 'aH*'  # Gets data sample from ADC for series A
    GET_SAMPLE_SERIES_B = 'bH*'  # Gets data sample from ADC for series B
    GET_SAMPLE_SERIES_C = 'cH*'  # Gets data sample from ADC for series C

    # THSPH commands for instrument series A, B and C
    THSPH_COMMANDS = {
        SERIES_A: {
            Command.GET_SAMPLE: GET_SAMPLE_SERIES_A
        },
        SERIES_B: {
            Command.GET_SAMPLE: GET_SAMPLE_SERIES_B
        },
        SERIES_C: {
            Command.GET_SAMPLE: GET_SAMPLE_SERIES_C
        },
    }

    __metaclass__ = get_logging_metaclass(log_level='debug')

    def __init__(self, prompts, newline, driver_event):
        """
        Protocol constructor.
        @param prompts A BaseEnum class containing instrument prompts.
        @param newline The newline.
        @param driver_event Driver process event callback.
        """
        # Construct protocol superclass.
        CommandResponseInstrumentProtocol.__init__(self, prompts, newline,
                                                   driver_event)

        # Build protocol state machine.
        self._protocol_fsm = InstrumentFSM(ProtocolState, ProtocolEvent,
                                           ProtocolEvent.ENTER,
                                           ProtocolEvent.EXIT)

        # Add event handlers for protocol state machine.
        self._protocol_fsm.add_handler(ProtocolState.UNKNOWN,
                                       ProtocolEvent.ENTER,
                                       self._handler_unknown_enter)
        self._protocol_fsm.add_handler(ProtocolState.UNKNOWN,
                                       ProtocolEvent.EXIT,
                                       self._handler_unknown_exit)
        self._protocol_fsm.add_handler(ProtocolState.UNKNOWN,
                                       ProtocolEvent.DISCOVER,
                                       self._handler_unknown_discover)

        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.ENTER,
                                       self._handler_command_enter)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.EXIT,
                                       self._handler_command_exit)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.START_AUTOSAMPLE,
                                       self._handler_command_start_autosample)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.ACQUIRE_SAMPLE,
                                       self._handler_command_acquire_sample)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.GET,
                                       self._handler_command_get)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.SET,
                                       self._handler_command_set)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.START_DIRECT,
                                       self._handler_command_start_direct)

        self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE,
                                       ProtocolEvent.ENTER,
                                       self._handler_autosample_enter)
        self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE,
                                       ProtocolEvent.EXIT,
                                       self._handler_autosample_exit)
        self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE,
                                       ProtocolEvent.SCHEDULE_ACQUIRE_SAMPLE,
                                       self._handler_command_acquire_sample)
        self._protocol_fsm.add_handler(
            ProtocolState.AUTOSAMPLE, ProtocolEvent.STOP_AUTOSAMPLE,
            self._handler_autosample_stop_autosample)

        self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS,
                                       ProtocolEvent.ENTER,
                                       self._handler_direct_access_enter)
        self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS,
                                       ProtocolEvent.EXIT,
                                       self._handler_direct_access_exit)
        self._protocol_fsm.add_handler(
            ProtocolState.DIRECT_ACCESS, ProtocolEvent.EXECUTE_DIRECT,
            self._handler_direct_access_execute_direct)
        self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS,
                                       ProtocolEvent.STOP_DIRECT,
                                       self._handler_direct_access_stop_direct)

        # Construct the parameter dictionary containing device parameters,
        # current parameter values, and set formatting functions.
        self._build_driver_dict()
        self._build_command_dict()
        self._build_param_dict()

        # Add build handlers for device commands.
        self._add_build_handler(Command.GET_SAMPLE, self._build_simple_command)

        # State state machine in COMMAND state.
        self._protocol_fsm.start(ProtocolState.UNKNOWN)

        # commands sent to device to be filtered in responses for telnet DA
        self._sent_cmds = []

        self._chunker = StringChunker(THSPHProtocol.sieve_function)

        # Set Get Sample Command and Communication Test Command for Series A as default
        self._get_sample_cmd = self.GET_SAMPLE_SERIES_A

    @staticmethod
    def sieve_function(raw_data):
        """
        The method that splits samples
        """
        matchers = []
        return_list = []

        matchers.append(THSPHParticle.regex_compiled())

        for matcher in matchers:
            log.trace('matcher: %r raw_data: %r', matcher.pattern, raw_data)
            for match in matcher.finditer(raw_data):
                return_list.append((match.start(), match.end()))

        return return_list

    def _got_chunk(self, chunk, timestamp):
        """
        The base class got_data has gotten a chunk from the chunker.  Pass it to extract_sample
        with the appropriate particle objects and REGEXes.
        """
        if not self._extract_sample(THSPHParticle,
                                    THSPHParticle.regex_compiled(), chunk,
                                    timestamp):
            raise InstrumentProtocolException("Unhandled chunk")

    def _build_driver_dict(self):
        """
        Populate the driver dictionary with options
        """
        self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, True)

    def _build_command_dict(self):
        """
        Populate the command dictionary with command.
        """
        self._cmd_dict.add(Capability.START_AUTOSAMPLE,
                           display_name="Start Autosample")
        self._cmd_dict.add(Capability.STOP_AUTOSAMPLE,
                           display_name="Stop Autosample")
        self._cmd_dict.add(Capability.ACQUIRE_SAMPLE,
                           display_name="Acquire Sample")
        self._cmd_dict.add(Capability.DISCOVER, display_name='Discover')

    def _build_param_dict(self):
        """
        Populate the parameter dictionary with THSPH parameters.
        For each parameter key, add match string, match lambda function,
        and value formatting function for set commands.
        """

        # Add parameter handlers to parameter dict.
        self._param_dict.add(Parameter.INTERVAL,
                             r'Auto Polled Interval = (\d+)',
                             lambda match: int(match.group(1)),
                             str,
                             type=ParameterDictType.INT,
                             units=Units.SECOND,
                             display_name="Polled Interval",
                             visibility=ParameterDictVisibility.READ_WRITE,
                             startup_param=True,
                             direct_access=False,
                             default_value=5)

        self._param_dict.add(
            Parameter.INSTRUMENT_SERIES,
            r'Instrument Series = ([A-C])',
            lambda match: int(match.group(1)),
            str,
            type=ParameterDictType.STRING,
            display_name="Instrument Series",
            description='Defines instance of instrument series [A, B, C].',
            visibility=ParameterDictVisibility.IMMUTABLE,
            startup_param=True,
            direct_access=False,
            default_value='A')

    def _filter_capabilities(self, events):
        """
        Return a list of currently available capabilities.
        """
        return [x for x in events if Capability.has(x)]

    ########################################################################
    # Unknown State handlers.
    ########################################################################
    def _handler_unknown_enter(self, *args, **kwargs):

        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_unknown_exit(self, *args, **kwargs):
        """
        Exit unknown state.
        """
        pass

    def _handler_unknown_discover(self, *args, **kwargs):
        """
        Discover current state; Change next state to be COMMAND state.
        @retval (next_state, result).
        """
        next_state = ProtocolState.COMMAND
        next_agent_state = ResourceAgentState.IDLE

        return next_state, next_agent_state

    ########################################################################
    # Command State handlers.
    ########################################################################
    def _handler_command_acquire_sample(self, *args, **kwargs):
        """
        Get device status
        """
        next_state = None
        next_agent_state = None
        result = None

        self._do_cmd_no_resp(Command.GET_SAMPLE, timeout=TIMEOUT)

        return next_state, (next_agent_state, result)

    def _handler_command_enter(self, *args, **kwargs):

        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._init_params()
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_command_exit(self, *args, **kwargs):
        pass

    def _handler_command_get(self, *args, **kwargs):
        """
        Get device parameters from the parameter dict.  First we set a baseline timestamp
        that all data expirations will be calculated against.  Then we try to get parameter
        value.  If we catch an expired parameter then we will update all parameters and get
        values using the original baseline time that we set at the beginning of this method.
        Assuming our _update_params is updating all parameter values properly then we can
        ensure that all data will be fresh.  Nobody likes stale data!
        @param args[0] list of parameters to retrieve, or DriverParameter.ALL.
        """
        return self._handler_get(*args, **kwargs)

    def _handler_command_set(self, *args, **kwargs):
        """
        Perform a set command.
        @param args[0] parameter : value dict.
        @retval (next_state, result) tuple, (None, None).
        @throws InstrumentParameterException if missing set parameters, if set parameters not ALL and
        not a dict, or if parameter can't be properly formatted.

        """
        next_state = None
        result = None
        startup = False

        # Retrieve required parameter.
        # Raise if no parameter provided, or not a dict.
        try:
            params = args[0]

        except IndexError:
            raise InstrumentParameterException(
                'Set command requires a parameter dict.')

        if not isinstance(params, dict):
            raise InstrumentParameterException('Set parameters not a dict.')

        try:
            startup = args[1]
        except IndexError:
            pass

        old_config = self._param_dict.get_config()
        self._set_params(params, startup)

        new_config = self._param_dict.get_config()
        if old_config != new_config:
            self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)

        return next_state, result

    def _set_params(self, *args, **kwargs):
        """
        Set various parameters internally to the driver. No issuing commands to the
        instrument needed for this driver.
        """

        try:
            params = args[0]
        except IndexError:
            raise InstrumentParameterException(
                'Set command requires a parameter dict.')

        #list can be null, like in the case of direct access params, in this case do nothing
        if not params:
            return

        # Do a range check before we start all sets
        for (key, val) in params.iteritems():

            if key == Parameter.INTERVAL and not (0 < val < 601):
                log.debug("Auto Sample Interval not in 1 to 600 range ")
                raise InstrumentParameterException(
                    "sample interval out of range [1, 600]")

            if key == Parameter.INSTRUMENT_SERIES:
                if val not in 'ABC':
                    log.debug("Instrument Series is not A, B or C ")
                    raise InstrumentParameterException(
                        "Instrument Series is not invalid ")
                else:
                    self._get_sample_cmd = self.THSPH_COMMANDS[val][
                        Command.GET_SAMPLE]

            log.debug('key = (%s), value = (%s)' % (key, val))

            self._param_dict.set_value(key, val)

    def _handler_command_start_autosample(self, *args, **kwargs):
        """
        Switch into autosample mode.
        @retval (next_state, result) tuple, (ProtocolState.AUTOSAMPLE,
        (next_agent_state, None) if successful.
        @throws InstrumentTimeoutException if device cannot be woken for command.
        @throws InstrumentProtocolException if command could not be built or misunderstood.
        """
        result = None

        next_state = ProtocolState.AUTOSAMPLE
        next_agent_state = ResourceAgentState.STREAMING

        return next_state, (next_agent_state, result)

    def _handler_command_start_direct(self):
        """
        Start direct access
        """
        return ProtocolState.DIRECT_ACCESS, (ResourceAgentState.DIRECT_ACCESS,
                                             None)

    #######################################################################
    # Autosample State handlers.
    ########################################################################
    def _handler_autosample_enter(self, *args, **kwargs):
        """
        Enter autosample state  Because this is an instrument that must be
        polled we need to ensure the scheduler is added when we are in an
        autosample state.  This scheduler raises events to poll the
        instrument for data.
        @retval next_state, (next_agent_state, result)
        """

        self._init_params()

        self._setup_autosample_config()

        # Schedule auto sample task
        self._add_scheduler_event(ScheduledJob.AUTO_SAMPLE,
                                  ProtocolEvent.SCHEDULE_ACQUIRE_SAMPLE)

        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

        return None, (None, None)

    def _setup_autosample_config(self):
        """
        Set up auto sample configuration and add it to the scheduler.
        """
        # Start the scheduler to poll the instrument for
        # data every sample interval seconds

        job_name = ScheduledJob.AUTO_SAMPLE
        polled_interval = self._param_dict.get_config_value(Parameter.INTERVAL)
        config = {
            DriverConfigKey.SCHEDULER: {
                job_name: {
                    DriverSchedulerConfigKey.TRIGGER: {
                        DriverSchedulerConfigKey.TRIGGER_TYPE:
                        TriggerType.INTERVAL,
                        DriverSchedulerConfigKey.SECONDS: polled_interval
                    }
                }
            }
        }
        self.set_init_params(config)

        # Start the scheduler if it is not running
        if not self._scheduler:
            self.initialize_scheduler()

    def _handler_autosample_exit(self, *args, **kwargs):
        """
        Exit auto sample state. Remove the auto sample task
        """

        next_state = None
        next_agent_state = None
        result = None

        return next_state, (next_agent_state, result)

    def _handler_autosample_stop_autosample(self, *args, **kwargs):
        """
        Remove the auto sample task. Exit Auto sample state
        """
        result = None

        # Stop the Auto Poll scheduling
        self._remove_scheduler(ScheduledJob.AUTO_SAMPLE)

        next_state = ProtocolState.COMMAND
        next_agent_state = ResourceAgentState.COMMAND
        return next_state, (next_agent_state, result)

    ########################################################################
    # Direct access handlers.
    ########################################################################

    def _handler_direct_access_enter(self, *args, **kwargs):
        """
        Enter direct access state.
        """
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)
        self._sent_cmds = []

    def _handler_direct_access_exit(self, *args, **kwargs):
        """
        Exit direct access state.
        """
        pass

    def _handler_direct_access_execute_direct(self, data):
        """
        Execute direct command
        """
        next_state = None
        result = None
        next_agent_state = None

        self._do_cmd_direct(data)

        # add sent command to list for 'echo' filtering in callback
        self._sent_cmds.append(data)

        return next_state, (next_agent_state, result)

    def _handler_direct_access_stop_direct(self):
        """
        @throw InstrumentProtocolException on invalid command
        """
        result = None

        next_state = ProtocolState.COMMAND
        next_agent_state = ResourceAgentState.COMMAND

        return next_state, (next_agent_state, result)

    def _build_simple_command(self, cmd, *args):
        """
        Build handler for basic THSPH commands.
        @param cmd the simple ooicore command to format.
        @retval The command to be sent to the device.
        """
        instrument_series = self._param_dict.get(Parameter.INSTRUMENT_SERIES)

        if cmd == Command.GET_SAMPLE:
            instrument_cmd = self.THSPH_COMMANDS[instrument_series][
                Command.GET_SAMPLE]
        else:
            raise InstrumentException('Unknown THSPH driver command  %s' % cmd)

        return "%s%s" % (instrument_cmd, NEWLINE)

    def _wakeup(self, wakeup_timeout=0, response_timeout=0):
        """
        There is no wakeup for this instrument.  Do nothing.
        @param wakeup_timeout The timeout to wake the device.
        @param response_timeout The time to look for response to a wakeup attempt.
        """
        pass
예제 #12
0
class NutnrBDclParser(Parser):

    __metaclass__ = get_logging_metaclass(log_level='debug')
    """
    Parser for nutnr_b_dcl data.
    In addition to the standard parser constructor parameters,
    this constructor needs the following additional parameters:
      - instrument particle class
      - metadata particle class
      - frame_types tuple
    """
    def __init__(self, config, stream_handle, state_callback, publish_callback,
                 exception_callback, instrument_particle_class,
                 metadata_particle_class, frame_types):

        super(NutnrBDclParser,
              self).__init__(config, stream_handle, None, None, state_callback,
                             publish_callback, exception_callback)

        # Initialize the
        self._file_parsed = False
        self._record_buffer = []
        self._metadata_state = 0
        self._metadata_timestamp = 0.0
        self._metadata_particle_generated_for_block = False

        # Save the names of the particle classes to be generated.
        self._metadata_particle_class = metadata_particle_class
        self._instrument_particle_class = instrument_particle_class

        # Save the input frame types
        self._frame_types = frame_types

    def _extract_metadata_unix_timestamp(self, idle_match):
        """
        This function will create a timestamp to be used as the internal
        timestamp for the metadata particle is generated.
        """

        # calculate the metadata particle internal timestamp
        # from the DCL timestamp.

        utc_time = dcl_controller_timestamp_to_utc_time(
            idle_match.group(MetaDataMatchGroups.META_GROUP_DCL_TIMESTAMP))

        return utc_time

    def _extract_instrument_ntp_timestamp(self, inst_match):
        """
        This function will create a timestamp to be used as the internal
        timestamp for the instrument particle is generated.
        """

        # calculate the instrument particle internal timestamp
        # from the DCL timestamp.

        return dcl_controller_timestamp_to_ntp_time(
            inst_match.group(
                InstrumentDataMatchGroups.INST_GROUP_DCL_TIMESTAMP))

    def _process_idle_metadata_record(self, idle_match):
        """
        This function processes an Idle State metadata record.
        It will create a timestamp to be used as the internal timestamp for the
        metadata particle is generated.
        """

        self._metadata_timestamp = ntplib.system_to_ntp_time(
            self._extract_metadata_unix_timestamp(idle_match))

    def _create_instrument_particle(self, inst_match):
        raise NotImplementedException(
            "The _create_instrument_particle must be implemented by the inheriting class!"
        )

    def _process_instrument_record_match(self, inst_match):
        """
        This function processes an instrument data match record.
        It will return the list of data particles generated.
        """
        # If the frame type is not DARK or LIGHT,
        # raise a recoverable sample exception.

        frame_type = inst_match.group(
            InstrumentDataMatchGroups.INST_GROUP_FRAME_TYPE)

        if frame_type != self._frame_types[FRAME_TYPE_DARK_INDEX] \
                and frame_type != self._frame_types[FRAME_TYPE_LIGHT_INDEX]:
            error_message = 'Invalid frame type %s' % frame_type
            log.warn(error_message)
            self._exception_callback(RecoverableSampleException(error_message))

        else:

            #generate one metadata record if it has not already been done
            if self._metadata_state == ALL_METADATA_RECEIVED and self._metadata_particle_generated_for_block is False:

                # Fields for the metadata particle must be
                # in the same order as the RAW_INDEX_META_xxx values.
                # DCL Controller timestamp and serial number
                # are from the instrument data record.
                # Other data comes from the various metadata records
                # which has been accumulated in the Metadata State Table.
                meta_fields = [
                    value for state, matcher, value in METADATA_STATE_TABLE
                ]

                metadata_tuple = [
                    (NutnrBDataParticleKey.DCL_CONTROLLER_TIMESTAMP,
                     inst_match.group(
                         InstrumentDataMatchGroups.INST_GROUP_DCL_TIMESTAMP),
                     str),
                    (NutnrBDataParticleKey.SERIAL_NUMBER,
                     inst_match.group(
                         InstrumentDataMatchGroups.INST_GROUP_SERIAL_NUMBER),
                     str),
                    (NutnrBDataParticleKey.STARTUP_TIME, meta_fields[0], int),
                    (NutnrBDataParticleKey.SPEC_ON_TIME, meta_fields[1], int),
                    (NutnrBDataParticleKey.SPEC_POWERED_TIME, meta_fields[2],
                     int),
                    (NutnrBDataParticleKey.LAMP_ON_TIME, meta_fields[3], int),
                    (NutnrBDataParticleKey.LAMP_POWERED_TIME, meta_fields[4],
                     int),
                    (NutnrBDataParticleKey.DATA_LOG_FILE, meta_fields[5], str)
                ]

                particle = self._extract_sample(self._metadata_particle_class,
                                                None, metadata_tuple,
                                                self._metadata_timestamp)

                if particle is not None:
                    self._record_buffer.append(particle)
                    self._metadata_particle_generated_for_block = True

            #
            particle = self._create_instrument_particle(inst_match)
            if particle is not None:
                self._record_buffer.append(particle)

    def _process_next_wakeup_match(self):

        # Clear the metadata state
        self._metadata_state = 0

        # Reset the flag to indicate that we have not generated the metadata particle
        self._metadata_particle_generated_for_block = False

    def _process_metadata_record_part(self, line):
        """
        This function checks to see if a metadata record is contained
        in this chunk.
        """

        match_found = False

        for table_data in METADATA_STATE_TABLE:
            state, matcher, value = table_data
            match = matcher.match(line)

            # If we get a match, it's one of the metadata records
            # that we're interested in.

            if match is not None:

                match_found = True

                # Update the state to reflect that we've got
                # this particular metadata record.

                self._metadata_state |= state

                # For all matchers except the LOG_FILE matcher,
                # convert the instrument time to seconds since
                # Jan 1, 1970 (Unix Epoch time).

                if matcher != LOG_FILE_MATCHER:
                    table_data[
                        METADATA_VALUE_INDEX] = self._extract_metadata_unix_timestamp(
                            match)

                # For the LOG_FILE matcher, save the name of the log file.
                else:
                    table_data[METADATA_VALUE_INDEX] = match.group(
                        MetaDataMatchGroups.META_GROUP_INST_LOGFILE)

        if match_found is False:
            error_message = 'Unexpected metadata found: ' + line
            log.warn(error_message)
            self._exception_callback(UnexpectedDataException(error_message))

    def parse_file(self):
        """
        Parse file and collect particles
        """
        raise NotImplementedException(
            "The parse_file must be implemented by the inheriting class!")

    def get_records(self, num_records_requested=1):
        """
        Returns a list of particles that is  equal to the num_records_requested when there are that many particles
        are available or a list of particles less than the num_records_requested when there are fewer than
        num_records_requested available.
        """
        particles_to_return = []

        if num_records_requested > 0:

            # If the file was not read, let's parse it
            if self._file_parsed is False:
                self.parse_file()

            # Iterate through the particles returned, and pop them off from the beginning of the record
            # buffer to the end
            while len(particles_to_return) < num_records_requested and len(
                    self._record_buffer) > 0:
                particles_to_return.append(self._record_buffer.pop(0))

        return particles_to_return
예제 #13
0
class CamhdAParser(SimpleParser):
    """
    Parser for camhd_a video files
    """

    __metaclass__ = get_logging_metaclass(log_level='debug')

    def __init__(self, config, stream_handle, exception_callback):
        """
        Initialize the camhd_a parser, which does not use state or the chunker
        and sieve functions.
        @param config: The parser configuration dictionary
        @param stream_handle: The stream handle of the file to parse
        @param exception_callback: The callback to use when an exception occurs
        """

        super(CamhdAParser, self).__init__(config, stream_handle,
                                           exception_callback)

    def recov_exception_callback(self, message):
        log.warn(message)
        self._exception_callback(RecoverableSampleException(message))

    @staticmethod
    def find_matching_mp4(log_file_path, sensor, date):
        """
        Find associated MP4 file for the given camera log file.
        :param log_file_path: fully qualified file path of log file in rsn archive
        :param sensor: sensor name (does not include port number prefix - e.g. 'CAMHDA301')
        :param date: gregorian date in 'YYYYMMDD' format
        :param time: time of day in 'HHMMSS' format
        :return: relative path in the raw data server for the associated MP4 file
        TODO - have to determine reference designator if we add more HD cameras
        """
        # TODO - deterministically fill reference designator from ingest parameters
        subsite = 'RS03ASHS'
        node = 'PN03B'
        sensor = '06-' + sensor

        year = date[0:4]
        month = date[4:6]
        day = date[6:8]
        filename = os.path.basename(log_file_path)
        fileroot = os.path.splitext(filename)[0]
        mp4_filename = '.'.join((fileroot, 'mp4'))
        mp4_file_path = os.path.join(subsite, node, sensor, year, month, day,
                                     mp4_filename)
        return mp4_file_path

    def parse_file(self):
        """
        Parse the *.log file.
        """
        match = FILE_PATH_MATCHER.match(self._stream_handle.name)
        if match:
            sensor = match.group('Sensor')
            date = match.group('Date')
            time = match.group('Time')
            file_datetime = date + time
            time_stamp = ntplib.system_to_ntp_time(
                utilities.formatted_timestamp_utc_time(file_datetime,
                                                       TIMESTAMP_FORMAT))

            # Extract a particle and append it to the record buffer
            mp4_file_path = self.find_matching_mp4(self._stream_handle.name,
                                                   sensor, date)
            particle = self._extract_sample(CamhdAInstrumentDataParticle,
                                            None,
                                            mp4_file_path,
                                            internal_timestamp=time_stamp)
            log.debug('Parsed particle: %s', particle.generate_dict())
            self._record_buffer.append(particle)

        else:
            # Files retrieved from the instrument should always match the timestamp naming convention
            self.recov_exception_callback(
                "Unable to extract file time from input file name: %s."
                "Expected format REFDES-YYYYmmddTHHMMSSZ.log" %
                self._stream_handle.name)
예제 #14
0
파일: dpc.py 프로젝트: JeffRoy/mi-dataset
"""
@package mi.dataset.parser.dpc
@file marine-integrations/mi/dataset/parser/dpc.py
@author Pete Cable
"""
import msgpack
import ntplib
import struct
from mi.core.exceptions import SampleException, RecoverableSampleException
from mi.core.instrument.data_particle import DataParticle, DataParticleKey
from mi.core.log import get_logger, get_logging_metaclass
from mi.dataset.dataset_parser import SimpleParser
from mi.core.common import BaseEnum

log = get_logger()
METACLASS = get_logging_metaclass("trace")

__author__ = "Peter Cable"
__license__ = "Apache 2.0"


ACS_STRUCT = struct.Struct(">BB4s7HIBB340H")


class DataParticleType(BaseEnum):
    # Data particle types for the Deep Profiler
    ACM = "dpc_acm_instrument_recovered"
    ACS = "dpc_acs_instrument_recovered"
    CTD = "dpc_ctd_instrument_recovered"
    FLCD = "dpc_flcdrtd_instrument_recovered"
    FLNTU = "dpc_flnturtd_instrument_recovered"
예제 #15
0
from threading import Thread

from mi.core.common import BaseEnum
from mi.core.exceptions import TestModeException
from mi.core.exceptions import NotImplementedException
from mi.core.exceptions import InstrumentException
from mi.core.exceptions import InstrumentParameterException
from mi.core.exceptions import InstrumentConnectionException
from mi.core.instrument.instrument_fsm import ThreadSafeFSM
from mi.core.instrument.port_agent_client import PortAgentClient

from mi.core.log import get_logger, LoggerManager, get_logging_metaclass

log = get_logger()

META_LOGGER = get_logging_metaclass('trace')

STARTING_RECONNECT_INTERVAL = .5
MAXIMUM_RECONNECT_INTERVAL = 256
MAXIMUM_CONSUL_QUERIES = 5
MAXIMUM_BACKOFF = 5  # seconds


class ConfigMetadataKey(BaseEnum):
    """
    Keys used in the metadata structure that describes the driver, commands,
    and parameters used in the driver and protocol.
    """
    DRIVER = 'driver'
    COMMANDS = 'commands'
    PARAMETERS = 'parameters'
예제 #16
0
class SatlanticPARInstrumentProtocol(CommandResponseInstrumentProtocol):
    """The instrument protocol classes to deal with a Satlantic PAR sensor.
    The protocol is a very simple command/response protocol with a few show
    commands and a few set commands.
    Note protocol state machine must be called "self._protocol_fsm"
    """

    __metaclass__ = get_logging_metaclass(log_level='debug')

    def __init__(self, callback=None):
        CommandResponseInstrumentProtocol.__init__(self, Prompt, EOLN, callback)

        self._protocol_fsm = InstrumentFSM(PARProtocolState, PARProtocolEvent, PARProtocolEvent.ENTER, PARProtocolEvent.EXIT)

        self._protocol_fsm.add_handler(PARProtocolState.UNKNOWN, PARProtocolEvent.ENTER, self._handler_unknown_enter)
        self._protocol_fsm.add_handler(PARProtocolState.UNKNOWN, PARProtocolEvent.DISCOVER, self._handler_unknown_discover)

        self._protocol_fsm.add_handler(PARProtocolState.COMMAND, PARProtocolEvent.ENTER, self._handler_command_enter)
        self._protocol_fsm.add_handler(PARProtocolState.COMMAND, PARProtocolEvent.GET, self._handler_get)
        self._protocol_fsm.add_handler(PARProtocolState.COMMAND, PARProtocolEvent.SET, self._handler_command_set)
        self._protocol_fsm.add_handler(PARProtocolState.COMMAND, PARProtocolEvent.ACQUIRE_SAMPLE, self._handler_poll_acquire_sample)
        self._protocol_fsm.add_handler(PARProtocolState.COMMAND, PARProtocolEvent.ACQUIRE_STATUS, self._handler_acquire_status)
        self._protocol_fsm.add_handler(PARProtocolState.COMMAND, PARProtocolEvent.SCHEDULED_ACQUIRE_STATUS, self._handler_acquire_status)
        self._protocol_fsm.add_handler(PARProtocolState.COMMAND, PARProtocolEvent.START_AUTOSAMPLE, self._handler_command_start_autosample)
        self._protocol_fsm.add_handler(PARProtocolState.COMMAND, PARProtocolEvent.START_DIRECT, self._handler_command_start_direct)

        self._protocol_fsm.add_handler(PARProtocolState.AUTOSAMPLE, PARProtocolEvent.ENTER, self._handler_autosample_enter)
        self._protocol_fsm.add_handler(PARProtocolState.AUTOSAMPLE, PARProtocolEvent.STOP_AUTOSAMPLE, self._handler_autosample_stop_autosample)
        self._protocol_fsm.add_handler(PARProtocolState.AUTOSAMPLE, PARProtocolEvent.SCHEDULED_ACQUIRE_STATUS, self._handler_autosample_acquire_status)

        self._protocol_fsm.add_handler(PARProtocolState.DIRECT_ACCESS, PARProtocolEvent.ENTER, self._handler_direct_access_enter)
        self._protocol_fsm.add_handler(PARProtocolState.DIRECT_ACCESS, PARProtocolEvent.EXECUTE_DIRECT, self._handler_direct_access_execute_direct)
        self._protocol_fsm.add_handler(PARProtocolState.DIRECT_ACCESS, PARProtocolEvent.STOP_DIRECT, self._handler_direct_access_stop_direct)

        self._protocol_fsm.start(PARProtocolState.UNKNOWN)

        self._add_response_handler(Command.GET, self._parse_get_response)
        self._add_response_handler(Command.SET, self._parse_set_response)
        self._add_response_handler(Command.SAMPLE, self._parse_response)

        # Construct the parameter dictionary containing device parameters,
        # current parameter values, and set formatting functions.
        self._build_cmd_dict()
        self._build_driver_dict()

        self._param_dict.add(Parameter.MAXRATE,
                             MAXRATE_PATTERN,
                             lambda match: float(match.group(1)),
                             self._float_or_int_to_string,
                             direct_access=True,
                             startup_param=True,
                             init_value=4,
                             display_name='Max Rate',
                             description='Maximum sampling rate (0 (Auto) | 0.125 | 0.5 | 1 | 2 | 4 | 8 | 10 | 12)',
                             type=ParameterDictType.FLOAT,
                             units=Units.HERTZ,
                             visibility=ParameterDictVisibility.READ_WRITE)

        self._param_dict.add(Parameter.INSTRUMENT,
                             HEADER_PATTERN,
                             lambda match: match.group(1),
                             str,
                             visibility=ParameterDictVisibility.IMMUTABLE,
                             display_name='Instrument Type',
                             description="",
                             type=ParameterDictType.STRING,
                             startup_param=True)

        self._param_dict.add(Parameter.SERIAL,
                             HEADER_PATTERN,
                             lambda match: match.group(1),
                             str,
                             visibility=ParameterDictVisibility.IMMUTABLE,
                             display_name='Serial Number',
                             description="",
                             type=ParameterDictType.STRING,
                             startup_param=True)

        self._param_dict.add(Parameter.FIRMWARE,
                             HEADER_PATTERN,
                             lambda match: match.group(1),
                             str,
                             visibility=ParameterDictVisibility.IMMUTABLE,
                             display_name='Firmware Version',
                             description="",
                             type=ParameterDictType.STRING,
                             startup_param=True)

        self._param_dict.add(Parameter.ACQUIRE_STATUS_INTERVAL,
                             INTERVAL_TIME_REGEX,
                             lambda match: match.group(1),
                             str,
                             display_name="Acquire Status Interval",
                             description='Interval for gathering status particles.',
                             type=ParameterDictType.STRING,
                             units=ParameterUnits.TIME_INTERVAL,
                             visibility=ParameterDictVisibility.READ_WRITE,
                             default_value='00:00:00',
                             startup_param=True)

        self._chunker = StringChunker(SatlanticPARInstrumentProtocol.sieve_function)

    def _build_cmd_dict(self):
        """
        Build a command dictionary structure, load the strings for the metadata from a file if present.
        """
        self._cmd_dict = ProtocolCommandDict()
        self._cmd_dict.add(PARCapability.ACQUIRE_SAMPLE, display_name='Acquire Sample')
        self._cmd_dict.add(PARCapability.ACQUIRE_STATUS, display_name='Acquire Status')
        self._cmd_dict.add(PARCapability.START_AUTOSAMPLE, display_name='Start Autosample')
        self._cmd_dict.add(PARCapability.STOP_AUTOSAMPLE, display_name='Stop Autosample')
        self._cmd_dict.add(PARCapability.DISCOVER, display_name='Discover')

    def _build_driver_dict(self):
        """
        Build a driver dictionary structure, load the strings for the metadata from a file if present.
        """
        self._driver_dict = DriverDict()
        self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, True)

    @staticmethod
    def sieve_function(raw_data):
        """
        The method that splits samples
        """
        matchers = [SAMPLE_REGEX, MAXANDBAUDRATE_REGEX]
        return_list = []

        for matcher in matchers:
            for match in matcher.finditer(raw_data):
                return_list.append((match.start(), match.end()))
                log.trace("sieve_function: regex found %r", raw_data[match.start():match.end()])

        return return_list

    def _filter_capabilities(self, events):
        """
        """
        events_out = [x for x in events if PARCapability.has(x)]
        return events_out

    def _do_cmd(self, cmd, *args, **kwargs):
        """
        Issue a command to the instrument after clearing of buffers.

        @param cmd The command to execute.
        @param args positional arguments to pass to the build handler.
        @retval The fully built command that was sent
        @raises InstrumentProtocolException if command could not be built.
        """
        expected_prompt = kwargs.get('expected_prompt', None)
        cmd_line = self._build_default_command(cmd, *args)

        # Send command.
        log.debug('_do_cmd: %s, length=%s' % (repr(cmd_line), len(cmd_line)))
        if len(cmd_line) == 1:
            self._connection.send(cmd_line)
        else:
            for char in cmd_line:
                starttime = time.time()
                self._connection.send(char)
                while len(self._promptbuf) == 0 or char not in self._promptbuf[-1]:
                    time.sleep(0.0015)
                    if time.time() > starttime + 3:
                        break

            # Keep for reference: This is a reliable alternative, but not fully explained & may not work in the future.
            # It somehow corrects bit rate timing issues across the driver-digi-instrument network interface,
            # & allows the entire line of a commands to be sent successfully.
            if EOLN not in cmd_line:    # Note: Direct access commands may already include an EOLN
                time.sleep(0.115)
                starttime = time.time()
                self._connection.send(EOLN)
                while EOLN not in self._promptbuf[len(cmd_line):len(cmd_line) + 2] and Prompt.ENTER_EXIT_CMD_MODE \
                           not in self._promptbuf[len(cmd_line):len(cmd_line) + 2]:
                    time.sleep(0.0015)
                    if time.time() > starttime + 3:
                        break

                # Limit resend_check_value from expected_prompt to one of the two below
                resend_check_value = None
                if expected_prompt is not None:
                    for check in (Prompt.COMMAND, Prompt.SAMPLES):
                        if check in expected_prompt:
                            log.trace('_do_cmd: command: %s, check=%s' % (cmd_line, check))
                            resend_check_value = check

                # Resend the EOLN if it did not go through the first time
                starttime = time.time()
                if resend_check_value is not None:
                    while True:
                        time.sleep(0.1)
                        if time.time() > starttime + 2:
                            log.debug("Sending eoln again.")
                            self._connection.send(EOLN)
                            starttime = time.time()
                        if resend_check_value in self._promptbuf:
                            break
                        if PARProtocolError.INVALID_COMMAND in self._promptbuf:
                            break

        return cmd_line

    def _do_cmd_no_resp(self, cmd, *args, **kwargs):
        """
        Issue a command to the instrument after clearing of buffers. No response is handled as a result of the command.
        Overridden: special "write delay" & command resending
        reliability improvements, no need for wakeup, default build command used for all commands
        @param cmd The command to execute.
        @param args positional arguments to pass to the build handler.
        @raises InstrumentProtocolException if command could not be built.
        """
        self._do_cmd(cmd, *args, **kwargs)

    def _do_cmd_resp(self, cmd, *args, **kwargs):
        """
        Perform a command-response on the device. Overridden: special "write delay" & command resending
        reliability improvements, no need for wakeup, default build command used for all commands
        @param cmd The command to execute.
        @param args positional arguments to pass to the build handler.
        @param expected_prompt kwarg offering a specific prompt to look for
        other than the ones in the protocol class itself.
        @param response_regex kwarg with a compiled regex for the response to
        match. Groups that match will be returned as a string.
        Cannot be supplied with expected_prompt. May be helpful for instruments that do not have a prompt.
        @retval resp_result The (possibly parsed) response result including the
        first instance of the prompt matched. If a regex was used, the prompt
        will be an empty string and the response will be the joined collection of matched groups.
        @raises InstrumentTimeoutException if the response did not occur in time.
        @raises InstrumentProtocolException if command could not be built or if response was not recognized.
        """
        timeout = kwargs.get('timeout', DEFAULT_CMD_TIMEOUT)
        expected_prompt = kwargs.get('expected_prompt', None)
        response_regex = kwargs.get('response_regex', None)

        if response_regex and not isinstance(response_regex, RE_PATTERN):
            raise InstrumentProtocolException('Response regex is not a compiled pattern!')

        if expected_prompt and response_regex:
            raise InstrumentProtocolException('Cannot supply both regex and expected prompt!')

        retry_count = 5
        retry_num = 0
        cmd_line = ""
        result = ""
        prompt = ""
        for retry_num in xrange(retry_count):
            # Clear line and prompt buffers for result.
            self._linebuf = ''
            self._promptbuf = ''

            cmd_line = self._do_cmd(cmd, *args, **kwargs)

            # Wait for the prompt, prepare result and return, timeout exception
            if response_regex:
                result_tuple = self._get_response(timeout, response_regex=response_regex,
                                                  expected_prompt=expected_prompt)
                result = "".join(result_tuple)
            else:
                (prompt, result) = self._get_response(timeout, expected_prompt=expected_prompt)

            # Confirm the entire command was sent, otherwise resend retry_count number of times
            if len(cmd_line) > 1 and \
                (expected_prompt is not None or
                (response_regex is not None))\
                    and not result.startswith(cmd_line):
                    # and cmd_line not in result:
                log.debug("_do_cmd_resp: Send command: %s failed %s attempt, result = %s.", cmd, retry_num, result)
                if retry_num >= retry_count:
                    raise InstrumentCommandException('_do_cmd_resp: Failed %s attempts sending command: %s' %
                                                     (retry_count, cmd))
            else:
                break

        log.debug("_do_cmd_resp: Sent command: %s, %s reattempts, expected_prompt=%s, result=%s.",
                  cmd_line, retry_num, expected_prompt, result)

        resp_handler = self._response_handlers.get((self.get_current_state(), cmd), None) or \
            self._response_handlers.get(cmd, None)
        resp_result = None
        if resp_handler:
            resp_result = resp_handler(result, prompt)

        time.sleep(0.3)     # give some time for the instrument connection to keep up

        return resp_result

    ########################################################################
    # Unknown handlers.
    ########################################################################
    def _handler_unknown_enter(self):
        """
        Enter unknown state.
        """
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_unknown_discover(self):
        """
        Discover current state; can be COMMAND or AUTOSAMPLE.
        @retval (next_state, result), (PARProtocolState.COMMAND or PARProtocolState.AUTOSAMPLE, None).
        """
        try:
            probe_resp = self._do_cmd_resp(Command.SAMPLE, timeout=2,
                                           expected_prompt=[Prompt.SAMPLES, PARProtocolError.INVALID_COMMAND])
        except InstrumentTimeoutException:
            self._do_cmd_resp(Command.SWITCH_TO_AUTOSAMPLE, expected_prompt=Prompt.SAMPLES, timeout=15)
            return PARProtocolState.AUTOSAMPLE, ResourceAgentState.STREAMING

        log.trace("_handler_unknown_discover: returned: %s", probe_resp)
        if probe_resp == PARProtocolError.INVALID_COMMAND:
            return PARProtocolState.COMMAND, ResourceAgentState.IDLE
        else:
            # Put the instrument into full autosample in case it isn't already (could be in polled mode)
            self._do_cmd_resp(Command.SWITCH_TO_AUTOSAMPLE, expected_prompt=Prompt.SAMPLES, timeout=15)
            return PARProtocolState.AUTOSAMPLE, ResourceAgentState.STREAMING

    ########################################################################
    # Command handlers.
    ########################################################################
    def _handler_command_enter(self):
        """
        Enter command state.
        """
        # Command device to update parameters and send a config change event.
        if self._init_type != InitializationType.NONE:
            self._update_params()

        self._init_params()
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _update_params(self):
        """
        Fetch the parameters from the device, and update the param dict.
        """
        max_rate_response = self._do_cmd_resp(Command.GET, Parameter.MAXRATE, expected_prompt=Prompt.COMMAND)
        self._param_dict.update(max_rate_response)

    def _set_params(self, params, startup=False, *args, **kwargs):
        """
        Issue commands to the instrument to set various parameters
        Also called when setting parameters during startup and direct access

        Issue commands to the instrument to set various parameters.  If
        startup is set to true that means we are setting startup values
        and immutable parameters can be set.  Otherwise only READ_WRITE
        parameters can be set.

        @param params dictionary containing parameter name and value
        @param startup bool True is we are initializing, False otherwise
        @raise InstrumentParameterException
        """
        # Retrieve required parameter from args.
        # Raise exception if no parameter provided, or not a dict.

        scheduling_interval_changed = False
        instrument_params_changed = False
        old_config = self._param_dict.get_all()

        if not isinstance(params, dict):
            raise InstrumentParameterException('Set params requires a parameter dict.')

        self._verify_not_readonly(params, startup)

        for name, value in params.iteritems():

            old_val = self._param_dict.format(name)
            new_val = self._param_dict.format(name, params[name])

            log.debug('Changing param %r OLD = %r, NEW %r', name, old_val, new_val)

            if name == Parameter.MAXRATE:
                if value not in VALID_MAXRATES:
                    raise InstrumentParameterException("Maxrate %s out of range" % value)

                if old_val != new_val:
                    if self._do_cmd_resp(Command.SET, name, new_val, expected_prompt=Prompt.COMMAND):
                        instrument_params_changed = True
            elif name == Parameter.ACQUIRE_STATUS_INTERVAL:
                if old_val != new_val:
                    self._param_dict.set_value(name, new_val)
                    scheduling_interval_changed = True
            elif name in [Parameter.FIRMWARE, Parameter.INSTRUMENT, Parameter.SERIAL]:
                self._param_dict.set_value(name, new_val)
            else:
                raise InstrumentParameterException("Parameter not in dictionary: %s" % name)


        if instrument_params_changed:
            self._do_cmd_resp(Command.SAVE, expected_prompt=Prompt.COMMAND)
            self._update_params()

        if scheduling_interval_changed and not startup:
            self._setup_scheduler_config()

        new_config = self._param_dict.get_all()
        log.debug("Updated parameter dict: old_config = %s, new_config = %s", old_config, new_config)
        if new_config != old_config:
            self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)

        for name in params.keys():
            if self._param_dict.format(name, params[name]) != self._param_dict.format(name):
                raise InstrumentParameterException('Failed to update parameter: %s' % name)

    def _handle_scheduling_params_changed(self):
        """
        Required actions when scheduling parameters change
        """
        self._setup_scheduler_config()

    def _setup_scheduler_config(self):
        """
        Set up auto scheduler configuration.
        """
        interval = self._param_dict.format(Parameter.ACQUIRE_STATUS_INTERVAL).split(':')
        hours = int(interval[0])
        minutes = int(interval[1])
        seconds = int(interval[2])
        log.debug("Setting scheduled interval to: %s %s %s", hours, minutes, seconds)

        if DriverConfigKey.SCHEDULER in self._startup_config:
            self._startup_config[DriverConfigKey.SCHEDULER][ScheduledJob.ACQUIRE_STATUS] = {
                DriverSchedulerConfigKey.TRIGGER: {
                    DriverSchedulerConfigKey.TRIGGER_TYPE: TriggerType.INTERVAL,
                    DriverSchedulerConfigKey.HOURS: int(hours),
                    DriverSchedulerConfigKey.MINUTES: int(minutes),
                    DriverSchedulerConfigKey.SECONDS: int(seconds)}
            }
        else:

            self._startup_config[DriverConfigKey.SCHEDULER] = {
                ScheduledJob.ACQUIRE_STATUS: {
                    DriverSchedulerConfigKey.TRIGGER: {
                        DriverSchedulerConfigKey.TRIGGER_TYPE: TriggerType.INTERVAL,
                        DriverSchedulerConfigKey.HOURS: int(hours),
                        DriverSchedulerConfigKey.MINUTES: int(minutes),
                        DriverSchedulerConfigKey.SECONDS: int(seconds)}
                },
            }

        # Start the scheduler if it is not running
        if not self._scheduler:
            self.initialize_scheduler()

        # First remove the scheduler, if it exists
        if not self._scheduler_callback.get(ScheduledJob.ACQUIRE_STATUS) is None:
            self._remove_scheduler(ScheduledJob.ACQUIRE_STATUS)
            log.debug("Removed scheduler for acquire status")

        # Now Add the scheduler
        if hours > 0 or minutes > 0 or seconds > 0:
            self._add_scheduler_event(ScheduledJob.ACQUIRE_STATUS, PARProtocolEvent.SCHEDULED_ACQUIRE_STATUS)

    def _handler_command_set(self, *args, **kwargs):
        """
        Handle setting data from command mode.
        @param params Dict of the parameters and values to pass to the state
        @retval return (next state, result)
        """
        self._set_params(*args, **kwargs)
        return None, None

    def _handler_command_start_autosample(self):
        """
        Handle getting a start autosample event when in command mode
        @retval return (next state, result)
        """
        self._do_cmd_resp(Command.EXIT, expected_prompt=Prompt.SAMPLES, timeout=15)
        time.sleep(0.115)
        self._do_cmd_resp(Command.SWITCH_TO_AUTOSAMPLE, expected_prompt=Prompt.SAMPLES, timeout=15)
        return PARProtocolState.AUTOSAMPLE, (ResourceAgentState.STREAMING, None)

    def _handler_command_start_direct(self):
        """
        """
        return PARProtocolState.DIRECT_ACCESS, (ResourceAgentState.DIRECT_ACCESS, None)

    ########################################################################
    # Autosample handlers.
    ########################################################################
    def _handler_autosample_enter(self):
        """
        Handle PARProtocolState.AUTOSAMPLE PARProtocolEvent.ENTER
        @retval return (next state, result)
        """
        if self._init_type != InitializationType.NONE:
            self._handler_autosample_stop_autosample()
            self._update_params()
            self._handler_command_start_autosample()

        self._init_params()

        self._driver_event(DriverAsyncEvent.STATE_CHANGE)
        return None, None

    def _handler_autosample_stop_autosample(self):
        """
        Handle PARProtocolState.AUTOSAMPLE stop
        @retval return (next state, result)
        @throw InstrumentProtocolException For hardware error
        """
        try:
            self._send_break()
        except InstrumentException, e:
            log.debug("_handler_autosample_stop_autosample error: %s", e)
            raise InstrumentProtocolException(error_code=InstErrorCode.HARDWARE_ERROR,
                                              msg="Couldn't break from autosample!")

        return PARProtocolState.COMMAND, (ResourceAgentState.COMMAND, None)
예제 #17
0
class Protocol(CommandResponseInstrumentProtocol):
    """
    Instrument protocol class
    Subclasses CommandResponseInstrumentProtocol
    """
    __metaclass__ = get_logging_metaclass(log_level='debug')

    def __init__(self, prompts, newline, driver_event):
        """
        Protocol constructor.
        @param prompts A BaseEnum class containing instrument prompts.
        @param newline The newline.
        @param driver_event Driver process event callback.
        """
        CommandResponseInstrumentProtocol.__init__(self, prompts, newline,
                                                   driver_event)

        self._protocol_fsm = InstrumentFSM(ProtocolState, ProtocolEvent,
                                           ProtocolEvent.ENTER,
                                           ProtocolEvent.EXIT)

        self._protocol_fsm.add_handler(ProtocolState.UNKNOWN,
                                       ProtocolEvent.ENTER,
                                       self._handler_unknown_enter)
        self._protocol_fsm.add_handler(ProtocolState.UNKNOWN,
                                       ProtocolEvent.EXIT,
                                       self._handler_unknown_exit)
        self._protocol_fsm.add_handler(ProtocolState.UNKNOWN,
                                       ProtocolEvent.DISCOVER,
                                       self._handler_unknown_discover)

        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.ENTER,
                                       self._handler_command_enter)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.EXIT,
                                       self._handler_command_exit)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.START_AUTOSAMPLE,
                                       self._handler_command_start_autosample)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.START_DIRECT,
                                       self._handler_command_start_direct)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.GET,
                                       self._handler_command_get)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.SET,
                                       self._handler_command_set)

        self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE,
                                       ProtocolEvent.ENTER,
                                       self._handler_autosample_enter)
        self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE,
                                       ProtocolEvent.EXIT,
                                       self._handler_autosample_exit)
        self._protocol_fsm.add_handler(
            ProtocolState.AUTOSAMPLE, ProtocolEvent.STOP_AUTOSAMPLE,
            self._handler_autosample_stop_autosample)

        self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS,
                                       ProtocolEvent.ENTER,
                                       self._handler_direct_access_enter)
        self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS,
                                       ProtocolEvent.EXIT,
                                       self._handler_direct_access_exit)
        self._protocol_fsm.add_handler(
            ProtocolState.DIRECT_ACCESS, ProtocolEvent.EXECUTE_DIRECT,
            self._handler_direct_access_execute_direct)
        self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS,
                                       ProtocolEvent.STOP_DIRECT,
                                       self._handler_direct_access_stop_direct)

        self._protocol_fsm.start(ProtocolState.UNKNOWN)

        self._chunker = StringChunker(Protocol.sieve_function)

        self._build_driver_dict()
        self._cmd_dict.add(Capability.DISCOVER, display_name='Discover')

    @staticmethod
    def sieve_function(raw_data):
        """
        The method that splits samples and status
        """
        raw_data_len = len(raw_data)
        return_list = []

        # look for samples
        for match in PACKET_REGISTRATION_REGEX.finditer(raw_data):
            if match.start(
            ) + INDEX_OF_PACKET_RECORD_LENGTH + SIZE_OF_PACKET_RECORD_LENGTH < raw_data_len:
                packet_length = get_two_byte_value(
                    raw_data,
                    match.start() +
                    INDEX_OF_PACKET_RECORD_LENGTH) + SIZE_OF_CHECKSUM_PLUS_PAD

                if match.start() + packet_length <= raw_data_len:
                    return_list.append(
                        (match.start(), match.start() + packet_length))

        # look for status
        for match in STATUS_REGEX.finditer(raw_data):
            return_list.append((match.start(), match.end()))

        return return_list

    def _got_chunk(self, chunk, timestamp):
        """
        The base class got_data has gotten a chunk from the chunker.  Pass it to extract_sample
        with the appropriate particle objects and REGEXes.
        """

        # On a rare occurrence the particle sample coming in will be missing a byte
        # trap the exception thrown and log an error
        try:
            self._extract_sample(OptaaSampleDataParticle,
                                 PACKET_REGISTRATION_REGEX, chunk, timestamp)
            self._extract_sample(OptaaStatusDataParticle, STATUS_REGEX, chunk,
                                 timestamp)

        except SampleException:
            log.error("Unable to process sample (%r)", SampleException.message)

    def _filter_capabilities(self, events):
        """
        Return a list of currently available capabilities.
        """
        return [x for x in events if Capability.has(x)]

    def _build_driver_dict(self):
        """
        Populate the driver dictionary with options
        """
        self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, True)

    ########################################################################
    # Unknown handlers.
    ########################################################################
    def _handler_unknown_enter(self, *args, **kwargs):
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_unknown_exit(self, *args, **kwargs):
        pass

    def _handler_unknown_discover(self, *args, **kwargs):
        """
        Discover current state; can only be AUTOSAMPLE (instrument has no actual command mode).
        """
        return ProtocolState.AUTOSAMPLE, ResourceAgentState.STREAMING

    ########################################################################
    # Command handlers.
    # Implemented to make DA possible, instrument has no actual command mode
    ########################################################################
    def _handler_command_enter(self, *args, **kwargs):
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_command_exit(self, *args, **kwargs):
        pass

    def _handler_command_get(self, *args, **kwargs):
        """
        Does nothing, implemented to make framework happy
        """
        return None, None

    def _handler_command_set(self, *args, **kwargs):
        """
        Does nothing, implemented to make framework happy
        """
        return None, None

    def _handler_command_start_direct(self, *args, **kwargs):
        return ProtocolState.DIRECT_ACCESS, (ResourceAgentState.DIRECT_ACCESS,
                                             None)

    def _handler_command_start_autosample(self, *args, **kwargs):
        return ProtocolState.AUTOSAMPLE, (ResourceAgentState.STREAMING, None)

    ########################################################################
    # Autosample handlers.
    ########################################################################
    def _handler_autosample_enter(self, *args, **kwargs):
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_autosample_exit(self, *args, **kwargs):
        pass

    def _handler_autosample_stop_autosample(self):
        return ProtocolState.COMMAND, (ResourceAgentState.COMMAND, None)

    ########################################################################
    # Direct access handlers.
    ########################################################################
    def _handler_direct_access_enter(self, *args, **kwargs):
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)
        self._sent_cmds = []

    def _handler_direct_access_exit(self, *args, **kwargs):
        pass

    def _handler_direct_access_execute_direct(self, data):
        self._do_cmd_direct(data)
        return None, None

    def _handler_direct_access_stop_direct(self):
        """
        Instead of using discover(), as is the norm, put instrument into
        Command state.  Instrument can only sample, even when in a command state.
        """
        return DriverProtocolState.COMMAND, (ResourceAgentState.COMMAND, None)
예제 #18
0
파일: dpc.py 프로젝트: GrimJ/mi-dataset
"""
@package mi.dataset.parser.dpc
@file marine-integrations/mi/dataset/parser/dpc.py
@author Pete Cable
"""
import msgpack
import ntplib
import struct
from mi.core.exceptions import SampleException, RecoverableSampleException
from mi.core.instrument.data_particle import DataParticle, DataParticleKey
from mi.core.log import get_logger, get_logging_metaclass
from mi.dataset.dataset_parser import SimpleParser
from mi.core.common import BaseEnum

log = get_logger()
METACLASS = get_logging_metaclass('trace')

__author__ = 'Peter Cable'
__license__ = 'Apache 2.0'


ACS_STRUCT = struct.Struct('>BB4s7HIBB340H')


class DataParticleType(BaseEnum):
    # Data particle types for the Deep Profiler
    ACM = 'dpc_acm_instrument_recovered'
    ACS = 'dpc_acs_instrument_recovered'
    CTD = 'dpc_ctd_instrument_recovered'
    FLCD = 'dpc_flcdrtd_instrument_recovered'
    FLNTU = 'dpc_flnturtd_instrument_recovered'
예제 #19
0
import os
import cPickle as pickle
from threading import Lock

from mi.core.driver_scheduler import DriverSchedulerConfigKey, TriggerType
from mi.core.exceptions import InstrumentProtocolException, InstrumentParameterException
from mi.core.instrument.data_particle import DataParticle
from mi.core.instrument.driver_dict import DriverDictKey
from mi.core.instrument.port_agent_client import PortAgentPacket
from mi.core.instrument.protocol_param_dict import ParameterDictVisibility, ParameterDictType
from mi.core.log import get_logger, get_logging_metaclass
from mi.instrument.antelope.orb.ooicore.packet_log import PacketLog, GapException


log = get_logger()
meta = get_logging_metaclass('info')

from mi.core.common import BaseEnum, Units
from mi.core.persistent_store import PersistentStoreDict

from mi.core.instrument.instrument_driver import SingleConnectionInstrumentDriver, DriverConfigKey
from mi.core.instrument.instrument_driver import DriverProtocolState
from mi.core.instrument.instrument_driver import DriverEvent
from mi.core.instrument.instrument_driver import DriverAsyncEvent
from mi.core.instrument.instrument_driver import ResourceAgentState

from mi.core.instrument.instrument_protocol import InstrumentProtocol
from mi.core.instrument_fsm import ThreadSafeFSM


ORBOLDEST = -13
예제 #20
0
log = get_logger()

###
#    Driver Constant Definitions
###

NEWLINE = '\r'
TIMEOUT = 1
ADDRESS = 1
QUERY = '=?'
TRUE = '111111'
FALSE = '000000'
TURBO_RESPONSE = re.compile('^(\d*)\r')
MAX_RETRIES = 5
CURRENT_STABILIZE_RETRIES = 5
META_LOGGER = get_logging_metaclass()


class ScheduledJob(BaseEnum):
    """
    All scheduled jobs for this driver
    """
    ACQUIRE_STATUS = DriverEvent.ACQUIRE_STATUS


class CommandType(BaseEnum):
    """
    Command types for the turbo. Used by build_command.
    """
    QUERY = 0
    SET = 10
예제 #21
0
import struct
import exceptions
import os
import numpy as np
from ctypes import *
from mi.core.exceptions import SampleException, RecoverableSampleException
from mi.core.instrument.dataset_data_particle import DataParticle, DataParticleKey
from mi.core.log import get_logger, get_logging_metaclass
from mi.dataset.dataset_parser import SimpleParser
from mi.core.common import BaseEnum
from datetime import datetime
from mi.dataset.driver.zplsc_c.zplsc_c_echogram import ZPLSCCPlot
from mi.dataset.driver.zplsc_c.zplsc_c_echogram import ZPLSCCEchogram

log = get_logger()
METACLASS = get_logging_metaclass('trace')

__author__ = 'Rene Gelinas'
__license__ = 'Apache 2.0'


PROFILE_DATA_DELIMITER = '\xfd\x02'  # Byte Offset 0 and 1


class DataParticleType(BaseEnum):
    # ZPLSC_C_PARTICLE_TYPE = 'zplsc_c_recovered'
    ZPLSC_C_PARTICLE_TYPE = 'zplsc_echogram_data'


class ZplscCParticleKey(BaseEnum):
    """
예제 #22
0
class Protocol(CommandResponseInstrumentProtocol):
    """
    Instrument protocol class
    Subclasses CommandResponseInstrumentProtocol
    """

    __metaclass__ = get_logging_metaclass(log_level='trace')

    def __init__(self, prompts, newline, driver_event):
        """
        Protocol constructor.
        @param prompts A BaseEnum class containing instrument prompts.
        @param newline The newline.
        @param driver_event Driver process event callback.
        """
        # Construct protocol superclass.
        CommandResponseInstrumentProtocol.__init__(self, prompts, newline,
                                                   driver_event)

        # Build protocol state machine.
        self._protocol_fsm = ThreadSafeFSM(ProtocolState, ProtocolEvent,
                                           ProtocolEvent.ENTER,
                                           ProtocolEvent.EXIT)

        # Add event handlers for protocol state machine.
        self._protocol_fsm.add_handler(ProtocolState.UNKNOWN,
                                       ProtocolEvent.ENTER,
                                       self._handler_unknown_enter)
        self._protocol_fsm.add_handler(ProtocolState.UNKNOWN,
                                       ProtocolEvent.DISCOVER,
                                       self._handler_unknown_discover)

        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.ENTER,
                                       self._handler_command_enter)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.START_AUTOSAMPLE,
                                       self._handler_command_autosample)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.ACQUIRE_STATUS,
                                       self._handler_command_acquire_status)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.GET,
                                       self._handler_command_get)
        self._protocol_fsm.add_handler(ProtocolState.COMMAND,
                                       ProtocolEvent.SET,
                                       self._handler_command_set)

        self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE,
                                       ProtocolEvent.STOP_AUTOSAMPLE,
                                       self._handler_autosample_stop)
        self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE,
                                       ProtocolEvent.GET,
                                       self._handler_command_get)

        # Construct the parameter dictionary containing device parameters,
        # current parameter values, and set formatting functions.
        self._build_driver_dict()
        self._build_command_dict()
        self._build_param_dict()

        # Add sample handlers.

        # State state machine in UNKNOWN state.
        self._protocol_fsm.start(ProtocolState.UNKNOWN)

        # commands sent sent to device to be filtered in responses for telnet DA
        self._sent_cmds = []

        self._chunker = StringChunker(self.sieve_function)

    def _build_param_dict(self):
        """
        Populate the parameter dictionary with parameters.
        For each parameter key, add match string, match lambda function,
        and value formatting function for set commands.
        """

        self._param_dict.add(
            Parameter.SCHEDULE,
            r'schedule:\s+(.*)',
            lambda match: match.group(1),
            str,
            type=ParameterDictType.STRING,
            display_name="Schedule",
            description=
            "Large block of text used to create the .yaml file defining the sampling schedule.",
            startup_param=True,
            default_value=yaml.dump(DEFAULT_CONFIG, default_flow_style=False))

        self._param_dict.add(
            Parameter.FTP_IP_ADDRESS,
            r'ftp address:\s+(\d\d\d\d\.\d\d\d\d\.\d\d\d\d\.\d\d\d)',
            lambda match: match.group(1),
            str,
            type=ParameterDictType.STRING,
            display_name="FTP IP Address",
            description=
            "IP address the driver uses to connect to the instrument FTP server.",
            startup_param=True,
            default_value=DEFAULT_HOST)

        self._param_dict.add(
            Parameter.FTP_USERNAME,
            r'username:(.*)',
            lambda match: match.group(1),
            str,
            type=ParameterDictType.STRING,
            display_name="FTP User Name",
            description="Username used to connect to the FTP server.",
            startup_param=True,
            default_value=USER_NAME)

        self._param_dict.add(
            Parameter.FTP_PASSWORD,
            r'password:(.*)',
            lambda match: match.group(1),
            str,
            type=ParameterDictType.STRING,
            display_name="FTP Password",
            description="Password used to connect to the FTP server.",
            startup_param=True,
            default_value=PASSWORD)

        self._param_dict.add(
            Parameter.FTP_PORT,
            r'port:(.*)',
            lambda match: match.group(1),
            str,
            type=ParameterDictType.STRING,
            display_name="FTP Port",
            description=
            "Location on the OOI infrastructure where .raw files and echogram images will be stored.",
            startup_param=True,
            default_value=DEFAULT_PORT)

    def _build_driver_dict(self):
        """
        Populate the driver dictionary with options
        """
        self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, True)

    def _build_command_dict(self):
        """
        Populate the command dictionary with command.
        """
        self._cmd_dict.add(Capability.START_AUTOSAMPLE,
                           display_name="Start Autosample")
        self._cmd_dict.add(Capability.STOP_AUTOSAMPLE,
                           display_name="Stop Autosample")
        self._cmd_dict.add(Capability.ACQUIRE_STATUS,
                           display_name="Acquire Status")
        self._cmd_dict.add(Capability.DISCOVER, display_name='Discover')

    def _filter_capabilities(self, events):
        """
        Return a list of currently available capabilities.
        """
        return [x for x in events if Capability.has(x)]

    ########################################################################
    # Unknown handlers.
    ########################################################################

    def _handler_unknown_enter(self, *args, **kwargs):
        """
        Enter unknown state.
        """
        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_unknown_exit(self, *args, **kwargs):
        """
        Exit unknown state.
        """
        pass

    def _handler_unknown_discover(self, *args, **kwargs):
        """
        Discover current state
        @retval next_state, (next_state, result)
        """
        next_state = ProtocolState.COMMAND
        result = []

        # Try to get the status to check if the instrument is alive
        host = self._param_dict.get_config_value(Parameter.FTP_IP_ADDRESS)
        port = self._param_dict.get_config_value(Parameter.FTP_PORT)
        response = self._url_request(host, port, '/status.json')

        if response is None:
            error_msg = "_handler_unknown_discover: Unable to connect to host: %s" % host
            log.error(error_msg)
            raise InstrumentConnectionException(error_msg)

        return next_state, (next_state, result)

    ########################################################################
    # Command handlers.
    ########################################################################
    def _handler_command_enter(self, *args, **kwargs):
        """
        Enter command state.
        @throws InstrumentTimeoutException if the device cannot be woken.
        @throws InstrumentProtocolException if the update commands and not recognized.
        """
        self._init_params()

        # Tell driver superclass to send a state change event.
        # Superclass will query the state.
        self._driver_event(DriverAsyncEvent.STATE_CHANGE)

    def _handler_command_exit(self, *args, **kwargs):
        """
        Exit command state.
        """
        pass

    def _handler_command_get(self, *args, **kwargs):
        """
        Get parameters while in the command state.
        @param params List of the parameters to pass to the state
        @retval returns (next_state, result) where result is a dict {}. No
            agent state changes happening with Get, so no next_agent_state
        @throw InstrumentParameterException for invalid parameter
        """
        result_vals = {}

        # Retrieve required parameter.
        # Raise if no parameter provided, or not a dict.
        try:
            params = args[0]

        except IndexError:
            raise InstrumentParameterException(
                '_handler_command_get requires a parameter dict.')

        if Parameter.ALL in params:
            log.debug("Parameter ALL in params")
            params = Parameter.list()
            params.remove(Parameter.ALL)

        log.debug("_handler_command_get: params = %s", params)

        if params is None or not isinstance(params, list):
            raise InstrumentParameterException(
                "GET parameter list not a list!")

        # fill the return values from the update
        for param in params:
            if not Parameter.has(param):
                raise InstrumentParameterException("Invalid parameter!")
            result_vals[param] = self._param_dict.get(param)
            self._param_dict.get_config_value(param)
        result = result_vals

        log.debug("Get finished, next_state: %s, result: %s", None, result)
        return None, result

    def _handler_command_set(self, *args, **kwargs):
        """
        Set parameter
        @retval next state, result
        """
        startup = False

        try:
            params = args[0]
        except IndexError:
            raise InstrumentParameterException(
                '_handler_command_set: command requires a parameter dict.')

        try:
            startup = args[1]
        except IndexError:
            pass

        if not isinstance(params, dict):
            raise InstrumentParameterException('Set parameters not a dict.')

        # For each key, val in the params, set the param dictionary.
        old_config = self._param_dict.get_config()
        self._set_params(params, startup)

        new_config = self._param_dict.get_config()
        if old_config != new_config:
            self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)

        return None, None

    def _set_params(self, *args, **kwargs):
        """
        Issue commands to the instrument to set various parameters
        """
        try:
            params = args[0]
        except IndexError:
            raise InstrumentParameterException(
                'Set command requires a parameter dict.')

        # verify param is not readonly param
        self._verify_not_readonly(*args, **kwargs)

        for key, val in params.iteritems():
            log.debug("KEY = %s VALUE = %s", key, val)
            self._param_dict.set_value(key, val)
            if key == Parameter.SCHEDULE:
                self._ftp_schedule_file()

                # Load the schedule file
                host = self._param_dict.get(Parameter.FTP_IP_ADDRESS)
                port = self._param_dict.get_config_value(Parameter.FTP_PORT)
                log.debug("_set_params: stop the current schedule file")
                self._url_request(host, port, '/stop_schedule', data={})
                log.debug("_set_params: upload driver YAML file to host %s",
                          host)
                res = self._url_request(host,
                                        port,
                                        '/load_schedule',
                                        data=json.dumps(
                                            {'filename': YAML_FILE_NAME}))
                log.debug("_set_params: result from load = %s", res)

        log.debug("set complete, update params")

    def _ftp_schedule_file(self):
        """
        Construct a YAML schedule file and
        ftp the file to the Instrument server
        """
        # Create a temporary file and write the schedule YAML information to the file
        try:
            config_file = tempfile.TemporaryFile()
            log.debug("temporary file created")

            if config_file is None or not isinstance(config_file, file):
                raise InstrumentException("config_file is not a temp file!")

            config_file.write(self._param_dict.get(Parameter.SCHEDULE))
            config_file.seek(0)
            log.debug("finished writing config file:\n%r",
                      self._param_dict.get(Parameter.SCHEDULE))

        except Exception as e:
            log.error("Create schedule YAML file exception: %s", e)
            raise e

        #  FTP the schedule file to the ZPLSC server
        host = ''

        try:
            log.debug("Create a ftp session")
            host = self._param_dict.get_config_value(Parameter.FTP_IP_ADDRESS)
            log.debug("Got host ip address %s", host)

            ftp_session = ftplib.FTP()
            ftp_session.connect(host)
            ftp_session.login(USER_NAME, PASSWORD)
            log.debug("ftp session was created...")

            ftp_session.set_pasv(False)
            ftp_session.cwd("config")

            ftp_session.storlines('STOR ' + YAML_FILE_NAME, config_file)
            files = ftp_session.dir()

            log.debug("*** Config yaml file sent: %s", files)

            ftp_session.quit()
            config_file.close()

        except (ftplib.socket.error, ftplib.socket.gaierror), e:
            log.error("ERROR: cannot reach FTP Host %s: %s ", host, e)
            raise InstrumentException("ERROR: cannot reach FTP Host %s " %
                                      host)

        log.debug("*** FTP %s to ftp host %s successfully", YAML_FILE_NAME,
                  host)
예제 #23
0
class SatlanticOCR507DataParticle(DataParticle):
    """
    Routines for parsing raw data into a data particle structure for the
    Satlantic OCR507 sensor. Overrides the building of values, and the rest comes
    along for free.
    """
    __metaclass__ = get_logging_metaclass(log_level='debug')
    _data_particle_type = DataParticleType.PARSED

    def _build_parsed_values(self):
        """
        Take something in the sample format and split it into
        a OCR507 values (with an appropriate tag)

        @throws SampleException If there is a problem with sample creation
        """
        match = SAMPLE_REGEX.match(self.raw_data)

        if not match:
            raise SampleException(
                "No regex match of parsed sample data: [%r]" % self.raw_data)

        # Parse the relevant ascii fields
        instrument_id = match.group('instrument_id')
        serial_number = match.group('serial_number')
        timer = float(match.group('timer'))

        # Ensure the expected values were present
        if not instrument_id:
            raise SampleException("No instrument id value parsed")
        if not serial_number:
            raise SampleException("No serial number value parsed")
        if not timer:
            raise SampleException("No timer value parsed")

        # Parse the relevant binary data
        """
        Field Name          Field Size (bytes)      Description         Format Char
        ----------          ------------------      -----------         -----------
        sample_delay                2               BS formatted value      h
        ch[1-7]_sample              4               BU formatted value      I
        regulated_input_voltage     2               BU formatted value      H
        analog_rail_voltage         2               BU formatted value      H
        internal_temp               2               BU formatted value      H
        frame_counter               1               BU formatted value      B
        checksum                    1               BU formatted value      B
        """
        try:
            sample_delay, ch1_sample, ch2_sample, ch3_sample, ch4_sample, ch5_sample, ch6_sample, ch7_sample, \
            regulated_input_voltage, analog_rail_voltage, internal_temp, frame_counter, checksum \
                = struct.unpack('!h7IHHHBB', match.group('binary_data') + match.group('checksum'))

        except struct.error, e:
            raise SampleException(e)

        result = [{
            DataParticleKey.VALUE_ID:
            SatlanticOCR507DataParticleKey.INSTRUMENT_ID,
            DataParticleKey.VALUE: instrument_id
        }, {
            DataParticleKey.VALUE_ID:
            SatlanticOCR507DataParticleKey.SERIAL_NUMBER,
            DataParticleKey.VALUE: serial_number
        }, {
            DataParticleKey.VALUE_ID: SatlanticOCR507DataParticleKey.TIMER,
            DataParticleKey.VALUE: timer
        }, {
            DataParticleKey.VALUE_ID:
            SatlanticOCR507DataParticleKey.SAMPLE_DELAY,
            DataParticleKey.VALUE: sample_delay
        }, {
            DataParticleKey.VALUE_ID:
            SatlanticOCR507DataParticleKey.SAMPLES,
            DataParticleKey.VALUE: [
                ch1_sample, ch2_sample, ch3_sample, ch4_sample, ch5_sample,
                ch6_sample, ch7_sample
            ]
        }, {
            DataParticleKey.VALUE_ID:
            SatlanticOCR507DataParticleKey.REGULATED_INPUT_VOLTAGE,
            DataParticleKey.VALUE: regulated_input_voltage
        }, {
            DataParticleKey.VALUE_ID:
            SatlanticOCR507DataParticleKey.ANALOG_RAIL_VOLTAGE,
            DataParticleKey.VALUE: analog_rail_voltage
        }, {
            DataParticleKey.VALUE_ID:
            SatlanticOCR507DataParticleKey.INTERNAL_TEMP,
            DataParticleKey.VALUE: internal_temp
        }, {
            DataParticleKey.VALUE_ID:
            SatlanticOCR507DataParticleKey.FRAME_COUNTER,
            DataParticleKey.VALUE: frame_counter
        }, {
            DataParticleKey.VALUE_ID: SatlanticOCR507DataParticleKey.CHECKSUM,
            DataParticleKey.VALUE: checksum
        }]

        if not self._checksum_check(self.raw_data):
            self.contents[DataParticleKey.
                          QUALITY_FLAG] = DataParticleValue.CHECKSUM_FAILED
            log.warn("Invalid checksum encountered: %r.", checksum)

        log.debug('OCR507 Data Particle raw data: %r', self.raw_data)
        log.debug('OCR507 Data Particle parsed data: %r', result)

        return result
예제 #24
0
import yaml
import zmq

from docopt import docopt
from logging import _levelNames
from mi.core.common import BaseEnum
from mi.core.exceptions import UnexpectedError, InstrumentCommandException, InstrumentException
from mi.core.instrument.instrument_driver import DriverAsyncEvent
from mi.core.instrument.publisher import Publisher
from mi.core.log import get_logger, get_logging_metaclass
from mi.core.service_registry import ConsulServiceRegistry

log = get_logger()

META_LOGGER = get_logging_metaclass('trace')

__author__ = 'Peter Cable'
__license__ = 'Apache 2.0'


class Commands(BaseEnum):
    STOP_DRIVER = 'stop_driver_process'
    TEST_EVENTS = 'test_events'
    PING = 'process_echo'
    OVERALL_STATE = 'overall_state'
    STOP_WORKER = 'stop_worker'
    DEFAULT = 'default'
    SET_LOG_LEVEL = 'set_log_level'

예제 #25
0
class ZplscBParser(SimpleParser):
    """
    Parser for zplsc_b *.raw files
    """

    __metaclass__ = get_logging_metaclass(log_level='debug')

    def __init__(self, config, stream_handle, exception_callback, output_file_path):
        """
        Initialize the zplsc_b parser, which does not use state or the chunker
        and sieve functions.
        @param config: The parser configuration dictionary
        @param stream_handle: The stream handle of the file to parse
        @param exception_callback: The callback to use when an exception occurs
        @param output_file_path: The location to output the echogram plot .png files
        """

        self.output_file_path = output_file_path

        super(ZplscBParser, self).__init__(config, stream_handle, exception_callback)

    def recov_exception_callback(self, message):
        log.warn(message)
        self._exception_callback(RecoverableSampleException(message))

    def parse_file(self):
        """
        Parse the *.raw file.
        """

        # Extract the file time from the file name
        input_file_name = self._stream_handle.name
        (filepath, filename) = os.path.split(input_file_name)

        # tuple contains the string before the '.', the '.', and the 'raw' string
        outfile = filename.rpartition('.')[0]

        match = FILE_NAME_MATCHER.match(input_file_name)
        if match:
            file_time = match.group('Date') + match.group('Time')
        else:
            file_time = ""
            # Files retrieved from the instrument should always match the timestamp naming convention
            self.recov_exception_callback("Unable to extract file time from input file name: %s."
                                          "Expected format *-DYYYYmmdd-THHMMSS.raw" % input_file_name)

        # Read binary file a block at a time
        raw = self._stream_handle.read(BLOCK_SIZE)

        # Set starting byte
        byte_cnt = 0

        # Read the configuration datagram, output at the beginning of the file
        length1, = unpack('<l', raw[byte_cnt:byte_cnt+LENGTH_SIZE])
        byte_cnt += LENGTH_SIZE

        # Configuration datagram header
        datagram_header = read_datagram_header(raw[byte_cnt:byte_cnt+DATAGRAM_HEADER_SIZE])
        byte_cnt += DATAGRAM_HEADER_SIZE

        # Configuration: header
        config_header = read_config_header(raw[byte_cnt:byte_cnt+CONFIG_HEADER_SIZE])
        byte_cnt += CONFIG_HEADER_SIZE

        transducer_count = config_header['transducer_count']

        if GET_CONFIG_TRANSDUCER:
            td_gain = {}
            td_gain_table = {}
            td_pulse_length_table = {}
            td_phi_equiv_beam_angle = {}

            # Configuration: transducers (1 to 7 max)
            for i in xrange(1, transducer_count+1):
                config_transducer = read_config_transducer(
                    raw[byte_cnt:byte_cnt+CONFIG_TRANSDUCER_SIZE])

                # Example data that one might need for various calculations later on
                td_gain[i] = config_transducer['gain']
                td_gain_table[i] = config_transducer['gain_table']
                td_pulse_length_table[i] = config_transducer['pulse_length_table']
                td_phi_equiv_beam_angle[i] = config_transducer['equiv_beam_angle']

        byte_cnt += CONFIG_TRANSDUCER_SIZE * transducer_count

        # Compare length1 (from beginning of datagram) to length2 (from the end of datagram) to
        # the actual number of bytes read. A mismatch can indicate an invalid, corrupt, misaligned,
        # or missing configuration datagram or a reverse byte order binary data file.
        # A bad/missing configuration datagram header is a significant error.
        length2, = unpack('<l', raw[byte_cnt:byte_cnt+LENGTH_SIZE])
        if not (length1 == length2 == byte_cnt-LENGTH_SIZE):
            raise ValueError(
                "Length of configuration datagram and number of bytes read do not match: length1: %s"
                ", length2: %s, byte_cnt: %s. Possible file corruption or format incompatibility." %
                (length1, length2, byte_cnt+LENGTH_SIZE))

        first_ping_metadata = defaultdict(list)
        trans_keys = range(1, transducer_count+1)
        trans_array = dict((key, []) for key in trans_keys)         # transducer power data
        trans_array_time = dict((key, []) for key in trans_keys)    # transducer time data
        td_f = dict.fromkeys(trans_keys)                            # transducer frequency
        td_dR = dict.fromkeys(trans_keys)                           # transducer depth measurement

        position = 0

        while raw:
            # We only care for the Sample datagrams, skip over all the other datagrams
            match = SAMPLE_MATCHER.search(raw)

            if not match:
                # Read in the next block w/ a token sized overlap
                self._stream_handle.seek(self._stream_handle.tell() - 4)
                raw = self._stream_handle.read(BLOCK_SIZE)

                # The last 4 bytes is just the length2 of the last datagram
                if len(raw) <= 4:
                    break

            # Offset by size of length value
            match_start = match.start() - LENGTH_SIZE

            # Seek to the position of the length data before the token to read into numpy array
            self._stream_handle.seek(position + match_start)

            # Read and unpack the Sample Datagram into numpy array
            sample_data = np.fromfile(self._stream_handle, dtype=sample_dtype, count=1)
            channel = sample_data['channel_number'][0]

            # Check for a valid channel number that is within the number of transducers config
            # to prevent incorrectly indexing into the dictionaries.
            # An out of bounds channel number can indicate invalid, corrupt,
            # or misaligned datagram or a reverse byte order binary data file.
            # Log warning and continue to try and process the rest of the file.
            if channel < 0 or channel > transducer_count:
                log.warn("Invalid channel: %s for transducer count: %s."
                         "Possible file corruption or format incompatibility.", channel, transducer_count)

                # Need current position in file to increment for next regex search offset
                position = self._stream_handle.tell()

                # Read the next block for regex search
                raw = self._stream_handle.read(BLOCK_SIZE)
                continue

            # Convert high and low bytes to internal time
            internal_time = (sample_data['high_date_time'][0] << 32) + sample_data['low_date_time'][0]
            # Note: Strictly sequential time tags are not guaranteed.
            trans_array_time[channel].append(internal_time)

            # Gather metadata once per transducer channel number
            if not trans_array[channel]:
                file_name = self.output_file_path + '/' + outfile + '_' + \
                            str(int(sample_data['frequency'])/1000) + 'k.png'

                first_ping_metadata[ZplscBParticleKey.FILE_TIME] = file_time
                first_ping_metadata[ZplscBParticleKey.FILE_NAME].append(file_name)
                first_ping_metadata[ZplscBParticleKey.CHANNEL].append(channel)
                first_ping_metadata[ZplscBParticleKey.TRANSDUCER_DEPTH].append(sample_data['transducer_depth'][0])
                first_ping_metadata[ZplscBParticleKey.FREQUENCY].append(sample_data['frequency'][0])
                first_ping_metadata[ZplscBParticleKey.TRANSMIT_POWER].append(sample_data['transmit_power'][0])
                first_ping_metadata[ZplscBParticleKey.PULSE_LENGTH].append(sample_data['pulse_length'][0])
                first_ping_metadata[ZplscBParticleKey.BANDWIDTH].append(sample_data['bandwidth'][0])
                first_ping_metadata[ZplscBParticleKey.SAMPLE_INTERVAL].append(sample_data['sample_interval'][0])
                first_ping_metadata[ZplscBParticleKey.SOUND_VELOCITY].append(sample_data['sound_velocity'][0])
                first_ping_metadata[ZplscBParticleKey.ABSORPTION_COEF].append(sample_data['absorption_coefficient'][0])
                first_ping_metadata[ZplscBParticleKey.TEMPERATURE].append(sample_data['temperature'][0])

                # Make only one particle for the first ping series containing data for all channels
                if channel == config_header['transducer_count']:
                    # Convert from Windows time to NTP time.
                    time = datetime(1601, 1, 1) + timedelta(microseconds=internal_time/10.0)
                    year, month, day, hour, min, sec = time.utctimetuple()[:6]
                    unix_time = calendar.timegm((year, month, day, hour, min, sec+(time.microsecond/1e6)))
                    time_stamp = ntplib.system_to_ntp_time(unix_time)

                    # Extract a particle and append it to the record buffer
                    # Note: numpy unpacked values still need to be encoded
                    particle = self._extract_sample(ZplscBInstrumentDataParticle, None,
                                                    first_ping_metadata,
                                                    time_stamp)
                    log.debug('Parsed particle: %s', particle.generate_dict())
                    self._record_buffer.append(particle)

                # Extract various calibration parameters used for generating echogram plot
                # This data doesn't change so extract it once per channel
                td_f[channel] = sample_data['frequency'][0]
                td_dR[channel] = sample_data['sound_velocity'][0] * sample_data['sample_interval'][0] / 2

            count = sample_data['count'][0]

            # Extract array of power data
            power_dtype = np.dtype([('power_data', '<i2')])     # 2 byte int (short)
            power_data = np.fromfile(self._stream_handle, dtype=power_dtype, count=count)

            # Decompress power data to dB
            trans_array[channel].append(power_data['power_data'] * 10. * np.log10(2) / 256.)

            # Read the athwartship and alongship angle measurements
            if sample_data['mode'][0] > 1:
                angle_dtype = np.dtype([('athwart', '<i1'), ('along', '<i1')])     # 1 byte ints
                angle_data = np.fromfile(self._stream_handle, dtype=angle_dtype, count=count)

            # Read and compare length1 (from beginning of datagram) to length2
            # (from the end of datagram). A mismatch can indicate an invalid, corrupt,
            # or misaligned datagram or a reverse byte order binary data file.
            # Log warning and continue to try and process the rest of the file.
            len_dtype = np.dtype([('length2', '<i4')])     # 4 byte int (long)
            length2_data = np.fromfile(self._stream_handle, dtype=len_dtype, count=1)
            if not (sample_data['length1'][0] == length2_data['length2'][0]):
                log.warn("Mismatching beginning and end length values in sample datagram: length1"
                         ": %s, length2: %s. Possible file corruption or format incompatibility."
                         , sample_data['length1'][0], length2_data['length2'][0])

            # Need current position in file to increment for next regex search offset
            position = self._stream_handle.tell()

            # Read the next block for regex search
            raw = self._stream_handle.read(BLOCK_SIZE)

        # Driver spends most of the time plotting,
        # this can take longer for more transducers so lets break out the work
        processes = []
        for channel in td_f.iterkeys():
            try:
                process = Process(target=self.generate_echogram_plot,
                                  args=(trans_array_time[channel], trans_array[channel],
                                        td_f[channel], td_dR[channel], channel,
                                        first_ping_metadata[ZplscBParticleKey.FILE_NAME][channel-1]))
                process.start()
                processes.append(process)

            except Exception, e:
                log.error("Error: Unable to start process: %s", e)

        for p in processes:
            p.join()