Example #1
0
    def stop_launched_simulator(cls):
        """
        Utility to stop the process launched with launch_simulator.
        The stop is attempted a couple of times in case of errors (with a few
        seconds of sleep in between).

        @return None if process seems to have been stopped properly.
                Otherwise the exception of the last attempt to stop it.
        """
        if cls._sim_process:
            sim_proc, cls._sim_process = cls._sim_process, None
            attempts = 3
            attempt = 0
            while attempt <= attempts:
                attempt += 1
                log.debug(
                    "[OMSim] stopping launched simulator (attempt=%d) ...",
                    attempt)
                try:
                    sim_proc.stop()
                    log.debug(
                        "[OMSim] simulator process seems to have stopped properly"
                    )
                    return None

                except Exception as ex:
                    if attempt < attempts:
                        sleep(10)
                    else:
                        log.warn(
                            "[OMSim] error while stopping simulator process: %s",
                            ex)
                        return ex
    def _build_parsed_values(self):
        """
        Take the velocity header data sample format and parse it into
        values with appropriate tags.
        @throws SampleException If there is a problem with sample creation
        """
        log.debug('VectorVelocityHeaderDataParticle: raw data =%r', self.raw_data)

        try:
            unpack_string = '<4s6sH8B20sH'
            sync, timestamp, number_of_records, noise1, noise2, noise3, _, correlation1, correlation2, correlation3, _,\
                _, cksum = struct.unpack(unpack_string, self.raw_data)

            if not validate_checksum('<20H', self.raw_data):
                log.warn("Failed checksum in %s from instrument (%r)", self._data_particle_type, self.raw_data)
                self.contents[DataParticleKey.QUALITY_FLAG] = DataParticleValue.CHECKSUM_FAILED

            timestamp = common.convert_time(timestamp)
            self.set_internal_timestamp((timestamp-datetime(1900, 1, 1)).total_seconds())

        except Exception as e:
            log.error('Error creating particle vel3d_cd_data_header, raw data: %r', self.raw_data)
            raise SampleException(e)

        result = [{VID: VectorVelocityHeaderDataParticleKey.TIMESTAMP, VAL: str(timestamp)},
                  {VID: VectorVelocityHeaderDataParticleKey.NUMBER_OF_RECORDS, VAL: number_of_records},
                  {VID: VectorVelocityHeaderDataParticleKey.NOISE1, VAL: noise1},
                  {VID: VectorVelocityHeaderDataParticleKey.NOISE2, VAL: noise2},
                  {VID: VectorVelocityHeaderDataParticleKey.NOISE3, VAL: noise3},
                  {VID: VectorVelocityHeaderDataParticleKey.CORRELATION1, VAL: correlation1},
                  {VID: VectorVelocityHeaderDataParticleKey.CORRELATION2, VAL: correlation2},
                  {VID: VectorVelocityHeaderDataParticleKey.CORRELATION3, VAL: correlation3}]

        log.debug('VectorVelocityHeaderDataParticle: particle=%s', result)
        return result
    def stop_launched_simulator(cls):
        """
        Utility to stop the process launched with launch_simulator.
        The stop is attempted a couple of times in case of errors (with a few
        seconds of sleep in between).

        @return None if process seems to have been stopped properly.
                Otherwise the exception of the last attempt to stop it.
        """
        if cls._sim_process:
            sim_proc, cls._sim_process = cls._sim_process, None
            attempts = 3
            attempt = 0
            while attempt <= attempts:
                attempt += 1
                log.debug("[OMSim] stopping launched simulator (attempt=%d) ...", attempt)
                try:
                    sim_proc.stop()
                    log.debug("[OMSim] simulator process seems to have stopped properly")
                    return None

                except Exception as ex:
                    if attempt < attempts:
                        sleep(10)
                    else:
                        log.warn("[OMSim] error while stopping simulator process: %s", ex)
                        return ex
 def getpid(self):
     """
     Get the pid of the current running process and ensure that it is running.
     @returns the pid of the driver process if it is running, otherwise None
     """
     if self._process:
         if self.poll():
             return self._process.pid
         else:
             log.warn("[OMSim] process found, but poll failed for pid %s",
                      self._process.pid)
     else:
         return None
 def getpid(self):
     """
     Get the pid of the current running process and ensure that it is running.
     @returns the pid of the driver process if it is running, otherwise None
     """
     if self._process:
         if self.poll():
             return self._process.pid
         else:
             log.warn("[OMSim] process found, but poll failed for pid %s",
                      self._process.pid)
     else:
         return None
    def _build_parsed_values(self):
        """
        Take the velocity data sample format and parse it into
        values with appropriate tags.
        @throws SampleException If there is a problem with sample creation
        """
        log.debug('VectorVelocityDataParticle: raw data =%r', self.raw_data)

        try:

            unpack_string = '<2s4B2H3h6BH'

            (sync_id, analog_input2_lsb, count, pressure_msb, analog_input2_msb, pressure_lsw,
             analog_input1, velocity_beam1, velocity_beam2, velocity_beam3, amplitude_beam1,
             amplitude_beam2, amplitude_beam3, correlation_beam1, correlation_beam2,
             correlation_beam3, checksum) = struct.unpack(unpack_string, self.raw_data)

            if not validate_checksum('<11H', self.raw_data):
                log.warn("Failed checksum in %s from instrument (%r)", self._data_particle_type, self.raw_data)
                self.contents[DataParticleKey.QUALITY_FLAG] = DataParticleValue.CHECKSUM_FAILED

            analog_input2 = analog_input2_msb * 0x100 + analog_input2_lsb
            pressure = pressure_msb * 0x10000 + pressure_lsw

        except Exception as e:
            log.error('Error creating particle vel3d_cd_velocity_data, raw data: %r', self.raw_data)
            raise SampleException(e)

        result = [{VID: VectorVelocityDataParticleKey.ANALOG_INPUT2, VAL: analog_input2},
                  {VID: VectorVelocityDataParticleKey.COUNT, VAL: count},
                  {VID: VectorVelocityDataParticleKey.PRESSURE, VAL: pressure},
                  {VID: VectorVelocityDataParticleKey.ANALOG_INPUT1, VAL: analog_input1},
                  {VID: VectorVelocityDataParticleKey.VELOCITY_BEAM1, VAL: velocity_beam1},
                  {VID: VectorVelocityDataParticleKey.VELOCITY_BEAM2, VAL: velocity_beam2},
                  {VID: VectorVelocityDataParticleKey.VELOCITY_BEAM3, VAL: velocity_beam3},
                  {VID: VectorVelocityDataParticleKey.AMPLITUDE_BEAM1, VAL: amplitude_beam1},
                  {VID: VectorVelocityDataParticleKey.AMPLITUDE_BEAM2, VAL: amplitude_beam2},
                  {VID: VectorVelocityDataParticleKey.AMPLITUDE_BEAM3, VAL: amplitude_beam3},
                  {VID: VectorVelocityDataParticleKey.CORRELATION_BEAM1, VAL: correlation_beam1},
                  {VID: VectorVelocityDataParticleKey.CORRELATION_BEAM2, VAL: correlation_beam2},
                  {VID: VectorVelocityDataParticleKey.CORRELATION_BEAM3, VAL: correlation_beam3}]

        log.debug('VectorVelocityDataParticle: particle=%s', result)
        return result
    def poll(self):
        """
        Check to see if the process is alive.
        @return true if process is running, false otherwise
        """

        # The Popen.poll() doesn't seem to be returning reliable results.  
        # Sending a signal 0 to the process might be more reliable.

        if not self._process:
            return False

        try:
            os.kill(self._process.pid, 0)
        except OSError:
            log.warn("[OMSim] Could not send a signal to the process, pid: %s" % self._process.pid)
            return False

        return True
Example #8
0
    def launch_simulator(cls, inactivity_period):
        """
        Utility to launch the simulator as a separate process.

        @return the new URI for a regular call to create_instance(uri).
        """

        # in case there's any ongoing simulator process:
        ex = cls.stop_launched_simulator()
        if ex:
            log.warn("[OMSim] previous process could not be stopped properly. "
                     "The next launch may fail because of potential conflict.")

        from mi.platform.rsn.simulator.process_util import ProcessUtil
        cls._sim_process = ProcessUtil()
        cls._rsn_oms, uri = cls._sim_process.launch()

        log.debug("launch_simulator: launched. uri=%s", uri)

        if inactivity_period:
            cls._rsn_oms.x_exit_inactivity(inactivity_period)

            def hearbeat():
                n = 0
                while cls._sim_process:
                    sleep(1)
                    n += 1
                    if cls._sim_process and n % 20 == 0:
                        log.debug("[OMSim] heartbeat sent")
                        try:
                            cls._rsn_oms.ping()
                        except Exception:
                            pass
                log.debug("[OMSim] heartbeat ended")

            Greenlet(hearbeat).start()
            log.debug(
                "[OMSim] called x_exit_inactivity with %s and started heartbeat",
                inactivity_period)

        return uri
    def poll(self):
        """
        Check to see if the process is alive.
        @return true if process is running, false otherwise
        """

        # The Popen.poll() doesn't seem to be returning reliable results.
        # Sending a signal 0 to the process might be more reliable.

        if not self._process:
            return False

        try:
            os.kill(self._process.pid, 0)
        except OSError:
            log.warn(
                "[OMSim] Could not send a signal to the process, pid: %s" %
                self._process.pid)
            return False

        return True
    def _build_parsed_values(self):
        """
        Take the system data sample format and parse it into
        values with appropriate tags.
        @throws SampleException If there is a problem with sample creation
        """
        log.debug('VectorSystemDataParticle: raw data =%r', self.raw_data)

        try:

            unpack_string = '<4s6s2H4h2bHH'

            (sync, timestamp, battery, sound_speed, heading, pitch,
             roll, temperature, error, status, analog_input, cksum) = struct.unpack_from(unpack_string, self.raw_data)

            if not validate_checksum('<13H', self.raw_data):
                log.warn("Failed checksum in %s from instrument (%r)", self._data_particle_type, self.raw_data)
                self.contents[DataParticleKey.QUALITY_FLAG] = DataParticleValue.CHECKSUM_FAILED

            timestamp = common.convert_time(timestamp)
            self.set_internal_timestamp((timestamp-datetime(1900, 1, 1)).total_seconds())

        except Exception as e:
            log.error('Error creating particle vel3d_cd_system_data, raw data: %r', self.raw_data)
            raise SampleException(e)

        result = [{VID: VectorSystemDataParticleKey.TIMESTAMP, VAL: str(timestamp)},
                  {VID: VectorSystemDataParticleKey.BATTERY, VAL: battery},
                  {VID: VectorSystemDataParticleKey.SOUND_SPEED, VAL: sound_speed},
                  {VID: VectorSystemDataParticleKey.HEADING, VAL: heading},
                  {VID: VectorSystemDataParticleKey.PITCH, VAL: pitch},
                  {VID: VectorSystemDataParticleKey.ROLL, VAL: roll},
                  {VID: VectorSystemDataParticleKey.TEMPERATURE, VAL: temperature},
                  {VID: VectorSystemDataParticleKey.ERROR, VAL: error},
                  {VID: VectorSystemDataParticleKey.STATUS, VAL: status},
                  {VID: VectorSystemDataParticleKey.ANALOG_INPUT, VAL: analog_input}]

        log.debug('VectorSystemDataParticle: particle=%r', result)

        return result
    def _build_parsed_values(self):
        """
        Take the head config data and parse it into
        values with appropriate tags.
        @throws SampleException If there is a problem with sample creation
        """
        try:
            unpack_string = '<4s2s2H12s176s22sHh2s'
            sync, config, head_freq, head_type, head_serial, system_data, _, num_beams, cksum, _ = struct.unpack(
                unpack_string, self.raw_data)

            if not validate_checksum('<111H', self.raw_data, -4):
                log.warn("Failed checksum in %s from instrument (%r)", self._data_particle_type, self.raw_data)
                self.contents[DataParticleKey.QUALITY_FLAG] = DataParticleValue.CHECKSUM_FAILED

            config = common.convert_word_to_bit_field(config)
            system_data = base64.b64encode(system_data)
            head_serial = head_serial.split('\x00', 1)[0]

            pressure_sensor = config[-1]
            mag_sensor = config[-2]
            tilt_sensor = config[-3]
            tilt_mount = config[-4]

        except Exception:
            log.error('Error creating particle head config, raw data: %r', self.raw_data)
            raise SampleException

        result = [{VID: NortekHeadConfigDataParticleKey.PRESSURE_SENSOR, VAL: pressure_sensor},
                  {VID: NortekHeadConfigDataParticleKey.MAG_SENSOR, VAL: mag_sensor},
                  {VID: NortekHeadConfigDataParticleKey.TILT_SENSOR, VAL: tilt_sensor},
                  {VID: NortekHeadConfigDataParticleKey.TILT_SENSOR_MOUNT, VAL: tilt_mount},
                  {VID: NortekHeadConfigDataParticleKey.HEAD_FREQ, VAL: head_freq},
                  {VID: NortekHeadConfigDataParticleKey.HEAD_TYPE, VAL: head_type},
                  {VID: NortekHeadConfigDataParticleKey.HEAD_SERIAL, VAL: head_serial},
                  {VID: NortekHeadConfigDataParticleKey.SYSTEM_DATA, VAL: system_data, DataParticleKey.BINARY: True},
                  {VID: NortekHeadConfigDataParticleKey.NUM_BEAMS, VAL: num_beams}]

        log.debug('NortekHeadConfigDataParticle: particle=%r', result)
        return result
    def launch_simulator(cls, inactivity_period):
        """
        Utility to launch the simulator as a separate process.

        @return the new URI for a regular call to create_instance(uri).
        """

        # in case there's any ongoing simulator process:
        ex = cls.stop_launched_simulator()
        if ex:
            log.warn("[OMSim] previous process could not be stopped properly. "
                     "The next launch may fail because of potential conflict.")

        from mi.platform.rsn.simulator.process_util import ProcessUtil
        cls._sim_process = ProcessUtil()
        cls._rsn_oms, uri = cls._sim_process.launch()

        log.debug("launch_simulator: launched. uri=%s", uri)

        if inactivity_period:
            cls._rsn_oms.x_exit_inactivity(inactivity_period)

            def hearbeat():
                n = 0
                while cls._sim_process:
                    sleep(1)
                    n += 1
                    if cls._sim_process and n % 20 == 0:
                        log.debug("[OMSim] heartbeat sent")
                        try:
                            cls._rsn_oms.ping()
                        except Exception:
                            pass
                log.debug("[OMSim] heartbeat ended")

            Greenlet(hearbeat).start()
            log.debug("[OMSim] called x_exit_inactivity with %s and started heartbeat",
                      inactivity_period)

        return uri
    def stop(self):
        """
        Stop the process.
        """
        if self._rsn_oms is not None:
            log.debug("[OMSim] x_exit_simulator -> %r", self._rsn_oms.x_exit_simulator())

        if self._process:
            try:
                log.debug("[OMSim] terminating process %s", self._process.pid)
                self._process.send_signal(signal.SIGINT)
                log.debug("[OMSim] waiting process %s", self._process.pid)
                self._process.wait()
                log.debug("[OMSim] process killed")

            except OSError:
                log.warn("[OMSim] Could not stop process, pid: %s" % self._process.pid)

            sleep(4)

        self._process = None
        self._rsn_oms = None
    def _notify_listener(self, url, event_instance):
        """
        Notifies event to given listener.
        """
        if url == "http://NO_OMS_NOTIFICATIONS":  # pragma: no cover
            # developer convenience -see ion.agents.platform.rsn.oms_event_listener
            return

        log.debug("Notifying event_instance=%s to listener=%s", str(event_instance), url)

        # include url in event instance for diagnostic/debugging purposes:
        event_instance['listener_url'] = url

        # prepare payload (JSON format):
        payload = json.dumps(event_instance, indent=2)
        log.trace("payload=\n%s", payload)
        headers = {
            "Content-type": "application/json",
            "Accept": "text/plain"
        }

        conn = None
        try:
            o = urlparse(url)
            url4conn = o.netloc
            path     = o.path

            conn = httplib.HTTPConnection(url4conn)
            conn.request("POST", path, body=payload, headers=headers)
            response = conn.getresponse()
            data = response.read()
            log.trace("RESPONSE: %s, %s, %s", response.status, response.reason, data)
        except Exception as e:
            # the actual listener is no longer there; just log a message
            log.warn("event notification HTTP request failed: %r: %s", url, e)
        finally:
            if conn:
                conn.close()
Example #15
0
    def _notify_listener(self, url, event_instance):
        """
        Notifies event to given listener.
        """
        if url == "http://NO_OMS_NOTIFICATIONS":  # pragma: no cover
            # developer convenience -see ion.agents.platform.rsn.oms_event_listener
            return

        log.debug("Notifying event_instance=%s to listener=%s",
                  str(event_instance), url)

        # include url in event instance for diagnostic/debugging purposes:
        event_instance['listener_url'] = url

        # prepare payload (JSON format):
        payload = json.dumps(event_instance, indent=2)
        log.trace("payload=\n%s", payload)
        headers = {"Content-type": "application/json", "Accept": "text/plain"}

        conn = None
        try:
            o = urlparse(url)
            url4conn = o.netloc
            path = o.path

            conn = httplib.HTTPConnection(url4conn)
            conn.request("POST", path, body=payload, headers=headers)
            response = conn.getresponse()
            data = response.read()
            log.trace("RESPONSE: %s, %s, %s", response.status, response.reason,
                      data)
        except Exception as e:
            # the actual listener is no longer there; just log a message
            log.warn("event notification HTTP request failed: %r: %s", url, e)
        finally:
            if conn:
                conn.close()
    def stop(self):
        """
        Stop the process.
        """
        if self._rsn_oms is not None:
            log.debug("[OMSim] x_exit_simulator -> %r",
                      self._rsn_oms.x_exit_simulator())

        if self._process:
            try:
                log.debug("[OMSim] terminating process %s", self._process.pid)
                self._process.send_signal(signal.SIGINT)
                log.debug("[OMSim] waiting process %s", self._process.pid)
                self._process.wait()
                log.debug("[OMSim] process killed")

            except OSError:
                log.warn("[OMSim] Could not stop process, pid: %s" %
                         self._process.pid)

            sleep(4)

        self._process = None
        self._rsn_oms = None
    def _build_parsed_values(self):
        """
        Take the hardware config data and parse it into
        values with appropriate tags.
        """
        try:
            unpack_string = '<4s14s2s4H2s12s4sh2s'
            (sync, serial_num, config, board_frequency, pic_version, hw_revision,
             recorder_size, status, spare, fw_version, cksum, _) = struct.unpack(unpack_string, self.raw_data)

            if not validate_checksum('<23H', self.raw_data, -4):
                log.warn("_parse_read_hw_config: Bad read hw response from instrument (%r)", self.raw_data)
                self.contents[DataParticleKey.QUALITY_FLAG] = DataParticleValue.CHECKSUM_FAILED

            config = common.convert_word_to_bit_field(config)
            status = common.convert_word_to_bit_field(status)
            recorder_installed = config[-1]
            compass_installed = config[-2]
            velocity_range = status[-1]

        except Exception:
            log.error('Error creating particle hardware config, raw data: %r', self.raw_data)
            raise SampleException

        result = [{VID: NortekHardwareConfigDataParticleKey.SERIAL_NUM, VAL: serial_num},
                  {VID: NortekHardwareConfigDataParticleKey.RECORDER_INSTALLED, VAL: recorder_installed},
                  {VID: NortekHardwareConfigDataParticleKey.COMPASS_INSTALLED, VAL: compass_installed},
                  {VID: NortekHardwareConfigDataParticleKey.BOARD_FREQUENCY, VAL: board_frequency},
                  {VID: NortekHardwareConfigDataParticleKey.PIC_VERSION, VAL: pic_version},
                  {VID: NortekHardwareConfigDataParticleKey.HW_REVISION, VAL: hw_revision},
                  {VID: NortekHardwareConfigDataParticleKey.RECORDER_SIZE, VAL: recorder_size},
                  {VID: NortekHardwareConfigDataParticleKey.VELOCITY_RANGE, VAL: velocity_range},
                  {VID: NortekHardwareConfigDataParticleKey.FW_VERSION, VAL: fw_version}]

        log.debug('NortekHardwareConfigDataParticle: particle=%r', result)
        return result
Example #18
0
    def parse_file(self):
        """
        Parse the *.raw file.
        """

        # Extract the file time from the file name
        input_file_name = self._stream_handle.name
        (filepath, filename) = os.path.split(input_file_name)

        # tuple contains the string before the '.', the '.', and the 'raw' string
        outfile = filename.rpartition('.')[0]

        match = FILE_NAME_MATCHER.match(input_file_name)
        if match:
            file_time = match.group('Date') + match.group('Time')
        else:
            file_time = ""
            # Files retrieved from the instrument should always match the timestamp naming convention
            self.recov_exception_callback("Unable to extract file time from input file name: %s."
                                          "Expected format *-DYYYYmmdd-THHMMSS.raw" % input_file_name)

        # Read binary file a block at a time
        raw = self._stream_handle.read(BLOCK_SIZE)

        # Set starting byte
        byte_cnt = 0

        # Read the configuration datagram, output at the beginning of the file
        length1, = unpack('<l', raw[byte_cnt:byte_cnt+LENGTH_SIZE])
        byte_cnt += LENGTH_SIZE

        # Configuration datagram header
        datagram_header = read_datagram_header(raw[byte_cnt:byte_cnt+DATAGRAM_HEADER_SIZE])
        byte_cnt += DATAGRAM_HEADER_SIZE

        # Configuration: header
        config_header = read_config_header(raw[byte_cnt:byte_cnt+CONFIG_HEADER_SIZE])
        byte_cnt += CONFIG_HEADER_SIZE

        transducer_count = config_header['transducer_count']

        if GET_CONFIG_TRANSDUCER:
            td_gain = {}
            td_gain_table = {}
            td_pulse_length_table = {}
            td_phi_equiv_beam_angle = {}

            # Configuration: transducers (1 to 7 max)
            for i in xrange(1, transducer_count+1):
                config_transducer = read_config_transducer(
                    raw[byte_cnt:byte_cnt+CONFIG_TRANSDUCER_SIZE])

                # Example data that one might need for various calculations later on
                td_gain[i] = config_transducer['gain']
                td_gain_table[i] = config_transducer['gain_table']
                td_pulse_length_table[i] = config_transducer['pulse_length_table']
                td_phi_equiv_beam_angle[i] = config_transducer['equiv_beam_angle']

        byte_cnt += CONFIG_TRANSDUCER_SIZE * transducer_count

        # Compare length1 (from beginning of datagram) to length2 (from the end of datagram) to
        # the actual number of bytes read. A mismatch can indicate an invalid, corrupt, misaligned,
        # or missing configuration datagram or a reverse byte order binary data file.
        # A bad/missing configuration datagram header is a significant error.
        length2, = unpack('<l', raw[byte_cnt:byte_cnt+LENGTH_SIZE])
        if not (length1 == length2 == byte_cnt-LENGTH_SIZE):
            raise ValueError(
                "Length of configuration datagram and number of bytes read do not match: length1: %s"
                ", length2: %s, byte_cnt: %s. Possible file corruption or format incompatibility." %
                (length1, length2, byte_cnt+LENGTH_SIZE))

        first_ping_metadata = defaultdict(list)
        trans_keys = range(1, transducer_count+1)
        trans_array = dict((key, []) for key in trans_keys)         # transducer power data
        trans_array_time = dict((key, []) for key in trans_keys)    # transducer time data
        td_f = dict.fromkeys(trans_keys)                            # transducer frequency
        td_dR = dict.fromkeys(trans_keys)                           # transducer depth measurement

        position = 0

        while raw:
            # We only care for the Sample datagrams, skip over all the other datagrams
            match = SAMPLE_MATCHER.search(raw)

            if not match:
                # Read in the next block w/ a token sized overlap
                self._stream_handle.seek(self._stream_handle.tell() - 4)
                raw = self._stream_handle.read(BLOCK_SIZE)

                # The last 4 bytes is just the length2 of the last datagram
                if len(raw) <= 4:
                    break

            # Offset by size of length value
            match_start = match.start() - LENGTH_SIZE

            # Seek to the position of the length data before the token to read into numpy array
            self._stream_handle.seek(position + match_start)

            # Read and unpack the Sample Datagram into numpy array
            sample_data = np.fromfile(self._stream_handle, dtype=sample_dtype, count=1)
            channel = sample_data['channel_number'][0]

            # Check for a valid channel number that is within the number of transducers config
            # to prevent incorrectly indexing into the dictionaries.
            # An out of bounds channel number can indicate invalid, corrupt,
            # or misaligned datagram or a reverse byte order binary data file.
            # Log warning and continue to try and process the rest of the file.
            if channel < 0 or channel > transducer_count:
                log.warn("Invalid channel: %s for transducer count: %s."
                         "Possible file corruption or format incompatibility.", channel, transducer_count)

                # Need current position in file to increment for next regex search offset
                position = self._stream_handle.tell()

                # Read the next block for regex search
                raw = self._stream_handle.read(BLOCK_SIZE)
                continue

            # Convert high and low bytes to internal time
            internal_time = (sample_data['high_date_time'][0] << 32) + sample_data['low_date_time'][0]
            # Note: Strictly sequential time tags are not guaranteed.
            trans_array_time[channel].append(internal_time)

            # Gather metadata once per transducer channel number
            if not trans_array[channel]:
                file_name = self.output_file_path + '/' + outfile + '_' + \
                            str(int(sample_data['frequency'])/1000) + 'k.png'

                first_ping_metadata[ZplscBParticleKey.FILE_TIME] = file_time
                first_ping_metadata[ZplscBParticleKey.FILE_NAME].append(file_name)
                first_ping_metadata[ZplscBParticleKey.CHANNEL].append(channel)
                first_ping_metadata[ZplscBParticleKey.TRANSDUCER_DEPTH].append(sample_data['transducer_depth'][0])
                first_ping_metadata[ZplscBParticleKey.FREQUENCY].append(sample_data['frequency'][0])
                first_ping_metadata[ZplscBParticleKey.TRANSMIT_POWER].append(sample_data['transmit_power'][0])
                first_ping_metadata[ZplscBParticleKey.PULSE_LENGTH].append(sample_data['pulse_length'][0])
                first_ping_metadata[ZplscBParticleKey.BANDWIDTH].append(sample_data['bandwidth'][0])
                first_ping_metadata[ZplscBParticleKey.SAMPLE_INTERVAL].append(sample_data['sample_interval'][0])
                first_ping_metadata[ZplscBParticleKey.SOUND_VELOCITY].append(sample_data['sound_velocity'][0])
                first_ping_metadata[ZplscBParticleKey.ABSORPTION_COEF].append(sample_data['absorption_coefficient'][0])
                first_ping_metadata[ZplscBParticleKey.TEMPERATURE].append(sample_data['temperature'][0])

                # Make only one particle for the first ping series containing data for all channels
                if channel == config_header['transducer_count']:
                    # Convert from Windows time to NTP time.
                    time = datetime(1601, 1, 1) + timedelta(microseconds=internal_time/10.0)
                    year, month, day, hour, min, sec = time.utctimetuple()[:6]
                    unix_time = calendar.timegm((year, month, day, hour, min, sec+(time.microsecond/1e6)))
                    time_stamp = ntplib.system_to_ntp_time(unix_time)

                    # Extract a particle and append it to the record buffer
                    # Note: numpy unpacked values still need to be encoded
                    particle = self._extract_sample(ZplscBInstrumentDataParticle, None,
                                                    first_ping_metadata,
                                                    time_stamp)
                    log.debug('Parsed particle: %s', particle.generate_dict())
                    self._record_buffer.append(particle)

                # Extract various calibration parameters used for generating echogram plot
                # This data doesn't change so extract it once per channel
                td_f[channel] = sample_data['frequency'][0]
                td_dR[channel] = sample_data['sound_velocity'][0] * sample_data['sample_interval'][0] / 2

            count = sample_data['count'][0]

            # Extract array of power data
            power_dtype = np.dtype([('power_data', '<i2')])     # 2 byte int (short)
            power_data = np.fromfile(self._stream_handle, dtype=power_dtype, count=count)

            # Decompress power data to dB
            trans_array[channel].append(power_data['power_data'] * 10. * np.log10(2) / 256.)

            # Read the athwartship and alongship angle measurements
            if sample_data['mode'][0] > 1:
                angle_dtype = np.dtype([('athwart', '<i1'), ('along', '<i1')])     # 1 byte ints
                angle_data = np.fromfile(self._stream_handle, dtype=angle_dtype, count=count)

            # Read and compare length1 (from beginning of datagram) to length2
            # (from the end of datagram). A mismatch can indicate an invalid, corrupt,
            # or misaligned datagram or a reverse byte order binary data file.
            # Log warning and continue to try and process the rest of the file.
            len_dtype = np.dtype([('length2', '<i4')])     # 4 byte int (long)
            length2_data = np.fromfile(self._stream_handle, dtype=len_dtype, count=1)
            if not (sample_data['length1'][0] == length2_data['length2'][0]):
                log.warn("Mismatching beginning and end length values in sample datagram: length1"
                         ": %s, length2: %s. Possible file corruption or format incompatibility."
                         , sample_data['length1'][0], length2_data['length2'][0])

            # Need current position in file to increment for next regex search offset
            position = self._stream_handle.tell()

            # Read the next block for regex search
            raw = self._stream_handle.read(BLOCK_SIZE)

        # Driver spends most of the time plotting,
        # this can take longer for more transducers so lets break out the work
        processes = []
        for channel in td_f.iterkeys():
            try:
                process = Process(target=self.generate_echogram_plot,
                                  args=(trans_array_time[channel], trans_array[channel],
                                        td_f[channel], td_dR[channel], channel,
                                        first_ping_metadata[ZplscBParticleKey.FILE_NAME][channel-1]))
                process.start()
                processes.append(process)

            except Exception, e:
                log.error("Error: Unable to start process: %s", e)
Example #19
0
 def recov_exception_callback(self, message):
     log.warn(message)
     self._exception_callback(RecoverableSampleException(message))
    def _build_parsed_values(self):
        """
        Take the velocity data sample and parse it into values with appropriate tags.
        @throws SampleException If there is a problem with sample creation

        typedef struct {
            unsigned char cSync; // sync = 0xa5
            unsigned char cId; // identification (0x01=normal, 0x80=diag)
            unsigned short hSize; // size of structure (words)
            PdClock clock; // date and time
            short hError; // error code:
            unsigned short hAnaIn1; // analog input 1
            unsigned short hBattery; // battery voltage (0.1 V)
            union {
                unsigned short hSoundSpeed; // speed of sound (0.1 m/s)
                unsigned short hAnaIn2; // analog input 2
            } u;
            short hHeading; // compass heading (0.1 deg)
            short hPitch; // compass pitch (0.1 deg)
            short hRoll; // compass roll (0.1 deg)
            unsigned char cPressureMSB; // pressure MSB
            char cStatus; // status:
            unsigned short hPressureLSW; // pressure LSW
            short hTemperature; // temperature (0.01 deg C)
            short hVel[3]; // velocity
            unsigned char cAmp[3]; // amplitude
            char cFill;
            short hChecksum; // checksum
        } PdMeas;
        """
        try:
            unpack_format = (
                ('sync',            '<4s'),  # cSync, cId, hSize
                ('timestamp',       '6s'),   # PdClock
                ('error',           'H'),    # defined as signed short, but represents bitmap, using unsigned
                ('analog1',         'H'),
                ('battery_voltage', 'H'),
                ('sound_speed',     'H'),
                ('heading',         'h'),
                ('pitch',           'h'),
                ('roll',            'h'),
                ('pressure_msb',    'B'),
                ('status',          'B'),    # defined as char, but represents bitmap, using unsigned
                ('pressure_lsw',    'H'),
                ('temperature',     'h'),
                ('velocity_beam1',  'h'),
                ('velocity_beam2',  'h'),
                ('velocity_beam3',  'h'),
                ('amplitude_beam1', 'B'),
                ('amplitude_beam2', 'B'),
                ('amplitude_beam3', 'B'),
            )

            data = unpack_from_format(self._data_particle_type, unpack_format, self.raw_data)

            if not validate_checksum('<20H', self.raw_data):
                log.warn("Failed checksum in %s from instrument (%r)", self._data_particle_type, self.raw_data)
                self.contents[DataParticleKey.QUALITY_FLAG] = DataParticleValue.CHECKSUM_FAILED

            timestamp = common.convert_time(data.timestamp)
            self.set_internal_timestamp((timestamp-datetime(1900, 1, 1)).total_seconds())

            pressure = data.pressure_msb * 0x10000 + data.pressure_lsw

        except Exception as e:
            log.error('Error creating particle velpt_velocity_data, raw data: %r', self.raw_data)
            raise SampleException(e)

        key = AquadoppVelocityDataParticleKey

        result = [{VID: key.TIMESTAMP, VAL: str(timestamp)},
                  {VID: key.ERROR, VAL: data.error},
                  {VID: key.ANALOG1, VAL: data.analog1},
                  {VID: key.BATTERY_VOLTAGE, VAL: data.battery_voltage},
                  {VID: key.SOUND_SPEED_ANALOG2, VAL: data.sound_speed},
                  {VID: key.HEADING, VAL: data.heading},
                  {VID: key.PITCH, VAL: data.pitch},
                  {VID: key.ROLL, VAL: data.roll},
                  {VID: key.STATUS, VAL: data.status},
                  {VID: key.PRESSURE, VAL: pressure},
                  {VID: key.TEMPERATURE, VAL: data.temperature},
                  {VID: key.VELOCITY_BEAM1, VAL: data.velocity_beam1},
                  {VID: key.VELOCITY_BEAM2, VAL: data.velocity_beam2},
                  {VID: key.VELOCITY_BEAM3, VAL: data.velocity_beam3},
                  {VID: key.AMPLITUDE_BEAM1, VAL: data.amplitude_beam1},
                  {VID: key.AMPLITUDE_BEAM2, VAL: data.amplitude_beam2},
                  {VID: key.AMPLITUDE_BEAM3, VAL: data.amplitude_beam3}]

        return result
Example #21
0
 def recov_exception_callback(self, message):
     log.warn(message)
     self._exception_callback(RecoverableSampleException(message))
Example #22
0
    def parse_file(self):
        """
        Parse the *.raw file.
        """

        # Extract the file time from the file name
        input_file_name = self._stream_handle.name
        (filepath, filename) = os.path.split(input_file_name)

        # tuple contains the string before the '.', the '.', and the 'raw' string
        outfile = filename.rpartition('.')[0]

        match = FILE_NAME_MATCHER.match(input_file_name)
        if match:
            file_time = match.group('Date') + match.group('Time')
            rel_file_path = os.path.join(*match.groups()[1:-1])
            full_file_path = os.path.join(self.output_file_path, rel_file_path)
            if not os.path.exists(full_file_path):
                os.makedirs(full_file_path)
        else:
            file_time = ""
            rel_file_path = ""
            # Files retrieved from the instrument should always match the timestamp naming convention
            self.recov_exception_callback("Unable to extract file time from input file name: %s."
                                          "Expected format *-DYYYYmmdd-THHMMSS.raw" % input_file_name)

        # Read binary file a block at a time
        raw = self._stream_handle.read(BLOCK_SIZE)

        # Set starting byte
        byte_cnt = 0

        # Read the configuration datagram, output at the beginning of the file
        length1, = unpack('<l', raw[byte_cnt:byte_cnt+LENGTH_SIZE])
        byte_cnt += LENGTH_SIZE

        # Configuration datagram header
        datagram_header = read_datagram_header(raw[byte_cnt:byte_cnt+DATAGRAM_HEADER_SIZE])
        byte_cnt += DATAGRAM_HEADER_SIZE

        # Configuration: header
        config_header = read_config_header(raw[byte_cnt:byte_cnt+CONFIG_HEADER_SIZE])
        byte_cnt += CONFIG_HEADER_SIZE

        transducer_count = config_header['transducer_count']

        if GET_CONFIG_TRANSDUCER:
            td_gain = {}
            td_gain_table = {}
            td_pulse_length_table = {}
            td_phi_equiv_beam_angle = {}

            # Configuration: transducers (1 to 7 max)
            for i in xrange(1, transducer_count+1):
                config_transducer = read_config_transducer(
                    raw[byte_cnt:byte_cnt+CONFIG_TRANSDUCER_SIZE])

                # Example data that one might need for various calculations later on
                td_gain[i] = config_transducer['gain']
                td_gain_table[i] = config_transducer['gain_table']
                td_pulse_length_table[i] = config_transducer['pulse_length_table']
                td_phi_equiv_beam_angle[i] = config_transducer['equiv_beam_angle']

        byte_cnt += CONFIG_TRANSDUCER_SIZE * transducer_count

        # Compare length1 (from beginning of datagram) to length2 (from the end of datagram) to
        # the actual number of bytes read. A mismatch can indicate an invalid, corrupt, misaligned,
        # or missing configuration datagram or a reverse byte order binary data file.
        # A bad/missing configuration datagram header is a significant error.
        length2, = unpack('<l', raw[byte_cnt:byte_cnt+LENGTH_SIZE])
        if not (length1 == length2 == byte_cnt-LENGTH_SIZE):
            raise ValueError(
                "Length of configuration datagram and number of bytes read do not match: length1: %s"
                ", length2: %s, byte_cnt: %s. Possible file corruption or format incompatibility." %
                (length1, length2, byte_cnt+LENGTH_SIZE))

        first_ping_metadata = defaultdict(list)
        trans_keys = range(1, transducer_count+1)
        trans_array = dict((key, []) for key in trans_keys)         # transducer power data
        trans_array_time = dict((key, []) for key in trans_keys)    # transducer time data
        td_f = dict.fromkeys(trans_keys)                            # transducer frequency
        td_dR = dict.fromkeys(trans_keys)                           # transducer depth measurement

        position = 0

        while raw:
            # We only care for the Sample datagrams, skip over all the other datagrams
            match = SAMPLE_MATCHER.search(raw)

            if not match:
                # Read in the next block w/ a token sized overlap
                self._stream_handle.seek(self._stream_handle.tell() - 4)
                raw = self._stream_handle.read(BLOCK_SIZE)

                # The last 4 bytes is just the length2 of the last datagram
                if len(raw) <= 4:
                    break

            # Offset by size of length value
            match_start = match.start() - LENGTH_SIZE

            # Seek to the position of the length data before the token to read into numpy array
            self._stream_handle.seek(position + match_start)

            # Read and unpack the Sample Datagram into numpy array
            sample_data = np.fromfile(self._stream_handle, dtype=sample_dtype, count=1)
            channel = sample_data['channel_number'][0]

            # Check for a valid channel number that is within the number of transducers config
            # to prevent incorrectly indexing into the dictionaries.
            # An out of bounds channel number can indicate invalid, corrupt,
            # or misaligned datagram or a reverse byte order binary data file.
            # Log warning and continue to try and process the rest of the file.
            if channel < 0 or channel > transducer_count:
                log.warn("Invalid channel: %s for transducer count: %s."
                         "Possible file corruption or format incompatibility.", channel, transducer_count)

                # Need current position in file to increment for next regex search offset
                position = self._stream_handle.tell()

                # Read the next block for regex search
                raw = self._stream_handle.read(BLOCK_SIZE)
                continue

            # Convert high and low bytes to internal time
            internal_time = (sample_data['high_date_time'][0] << 32) + sample_data['low_date_time'][0]
            # Note: Strictly sequential time tags are not guaranteed.
            trans_array_time[channel].append(internal_time)

            # Gather metadata once per transducer channel number
            if not trans_array[channel]:
                file_path = os.path.join(
                    rel_file_path, outfile + '_' + str(int(sample_data['frequency'])/1000) + 'k.png')

                first_ping_metadata[ZplscBParticleKey.FILE_TIME] = file_time
                first_ping_metadata[ZplscBParticleKey.FILE_PATH].append(file_path)
                first_ping_metadata[ZplscBParticleKey.CHANNEL].append(channel)
                first_ping_metadata[ZplscBParticleKey.TRANSDUCER_DEPTH].append(sample_data['transducer_depth'][0])
                first_ping_metadata[ZplscBParticleKey.FREQUENCY].append(sample_data['frequency'][0])
                first_ping_metadata[ZplscBParticleKey.TRANSMIT_POWER].append(sample_data['transmit_power'][0])
                first_ping_metadata[ZplscBParticleKey.PULSE_LENGTH].append(sample_data['pulse_length'][0])
                first_ping_metadata[ZplscBParticleKey.BANDWIDTH].append(sample_data['bandwidth'][0])
                first_ping_metadata[ZplscBParticleKey.SAMPLE_INTERVAL].append(sample_data['sample_interval'][0])
                first_ping_metadata[ZplscBParticleKey.SOUND_VELOCITY].append(sample_data['sound_velocity'][0])
                first_ping_metadata[ZplscBParticleKey.ABSORPTION_COEF].append(sample_data['absorption_coefficient'][0])
                first_ping_metadata[ZplscBParticleKey.TEMPERATURE].append(sample_data['temperature'][0])

                # Make only one particle for the first ping series containing data for all channels
                if channel == config_header['transducer_count']:
                    # Convert from Windows time to NTP time.
                    time = datetime(1601, 1, 1) + timedelta(microseconds=internal_time/10.0)
                    year, month, day, hour, min, sec = time.utctimetuple()[:6]
                    unix_time = calendar.timegm((year, month, day, hour, min, sec+(time.microsecond/1e6)))
                    time_stamp = ntplib.system_to_ntp_time(unix_time)

                    # Extract a particle and append it to the record buffer
                    # Note: numpy unpacked values still need to be encoded
                    particle = self._extract_sample(ZplscBInstrumentDataParticle, None,
                                                    first_ping_metadata,
                                                    time_stamp)
                    log.debug('Parsed particle: %s', particle.generate_dict())
                    self._record_buffer.append(particle)

                # Extract various calibration parameters used for generating echogram plot
                # This data doesn't change so extract it once per channel
                td_f[channel] = sample_data['frequency'][0]
                td_dR[channel] = sample_data['sound_velocity'][0] * sample_data['sample_interval'][0] / 2

            count = sample_data['count'][0]

            # Extract array of power data
            power_dtype = np.dtype([('power_data', '<i2')])     # 2 byte int (short)
            power_data = np.fromfile(self._stream_handle, dtype=power_dtype, count=count)

            # Decompress power data to dB
            trans_array[channel].append(power_data['power_data'] * 10. * np.log10(2) / 256.)

            # Read the athwartship and alongship angle measurements
            if sample_data['mode'][0] > 1:
                angle_dtype = np.dtype([('athwart', '<i1'), ('along', '<i1')])     # 1 byte ints
                angle_data = np.fromfile(self._stream_handle, dtype=angle_dtype, count=count)

            # Read and compare length1 (from beginning of datagram) to length2
            # (from the end of datagram). A mismatch can indicate an invalid, corrupt,
            # or misaligned datagram or a reverse byte order binary data file.
            # Log warning and continue to try and process the rest of the file.
            len_dtype = np.dtype([('length2', '<i4')])     # 4 byte int (long)
            length2_data = np.fromfile(self._stream_handle, dtype=len_dtype, count=1)
            if not (sample_data['length1'][0] == length2_data['length2'][0]):
                log.warn("Mismatching beginning and end length values in sample datagram: length1"
                         ": %s, length2: %s. Possible file corruption or format incompatibility."
                         , sample_data['length1'][0], length2_data['length2'][0])

            # Need current position in file to increment for next regex search offset
            position = self._stream_handle.tell()

            # Read the next block for regex search
            raw = self._stream_handle.read(BLOCK_SIZE)

        # Driver spends most of the time plotting,
        # this can take longer for more transducers so lets break out the work
        processes = []
        for channel in td_f.iterkeys():
            try:
                process = Process(target=self.generate_echogram_plot,
                                  args=(trans_array_time[channel], trans_array[channel],
                                        td_f[channel], td_dR[channel], channel,
                                        os.path.join(
                                            self.output_file_path,
                                            first_ping_metadata[ZplscBParticleKey.FILE_PATH][channel-1])))
                process.start()
                processes.append(process)

            except Exception, e:
                log.error("Error: Unable to start process: %s", e)