示例#1
0
    def __init__(self,
                 raw_data,
                 port_timestamp=None,
                 internal_timestamp=None,
                 preferred_timestamp=DataParticleKey.PORT_TIMESTAMP,
                 quality_flag=DataParticleValue.OK,
                 new_sequence=None):

        super(AdcpsJlnSioDataParticle,
              self).__init__(raw_data, port_timestamp, internal_timestamp,
                             preferred_timestamp, quality_flag, new_sequence)

        self._data_match = DATA_MATCHER.match(self.raw_data[8:])

        if not self._data_match:

            raise RecoverableSampleException(
                "AdcpsJlnSioParserDataParticle: No regex match of "
                "parsed sample data [%s]" % self.raw_data[8:])

        date_str = self.unpack_date(self._data_match.group(0)[11:19])

        unix_time = utilities.zulu_timestamp_to_utc_time(date_str)

        self.set_internal_timestamp(unix_time=unix_time)
示例#2
0
    def __init__(self, raw_data,
                 port_timestamp=None,
                 internal_timestamp=None,
                 preferred_timestamp=DataParticleKey.PORT_TIMESTAMP,
                 quality_flag=DataParticleValue.OK,
                 new_sequence=None):

        super(AdcpsJlnSioDataParticle, self).__init__(raw_data,
                                                      port_timestamp,
                                                      internal_timestamp,
                                                      preferred_timestamp,
                                                      quality_flag,
                                                      new_sequence)

        self._data_match = DATA_MATCHER.match(self.raw_data[8:])

        if not self._data_match:

            raise RecoverableSampleException("AdcpsJlnSioParserDataParticle: No regex match of "
                                             "parsed sample data [%s]" % self.raw_data[8:])

        date_str = self.unpack_date(self._data_match.group(0)[11:19])

        unix_time = utilities.zulu_timestamp_to_utc_time(date_str)

        self.set_internal_timestamp(unix_time=unix_time)
示例#3
0
    def parse_chunks(self):
        """
        Parse out any pending data chunks in the chunker. If
        it is a valid data piece, build a particle, update the position and
        timestamp. Go until the chunker has no more valid data.
        @retval a list of tuples with sample particles encountered in this
            parsing, plus the state. An empty list of nothing was parsed.
        """
        result_particles = []

        # header gets read in initialization, but need to send it back from parse_chunks
        if self._saved_header:
            result_particles.append(self._saved_header)
            self._saved_header = None

        (nd_timestamp, non_data, non_start,
         non_end) = self._chunker.get_next_non_data_with_index(clean=False)
        (timestamp, chunk, start,
         end) = self._chunker.get_next_data_with_index(clean=True)
        self.handle_non_data(non_data, non_end, start)

        while chunk is not None:
            match_rx_failure = RX_FAILURE_MATCHER.match(chunk)
            # ignore marked failure records
            if not match_rx_failure:
                data_match = DATA_MATCHER_B.match(chunk)
                if data_match:

                    if len(data_match.group(
                            2)) >= MIN_DATA_BYTES and self.compare_checksum(
                                data_match.group(2)):
                        # pull out the date string from the data
                        date_str = self._instrument_class.unpack_date(
                            data_match.group(2)[11:19])

                        unix_time = utilities.zulu_timestamp_to_utc_time(
                            date_str)

                        self._timestamp = ntplib.system_to_ntp_time(unix_time)

                        # round to ensure the timestamps match
                        self._timestamp = round(self._timestamp * 100) / 100

                        # particle-ize the data block received, return the record
                        # set timestamp here, converted to ntp64. pull out timestamp for this record
                        sample = self._extract_sample(self._instrument_class,
                                                      DATA_MATCHER_B, chunk,
                                                      self._timestamp)

                        if sample:
                            # create particle
                            log.trace(
                                "Extracting sample chunk %r with read_state: %r",
                                chunk, self._read_state)
                            self._increment_state(len(chunk))
                            result_particles.append(
                                (sample, copy.copy(self._read_state)))
                    else:
                        if len(data_match.group(2)) < MIN_DATA_BYTES:
                            log.debug("Found record with not enough bytes %r",
                                      data_match.group(0))
                            self._exception_callback(
                                SampleException(
                                    "Found record with not enough bytes %r" %
                                    data_match.group(0)))
                        else:
                            log.debug(
                                "Found record whose checksum doesn't match 0x%r",
                                data_match.group(0))
                            self._exception_callback(
                                SampleException(
                                    "Found record whose checksum doesn't match %r"
                                    % data_match.group(0)))
            else:
                log.info("Found RecieveFailure record, ignoring")

            (nd_timestamp, non_data, non_start,
             non_end) = self._chunker.get_next_non_data_with_index(clean=False)
            (timestamp, chunk, start,
             end) = self._chunker.get_next_data_with_index(clean=True)
            self.handle_non_data(non_data, non_end, start)

        return result_particles
示例#4
0
    def parse_chunks(self):
        """
        Parse out any pending data chunks in the chunker. If
        it is a valid data piece, build a particle, update the position and
        timestamp. Go until the chunker has no more valid data.
        @retval a list of tuples with sample particles encountered in this
            parsing, plus the state. An empty list of nothing was parsed.
        """            
        result_particles = []

         # header gets read in initialization, but need to send it back from parse_chunks
        if self._saved_header:
            result_particles.append(self._saved_header)
            self._saved_header = None

        (nd_timestamp, non_data, non_start, non_end) = self._chunker.get_next_non_data_with_index(clean=False)
        (timestamp, chunk, start, end) = self._chunker.get_next_data_with_index(clean=True)
        self.handle_non_data(non_data, non_end, start)

        while chunk is not None:
            match_rx_failure = RX_FAILURE_MATCHER.match(chunk)
            # ignore marked failure records
            if not match_rx_failure:
                data_match = DATA_MATCHER_B.match(chunk)
                if data_match:
                    if len(data_match.group(2)) >= MIN_DATA_BYTES and self.compare_checksum(data_match.group(2)):
                        # pull out the date string from the data
                        date_str = self._instrument_class.unpack_date(data_match.group(2)[11:19])

                        unix_time = utilities.zulu_timestamp_to_utc_time(date_str)

                        self._timestamp = ntplib.system_to_ntp_time(unix_time)

                        # round to ensure the timestamps match
                        self._timestamp = round(self._timestamp*100)/100

                        # particle-ize the data block received, return the record
                        # set timestamp here, converted to ntp64. pull out timestamp for this record               
                        sample = self._extract_sample(self._instrument_class, DATA_MATCHER_B,
                                                      chunk, self._timestamp)

                        if sample:
                            # create particle
                            log.trace("Extracting sample chunk %s with read_state: %s", chunk, self._read_state)
                            self._increment_state(len(chunk))
                            result_particles.append((sample, copy.copy(self._read_state)))
                    else:
                        if len(data_match.group(2)) < MIN_DATA_BYTES:
                            log.debug("Found record with not enough bytes 0x%s",
                                      binascii.hexlify(data_match.group(0)))
                            self._exception_callback(SampleException("Found record with not enough bytes 0x%s"
                                                                     % binascii.hexlify(data_match.group(0))))
                        else:
                            log.debug("Found record whose checksum doesn't match 0x%s",
                                      binascii.hexlify(data_match.group(0)))
                            self._exception_callback(SampleException("Found record whose checksum doesn't match 0x%s"
                                                                     % binascii.hexlify(data_match.group(0))))
            else:
                log.info("Found RecieveFailure record, ignoring")

            (nd_timestamp, non_data, non_start, non_end) = self._chunker.get_next_non_data_with_index(clean=False)
            (timestamp, chunk, start, end) = self._chunker.get_next_data_with_index(clean=True)
            self.handle_non_data(non_data, non_end, start)

        return result_particles