Beispiel #1
0
    def _are_timestamps_equal(received_ts, expected_ts):
        """
        Compare the timestamps
        :param received_ts: received timestamp
        :param expected_ts: expected timestamp from yml (can be string or ntp float)
        :return: If the timestamps are the same, None is returned, otherwise an error message string is
        """
        # Verify the timestamp, required to be in the particle
        if received_ts and expected_ts:
            # got timestamp in yml and received particle
            if isinstance(expected_ts, str):
                expected = string_to_ntp_date_time(expected_ts)
            else:
                # if not a string, timestamp should already be in ntp
                expected = expected_ts

            if abs(received_ts - expected) > FLOAT_ALLOWED_DIFF:
                log.error("expected internal_timestamp mismatch, %.9f != %.9f" % (expected, received_ts))
                return False

        elif expected_ts and not received_ts:
            log.error("expected internal_timestamp, missing from received particle")
            return False

        elif received_ts and not expected_ts:
            log.error("internal_timestamp was received but is missing from .yml")
            return False

        return True
Beispiel #2
0
    def _are_timestamps_equal(received_ts, expected_ts):
        """
        Compare the timestamps
        :param received_ts: received timestamp
        :param expected_ts: expected timestamp from yml (can be string or ntp float)
        :return: If the timestamps are the same, None is returned, otherwise an error message string is
        """
        # Verify the timestamp, required to be in the particle
        if received_ts and expected_ts:
            # got timestamp in yml and received particle
            if isinstance(expected_ts, str):
                expected = string_to_ntp_date_time(expected_ts)
            else:
                # if not a string, timestamp should already be in ntp
                expected = expected_ts

            if abs(received_ts - expected) > FLOAT_ALLOWED_DIFF:
                log.error(
                    "expected internal_timestamp mismatch, %.9f != %.9f" %
                    (expected, received_ts))
                return False

        elif expected_ts and not received_ts:
            log.error(
                "expected internal_timestamp, missing from received particle")
            return False

        elif received_ts and not expected_ts:
            log.error(
                "internal_timestamp was received but is missing from .yml")
            return False

        return True
Beispiel #3
0
def generate_particle_timestamp(time_2000):
    """
    This function calculates and returns a timestamp in epoch 1900
    based on an ASCII hex time in epoch 2000.
    Parameter:
      time_2000 - number of seconds since Jan 1, 2000
    Returns:
      number of seconds since Jan 1, 1900
    """
    return int(time_2000, 16) + string_to_ntp_date_time("2000-01-01T00:00:00.00Z")
Beispiel #4
0
def generate_particle_timestamp(time_2000):
    """
    This function calculates and returns a timestamp in epoch 1900
    based on an ASCII hex time in epoch 2000.
    Parameter:
      time_2000 - number of seconds since Jan 1, 2000
    Returns:
      number of seconds since Jan 1, 1900
    """
    return int(time_2000,
               16) + string_to_ntp_date_time("2000-01-01T00:00:00.00Z")
Beispiel #5
0
    def _build_parsed_values(self):
        """
        Take something in the binary data values and turn it into a
        particle with the appropriate tag.
        throws SampleException If there is a problem with sample creation
        """

        # match the data inside the wrapper
        match = DATA_MATCHER.match(self.raw_data)
        if not match:
            raise SampleException("AdcpsParserDataParticle: No regex match of \
                                  parsed sample data [%s]", self.raw_data)
        try:
            fields = struct.unpack('<HHIBBBdHhhhIbBB', match.group(0)[0:34])
            packet_id = fields[0]
            num_bytes = fields[1]
            if len(match.group(0)) - 2 != num_bytes:
                raise ValueError('num bytes %d does not match data length %d'
                          % (num_bytes, len(match.group(0))))
            log.debug('unpacked fields %s', fields)
            nbins = fields[14]
            if len(match.group(0)) < (36+(nbins*8)):
                raise ValueError('Number of bins %d does not fit in data length %d'%(nbins,
                                                                                     len(match.group(0))))
            date_fields = struct.unpack('HBBBBBB', match.group(0)[11:19])
            date_str = self.unpack_date(match.group(0)[11:19])

            log.debug('unpacked date string %s', date_str)
            sec_since_1900 = string_to_ntp_date_time(date_str)

            # get seconds from 1990 to 1970
            #elapse_1900 = float(parser.parse("1900-01-01T00:00:00.00Z").strftime("%s.%f"))
            #elapse_date = float(parser.parse(date_str).strftime("%s.%f"))
            # subtract seconds from 1900 to 1970 to convert to seconds since 1900
            #sec_since_1900 = round((elapse_date - elapse_1900)*100)/100

            # create a string with the right number of shorts to unpack
            struct_format = '>'
            for i in range(0,nbins):
                struct_format = struct_format + 'h'

            bin_len = nbins*2
            vel_east = struct.unpack(struct_format, match.group(0)[34:(34+bin_len)])
            vel_north = struct.unpack(struct_format, match.group(0)[(34+bin_len):(34+(bin_len*2))])
            vel_up = struct.unpack(struct_format, match.group(0)[(34+(bin_len*2)):(34+(bin_len*3))])
            vel_err = struct.unpack(struct_format, match.group(0)[(34+(bin_len*3)):(34+(bin_len*4))])

            checksum = struct.unpack('<h', match.group(0)[(34+(bin_len*4)):(36+(bin_len*4))])

            # heading/pitch/roll/temp units of cdegrees (= .01 deg)
            heading = fields[7]
            pitch = fields[8]
            roll = fields[9]
            temp = fields[10]
            # pressure in units of daPa (= .01 kPa)
            pressure = fields[11]

        except (ValueError, TypeError, IndexError) as ex:
            raise SampleException("Error (%s) while decoding parameters in data: [%s]"
                                  % (ex, match.group(0)))

        result = [{DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.PD12_PACKET_ID,
                   DataParticleKey.VALUE: packet_id},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.NUM_BYTES,
                   DataParticleKey.VALUE: num_bytes},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.ENSEMBLE_NUMBER,
                   DataParticleKey.VALUE: fields[2]},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.UNIT_ID,
                   DataParticleKey.VALUE: fields[3]},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.FIRMWARE_VERSION,
                   DataParticleKey.VALUE: fields[4]},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.FIRMWARE_REVISION,
                   DataParticleKey.VALUE: fields[5]},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.REAL_TIME_CLOCK,
                   DataParticleKey.VALUE: list(date_fields)},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.ENSEMBLE_START_TIME,
                   DataParticleKey.VALUE: sec_since_1900},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.HEADING,
                   DataParticleKey.VALUE: heading},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.PITCH,
                   DataParticleKey.VALUE: pitch},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.ROLL,
                   DataParticleKey.VALUE: roll},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.TEMPERATURE,
                   DataParticleKey.VALUE: temp},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.PRESSURE,
                   DataParticleKey.VALUE: pressure},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.VELOCITY_PO_ERROR_FLAG,
                   DataParticleKey.VALUE: fields[12]&1},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.VELOCITY_PO_UP_FLAG,
                   DataParticleKey.VALUE: (fields[12]&2) >> 1},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.VELOCITY_PO_NORTH_FLAG,
                   DataParticleKey.VALUE: (fields[12]&4) >> 2},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.VELOCITY_PO_EAST_FLAG,
                   DataParticleKey.VALUE: (fields[12]&8) >> 3},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.SUBSAMPLING_PARAMETER,
                   DataParticleKey.VALUE: (fields[12]&240) >> 4},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.START_BIN,
                   DataParticleKey.VALUE: fields[13]},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.NUM_BINS,
                   DataParticleKey.VALUE: nbins},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.WATER_VELOCITY_EAST,
                   DataParticleKey.VALUE: list(vel_east)},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.WATER_VELOCITY_NORTH,
                   DataParticleKey.VALUE: list(vel_north)},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.WATER_VELOCITY_UP,
                   DataParticleKey.VALUE: list(vel_up)},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.ERROR_VELOCITY,
                   DataParticleKey.VALUE: list(vel_err)},
                  {DataParticleKey.VALUE_ID: AdcpsParserDataParticleKey.CHECKSUM,
                   DataParticleKey.VALUE: checksum[0]},]

        log.debug('AdcpsParserDataParticle: particle=%s', result)
        return result