Ejemplo n.º 1
0
    def _build_parsed_values(self, match):
        '''
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        '''
        # Use the date_time_string to calculate an epoch timestamp (seconds since
        # 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)

        self.data.date_time_string.append(str(match.group(1)))
        self.data.serial_number.append(int(match.group(2)))
        self.data.timer.append(float(match.group(3)))

        # Unpack the binary data packet
        (delay, ch1, ch2, ch3, ch4, ch5, ch6, ch7,
         Vin, Va, temp, count, check) = SPKIR.unpack(match.group(4))

        # Assign the remaining SPKIR data to the named parameters
        self.data.sample_delay.append(delay)
        self.data.raw_channels.append([ch1, ch2, ch3, ch4, ch5, ch6, ch7])
        self.data.input_voltage.append(Vin)
        self.data.analog_rail_voltage.append(Va)
        self.data.frame_counter.append(count)
        self.data.internal_temperature.append(temp)
Ejemplo n.º 2
0
    def _build_parsed_values(self, collect_time, process_time, sample):
        """
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        """
        # Use the date_time_string from the collection time to calculate an
        # epoch timestamp (seconds since 1970-01-01), using that values as the
        # preferred time record for the data
        epts = dcl_to_epoch(collect_time)
        self.data.time.append(epts)
        self.data.collect_date_time.append(collect_time)
        self.data.process_date_time.append(process_time)

        self.data.unique_id.append(int(sample[1:3], 16))
        self.data.record_length.append(int(sample[3:5], 16))
        self.data.record_type.append(int(sample[5:7], 16))
        self.data.record_time.append(int(sample[7:15], 16))

        cnt = 15  # set the counter for the light measurements
        light = []  # create empty list to hold the 14 light measurements
        for i in range(0, 14):
            indx = (i * 4) + cnt
            light.append(int(sample[indx:indx + 4], 16))

        self.data.light_measurements.append(light)

        cnt = indx + 4  # reset the counter for the final parameters
        self.data.voltage_battery.append(int(sample[cnt:cnt + 4], 16))
        self.data.thermistor_raw.append(int(sample[cnt + 4:cnt + 8], 16))
Ejemplo n.º 3
0
    def _build_parsed_values(self, match):
        """
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        """
        # Use the date_time_string to calculate an epoch timestamp (seconds since
        # 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.dcl_date_time_string.append(str(match.group(1)))

        # Assign the remaining WAVSS data to the named parameters
        self.data.date_string.append(str(match.group(2)))
        self.data.time_string.append(str(match.group(3)))
        self.data.serial_number.append(str(match.group(4)))
        self.data.num_zero_crossings.append(int(match.group(5)))
        self.data.average_wave_height.append(float(match.group(6)))
        self.data.mean_spectral_period.append(float(match.group(7)))
        self.data.maximum_wave_height.append(float(match.group(8)))
        self.data.significant_wave_height.append(float(match.group(9)))
        self.data.significant_wave_period.append(float(match.group(10)))
        self.data.average_tenth_height.append(float(match.group(11)))
        self.data.average_tenth_period.append(float(match.group(12)))
        self.data.average_wave_period.append(float(match.group(13)))
        self.data.peak_period.append(float(match.group(14)))
        self.data.peak_period_read.append(float(match.group(15)))
        self.data.spectral_wave_height.append(float(match.group(16)))
        self.data.mean_wave_direction.append(float(match.group(17)))
        self.data.mean_directional_spread.append(float(match.group(18)))
Ejemplo n.º 4
0
    def _build_parsed_values(self, match):
        '''
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        '''
        # Use the date_time_string to calculate an epoch timestamp (seconds
        # since 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.dcl_date_time_string.append(str(match.group(1)))

        # Assign the remaining FDCHP data to the named parameters, where the
        # rest of the data is in a (mostly) comma separated string, so ... need
        # to use filter and a split that looks for both commas and spaces
        # (sloppy programming by the developer of the instrument).
        data = filter(None, re.split(r',|\s', match.group(2)))

        # index through the list of parameter names and assign the data
        cnt = 0
        for p in _parameter_names_fdchp[1:]:
            if cnt == 2:
                # the status parameter is a 6 character hex value ...
                self.data[p].append(str(data[cnt]))
            else:
                # all other values are floats
                self.data[p].append(float(data[cnt]))

            cnt += 1
Ejemplo n.º 5
0
    def _build_parsed_values(self, match):
        """
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        """
        # Use the date_time_string to calculate an epoch timestamp (seconds since
        # 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.dcl_date_time_string.append(str(match.group(1)))

        # Assign the remaining MET data to the named parameters
        self.data.temperature.append(float(match.group(2)))
        self.data.conductivity.append(float(match.group(3)))
        self.data.pressure.append(float(match.group(4)))

        if self.ctd_type == 1:
            self.data.ctd_date_time_string.append(str(match.group(5)))

        if self.ctd_type == 2:
            self.data.oxygen_concentration.append(float(match.group(5)))
            self.data.ctd_date_time_string.append(str(match.group(6)))

        if self.ctd_type == 3:
            self.data.raw_backscatter.append(int(match.group(5)))
            self.data.raw_chlorophyll.append(int(match.group(6)))
            self.data.raw_cdom.append(int(match.group(7)))
            self.data.ctd_date_time_string.append(str(match.group(8)))
Ejemplo n.º 6
0
    def _build_parsed_values(self, match):
        """
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        """
        # Use the date_time_string to calculate an epoch timestamp (seconds since
        # 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.dcl_date_time_string.append(str(match.group(1)))

        # Assign the remaining MET data to the named parameters
        self.data.hydrogen_concentration.append(float(match.group(2)))
Ejemplo n.º 7
0
    def _build_parsed_values(self, match):
        """
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        """
        # Use the date_time_string to calculate an epoch timestamp (seconds
        # since 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.dcl_date_time_string.append(str(match.group(1)))

        # Assign the remaining PRESF data to the named parameters
        self.data.presf_date_time_string.append(str(match.group(2)))
        self.data.absolute_pressure.append(float(match.group(3)))
        self.data.pressure_temp.append(float(match.group(4)))
        self.data.seawater_temperature.append(float(match.group(5)))
Ejemplo n.º 8
0
    def _build_parsed_values(self, match, spectra):
        '''
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        '''
        # Use the date_time_string to calculate an epoch timestamp (seconds
        # since 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.date_time_string.append(str(match.group(1)))

        # Assign the remaining NUTNR data to the named parameters
        self.data.measurement_type.append(str(match.group(2)))
        self.data.serial_number.append(int(match.group(3)))

        # the rest of the data is in a comma separated string, so...
        data = (match.group(4)).split(',')

        # data found in all frames
        self.data.date_string.append(str(data[0]))
        self.data.decimal_hours.append(float(data[1]))
        self.data.nitrate_concentration.append(float(data[2]))
        self.data.auxiliary_fit_1st.append(float(data[3]))
        self.data.auxiliary_fit_2nd.append(float(data[4]))
        self.data.auxiliary_fit_3rd.append(float(data[5]))
        self.data.rms_error.append(float(data[6]))

        # data found only in the full frames
        if self.spectra == 1:
            self.data.temperature_internal.append(float(data[7]))
            self.data.temperature_spectrometer.append(float(data[8]))
            self.data.temperature_lamp.append(float(data[9]))
            self.data.lamp_on_time.append(int(data[10]))
            self.data.humidity.append(float(data[11]))
            self.data.voltage_lamp.append(float(data[12]))
            self.data.voltage_analog.append(float(data[13]))
            self.data.voltage_main.append(float(data[14]))
            self.data.average_reference.append(float(data[15]))
            self.data.variance_reference.append(float(data[16]))
            self.data.seawater_dark.append(float(data[17]))
            self.data.spectal_average.append(float(data[18]))
            self.data.channel_measurements.append(map(int, data[19:]))
Ejemplo n.º 9
0
    def _build_parsed_values(self, match):
        '''
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        '''
        # Use the date_time_string to calculate an epoch timestamp (seconds since
        # 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.dcl_date_time_string.append(str(match.group(1)))

        # Assign the remaining MET data to the named parameters
        self.data.flort_date_time_string.append(
            re.sub('\t', ' ', str(match.group(2))))
        self.data.measurement_wavelength_beta.append(int(match.group(3)))
        self.data.raw_signal_beta.append(int(match.group(4)))
        self.data.measurement_wavelength_chl.append(int(match.group(4)))
        self.data.raw_signal_chl.append(int(match.group(5)))
        self.data.measurement_wavelength_cdom.append(int(match.group(6)))
        self.data.raw_signal_cdom.append(int(match.group(7)))
        self.data.raw_internal_temp.append(int(match.group(8)))
Ejemplo n.º 10
0
    def _build_parsed_values(self, match):
        """
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        """
        # Use the date_time_string to calculate an epoch timestamp (seconds since
        # 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.dcl_date_time_string.append(str(match.group(1)))

        # Assign the remaining MET data to the named parameters
        self.data.barometric_pressure.append(float(match.group(2)))
        self.data.relative_humidity.append(float(match.group(3)))
        self.data.air_temperature.append(float(match.group(4)))
        self.data.longwave_irradiance.append(float(match.group(5)))
        self.data.precipitation_level.append(float(match.group(6)))
        self.data.sea_surface_temperature.append(float(match.group(7)))
        self.data.sea_surface_conductivity.append(float(match.group(8)))
        self.data.shortwave_irradiance.append(float(match.group(9)))
        self.data.eastward_wind_velocity.append(float(match.group(10)))
        self.data.northward_wind_velocity.append(float(match.group(11)))
Ejemplo n.º 11
0
    def _build_parsed_values(self, match):
        """
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        """
        # Use the date_time_string to calculate an epoch timestamp (seconds since
        # 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.dcl_date_time_string.append(str(match.group(1)))

        # Assign the remaining MET data to the named parameters
        self.data.co2_date_time_string.append(str(match.group(2)))
        self.data.zero_a2d.append(int(match.group(3)))
        self.data.current_a2d.append(int(match.group(4)))
        self.data.measured_water_co2.append(float(match.group(5)))
        self.data.avg_irga_temperature.append(float(match.group(6)))
        self.data.humidity.append(float(match.group(7)))
        self.data.humidity_temperature.append(float(match.group(8)))
        self.data.gas_stream_pressure.append(int(match.group(9)))
        self.data.irga_detector_temperature.append(float(match.group(10)))
        self.data.irga_source_temperature.append(float(match.group(11)))
        self.data.co2_source.append(str(match.group(12)))
Ejemplo n.º 12
0
    def _build_parsed_values(self, timestamp, sample):
        """
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        """
        # Use the date_time_string to calculate an epoch timestamp (seconds since
        # 1970-01-01)
        epts = dcl_to_epoch(timestamp)
        self.data.time.append(epts)
        self.data.dcl_date_time_string.append(timestamp)

        self.data.record_length.append(int(sample[3:5], 16))
        self.data.record_type.append(int(sample[5:7], 16))
        self.data.record_time.append(int(sample[7:15], 16))
        self.data.thermistor_start.append(int(sample[15:19], 16))

        cnt = 19
        reference = [
        ]  # create empty list to hold the 16 reference measurements
        for i in range(0, 16):
            indx = (i * 4) + cnt
            reference.append(int(sample[indx:indx + 4], 16))

        self.data.reference_measurements.append(reference)

        cnt = indx + 4  # reset the counter to start with the light measurements
        light = []  # create empty list to hold the 92 light measurements
        for i in range(0, 92):
            indx = (i * 4) + cnt
            light.append(int(sample[indx:indx + 4], 16))

        self.data.light_measurements.append(light)

        cnt = indx + 8  # reset the counter for the final parameters
        self.data.voltage_battery.append(int(sample[cnt:cnt + 4], 16))
        self.data.thermistor_end.append(int(sample[cnt + 4:cnt + 8], 16))
Ejemplo n.º 13
0
    def _build_parsed_values(self, match):
        '''
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        '''
        # Use the date_time_string to calculate an epoch timestamp (seconds since
        # 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.dcl_date_time_string.append(str(match.group(1)))

        # Assign the remaining MET data to the named parameters
        self.data.latitude.append(float(match.group(2)))
        self.data.longitude.append(float(match.group(3)))
        self.data.speed_over_ground.append(float(match.group(4)))
        self.data.course_over_ground.append(float(match.group(5)))
        self.data.fix_quality.append(int(match.group(6)))
        self.data.number_satellites.append(int(match.group(7)))
        self.data.horiz_dilution_precision.append(float(match.group(8)))
        self.data.altitude.append(float(match.group(9)))
        self.data.gps_date_string.append(str(match.group(10)))
        self.data.gps_time_string.append(str(match.group(11)))
        self.data.latitude_string.append(str(match.group(12)))
        self.data.longitude_string.append(str(match.group(13)))
Ejemplo n.º 14
0
    def _build_parsed_values(self, match):
        """
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        """
        # Use the date_time_string to calculate an epoch timestamp (seconds since
        # 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.cpm_date_time_string.append(str(match.group(1)))

        # Assign the remaining MET data to the named parameters
        self.data.main_voltage.append(float(match.group(2)))
        self.data.main_current.append(float(match.group(3)))
        self.data.backup_battery_voltage.append(float(match.group(4)))
        self.data.backup_battery_current.append(float(match.group(5)))
        self.data.error_flags.append(int(match.group(6), 16))

        self.data.temperature1.append(float(match.group(7)))
        self.data.temperature2.append(float(match.group(8)))

        self.data.humidity.append(float(match.group(9)))
        self.data.pressure.append(float(match.group(10)))

        self.data.ground_fault_enable.append(int(match.group(11), 16))
        self.data.ground_fault_sbd.append(float(match.group(12)))
        self.data.ground_fault_gps.append(float(match.group(13)))
        self.data.ground_fault_main.append(float(match.group(14)))
        self.data.ground_fault_9522_fw.append(float(match.group(15)))

        self.data.leak_detect_enable.append(int(match.group(16), 16))
        self.data.leak_detect_voltage1.append(int(match.group(17)))
        self.data.leak_detect_voltage2.append(int(match.group(18)))

        self.data.heartbeat_enable.append(int(match.group(19)))
        self.data.heartbeat_delta.append(int(match.group(20)))
        self.data.heartbeat_threshold.append(int(match.group(21)))

        self.data.wake_code.append(int(match.group(22)))

        self.data.iridium_power_state.append(int(match.group(23)))
        self.data.iridium_voltage.append(float(match.group(24)))
        self.data.iridium_current.append(float(match.group(25)))
        self.data.iridium_error_flag.append(int(match.group(26)))

        self.data.fwwf_power_state.append(int(match.group(27), 16))
        self.data.fwwf_voltage.append(float(match.group(28)))
        self.data.fwwf_current.append(float(match.group(29)))
        self.data.fwwf_power_flag.append(int(match.group(30)))

        self.data.gps_power_state.append(float(match.group(31)))

        self.data.sbd_power_state.append(float(match.group(32)))
        self.data.sbd_message_pending.append(float(match.group(33)))

        self.data.pps_source.append(float(match.group(34)))
        self.data.dcl_power_state.append(int(match.group(35), 16))

        self.data.wake_time_count.append(float(match.group(36)))
        self.data.wake_power_count.append(int(match.group(37)))

        self.data.esw_power_state.append(int(match.group(38), 16))

        self.data.dsl_power_state.append(int(match.group(39)))
Ejemplo n.º 15
0
def main():
    # load  the input arguments
    args = inputs()
    infile = os.path.abspath(args.infile)
    outfile = os.path.abspath(args.outfile)
    coeff_file = os.path.abspath(args.coeff_file)
    blnk_file = os.path.abspath(args.devfile)

    # check for the source of calibration coeffs and load accordingly
    dev = Calibrations(coeff_file)  # initialize calibration class
    if os.path.isfile(coeff_file):
        # we always want to use this file if it exists
        dev.load_coeffs()
    elif args.csvurl:
        # load from the CI hosted CSV files
        csv_url = args.csvurl
        dev.read_csv(csv_url)
        dev.save_coeffs()
    else:
        raise Exception(
            'A source for the PCO2W calibration coefficients could not be found'
        )

    # check for the source of instrument blanks and load accordingly
    blank = Blanks(blnk_file, 1.0,
                   1.0)  # initialize the calibration class using default blank
    if os.path.isfile(blnk_file):
        blank.load_blanks()
    else:
        blank.save_blanks()

    # load the PCO2W data file
    with open(infile, 'rb') as f:
        pco2w = Munch(json.load(f))

    if len(pco2w.time) == 0:
        # This is an empty file, end processing
        return None

    # convert the raw battery voltage and thermistor values from counts
    # to V and degC, respectively
    pco2w.thermistor = ph_thermistor(np.array(pco2w.thermistor_raw)).tolist()
    pco2w.voltage_battery = ph_battery(np.array(
        pco2w.voltage_battery)).tolist()

    # compare the instrument clock to the GPS based DCL time stamp
    # --> PCO2W uses the OSX date format of seconds since 1904-01-01
    mac = datetime.strptime("01-01-1904", "%m-%d-%Y")
    offset = []
    for i in range(len(pco2w.time)):
        rec = mac + timedelta(seconds=pco2w.record_time[i])
        rec.replace(tzinfo=timezone('UTC'))
        dcl = datetime.utcfromtimestamp(pco2w.time[i])

        # we use the sample collection time as the time record for the sample.
        # the record_time, however, is when the sample was processed. so the
        # true offset needs to include the difference between the collection
        # and processing times
        collect = dcl_to_epoch(pco2w.collect_date_time[i])
        process = dcl_to_epoch(pco2w.process_date_time[i])
        diff = process - collect
        if np.isnan(diff):
            diff = 300
        offset.append((rec - dcl).total_seconds() - diff)

    pco2w.time_offset = offset

    # set calibration inputs to pCO2 calculations
    ea434 = 19706.  # factory constants
    eb434 = 3073.  # factory constants
    ea620 = 34.  # factory constants
    eb620 = 44327.  # factory constants

    # calculate pCO2
    pCO2 = []
    blank434 = []
    blank620 = []

    for i in range(len(pco2w.record_type)):
        if pco2w.record_type[i] == 4:
            # this is a light measurement, calculate the pCO2
            pCO2.append(
                pco2_pco2wat(pco2w.record_type[i], pco2w.light_measurements[i],
                             pco2w.thermistor[i], ea434, eb434, ea620, eb620,
                             dev.coeffs['calt'], dev.coeffs['cala'],
                             dev.coeffs['calb'], dev.coeffs['calc'],
                             blank.blank_434, blank.blank_620)[0])

            # record the blanks used
            blank434.append(blank.blank_434)
            blank620.append(blank.blank_620)

        if pco2w.record_type[i] == 5:
            # this is a dark measurement, update and save the new blanks
            blank.blank_434 = pco2_blank(pco2w.light_measurements[i][6])
            blank.blank_620 = pco2_blank(pco2w.light_measurements[i][7])
            blank.save_blanks()

            blank434.append(blank.blank_434)
            blank620.append(blank.blank_620)

    # save the resulting data to a json formatted file
    pco2w.pCO2 = pCO2
    pco2w.blank434 = blank434
    pco2w.blank620 = blank620

    with open(outfile, 'w') as f:
        f.write(pco2w.toJSON())
Ejemplo n.º 16
0
    def _build_parsed_values(self, match):
        """
        Start by parsing the beginning portion of the ensemble (Header Data)
        """
        # build the ensemble string from match.group(2) through to the end.
        length = unpack("<H", unhexlify(match.group(3)))[0]
        ensemble = unhexlify(''.join(match.groups()[1:]))

        # Calculate the checksum
        total = int(0)
        for i in range(0, length):
            total += int(ord(ensemble[i]))

        checksum = total & 65535  # bitwise and with 65535 or mod vs 65536
        if checksum != unpack("<H", ensemble[length:length + 2])[0]:
            raise Exception("Checksum mismatch")

        (header_id, data_source_id, num_bytes, spare, num_data_types) = \
            unpack('<2BH2B', ensemble[0:6])

        self.data.time.append(dcl_to_epoch(match.group(1)))
        self.data.header.num_bytes.append(num_bytes)
        self.data.header.num_data_types.append(num_data_types)

        offsets = []  # create list for offsets
        strt = 6  # offsets start at byte 6 (using 0 indexing)
        nDT = 1  # counter for N data types
        while nDT <= num_data_types:
            value = unpack('<H', ensemble[strt:strt + 2])[0]
            offsets.append(value)
            strt += 2
            nDT += 1

        for offset in offsets:
            # for each offset, using the starting byte, determine the data type
            # and then parse accordingly.
            data_type = unpack('<H', ensemble[offset:offset + 2])[0]

            # fixed leader data (x00x00)
            if data_type == 0:
                chunk = ensemble[offset:offset + 59]
                self._parse_fixed_chunk(chunk)
                iCells = self.num_depth_cells  # grab the # of depth cells

            # variable leader data (x80x00)
            if data_type == 128:
                chunk = ensemble[offset:offset + 65]
                self._parse_variable_chunk(chunk)

            # velocity data (x00x01)
            if data_type == 256:
                # number of bytes is a function of the user selectable number
                # of depth cells (WN command), obtained above
                nBytes = 2 + 8 * iCells
                chunk = ensemble[offset:offset + nBytes]
                self._parse_velocity_chunk(chunk)

            # correlation magnitude data (x00x02)
            if data_type == 512:
                # number of bytes is a function of the user selectable number
                # of depth cells (WN command), obtained above
                nBytes = 2 + 4 * iCells
                chunk = ensemble[offset:offset + nBytes]
                self._parse_corelation_magnitude_chunk(chunk)

            # echo intensity data (x00x03)
            if data_type == 768:
                # number of bytes is a function of the user selectable number
                # of depth cells (WN command), obtained above
                nBytes = 2 + 4 * iCells
                chunk = ensemble[offset:offset + nBytes]
                self._parse_echo_intensity_chunk(chunk)

            # percent-good data (x00x04)
            if data_type == 1024:
                # number of bytes is a function of the user selectable number
                # of depth cells (WN command), obtained above
                nBytes = 2 + 4 * iCells
                chunk = ensemble[offset:offset + nBytes]
                self._parse_percent_good_chunk(chunk)
Ejemplo n.º 17
0
    def _build_parsed_values(self, match):
        """
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        """
        # Use the date_time_string to calculate an epoch timestamp (seconds since
        # 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.dcl_date_time_string.append(str(match.group(1)))

        # Assign the remaining data to the named parameters
        self.data.main_voltage.append(float(match.group(2)))
        self.data.main_current.append(float(match.group(3)))
        self.data.error_flags.append(int(match.group(4), 16))

        self.data.temperature1.append(float(match.group(5)))
        self.data.temperature2.append(float(match.group(6)))
        self.data.temperature3.append(float(match.group(7)))
        self.data.temperature4.append(float(match.group(8)))
        self.data.temperature5.append(float(match.group(9)))

        self.data.humidity.append(float(match.group(10)))
        self.data.pressure.append(float(match.group(11)))

        self.data.ground_fault_enable.append(int(match.group(12), 16))
        self.data.ground_fault_isov3.append(float(match.group(13)))
        self.data.ground_fault_main.append(float(match.group(14)))
        self.data.ground_fault_sensors.append(float(match.group(15)))

        self.data.leak_detect_enable.append(int(match.group(16), 16))
        self.data.leak_detect_voltage1.append(int(match.group(17)))
        self.data.leak_detect_voltage2.append(int(match.group(18)))

        self.data.port1_power_state.append(int(match.group(19)))
        self.data.port1_voltage.append(float(match.group(20)))
        self.data.port1_current.append(float(match.group(21)))
        self.data.port1_error_flag.append(int(match.group(22)))

        self.data.port2_power_state.append(int(match.group(23)))
        self.data.port2_voltage.append(float(match.group(24)))
        self.data.port2_current.append(float(match.group(25)))
        self.data.port2_error_flag.append(int(match.group(26)))

        self.data.port3_power_state.append(int(match.group(27)))
        self.data.port3_voltage.append(float(match.group(28)))
        self.data.port3_current.append(float(match.group(29)))
        self.data.port3_error_flag.append(int(match.group(30)))

        self.data.port4_power_state.append(int(match.group(31)))
        self.data.port4_voltage.append(float(match.group(32)))
        self.data.port4_current.append(float(match.group(33)))
        self.data.port4_error_flag.append(int(match.group(34)))

        self.data.port5_power_state.append(int(match.group(35)))
        self.data.port5_voltage.append(float(match.group(36)))
        self.data.port5_current.append(float(match.group(37)))
        self.data.port5_error_flag.append(int(match.group(38)))

        self.data.port6_power_state.append(int(match.group(39)))
        self.data.port6_voltage.append(float(match.group(40)))
        self.data.port6_current.append(float(match.group(41)))
        self.data.port6_error_flag.append(int(match.group(42)))

        self.data.port7_power_state.append(int(match.group(43)))
        self.data.port7_voltage.append(float(match.group(44)))
        self.data.port7_current.append(float(match.group(45)))
        self.data.port7_error_flag.append(int(match.group(46)))

        self.data.port8_power_state.append(int(match.group(47)))
        self.data.port8_voltage.append(float(match.group(48)))
        self.data.port8_current.append(float(match.group(49)))
        self.data.port8_error_flag.append(int(match.group(50)))

        self.data.heartbeat_enable.append(int(match.group(51)))
        self.data.heartbeat_delta.append(int(match.group(52)))
        self.data.heartbeat_threshold.append(int(match.group(53)))

        self.data.wake_code.append(int(match.group(54)))
        self.data.wake_time_count.append(float(match.group(55)))
        self.data.wake_power_count.append(int(match.group(56)))

        self.data.power_state.append(int(match.group(57)))
        self.data.power_board_mode.append(int(match.group(58)))
        self.data.power_voltage_select.append(int(match.group(59)))
        self.data.power_voltage_main.append(float(match.group(60)))
        self.data.power_current_main.append(float(match.group(61)))
        self.data.power_voltage_12.append(float(match.group(62)))
        self.data.power_current_12.append(float(match.group(63)))
        self.data.power_voltage_24.append(float(match.group(64)))
        self.data.power_current_24.append(float(match.group(65)))
Ejemplo n.º 18
0
    def _build_parsed_values(self, match):
        """
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        """
        # Use the date_time_string to calculate an epoch timestamp (seconds since
        # 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.dcl_date_time_string.append(str(match.group(1)))

        # Assign the remaining MET data to the named parameters
        self.data.main_voltage.append(float(match.group(2)))
        self.data.main_current.append(float(match.group(3)))
        self.data.percent_charge.append(float(match.group(4)))
        self.data.override_flag.append(str(match.group(5)))
        self.data.error_flag1.append(str(match.group(6)))
        self.data.error_flag2.append(str(match.group(7)))
        self.data.solar_panel1_state.append(int(match.group(8)))
        self.data.solar_panel1_voltage.append(float(match.group(9)))
        self.data.solar_panel1_current.append(float(match.group(10)))
        self.data.solar_panel2_state.append(int(match.group(11)))
        self.data.solar_panel2_voltage.append(float(match.group(12)))
        self.data.solar_panel2_current.append(float(match.group(13)))
        self.data.solar_panel3_state.append(int(match.group(14)))
        self.data.solar_panel3_voltage.append(float(match.group(15)))
        self.data.solar_panel3_current.append(float(match.group(16)))
        self.data.solar_panel4_state.append(int(match.group(17)))
        self.data.solar_panel4_voltage.append(float(match.group(18)))
        self.data.solar_panel4_current.append(float(match.group(19)))
        self.data.wind_turbine1_state.append(int(match.group(20)))
        self.data.wind_turbine1_voltage.append(float(match.group(21)))
        self.data.wind_turbine1_current.append(float(match.group(22)))
        self.data.wind_turbine2_state.append(int(match.group(23)))
        self.data.wind_turbine2_voltage.append(float(match.group(24)))
        self.data.wind_turbine2_current.append(float(match.group(25)))
        self.data.fuel_cell1_state.append(int(match.group(26)))
        self.data.fuel_cell1_voltage.append(float(match.group(27)))
        self.data.fuel_cell1_current.append(float(match.group(28)))
        self.data.fuel_cell2_state.append(int(match.group(29)))
        self.data.fuel_cell2_voltage.append(float(match.group(30)))
        self.data.fuel_cell2_current.append(float(match.group(31)))
        self.data.battery_bank1_temperature.append(float(match.group(32)))
        self.data.battery_bank1_voltage.append(float(match.group(33)))
        self.data.battery_bank1_current.append(float(match.group(34)))
        self.data.battery_bank2_temperature.append(float(match.group(35)))
        self.data.battery_bank2_voltage.append(float(match.group(36)))
        self.data.battery_bank2_current.append(float(match.group(37)))
        self.data.battery_bank3_temperature.append(float(match.group(38)))
        self.data.battery_bank3_voltage.append(float(match.group(39)))
        self.data.battery_bank3_current.append(float(match.group(40)))
        self.data.battery_bank4_temperature.append(float(match.group(41)))
        self.data.battery_bank4_voltage.append(float(match.group(42)))
        self.data.battery_bank4_current.append(float(match.group(43)))
        self.data.external_voltage.append(float(match.group(44)))
        self.data.external_current.append(float(match.group(45)))
        self.data.internal_voltage.append(float(match.group(46)))
        self.data.internal_current.append(float(match.group(47)))
        self.data.internal_temperature.append(float(match.group(48)))
        self.data.fuel_cell_volume.append(float(match.group(49)))
        self.data.seawater_ground_state.append(int(match.group(50)))
        self.data.seawater_ground_positve.append(float(match.group(51)))
        self.data.seawater_ground_negative.append(float(match.group(52)))
        self.data.cvt_state.append(int(match.group(53)))
        self.data.cvt_voltage.append(float(match.group(54)))
        self.data.cvt_current.append(float(match.group(55)))
        self.data.cvt_interlock.append(int(match.group(56)))
        self.data.cvt_temperature.append(float(match.group(57)))
        self.data.error_flag3.append(str(match.group(58)))
Ejemplo n.º 19
0
    def _build_parsed_values(self, match):
        '''
        Extract the data from the relevant regex groups and assign to elements
        of the data dictionary.
        '''
        # Use the date_time_string to calculate an epoch timestamp (seconds
        # since 1970-01-01)
        epts = dcl_to_epoch(match.group(1))
        self.data.time.append(epts)
        self.data.dcl_date_time_string.append(str(match.group(1)))

        # Assign the remaining ZPLSC data to the named parameters
        self.data.transmission_date_string.append(str(match.group(2)))

        # the rest of the data is in a comma separated string, so...
        data = (match.group(3)).split(',')

        # serial number, phase and burst number
        self.data.serial_number.append(int(data[0]))
        self.data.phase.append(int(data[1]))
        self.data.burst_number.append(int(data[2]))

        # number of frequencies and bins per profile
        nfreq = int(data[3])
        strt = 4
        stop = strt + nfreq
        nbins = map(int, data[strt:stop])
        self.data.number_bins.append(nbins)

        # minimum values per frequency
        strt = stop
        stop = strt + nfreq
        self.data.minimum_values.append(map(int, data[strt:stop]))

        # tilts, battery and temperature (no pressure sensor)
        strt = stop
        self.data.burst_date_string.append(str(data[strt]))
        self.data.tilts.append(map(float, data[strt + 1:strt + 3]))
        self.data.battery_voltage.append(float(data[strt + 3]))
        self.data.temperature.append(float(data[strt + 4]))

        # frequency #1
        strt += 7
        freq = [int(data[strt])]
        self.data.profiles_freq1.append(
            map(int, data[strt + 1:strt + 1 + nbins[0]]))

        # frequency #2
        if nfreq >= 2:
            strt += 2 + nbins[0]
            freq.append(int(data[strt]))
            self.data.profiles_freq2.append(
                map(int, data[strt + 1:strt + 1 + nbins[1]]))

        # frequency #3
        if nfreq >= 3:
            strt += 2 + nbins[1]
            freq.append(int(data[strt]))
            self.data.profiles_freq3.append(
                map(int, data[strt + 1:strt + 1 + nbins[2]]))

        # frequency #4
        if nfreq == 4:
            strt += 2 + nbins[2]
            freq.append(int(data[strt]))
            self.data.profiles_freq4.append(
                map(int, data[strt + 1:strt + 1 + nbins[3]]))

        self.data.frequencies.append(freq)