def _process_metadata_dict(self):
        """
        This method process a full metadata dictionary which results in
        the generation of a metadata particle
        :return: None
        """

        file_time = self._metadata_dict[Pco2wAbcDataParticleKey.FILE_TIME]

        if file_time is None:
            message = "Unable to create metadata particle due to missing file time"
            log.warn(message)
            self._exception_callback(RecoverableSampleException(message))

        else:
            utc_timestamp = formatted_timestamp_utc_time(
                file_time, "%Y%m%d %H%M%S")

            ntp_timestamp = float(ntplib.system_to_ntp_time(utc_timestamp))

            particle = self._extract_sample(self._metadata_class,
                                            None,
                                            self._metadata_dict,
                                            internal_timestamp=ntp_timestamp)

            self._record_buffer.append(particle)
    def _generate_metadata_particle(self):
        """
        This function generates a metadata particle.
        """

        if self._metadata_matches_dict[MetadataMatchKey.FILE_TIME_MATCH] is None:
            message = "Unable to create metadata particle due to missing file time"
            log.warn(message)
            self._exception_callback(RecoverableSampleException(message))
        else:
            particle_data = dict()

            for key in self._metadata_matches_dict.keys():
                log.trace('key: %s, particle_data: %s', key, particle_data)

                if self._metadata_matches_dict[key]:
                    self._process_metadata_match_dict(key, particle_data)

            utc_time = formatted_timestamp_utc_time(
                particle_data[PhsenAbcdefImodemDataParticleKey.FILE_TIME],
                "%Y%m%d %H%M%S")
            ntp_timestamp = ntplib.system_to_ntp_time(utc_time)

            # Generate the metadata particle class and add the
            # result to the list of particles to be returned.
            particle = self._extract_sample(self._metadata_particle_class,
                                            None,
                                            particle_data,
                                            ntp_timestamp)
            if particle is not None:
                log.trace("Appending metadata particle to record buffer: %s", particle.generate())
                self._record_buffer.append(particle)
Beispiel #3
0
    def _process_metadata_dict(self):
        """
        This method process a full metadata dictionary which results in
        the generation of a metadata particle
        :return: None
        """

        file_time = self._metadata_dict[Pco2wAbcDataParticleKey.FILE_TIME]

        if file_time is None:
            message = "Unable to create metadata particle due to missing file time"
            log.warn(message)
            self._exception_callback(RecoverableSampleException(message))

        else:
            utc_timestamp = formatted_timestamp_utc_time(file_time,
                                                         "%Y%m%d %H%M%S")

            ntp_timestamp = float(ntplib.system_to_ntp_time(utc_timestamp))

            particle = self._extract_sample(
                self._metadata_class, None,
                self._metadata_dict, ntp_timestamp)

            self._record_buffer.append(particle)
Beispiel #4
0
    def _generate_metadata_particle(self):
        """
        This function generates a metadata particle.
        """

        particle_data = dict()

        for key in self._metadata_matches_dict.keys():

            self._process_metadata_match_dict(key, particle_data)

        utc_time = formatted_timestamp_utc_time(
            particle_data[CtdmoGhqrImodemDataParticleKey.DATE_TIME_STRING],
            "%Y%m%d %H%M%S")
        ntp_time = ntplib.system_to_ntp_time(utc_time)

        # Generate the metadata particle class and add the
        # result to the list of particles to be returned.
        particle = self._extract_sample(self.metadata_particle_class,
                                        None,
                                        particle_data,
                                        internal_timestamp=ntp_time)
        if particle is not None:
            log.debug("Appending metadata particle to record buffer")
            self._record_buffer.append(particle)
Beispiel #5
0
    def _generate_metadata_particle(self):
        """
        This function generates a metadata particle.
        """

        particle_data = dict()

        for key in self._metadata_matches_dict.keys():

            self._process_metadata_match_dict(key, particle_data)

        utc_time = formatted_timestamp_utc_time(
            particle_data[CtdmoGhqrImodemDataParticleKey.DATE_TIME_STRING],
            "%Y%m%d %H%M%S")
        ntp_time = ntplib.system_to_ntp_time(utc_time)

        # Generate the metadata particle class and add the
        # result to the list of particles to be returned.
        particle = self._extract_sample(self.metadata_particle_class,
                                        None,
                                        particle_data,
                                        ntp_time)
        if particle is not None:
            log.debug("Appending metadata particle to record buffer")
            self._record_buffer.append(particle)
 def process_wave(self, fields):
     date_time_str = fields[1] + SPACE + fields[0]  # concatenate date and time
     date_time_utc = formatted_timestamp_utc_time(date_time_str, DATE_TIME_FORMAT)
     date_time_ntp = ntplib.system_to_ntp_time(date_time_utc)
     self._eng_data[7] = date_time_ntp
     self._eng_data[8:] = fields[2:]
     return
Beispiel #7
0
    def __init__(self,
                 config,
                 stream_handle,
                 filename,
                 exception_callback):

        self.timer_diff = None

        self._timer_start = None
        self._timer_rollover = 0

        self._start_time_utc = utilities.formatted_timestamp_utc_time(filename[:15],
                                                                      "%Y%m%d_%H%M%S")

        try:
            # Get the particle classes to publish from the configuration
            particle_classes_dict = config.get(DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT)
            self._accel_particle_class = particle_classes_dict.get(MopakParticleClassType.ACCEL_PARTICLE_CLASS)
            self._rate_particle_class = particle_classes_dict.get(MopakParticleClassType.RATE_PARTICLE_CLASS)

        except Exception:
            log.error('Parser configuration missing or incorrect')
            raise ConfigurationException

        super(MopakODclParser, self).__init__(config,
                                              stream_handle,
                                              exception_callback)
Beispiel #8
0
    def parse_file(self):
        """
        Parse the *.mp4 file.
        """
        match = FILE_PATH_MATCHER.match(self._stream_handle.name)
        if match:
            file_datetime = match.group('Date') + match.group('Time')
            time_stamp = ntplib.system_to_ntp_time(
                utilities.formatted_timestamp_utc_time(file_datetime,
                                                       TIMESTAMP_FORMAT))

            # Extract a particle and append it to the record buffer
            particle = self._extract_sample(CamhdAInstrumentDataParticle,
                                            None,
                                            match.group('Path'),
                                            internal_timestamp=time_stamp)
            log.debug('Parsed particle: %s', particle.generate_dict())
            self._record_buffer.append(particle)

        else:
            # Files retrieved from the instrument should always match the timestamp naming convention
            self.recov_exception_callback(
                "Unable to extract file time from input file name: %s."
                "Expected format REFDES-YYYYmmddTHHMMSSZ.mp4" %
                self._stream_handle.name)
    def _generate_metadata_particle(self):
        """
        This function generates a metadata particle.
        """

        if self._metadata_matches_dict[
                MetadataMatchKey.FILE_TIME_MATCH] is None:
            message = "Unable to create metadata particle due to missing file time"
            log.warn(message)
            self._exception_callback(RecoverableSampleException(message))
        else:
            particle_data = dict()

            for key in self._metadata_matches_dict.keys():
                log.trace('key: %s, particle_data: %s', key, particle_data)

                if self._metadata_matches_dict[key]:
                    self._process_metadata_match_dict(key, particle_data)

            utc_time = formatted_timestamp_utc_time(
                particle_data[PhsenAbcdefImodemDataParticleKey.FILE_TIME],
                "%Y%m%d %H%M%S")
            ntp_timestamp = ntplib.system_to_ntp_time(utc_time)

            # Generate the metadata particle class and add the
            # result to the list of particles to be returned.
            particle = self._extract_sample(self._metadata_particle_class,
                                            None,
                                            particle_data,
                                            internal_timestamp=ntp_timestamp)
            if particle is not None:
                log.trace("Appending metadata particle to record buffer: %s",
                          particle.generate())
                self._record_buffer.append(particle)
Beispiel #10
0
    def __init__(self, config, state, stream_handle, filename, state_callback,
                 publish_callback, exception_callback, *args, **kwargs):

        self.timer_diff = None

        self._read_state = {
            StateKey.POSITION: 0,
            StateKey.TIMER_ROLLOVER: 0,
            StateKey.TIMER_START: None
        }
        # convert the date / time string from the file name to a starting time in seconds UTC

        self._start_time_utc = utilities.formatted_timestamp_utc_time(
            filename[:15], "%Y%m%d_%H%M%S")

        try:
            # Get the particle classes to publish from the configuration
            particle_classes_dict = config.get(
                DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT)
            self._accel_particle_class = particle_classes_dict.get(
                MopakParticleClassType.ACCEL_PARTCICLE_CLASS)
            self._rate_particle_class = particle_classes_dict.get(
                MopakParticleClassType.RATE_PARTICLE_CLASS)

        except Exception as e:
            log.error('Parser configuration missing or incorrect')
            raise ConfigurationException

        super(MopakODclParser,
              self).__init__(config, stream_handle, state, self.sieve_function,
                             state_callback, publish_callback,
                             exception_callback)

        if state:
            self.set_state(state)
    def process_date(self, fields):

        date_time_str = fields[0] + SPACE + fields[1]  # concatenate date and time
        date_time_utc = formatted_timestamp_utc_time(date_time_str, DATE_TIME_FORMAT)
        date_time_utc += float(fields[2])*3600  # adjust for timezone (%z format is not supported in Python 2.7)
        date_time_ntp = ntplib.system_to_ntp_time(date_time_utc)
        self._eng_data[0] = date_time_ntp
        return
Beispiel #12
0
    def parse_file(self):
        """
        Parse the zplsc_c log file (averaged condensed data).
        Read file line by line. Values are extracted from lines containing condensed ASCII data
        @return: dictionary of data values with the particle names as keys or None
        """

        # Loop over all lines in the data file and parse the data to generate particles
        for number, line in enumerate(self._stream_handle, start=1):

            # Check if this is the dcl status log
            match = DCL_LOG_MATCHER.match(line)
            if match is not None:
                log.trace("MATCHED DCL_LOG_MATCHER: %s: %s", number,
                          match.groups())
                # No data to extract, move on to the next line
                continue

            # Check if this is the instrument phase status log
            match = PHASE_STATUS_MATCHER.match(line)
            if match is not None:
                log.trace("MATCHED PHASE_STATUS_MATCHER: %s: %s", number,
                          match.groups())
                # No data to extract, move on to the next line
                continue

            # Check if this is the instrument condensed ASCII data
            match = SENSOR_DATA_MATCHER.match(line)
            if match is not None:
                log.trace("MATCHED SENSOR_DATA_MATCHER: %s: %s", number,
                          match.groups())

                # Extract the condensed ASCII data from this line
                data_dict = self.parse_line(match)
                if data_dict is None:
                    log.error('Erroneous data found in line %s: %s', number,
                              line)
                    continue

                # Convert the DCL timestamp into the particle timestamp
                time_stamp = ntplib.system_to_ntp_time(
                    utilities.formatted_timestamp_utc_time(
                        match.group('dcl_timestamp'),
                        utilities.DCL_CONTROLLER_TIMESTAMP_FORMAT))

                # Extract a particle and append it to the record buffer
                particle = self._extract_sample(ZplscCInstrumentDataParticle,
                                                None, data_dict, time_stamp)
                if particle is not None:
                    log.trace('Parsed particle: %s' % particle.generate_dict())
                    self._record_buffer.append(particle)

                continue

            # Error, line did not match any expected regex
            self._exception_callback(
                RecoverableSampleException('Unknown data found in line %s:%s' %
                                           (number, line)))
Beispiel #13
0
    def parse_file(self):
        """
        Parse the zplsc_c log file (averaged condensed data).
        Read file line by line. Values are extracted from lines containing condensed ASCII data
        @return: dictionary of data values with the particle names as keys or None
        """

        # Loop over all lines in the data file and parse the data to generate particles
        for number, line in enumerate(self._stream_handle, start=1):

            # Check if this is the dcl status log
            match = DCL_LOG_MATCHER.match(line)
            if match is not None:
                log.trace("MATCHED DCL_LOG_MATCHER: %s: %s", number, match.groups())
                # No data to extract, move on to the next line
                continue

            # Check if this is the instrument phase status log
            match = PHASE_STATUS_MATCHER.match(line)
            if match is not None:
                log.trace("MATCHED PHASE_STATUS_MATCHER: %s: %s", number, match.groups())
                # No data to extract, move on to the next line
                continue

            # Check if this is the instrument condensed ASCII data
            match = SENSOR_DATA_MATCHER.match(line)
            if match is not None:
                log.trace("MATCHED SENSOR_DATA_MATCHER: %s: %s", number, match.groups())

                # Extract the condensed ASCII data from this line
                data_dict = self.parse_line(match)
                if data_dict is None:
                    log.error('Erroneous data found in line %s: %s', number, line)
                    continue

                # Convert the DCL timestamp into the particle timestamp
                time_stamp = ntplib.system_to_ntp_time(
                    utilities.formatted_timestamp_utc_time(
                        match.group('dcl_timestamp'), utilities.DCL_CONTROLLER_TIMESTAMP_FORMAT))

                # Extract a particle and append it to the record buffer
                particle = self._extract_sample(
                    ZplscCInstrumentDataParticle, None, data_dict, time_stamp)
                if particle is not None:
                    log.trace('Parsed particle: %s' % particle.generate_dict())
                    self._record_buffer.append(particle)

                continue

            # Error, line did not match any expected regex
            self._exception_callback(
                RecoverableSampleException('Unknown data found in line %s:%s' % (number, line)))
Beispiel #14
0
    def parse_file(self):
        """
        Parse the *.mp4 file.
        """
        match = FILE_PATH_MATCHER.match(self._stream_handle.name)
        if match:
            file_datetime = match.group('Date') + match.group('Time')
            time_stamp = ntplib.system_to_ntp_time(
                utilities.formatted_timestamp_utc_time(file_datetime, TIMESTAMP_FORMAT))

            # Extract a particle and append it to the record buffer
            particle = self._extract_sample(CamhdAInstrumentDataParticle, None, match.group('Path'),
                                            time_stamp)
            log.debug('Parsed particle: %s', particle.generate_dict())
            self._record_buffer.append(particle)

        else:
            # Files retrieved from the instrument should always match the timestamp naming convention
            self.recov_exception_callback("Unable to extract file time from input file name: %s."
                "Expected format REFDES-YYYYmmddTHHMMSSZ.mp4" % self._stream_handle.name)
Beispiel #15
0
    def __init__(self,
                 config,
                 state,
                 stream_handle,
                 filename,
                 state_callback,
                 publish_callback,
                 exception_callback,
                 *args, **kwargs):

        self.timer_diff = None

        self._read_state = {StateKey.POSITION: 0, StateKey.TIMER_ROLLOVER: 0, StateKey.TIMER_START: None}
        # convert the date / time string from the file name to a starting time in seconds UTC

        self._start_time_utc = utilities.formatted_timestamp_utc_time(filename[:15],
                                                                      "%Y%m%d_%H%M%S")

        try:
            # Get the particle classes to publish from the configuration
            particle_classes_dict = config.get(DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT)
            self._accel_particle_class = particle_classes_dict.get(MopakParticleClassType.ACCEL_PARTCICLE_CLASS)
            self._rate_particle_class = particle_classes_dict.get(MopakParticleClassType.RATE_PARTICLE_CLASS)

        except Exception as e:
            log.error('Parser configuration missing or incorrect')
            raise ConfigurationException

        super(MopakODclParser, self).__init__(config,
                                              stream_handle,
                                              state,
                                              self.sieve_function,
                                              state_callback,
                                              publish_callback,
                                              exception_callback)

        if state:
            self.set_state(state)
    def parse_file(self):
        """
        Parse through the file, pulling single lines and comparing to
        the established patterns, generating particles for data lines
        """
        for line in self._stream_handle:
            message = 'data line \n%s' % line
            log.debug(message)

            # First check for valid FLORT DJ DCL data
            # If this is a valid sensor data record,
            # use the extracted fields to generate a particle.
            sensor_match = SENSOR_DATA_MATCHER.match(line)

            if sensor_match is not None:
                self._particle_class._data_particle_map = INSTRUMENT_PARTICLE_MAP
                log.debug('FLORT DJ match found')
            else:
                log.debug('FLORT DJ match NOT found')
                # check for a match against the FLORT D data in a combined
                # CTDBP FLORT instrument record
                sensor_match = CTDBP_FLORT_MATCHER.match(line)

                if sensor_match is not None:
                    self._particle_class._data_particle_map = CTDBP_FLORT_PARTICLE_MAP
                    log.debug('check for CTDBP/FLORT match')

            if sensor_match is not None:
                # FLORT data matched against one of the patterns
                log.debug('record found')

                # DCL Controller timestamp is the port_timestamp
                dcl_controller_timestamp = sensor_match.groups()[SENSOR_GROUP_TIMESTAMP]
                port_timestamp = dcl_time_to_ntp(dcl_controller_timestamp)

                if self._particle_class._data_particle_map == INSTRUMENT_PARTICLE_MAP:
                    # For valid FLORT DJ data, Instrument timestamp is the internal_timestamp
                    instrument_timestamp = sensor_match.groups()[SENSOR_GROUP_SENSOR_DATE] \
                                           + ' ' + sensor_match.groups()[SENSOR_GROUP_SENSOR_TIME]
                    internal_timestamp = timestamp_mmddyyhhmmss_to_ntp(instrument_timestamp)
                else:
                    # _data_particle_map is CTDBP_FLORT_PARTICLE_MAP
                    utc_time = formatted_timestamp_utc_time(sensor_match.groups()[CTDBP_FLORT_GROUP_DATE_TIME],
                                                            "%d %b %Y %H:%M:%S")
                    instrument_timestamp = ntplib.system_to_ntp_time(utc_time)
                    internal_timestamp = instrument_timestamp

                # using port_timestamp as preferred_ts because internal_timestamp is not accurate
                particle = self._extract_sample(self._particle_class,
                                                None,
                                                sensor_match.groups(),
                                                port_timestamp=port_timestamp,
                                                internal_timestamp=internal_timestamp,
                                                preferred_ts=DataParticleKey.PORT_TIMESTAMP)
                # increment state for this chunk even if we don't
                # get a particle
                self._record_buffer.append(particle)

            # It's not a sensor data record, see if it's a metadata record.
            else:
                log.debug('No data recs found, check for meta record')

                # If it's a valid metadata record, ignore it.
                # Otherwise generate warning for unknown data.

                meta_match = METADATA_MATCHER.match(line)
                if meta_match is None:
                    error_message = 'Unknown data found in chunk %s' % line
                    log.warn(error_message)
                    self._exception_callback(UnexpectedDataException(error_message))
Beispiel #17
0
    def parse_file(self):
        """
        Parse the FCoeff*.txt file. Create a chunk from valid data in the file.
        Build a data particle from the chunk.
        """

        file_time_dict = {}
        dir_freq_dict = {}
        freq_band_dict = {}
        sensor_data_dict = {
            AdcptMFCoeffParticleKey.FREQ_BAND: [],
            AdcptMFCoeffParticleKey.BANDWIDTH_BAND: [],
            AdcptMFCoeffParticleKey.ENERGY_BAND: [],
            AdcptMFCoeffParticleKey.DIR_BAND: [],
            AdcptMFCoeffParticleKey.A1_BAND: [],
            AdcptMFCoeffParticleKey.B1_BAND: [],
            AdcptMFCoeffParticleKey.A2_BAND: [],
            AdcptMFCoeffParticleKey.B2_BAND: [],
            AdcptMFCoeffParticleKey.CHECK_BAND: []
        }

        # Extract the file time from the file name
        input_file_name = self._stream_handle.name

        match = FILE_NAME_MATCHER.match(input_file_name)

        if match:
            file_time_dict = match.groupdict()
        else:
            self.recov_exception_callback(
                'Unable to extract file time from FCoeff input file name: %s ' % input_file_name)

        # read the first line in the file
        line = self._stream_handle.readline()

        while line:

            if EMPTY_LINE_MATCHER.match(line):
                # ignore blank lines, do nothing
                pass

            elif HEADER_MATCHER.match(line):
                # we need header records to extract useful information
                for matcher in HEADER_MATCHER_LIST:
                    header_match = matcher.match(line)

                    if header_match is not None:

                        if matcher is DIR_FREQ_MATCHER:
                            dir_freq_dict = header_match.groupdict()

                        elif matcher is FREQ_BAND_MATCHER:
                            freq_band_dict = header_match.groupdict()

                        else:
                            #ignore
                            pass

            elif FCOEFF_DATA_MATCHER.match(line):
                # Extract a row of data
                sensor_match = FCOEFF_DATA_MATCHER.match(line)

                sensor_data_dict[AdcptMFCoeffParticleKey.FREQ_BAND].append(
                    sensor_match.group(AdcptMFCoeffParticleKey.FREQ_BAND))
                sensor_data_dict[AdcptMFCoeffParticleKey.BANDWIDTH_BAND].append(
                    sensor_match.group(AdcptMFCoeffParticleKey.BANDWIDTH_BAND))
                sensor_data_dict[AdcptMFCoeffParticleKey.ENERGY_BAND].append(
                    sensor_match.group(AdcptMFCoeffParticleKey.ENERGY_BAND))
                sensor_data_dict[AdcptMFCoeffParticleKey.DIR_BAND].append(
                    sensor_match.group(AdcptMFCoeffParticleKey.DIR_BAND))
                sensor_data_dict[AdcptMFCoeffParticleKey.A1_BAND].append(
                    sensor_match.group(AdcptMFCoeffParticleKey.A1_BAND))
                sensor_data_dict[AdcptMFCoeffParticleKey.B1_BAND].append(
                    sensor_match.group(AdcptMFCoeffParticleKey.B1_BAND))
                sensor_data_dict[AdcptMFCoeffParticleKey.A2_BAND].append(
                    sensor_match.group(AdcptMFCoeffParticleKey.A2_BAND))
                sensor_data_dict[AdcptMFCoeffParticleKey.B2_BAND].append(
                    sensor_match.group(AdcptMFCoeffParticleKey.B2_BAND))
                sensor_data_dict[AdcptMFCoeffParticleKey.CHECK_BAND].append(
                    sensor_match.group(AdcptMFCoeffParticleKey.CHECK_BAND))

            else:
                # Generate a warning for unknown data
                self.recov_exception_callback('Unexpected data found in line %s' % line)

            # read the next line in the file
            line = self._stream_handle.readline()

        # Construct parsed data list to hand over to the Data Particle class for particle creation
        # Make all the collected data effectively into one long dictionary
        parsed_dict = dict(chain(file_time_dict.iteritems(),
                                 dir_freq_dict.iteritems(),
                                 freq_band_dict.iteritems(),
                                 sensor_data_dict.iteritems()))

        error_flag = False
        # Check if all parameter data is accounted for
        for name in FCOEFF_ENCODING_RULES:
            try:
                if parsed_dict[name[0]]:
                    log.trace("parsed_dict[%s]: %s", name[0], parsed_dict[name[0]])
            except KeyError:
                self.recov_exception_callback('Missing particle data: %s' % name[0])
                error_flag = True

        # Don't create a particle if data is missing
        if error_flag:
            return

        # Check if the specified number of frequencies were retrieved from the data
        fcoeff_data_length = len(sensor_data_dict[AdcptMFCoeffParticleKey.FREQ_BAND])
        if fcoeff_data_length != int(dir_freq_dict[AdcptMFCoeffParticleKey.NUM_FREQ]):
            self.recov_exception_callback(
                'Unexpected number of frequencies in FCoeff Matrix: expected %s, got %s'
                % (dir_freq_dict[AdcptMFCoeffParticleKey.NUM_FREQ], fcoeff_data_length))

            # Don't create a particle if data is missing
            return

        # Convert the filename timestamp into the particle timestamp
        time_stamp = ntplib.system_to_ntp_time(
            utilities.formatted_timestamp_utc_time(file_time_dict[AdcptMFCoeffParticleKey.FILE_TIME]
                                                   , TIMESTAMP_FORMAT))

        # Extract a particle and append it to the record buffer
        particle = self._extract_sample(AdcptMFCoeffInstrumentDataParticle,
                                        None, parsed_dict, internal_timestamp=time_stamp)
        log.trace('Parsed particle: %s' % particle.generate_dict())
        self._record_buffer.append(particle)
Beispiel #18
0
    def parse_file(self):
        """
        Parse through the file, pulling single lines and comparing to
        the established patterns, generating particles for data lines
        """
        for line in self._stream_handle:
            message = 'data line \n%s' % line
            log.debug(message)

            # First check for valid FLORT DJ DCL data
            # If this is a valid sensor data record,
            # use the extracted fields to generate a particle.
            sensor_match = SENSOR_DATA_MATCHER.match(line)

            if sensor_match is not None:
                self._particle_class._data_particle_map = INSTRUMENT_PARTICLE_MAP
                log.debug('FLORT DJ match found')
            else:
                log.debug('FLORT DJ match NOT found')
                # check for a match against the FLORT D data in a combined
                # CTDBP FLORT instrument record
                sensor_match = CTDBP_FLORT_MATCHER.match(line)

                if sensor_match is not None:
                    self._particle_class._data_particle_map = CTDBP_FLORT_PARTICLE_MAP
                    log.debug('check for CTDBP/FLORT match')

            if sensor_match is not None:
                # FLORT data matched against one of the patterns
                log.debug('record found')

                # DCL Controller timestamp is the port_timestamp
                dcl_controller_timestamp = sensor_match.groups()[SENSOR_GROUP_TIMESTAMP]
                port_timestamp = dcl_time_to_ntp(dcl_controller_timestamp)

                if self._particle_class._data_particle_map == INSTRUMENT_PARTICLE_MAP:
                    # For valid FLORT DJ data, Instrument timestamp is the internal_timestamp
                    instrument_timestamp = sensor_match.groups()[SENSOR_GROUP_SENSOR_DATE] \
                                           + ' ' + sensor_match.groups()[SENSOR_GROUP_SENSOR_TIME]
                    internal_timestamp = timestamp_mmddyyhhmmss_to_ntp(instrument_timestamp)
                else:
                    # _data_particle_map is CTDBP_FLORT_PARTICLE_MAP
                    utc_time = formatted_timestamp_utc_time(sensor_match.groups()[CTDBP_FLORT_GROUP_DATE_TIME],
                                                            "%d %b %Y %H:%M:%S")
                    instrument_timestamp = ntplib.system_to_ntp_time(utc_time)
                    internal_timestamp = instrument_timestamp

                # using port_timestamp as preferred_ts because internal_timestamp is not accurate
                particle = self._extract_sample(self._particle_class,
                                                None,
                                                sensor_match.groups(),
                                                port_timestamp=port_timestamp,
                                                internal_timestamp=internal_timestamp,
                                                preferred_ts=DataParticleKey.PORT_TIMESTAMP)
                # increment state for this chunk even if we don't
                # get a particle
                self._record_buffer.append(particle)

            # It's not a sensor data record, see if it's a metadata record.
            else:
                log.debug('No data recs found, check for meta record')

                # If it's a valid metadata record, ignore it.
                # Otherwise generate warning for unknown data.

                meta_match = METADATA_MATCHER.match(line)
                if meta_match is None:
                    error_message = 'Unknown data found in chunk %s' % line
                    log.warn(error_message)
                    self._exception_callback(UnexpectedDataException(error_message))