Example #1
0
    def create_expected_results(self):
        """
        This function creates the expected data particle results.
        """

        # These records are at time t=0
        time_stamp = TIME_ON
        ntp_time = ntplib.system_to_ntp_time(time_stamp)
        self.expected_particle1 = Vel3dKWfpInstrumentParticle(RECORD_1_FIELDS, internal_timestamp=ntp_time)
        self.expected_particle2_header_checksum = Vel3dKWfpInstrumentParticle(
            RECORD_2_FIELDS, internal_timestamp=ntp_time
        )
        self.expected_time = Vel3dKWfpMetadataParticle(TIME_1_FIELDS, internal_timestamp=ntp_time)
        self.expected_string_particle = Vel3dKWfpStringParticle(STRING_FIELDS, internal_timestamp=ntp_time)

        # These records are at time t=1
        time_stamp += SAMPLE_RATE
        ntp_time = ntplib.system_to_ntp_time(time_stamp)
        self.expected_particle2 = Vel3dKWfpInstrumentParticle(RECORD_2_FIELDS, internal_timestamp=ntp_time)
        self.expected_particle3_missing = Vel3dKWfpInstrumentParticle(RECORD_3_FIELDS, internal_timestamp=ntp_time)

        # These records are at time t=2
        time_stamp += SAMPLE_RATE
        ntp_time = ntplib.system_to_ntp_time(time_stamp)
        self.expected_particle3 = Vel3dKWfpInstrumentParticle(RECORD_3_FIELDS, internal_timestamp=ntp_time)
        self.expected_particle4_missing = Vel3dKWfpInstrumentParticle(RECORD_4_FIELDS, internal_timestamp=ntp_time)

        # These records are at time t=3
        time_stamp += SAMPLE_RATE
        ntp_time = ntplib.system_to_ntp_time(time_stamp)
        self.expected_particle4 = Vel3dKWfpInstrumentParticle(RECORD_4_FIELDS, internal_timestamp=ntp_time)
Example #2
0
    def _run(self, address, port):
        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        sock.bind((address, port))
        while not self._stopflag:
            rlist, wlist, elist = select.select([sock], [], [], 1)
            for tempSocket in rlist:
                with self._time_lock:
                    if self._time is None:
                        recvTimestamp = ntplib.system_to_ntp_time(time.time())
                    else:
                        recvTimestamp = ntplib.system_to_ntp_time(self._time)

                    data, addr = tempSocket.recvfrom(1024)
                    recvPacket = ntplib.NTPPacket()
                    recvPacket.from_data(data)
                    timeStamp_high = ntplib._to_int(recvPacket.tx_timestamp)
                    timeStamp_low = ntplib._to_frac(recvPacket.tx_timestamp)
                    sendPacket = ntplib.NTPPacket(version=3, mode=4)
                    sendPacket.stratum = 2
                    sendPacket.poll = 10
                    sendPacket.ref_timestamp = recvTimestamp - 5
                    sendPacket.orig_timestamp = ntplib._to_time(
                        timeStamp_high, timeStamp_low)
                    sendPacket.recv_timestamp = recvTimestamp
                    if self._time is None:
                        sendPacket.tx_timestamp = (ntplib.system_to_ntp_time(
                            time.time()))
                    else:
                        sendPacket.tx_timestamp = (ntplib.system_to_ntp_time(
                            self._time))
                    sock.sendto(sendPacket.to_data(), addr)
                    print("Served request from {}".format(addr))
    def parse_record(self, record):
        """
        determine if this is a engineering or data record and parse
        """
        sample = None
        result_particle = []
        if PROFILE_MATCHER.match(record):
            # send to WFP_eng_profiler if WFP
	    match = PROFILE_MATCHER.match(record)
	    fields = struct.unpack('>ihhII', match.group(0))
	    # use the profile stop time
	    timestamp = int(fields[3])
            self._timestamp = float(ntplib.system_to_ntp_time(timestamp))
            sample = self._extract_sample(Wfp_eng__stc_imodem_statusParserDataParticle, PROFILE_MATCHER,
                                          record, self._timestamp)
	    self._increment_state(STATUS_BYTES)
        elif DATA_SAMPLE_MATCHER.match(record):
            # pull out the timestamp for this record
            match = DATA_SAMPLE_MATCHER.match(record)
            fields = struct.unpack('>I', match.group(0)[:4])
            timestamp = int(fields[0])
            self._timestamp = float(ntplib.system_to_ntp_time(timestamp))
            log.debug("Converting record timestamp %f to ntp timestamp %f", timestamp, self._timestamp)
            sample = self._extract_sample(Wfp_eng__stc_imodem_engineeringParserDataParticle, DATA_SAMPLE_MATCHER,
                                          record, self._timestamp)
	    self._increment_state(SAMPLE_BYTES)
        if sample:
            # create particle
            log.trace("Extracting sample %s with read_state: %s", sample, self._read_state)
            result_particle = (sample, copy.copy(self._read_state))
                    
        return result_particle
    def parse_chunks(self):
        """
        Parse out any pending data chunks in the chunker. If it is a valid data piece, build a particle,
        update the position and timestamp. Go until the chunker has no more valid data.
        @retval a list of tuples
        """
        result_particles = []
        (timestamp, chunk, start, end) = self._chunker.get_next_data_with_index(clean=True)

        while chunk is not None:

            sample_count = 0

            self.process_header(chunk)

            if self._goodHeader:
                self.process_footer(chunk)

                if self._goodFooter:

                    timestamp = float(ntplib.system_to_ntp_time(self._startTime))
                    self._footerData = (self._startTime, self._endTime, self._numberOfRecords, self._decimationFactor)
                    sample = self.extract_metadata_particle(self._footerData, timestamp)
                    if sample is not None:
                        result_particles.append(sample)
                        sample_count = 1

                    moreRecords = True
                    dataRecord = chunk[self._startData:self._startData + DATA_RECORD_BYTES]
                    self._startData += DATA_RECORD_BYTES
                    self._recordNumber = 0.0
                    timestamp = float(ntplib.system_to_ntp_time(float(self._startTime) +
                                                                (self._recordNumber * self._timeIncrement)))

                    while moreRecords:
                        dataFields = struct.unpack('>I', '\x00' + dataRecord[0:3]) + \
                                     struct.unpack('>I', '\x00' + dataRecord[3:6]) + \
                                     struct.unpack('>I', '\x00' + dataRecord[6:9]) + \
                                     struct.unpack('>H', dataRecord[9:11])
                        self._RecordData = (dataFields[0], dataFields[1], dataFields[2])
                        sample = self.extract_data_particle(self._RecordData, timestamp)
                        if sample is not None:
                            result_particles.append(sample)
                            sample_count += 1

                        dataRecord = chunk[self._startData:self._startData + DATA_RECORD_BYTES]
                        self._recordNumber += 1.0
                        timestamp = float(ntplib.system_to_ntp_time(float(self._startTime) +
                                                                    (self._recordNumber * self._timeIncrement)))
                        eopMatch = EOP_MATCHER.search(dataRecord)
                        if eopMatch:
                            moreRecords = False
                        else:
                            self._startData += DATA_RECORD_BYTES

            self._chunk_sample_count.append(sample_count)
            (timestamp, chunk, start, end) = self._chunker.get_next_data_with_index(clean=True)

        return result_particles
 def _test_multiple_exclusions(self, tstart, tstop, annos, expected):
     # all times in whole seconds since 1970
     # adapt to expected formats
     times = np.arange(ntplib.system_to_ntp_time(tstart), ntplib.system_to_ntp_time(tstop + 1))
     store = AnnotationStore()
     store.add_annotations([self._create_exclusion_anno(start*1000, stop*1000) for start, stop in annos])
     mask = store.get_exclusion_mask(times)
     self.assertEqual(list(mask), expected)
Example #6
0
 def _test_multiple_exclusions(self, streamkey, tstart, tstop, annos, expected):
     # all times in whole seconds since 1970
     # adapt to expected formats
     times = np.arange(ntplib.system_to_ntp_time(tstart), ntplib.system_to_ntp_time(tstop + 1))
     store = AnnotationStore()
     store.add_annotations([self._create_exclusion_anno(streamkey, start*1000, stop*1000) for start, stop in annos])
     mask = store.get_exclusion_mask(streamkey, times)
     self.assertEqual(list(mask), expected)
 def assert_clock_set(self, sent_time, rcvd_time):
     # verify that the dates match
     print("sts=%s, rts=%s" %(sent_time, rcvd_time))
     self.assertTrue(sent_time[:12].upper() in rcvd_time.upper())
        
     sent_timestamp = time.strptime(sent_time, "%m/%d/%Y %H:%M:%S")
     ntp_sent_timestamp = ntplib.system_to_ntp_time(time.mktime(sent_timestamp))
     rcvd_timestamp = time.strptime(rcvd_time, "%m/%d/%Y %H:%M:%S")
     ntp_rcvd_timestamp = ntplib.system_to_ntp_time(time.mktime(rcvd_timestamp))
     # verify that the times match closely
     print("sts=%d, rts=%d" %(ntp_sent_timestamp, ntp_rcvd_timestamp))
     if ntp_rcvd_timestamp - ntp_sent_timestamp > 45:
         self.fail("time delta too large after clock sync")        
Example #8
0
 def _build_parsed_values(self):
     header = self.raw_data.header
     pk = AntelopeMetadataParticleKey
     return [
         self._encode_value(pk.NET, header.net, str),
         self._encode_value(pk.STATION, header.station, str),
         self._encode_value(pk.LOCATION, header.location, str),
         self._encode_value(pk.CHANNEL, header.channel, str),
         self._encode_value(pk.START, ntplib.system_to_ntp_time(header.starttime), float),
         self._encode_value(pk.END, ntplib.system_to_ntp_time(header.endtime), float),
         self._encode_value(pk.RATE, header.rate, int),
         self._encode_value(pk.NSAMPS, header.num_samples, int),
         self._encode_value(pk.FILENAME, self.raw_data.relname, str),
         self._encode_value(pk.UUID, self.raw_data.bin_uuid, str),
     ]
Example #9
0
 def _build_parsed_values(self):
     header = self.raw_data.header
     pk = AntelopeMetadataParticleKey
     return [
         self._encode_value(pk.NET, header.net, str),
         self._encode_value(pk.STATION, header.station, str),
         self._encode_value(pk.LOCATION, header.location, str),
         self._encode_value(pk.CHANNEL, header.channel, str),
         self._encode_value(pk.START, ntplib.system_to_ntp_time(header.starttime), float),
         self._encode_value(pk.END, ntplib.system_to_ntp_time(header.endtime), float),
         self._encode_value(pk.RATE, header.rate, int),
         self._encode_value(pk.NSAMPS, header.num_samples, int),
         self._encode_value(pk.FILENAME, self.raw_data.relname, str),
         self._encode_value(pk.UUID, self.raw_data.bin_uuid, str),
     ]
Example #10
0
    def _parse_header(self):
        """
        Parse required parameters from the header and the footer.
        """
        # read the first bytes from the file
        header = self._stream_handle.read(HEADER_BYTES)
        if len(header) < HEADER_BYTES:
            log.error("File is not long enough to read header")
            raise SampleException("File is not long enough to read header")

        # read the last 43 bytes from the file     
        self._stream_handle.seek(-FOOTER_BYTES, 2)
        footer = self._stream_handle.read() 
        footer_match = FOOTER_MATCHER.search(footer)
        
        # parse the header to get the timestamp
        if footer_match and HEADER_MATCHER.search(header):
            header_match = HEADER_MATCHER.search(header)
            self._stream_handle.seek(len(header_match.group(0)))        
            timestamp_struct = time.strptime(header_match.group(1), "%Y%m%d %H%M%S") 
            timestamp_s = calendar.timegm(timestamp_struct) 
            self._timestamp = float(ntplib.system_to_ntp_time(timestamp_s))
            
            header_footer = header_match.group(0) + footer_match.group(0) 
            
            sample = self._extract_sample(self._metadata_class, HEADER_FOOTER_MATCHER,
                                          header_footer, self._timestamp)  
            
            if sample:
                # increment by the length of the matched header and save the header          
                self._increment_state(len(header_match.group(0)))
                self._saved_header = (sample, copy.copy(self._read_state))
        else:
            log.error("File header or footer does not match header regex")
            raise SampleException("File header or footer does not match header regex")
Example #11
0
    def _string_to_ntp_date_time(self, datestr):
        """
        Extract an ntp date from a ISO8601 formatted date string.
        @param str an ISO8601 formatted string containing date information
        @retval an ntp date number (seconds since jan 1 1900)
        @throws InstrumentParameterException if datestr cannot be formatted to
        a date.
        """
        if not isinstance(datestr, str):
            raise IOError("Value %s is not a string." % str(datestr))
        if not DATE_MATCHER.match(datestr):
            raise ValueError("date string not in ISO8601 format YYYY-MM-DDTHH:MM:SS.SSSSZ")

        try:
            # This assumes input date string are in UTC (=GMT)
            if datestr[-1:] != "Z":
                datestr += "Z"

            # the parsed date time represents a GMT time, but strftime
            # does not take timezone into account, so these are seconds from the
            # local start of 1970
            local_sec = float(parser.parse(datestr).strftime("%s.%f"))
            # remove the local time zone to convert to gmt (seconds since gmt jan 1 1970)
            gmt_sec = local_sec - time.timezone
            # convert to ntp (seconds since gmt jan 1 1900)
            timestamp = ntplib.system_to_ntp_time(gmt_sec)

        except ValueError as e:
            raise ValueError("Value %s could not be formatted to a date. %s" % (str(datestr), e))

        log.debug("converting time string '%s', unix_ts: %s ntp: %s", datestr, gmt_sec, timestamp)

        return timestamp
Example #12
0
    def test_system_sample_format(self):
        """
        Verify driver can get system sample data out in a reasonable
        format. Parsed is all we care about...raw is tested in the base
        DataParticle tests
        """

        port_timestamp = 3555423720.711772
        driver_timestamp = 3555423722.711772
        text_timestamp = time.strptime('13/12/2012 17:03:26', "%d/%m/%Y %H:%M:%S")
        internal_timestamp = ntplib.system_to_ntp_time(timegm_to_float(text_timestamp))

        # construct the expected particle
        expected_particle = {
            DataParticleKey.PKT_FORMAT_ID: DataParticleValue.JSON_DATA,
            DataParticleKey.PKT_VERSION: 1,
            DataParticleKey.STREAM_NAME: DataParticleType.SYSTEM,
            DataParticleKey.PORT_TIMESTAMP: port_timestamp,
            DataParticleKey.DRIVER_TIMESTAMP: driver_timestamp,
            DataParticleKey.INTERNAL_TIMESTAMP: internal_timestamp,
            DataParticleKey.PREFERRED_TIMESTAMP: DataParticleKey.PORT_TIMESTAMP,
            DataParticleKey.QUALITY_FLAG: DataParticleValue.OK,
            DataParticleKey.VALUES: system_particle
        }

        self.compare_parsed_data_particle(VectorSystemDataParticle,
                                          system_sample(),
                                          expected_particle)
    def parse_chunks(self):
        """
        Parse out any pending data chunks in the chunker. If
        it is a valid data piece, build a particle, update the position and
        timestamp. Go until the chunker has no more valid data.
        @retval a list of tuples with sample particles encountered in this
            parsing, plus the state. An empty list of nothing was parsed.
        """     
        result_particles = []

        if not self._read_state[StateKey.METADATA_SENT] and not self.footer_data is None:
            timestamp = float(ntplib.system_to_ntp_time(self._start_time))
            sample = self.extract_metadata_particle(self.footer_data, timestamp)
            self._read_state[StateKey.METADATA_SENT] = True
            result_particles.append((sample, copy.copy(self._read_state)))

        (timestamp, chunk) = self._chunker.get_next_data()

        while (chunk != None):
            # particle-ize the data block received, return the record
            if EOP_MATCHER.match(chunk):
                # this is the end of profile matcher, just increment the state
                self._increment_state(DATA_RECORD_BYTES + TIME_RECORD_BYTES, 0)
            else:
                timestamp = self.calc_timestamp(self._read_state[StateKey.RECORDS_READ])
                sample = self.extract_data_particle(chunk, timestamp)
                if sample:
                    # create particle
                    self._increment_state(DATA_RECORD_BYTES, 1)
                    result_particles.append((sample, copy.copy(self._read_state)))

            (timestamp, chunk) = self._chunker.get_next_data()

        return result_particles
Example #14
0
    def _handler_acquire_status(self, *args, **kwargs):
        """
        We generate these particles here to avoid the chunker.  This allows us to process status
        messages with embedded messages from the other parts of the instrument.
        @return next_state, (next_agent_state, result)
        """
        ts = ntplib.system_to_ntp_time(time.time())
        parts = []

        for command, particle_class in [
            (InstrumentCommand.SYST_DUMP1, particles.SystStatusParticle),
            (InstrumentCommand.LILY_DUMP1, particles.LilyStatusParticle1),
            (InstrumentCommand.LILY_DUMP2, particles.LilyStatusParticle2),
            (InstrumentCommand.IRIS_DUMP1, particles.IrisStatusParticle1),
            (InstrumentCommand.IRIS_DUMP2, particles.IrisStatusParticle2),
            (InstrumentCommand.NANO_DUMP1, particles.NanoStatusParticle),
        ]:
            result, _ = self._do_cmd_resp(command, response_regex=particle_class.regex_compiled())
            parts.append(result)
        sample = self._extract_sample(particles.BotptStatusParticle,
                                      particles.BotptStatusParticle.regex_compiled(),
                                      NEWLINE.join(parts), ts)

        if self.get_current_state() == ProtocolState.AUTOSAMPLE:
            # acquiring status stops NANO output, restart it
            self._do_cmd_resp(InstrumentCommand.NANO_ON, expected_prompt=NANO_STRING)

        if not sample:
            raise InstrumentProtocolException('Failed to generate status particle')
        return None, (None, sample)
 def get_current_timestamp(self, offset=0):
     """
     Get the current time in a format suitable for parameter expiration calculation.
     @param offset: seconds from the current time to offset the timestamp
     @return: a unix timestamp
     """
     return ntplib.system_to_ntp_time(time.time()) + offset
Example #16
0
    def _parse_chunk(self):
        records = []

        sample = self.chunk["content"][4:].strip()
        parts = sample.split("\t")
        record = {
            # TODO: Set correct params
            #'model': parts[0],
            #'serial': parts[1],
            'C': float(parts[2]),
            'D': float(parts[3]),
            'E': float(parts[4]),
            'F': float(parts[5]),
            'G': float(parts[6]),
            'H': float(parts[7]),
            'I': float(parts[8]),
            'J': float(parts[9]),
            'K': float(parts[10]),
            'L': float(parts[11]),
        }
        ntp_timestamp = ntplib.system_to_ntp_time(self.chunk["timestamp"])
        particle = FlexDataParticle(driver_timestamp=ntp_timestamp, stream_name=STREAM_NAME)
        particle.set_data_values(record)
        records.append(particle)

        return records
Example #17
0
def test_results(expected, stream_name, sensor, method):
    subsite, node, sensor = sensor.split('-', 3)
    start = ntplib.system_to_ntp_time(1)
    stop = 1e10

    stream_code = '%s_%s_%s' % (stream_name, sensor, method)

    log.info('Retrieving data (%s)', stream_code)
    now = time.time()
    metadata = edex_tools.get_edex_metadata('localhost', subsite, node, sensor)
    retrieved = edex_tools.get_from_edex('localhost', subsite, node, sensor, method,
                                         stream_name, start, stop, timestamp_as_string=True)
    elapsed = time.time() - now
    retrieved_count = 0
    for each in retrieved.itervalues():
        retrieved_count += len(each)

    log.info('Retrieved %d records (%s) in %.4f secs', retrieved_count, stream_code, elapsed)

    log.debug(pprint.pformat(retrieved, depth=3))
    log.debug('Retrieved %d records from expected data file:', len(expected))
    log.debug(pprint.pformat(expected, depth=3))
    now = time.time()
    failures = edex_tools.compare(retrieved, expected, metadata, ignore_nulls=IGNORE_NULLS)
    elapsed = time.time() - now
    log.info('Compared %d records (%s) in %.4f secs', retrieved_count, stream_code, elapsed)
    return retrieved_count, len(expected), failures
Example #18
0
    def get_block(self, size=1024):
        """
        This function overrides the get_block function in BufferLoadingParser
        to read the entire file rather than break it into chunks.
        @return The length of data retrieved.
        @throws EOFError when the end of the file is reached.
        """
        # Read in data in blocks so as to not tie up the CPU.
        eof = False
        data = ''
        while not eof:
            next_block = self._stream_handle.read(size)
            if next_block:
                data = data + next_block
                gevent.sleep(0)
            else:
                eof = True

        if data != '':
            self._timestamp = float(ntplib.system_to_ntp_time(time.time()))
            log.debug("Calculated current time timestamp %.10f", self._timestamp)
            self._chunker.add_chunk(data, self._timestamp)
            self.file_complete = True
            return len(data)
        else:  # EOF
            self.file_complete = True
            raise EOFError
Example #19
0
    def generate_and_notify_event(self):
        if self._index >= len(EventInfo.EVENT_TYPES):
            self._index = 0

        event_type = EventInfo.EVENT_TYPES.values()[self._index]
        self._index += 1

        platform_id = "TODO_some_platform_id"
        message = "%s (synthetic event generated from simulator)" % event_type[
            'name']
        group = event_type['group']
        timestamp = ntplib.system_to_ntp_time(time.time())
        first_time_timestamp = timestamp
        severity = event_type['severity']
        event_instance = {
            'message': message,
            'platform_id': platform_id,
            'timestamp': timestamp,
            'first_time_timestamp': first_time_timestamp,
            'severity': severity,
            'group': group,
        }

        log.debug("notifying event_instance=%s", str(event_instance))
        self._notifier.notify(event_instance)
Example #20
0
 def run(self):
     while True:
         try:
             data, addr, recvTimestamp = self.taskQueue.get(timeout=1)
             recvPacket = ntplib.NTPPacket()
             recvPacket.from_data(data)
             timeStamp_high = ntplib._to_int(recvPacket.tx_timestamp)
             timeStamp_low = ntplib._to_frac(recvPacket.tx_timestamp)
             sendPacket = ntplib.NTPPacket(version=3, mode=4)
             sendPacket.stratum = 2
             sendPacket.poll = 10
             '''
             sendPacket.precision = 0xfa
             sendPacket.root_delay = 0x0bfa
             sendPacket.root_dispersion = 0x0aa7
             sendPacket.ref_id = 0x808a8c2c
             '''
             sendPacket.ref_timestamp = recvTimestamp - 5
             sendPacket.orig_timestamp = ntplib._to_time(
                 timeStamp_high, timeStamp_low)
             sendPacket.recv_timestamp = recvTimestamp
             sendPacket.tx_timestamp = ntplib.system_to_ntp_time(
                 time.time())
             self.sock.sendto(sendPacket.to_data(), addr)
             print("Sent to %s:%d" % (addr[0], addr[1]))
         except queue.Empty:
             continue
Example #21
0
def ion_ts_2_ntp(ion_ts):
    """
    Converts an ION system timestamp into NTP.
    The converse operation is ntp_2_ion_ts(ntp_time).

    Note: this should probably be a utility provided by pyon.

    @see https://jira.oceanobservatories.org/tasks/browse/OOIION-631

    @param ion_ts
            "a str representing an integer number, the millis in UNIX epoch"
            according to description of pyon's get_ion_ts function as of
            2013-01-08. See:
            https://github.com/ooici/pyon/blob/6a9e4199db1e9/pyon/util/containers.py#L243-248

    @retval float corresponding to NTP calculated as
            ntplib.system_to_ntp_time(float(ion_ts) / 1000)
    """

    # convert to seconds:
    sys_time = float(ion_ts) / 1000

    # convert to NTP
    ntp_time = ntplib.system_to_ntp_time(sys_time)

    return ntp_time
Example #22
0
    def _parse_header(self):
        """
        Parse required parameters from the header and the footer.
        """
        # read the first bytes from the file
        header = self._stream_handle.read(HEADER_BYTES)
        if len(header) < HEADER_BYTES:
            log.error("File is not long enough to read header")
            raise SampleException("File is not long enough to read header")

        # read the last 43 bytes from the file     
        self._stream_handle.seek(-FOOTER_BYTES, 2)
        footer = self._stream_handle.read() 
        footer_match = FOOTER_MATCHER.search(footer)
        
        # parse the header to get the timestamp
        if footer_match and HEADER_MATCHER.search(header):
            header_match = HEADER_MATCHER.search(header)
            self._stream_handle.seek(len(header_match.group(0)))        
            timestamp_struct = time.strptime(header_match.group(1), "%Y%m%d %H%M%S") 
            timestamp_s = calendar.timegm(timestamp_struct) 
            self._timestamp = float(ntplib.system_to_ntp_time(timestamp_s))
            
            header_footer = header_match.group(0) + footer_match.group(0) 
            
            sample = self._extract_sample(AdcpsJlnStcMetadataParserDataParticle, HEADER_FOOTER_MATCHER,
                                          header_footer, self._timestamp)  
            
            if sample:
                # increment by the length of the matched header and save the header          
                self._increment_state(len(header_match.group(0)))
                self._saved_header = (sample, copy.copy(self._read_state))
        else:
            log.error("File header or footer does not match header regex")
            raise SampleException("File header or footer does not match header regex")
Example #23
0
    def _build_parsed_values(self):
        self._data_particle_type = self._find_particle_type()
        log.debug('Data particle type = %s', self._data_particle_type)
        try:
            seconds, microseconds, data = self.raw_data
            self.set_internal_timestamp(
                ntplib.system_to_ntp_time(seconds) + microseconds / 1e6)
            self.timelist = [{
                DataParticleKey.VALUE_ID: DeepProfileParticleKey.RAW_SECS,
                DataParticleKey.VALUE: seconds
            }, {
                DataParticleKey.VALUE_ID: DeepProfileParticleKey.RAW_MSECS,
                DataParticleKey.VALUE: microseconds
            }]
        except:
            raise SampleException('Invalid sample, unable to parse timestamp')

        if self._data_particle_type == DataParticleType.ACS:
            return self.build_acs_parsed_values(data)

        if not isinstance(data, dict):
            raise SampleException(
                'Invalid sample, does not contain data dictionary')

        return self.timelist + [{
            DataParticleKey.VALUE_ID:
            DeepProfilerParticle.parameter_map.get(k, k),
            DataParticleKey.VALUE:
            v
        } for k, v in data.iteritems()]
    def get_block(self):
        """
        This function overwrites the get_block function in dataset_parser.py
        to  read the entire file rather than break it into chunks.
        Returns:
          The length of data retrieved.
        An EOFError is raised when the end of the file is reached.
        """
        # Read in data in blocks so as to not tie up the CPU.
        block_size = 1024
        eof = False
        data = ''
        while not eof:
            next_block = self._stream_handle.read(block_size)
            if next_block:
                data = data + next_block
            else:
                eof = True

        if data != '':
            self._chunker.add_chunk(data, ntplib.system_to_ntp_time(time.time()))
            self.file_complete = True
            return len(data)
        else:  # EOF
            self.file_complete = True
            raise EOFError
Example #25
0
    def __init__(self,
                 raw_data,
                 port_timestamp=None,
                 internal_timestamp=None,
                 preferred_timestamp=DataParticleKey.PORT_TIMESTAMP,
                 quality_flag=DataParticleValue.OK,
                 new_sequence=None):
        """ Build a particle seeded with appropriate information
        
        @param raw_data The raw data used in the particle
        """
        if new_sequence is not None and not isinstance(new_sequence, bool):
            raise TypeError("new_sequence is not a bool")

        self.contents = {
            DataParticleKey.PKT_FORMAT_ID:
            DataParticleValue.JSON_DATA,
            DataParticleKey.PKT_VERSION:
            1,
            DataParticleKey.PORT_TIMESTAMP:
            port_timestamp,
            DataParticleKey.INTERNAL_TIMESTAMP:
            internal_timestamp,
            DataParticleKey.DRIVER_TIMESTAMP:
            ntplib.system_to_ntp_time(time.time()),
            DataParticleKey.PREFERRED_TIMESTAMP:
            preferred_timestamp,
            DataParticleKey.QUALITY_FLAG:
            quality_flag,
        }

        if new_sequence is not None:
            self.contents[DataParticleKey.NEW_SEQUENCE] = new_sequence

        self.raw_data = raw_data
    def get_block(self, size=1024):
        """
        This function overrides the get_block function in BufferLoadingParser
        to read the entire file rather than break it into chunks.
        @return The length of data retrieved.
        @throws EOFError when the end of the file is reached.
        """
        # Read in data in blocks so as to not tie up the CPU.
        eof = False
        data = ''
        while not eof:
            next_block = self._stream_handle.read(size)
            if next_block:
                data = data + next_block
                gevent.sleep(0)
            else:
                eof = True

        if data != '':
            self._timestamp = float(ntplib.system_to_ntp_time(time.time()))
            log.debug("Calculated current time timestamp %.10f", self._timestamp)
            self._chunker.add_chunk(data, self._timestamp)
            self.file_complete = True
            return len(data)
        else:  # EOF
            self.file_complete = True
            raise EOFError
Example #27
0
    def __restore_ntp_pck(self, ntp: NTP) -> NTP:
        self.log.debug('Send timestamp for reconstruction: ' + str(ntp.sent))
        sent_time_stamp = datetime.fromtimestamp(
            ntplib.ntp_to_system_time(ntp.sent))
        sent_time_stamp = sent_time_stamp.replace(year=datetime.now().year)
        sent_time_stamp_as_ntp = ntplib.system_to_ntp_time(
            sent_time_stamp.timestamp())
        ntp.sent = sent_time_stamp_as_ntp
        self.log.debug('Send timestamp after reconstruction: ' + str(ntp.sent))
        pck = CP3Package(ntp)

        if NTPMode.from_bit_string(pck.mode()) is NTPMode.CLIENT:
            self.log.debug("Restored in Client mode")
            ntp.ref = 0
            ntp.orig = 0
            ntp.recv = 0
        if NTPMode.from_bit_string(pck.mode()) is NTPMode.SERVER \
                or NTPMode.from_bit_string(pck.mode()) is NTPMode.BROADCAST_SERVER:
            self.log.debug("Restored in Server mode")
            origin_last_32 = pck.origin_timestamp()[32:64]
            received_last_32 = pck.receive_timestamp()[32:64]
            transmit_first_32 = pck.origin_timestamp()[0:32]

            pck.set_origin_timestamp(transmit_first_32 + origin_last_32)
            pck.set_receive_timestamp(transmit_first_32 + received_last_32)
            ntp = pck.ntp()
        self.log.debug("Reconstruction complete.")
        #ntp.show()
        return ntp
Example #28
0
    def calculate_timestamp(self, year_and_day_of_year, sample_time):
        """
        Calculate the timestamp
        :param year_and_day_of_year: Integer year and day of year value
        :param sample_time: Sample time in floating point hours
        :return: The timestamp in ntp64
        """
        # turn year and day of year integer into a string to pull out specific digits
        [year, day_of_year] = get_year_and_day_of_year(year_and_day_of_year)

        if year is None or day_of_year is None:
            # need at least 5 digits to get year and day of year
            msg = 'Not enough digits for year and day of year: %s, unable to calculate timestamp' % \
                  str(year_and_day_of_year)
            log.warning(msg)
            self._exception_callback(SampleException(msg))
            # return no timestamp so the particle is not calculated
            return None

        # convert sample time in floating point hours to hours, minutes, seconds, and microseconds
        hours = int(sample_time)
        minutes = int(60.0 * (sample_time - float(hours)))
        seconds = 3600.0 * (sample_time - float(hours)) - float(minutes) * 60.0
        microseconds = seconds - int(seconds)

        # convert to a datetime (doesn't handle microseconds, they are included in final utc timestamp)
        date = datetime.datetime(year, 1, 1) + datetime.timedelta(days=day_of_year - 1,
                                                                  hours=hours,
                                                                  minutes=minutes,
                                                                  seconds=int(seconds))

        # convert from datetime to utc seconds, including microseconds since Jan 1 1970
        utc_timestamp = calendar.timegm(date.timetuple()) + microseconds
        # convert to seconds since Jan 1 1900 for ntp
        return ntplib.system_to_ntp_time(utc_timestamp)
Example #29
0
    def parse_file(self):
        """
        Create particles out of chunks and raise an event
        @retval a list of tuples with sample particles encountered in this
            parsing, plus the state. An empty list is returned if nothing was
            parsed.
        """
        for line in self._stream_handle:
            try:
                # create the dictionary of key/value pairs composed of the labels and the values from the
                # record being parsed
                # ex: data_dict = {'sci_bsipar_temp':10.67, n1, n2, nn}
                data_dict = self._read_data(line)
                timestamp = ntplib.system_to_ntp_time(data_dict['m_present_time'])

            except SampleException as e:
                log.error('Exception parsing line: %s', e)
                self._exception_callback(e)
                continue

            except KeyError as e:
                log.error('KEY ERROR: %s', e)
                self._exception_callback(SampleException("GliderParser.parse_chunks(): unable to find timestamp in data"))
                continue

            if self._has_science_data(data_dict, self._particle_class):
                # create the particle
                self._record_buffer.append(self._extract_sample(self._particle_class, None, data_dict, timestamp))
Example #30
0
def delimited_to_filesystem(input_data, delimiter=','):
    request_start_time = time.time()
    log.info("Handling request to {} - {}".format(request.url, input_data.get('streams', "")))

    start = input_data.get('start', app.config["UNBOUND_QUERY_START"])
    stop = input_data.get('stop', ntplib.system_to_ntp_time(time.time()))
    limit = input_data.get('limit', 0)
    if limit <= 0:
        limit = None

    prov = input_data.get('include_provenance', True)
    annotate = input_data.get('include_annotations', False)
    base_path = os.path.join(app.config['ASYNC_DOWNLOAD_BASE_DIR'],input_data.get('directory','unknown'))
    try:
        json_str = util.calc.get_csv(input_data.get('streams'), start, stop, input_data.get('coefficients', {}),
                                         limit=limit,
                                         include_provenance=prov,
                                         include_annotations=annotate, request_uuid=input_data.get('requestUUID', ''),
                                         location_information=input_data.get('locations', {}),
                                         disk_path=input_data.get('directory','unknown'), delimiter=delimiter)
    except Exception as e:
        json_str = output_async_error(input_data, e)
    write_status(base_path)
    log.info("Request took {:.2f}s to complete".format(time.time() - request_start_time))
    return Response(json_str, mimetype='application/json')
    def test_get_set_value(self):
        """
        Test setting values after creation
        """
        test_particle = self.TestDataParticle(
            self.sample_raw_data,
            preferred_timestamp=DataParticleKey.PORT_TIMESTAMP,
            internal_timestamp=self.sample_internal_timestamp,
        )

        new_time = self.sample_internal_timestamp + 200

        test_particle.set_value(DataParticleKey.INTERNAL_TIMESTAMP, new_time)
        fetched_time = test_particle.get_value(DataParticleKey.INTERNAL_TIMESTAMP)
        self.assertEquals(new_time, fetched_time)

        self.assertRaises(ReadOnlyException, test_particle.set_value, DataParticleKey.PKT_VERSION, 2)

        self.assertRaises(
            ReadOnlyException,
            test_particle.set_value,
            DataParticleKey.INTERNAL_TIMESTAMP,
            ntplib.system_to_ntp_time(time.time() + (86400 * 600)),
        )

        self.assertRaises(NotImplementedException, test_particle.get_value, "bad_key")
    def test_velocity_header_sample_format(self):
        """
        Test to make sure we can get velocity_header sample data out in a
        reasonable format. Parsed is all we care about...raw is tested in the
        base DataParticle tests
        """
        
        port_timestamp = 3555423720.711772
        driver_timestamp = 3555423722.711772
        text_timestamp = time.strptime('17/12/2012 11:12:49', "%d/%m/%Y %H:%M:%S")
        internal_timestamp = ntplib.system_to_ntp_time(time.mktime(text_timestamp))
 
        # construct the expected particle
        expected_particle = {
            DataParticleKey.NEW_SEQUENCE: None,
            DataParticleKey.PKT_FORMAT_ID: DataParticleValue.JSON_DATA,
            DataParticleKey.PKT_VERSION: 1,
            DataParticleKey.STREAM_NAME: DataParticleType.VELOCITY_HEADER,
            DataParticleKey.PORT_TIMESTAMP: port_timestamp,
            DataParticleKey.DRIVER_TIMESTAMP: driver_timestamp,
            DataParticleKey.INTERNAL_TIMESTAMP: internal_timestamp,
            DataParticleKey.PREFERRED_TIMESTAMP: DataParticleKey.PORT_TIMESTAMP,
            DataParticleKey.QUALITY_FLAG: DataParticleValue.OK,
            DataParticleKey.VALUES: velocity_header_particle
            }
        
        self.compare_parsed_data_particle(VectorVelocityHeaderDataParticle,
                                          velocity_header_sample(),
                                          expected_particle)
Example #33
0
    def parse_file(self):
        """
        Parse the *.mp4 file.
        """
        match = FILE_PATH_MATCHER.match(self._stream_handle.name)
        if match:
            file_datetime = match.group('Date') + match.group('Time')
            time_stamp = ntplib.system_to_ntp_time(
                utilities.formatted_timestamp_utc_time(file_datetime,
                                                       TIMESTAMP_FORMAT))

            # Extract a particle and append it to the record buffer
            particle = self._extract_sample(CamhdAInstrumentDataParticle,
                                            None,
                                            match.group('Path'),
                                            internal_timestamp=time_stamp)
            log.debug('Parsed particle: %s', particle.generate_dict())
            self._record_buffer.append(particle)

        else:
            # Files retrieved from the instrument should always match the timestamp naming convention
            self.recov_exception_callback(
                "Unable to extract file time from input file name: %s."
                "Expected format REFDES-YYYYmmddTHHMMSSZ.mp4" %
                self._stream_handle.name)
Example #34
0
    def test_system_sample_format(self):
        """
        Verify driver can get system sample data out in a reasonable
        format. Parsed is all we care about...raw is tested in the base
        DataParticle tests
        """
        port_timestamp = 3555423720.711772
        driver_timestamp = 3555423722.711772
        text_timestamp = time.strptime('13/12/2012 17:03:26',
                                       "%d/%m/%Y %H:%M:%S")
        internal_timestamp = ntplib.system_to_ntp_time(
            timegm_to_float(text_timestamp))

        # construct the expected particle
        expected_particle = {
            DataParticleKey.PKT_FORMAT_ID: DataParticleValue.JSON_DATA,
            DataParticleKey.PKT_VERSION: 1,
            DataParticleKey.STREAM_NAME: DataParticleType.SYSTEM,
            DataParticleKey.PORT_TIMESTAMP: port_timestamp,
            DataParticleKey.DRIVER_TIMESTAMP: driver_timestamp,
            DataParticleKey.INTERNAL_TIMESTAMP: internal_timestamp,
            DataParticleKey.PREFERRED_TIMESTAMP:
            DataParticleKey.PORT_TIMESTAMP,
            DataParticleKey.QUALITY_FLAG: DataParticleValue.OK,
            DataParticleKey.VALUES: system_particle
        }

        self.compare_parsed_data_particle(VectorSystemDataParticle,
                                          system_sample(), expected_particle)
    def get_block(self):
        """
        This function overwrites the get_block function in dataset_parser.py
        to  read the entire file rather than break it into chunks.
        Returns:
          The length of data retrieved.
        An EOFError is raised when the end of the file is reached.
        """
        # Read in data in blocks so as to not tie up the CPU.
        block_size = 1024
        eof = False
        data = ''
        while not eof:
            next_block = self._stream_handle.read(block_size)
            if next_block:
                data = data + next_block
            else:
                eof = True

        if data != '':
            self._chunker.add_chunk(data,
                                    ntplib.system_to_ntp_time(time.time()))
            self.file_complete = True
            return len(data)
        else:  # EOF
            self.file_complete = True
            raise EOFError
 def process_wave(self, fields):
     date_time_str = fields[1] + SPACE + fields[0]  # concatenate date and time
     date_time_utc = formatted_timestamp_utc_time(date_time_str, DATE_TIME_FORMAT)
     date_time_ntp = ntplib.system_to_ntp_time(date_time_utc)
     self._eng_data[7] = date_time_ntp
     self._eng_data[8:] = fields[2:]
     return
    def __init__(self, raw_data,
                 port_timestamp=None,
                 internal_timestamp=None,
                 preferred_timestamp=DataParticleKey.PORT_TIMESTAMP,
                 quality_flag=DataParticleValue.OK,
                 new_sequence=None):
        """ Build a particle seeded with appropriate information
        
        @param raw_data The raw data used in the particle
        """
        if new_sequence is not None and not isinstance(new_sequence, bool):
            raise TypeError("new_sequence is not a bool")

        self.contents = {
            DataParticleKey.PKT_FORMAT_ID: DataParticleValue.JSON_DATA,
            DataParticleKey.PKT_VERSION: 1,
            DataParticleKey.PORT_TIMESTAMP: port_timestamp,
            DataParticleKey.INTERNAL_TIMESTAMP: internal_timestamp,
            DataParticleKey.DRIVER_TIMESTAMP: ntplib.system_to_ntp_time(time.time()),
            DataParticleKey.PREFERRED_TIMESTAMP: preferred_timestamp,
            DataParticleKey.QUALITY_FLAG: quality_flag,
        }

        if new_sequence is not None:
            self.contents[DataParticleKey.NEW_SEQUENCE] = new_sequence

        self.raw_data = raw_data
Example #38
0
    def _string_to_ntp_date_time(self, datestr):
        """
        Extract an ntp date from a ISO8601 formatted date string.
        @param str an ISO8601 formatted string containing date information
        @retval an ntp date number (seconds since jan 1 1900)
        @throws InstrumentParameterException if datestr cannot be formatted to
        a date.
        """
        if not isinstance(datestr, str):
            raise IOError('Value %s is not a string.' % str(datestr))
        if not DATE_MATCHER.match(datestr):
            raise ValueError("date string not in ISO8601 format YYYY-MM-DDTHH:MM:SS.SSSSZ")

        try:
            # This assumes input date string are in UTC (=GMT)
            if datestr[-1:] != 'Z':
                datestr += 'Z'

            # the parsed date time represents a GMT time, but strftime
            # does not take timezone into account, so these are seconds from the
            # local start of 1970
            local_sec = float(parser.parse(datestr).strftime("%s.%f"))
            # remove the local time zone to convert to gmt (seconds since gmt jan 1 1970)
            gmt_sec = local_sec - time.timezone
            # convert to ntp (seconds since gmt jan 1 1900)
            timestamp = ntplib.system_to_ntp_time(gmt_sec)

        except ValueError as e:
            raise ValueError('Value %s could not be formatted to a date. %s' % (str(datestr), e))

        log.debug("converting time string '%s', unix_ts: %s ntp: %s", datestr, gmt_sec, timestamp)

        return timestamp
 def get_current_timestamp(self, offset=0):
     """
     Get the current time in a format suitable for parameter expiration calculation.
     @param offset: seconds from the current time to offset the timestamp
     @return: a unix timestamp
     """
     return ntplib.system_to_ntp_time(time.time()) + offset
Example #40
0
def validate(input_data):
    if input_data is None:
        raise MalformedRequestException('Received NULL input data')

    request_id = input_data.get('requestUUID', None)
    limit = input_data.get('limit', 0)

    if limit <= 0:
        limit = None
    if limit > app.config['UI_HARD_LIMIT']:
        message = '<{:s}> Requested number of particles ({:,d}) larger than maximum allowed limit ({:,d})'
        message = message.format(request_id, limit, app.config['UI_HARD_LIMIT'])
        raise UIHardLimitExceededException(message=message)

    streams = _validate_streams(input_data)
    coefficients = _validate_coefficients(input_data)
    user_flags = _get_userflags(input_data)

    start = input_data.get('start', app.config["UNBOUND_QUERY_START"])
    stop = input_data.get('stop', ntplib.system_to_ntp_time(time.time()))
    prov = input_data.get('include_provenance', False)
    annotate = input_data.get('include_annotations', False)
    qc = input_data.get('qcParameters', {})
    strict = input_data.get('strict_range', False)
    locs = input_data.get('locations', {})
    execute_dpa = input_data.get('execute_dpa', True)

    return RequestParameters(request_id, streams, coefficients, user_flags, start,
                             stop, limit, prov, annotate, qc, strict, locs, execute_dpa)
Example #41
0
 def _set_cp3_mode(self,year:int):
     ntp = self.ntp()
     time = ntplib.system_to_ntp_time(datetime.fromtimestamp(ntplib.ntp_to_system_time(ntp.sent))
                               .replace(year=year).timestamp())
     ntp.sent = time
     raw = RawNTP(ntp)
     self.set_transmit_timestamp(raw.transmit_timestamp())
    def _get_new_ctd_packet(self, length):

        rdt = RecordDictionaryTool(stream_definition_id=self.stream_def._id)
        #Explicitly make these numpy arrays...
        c = numpy.array([random.uniform(0.0,75.0)  for i in xrange(length)]) 
        t = numpy.array([random.uniform(-1.7, 21.0) for i in xrange(length)]) 
        p = numpy.array([random.lognormvariate(1,2) for i in xrange(length)])
        lat = numpy.array([random.uniform(-90.0, 90.0) for i in xrange(length)]) 
        lon = numpy.array([random.uniform(0.0, 360.0) for i in xrange(length)]) 
        h = numpy.array([random.uniform(0.0, 360.0) for i in xrange(length)])

        start_time = ntplib.system_to_ntp_time(time.time()) - (length + 1)
        tvar = numpy.array([start_time + i for i in xrange(1,length+1)])


        rdt['time'] = tvar
        rdt['lat'] = lat
        rdt['lon'] = lon
        rdt['temp'] = t
        rdt['conductivity'] = c
        rdt['pressure'] = p

#        rdt['coordinates'] = rdt0
#        rdt['data'] = rdt1

        g = rdt.to_granule(data_producer_id=self.id)

        return g
Example #43
0
    def parse_chunks(self):
        """
        Parse out any pending data chunks in the chunker. If
        it is a valid data piece, build a particle, update the position and
        timestamp. Go until the chunker has no more valid data.
        @retval a list of tuples with sample particles encountered in this
            parsing, plus the state. An empty list of nothing was parsed.
        """
        result_particles = []

        (nd_timestamp, non_data, non_start,
         non_end) = self._chunker.get_next_non_data_with_index(clean=False)
        (timestamp, chunk, start,
         end) = self._chunker.get_next_data_with_index(clean=True)

        # if there is any non data handle it
        self.handle_non_data(non_data, non_end, start)

        while chunk is not None:
            header_match = SIO_HEADER_MATCHER.match(chunk)

            if header_match.group(SIO_HEADER_GROUP_ID) == 'CS':
                data_match = ENG_MATCHER.match(chunk)
                if data_match:
                    # put timestamp from hex string to float:
                    posix_time = int(
                        header_match.group(SIO_HEADER_GROUP_TIMESTAMP), 16)
                    log.debug('utc timestamp %s',
                              datetime.utcfromtimestamp(posix_time))
                    timestamp = ntplib.system_to_ntp_time(float(posix_time))
                    # particle-ize the data block received, return the record
                    sample = self._extract_sample(self._particle_class,
                                                  None,
                                                  data_match,
                                                  internal_timestamp=timestamp)
                    if sample:
                        # create particle
                        result_particles.append(sample)

                else:
                    log.warn('CS data does not match REGEX')
                    self._exception_callback(
                        SampleException('CS data does not match REGEX'))

            # 'PS' IDs will also be in this file but are specifically ignored
            elif header_match.group(SIO_HEADER_GROUP_ID) != 'PS':
                message = 'Unexpected Sio Header ID %s' % header_match.group(
                    SIO_HEADER_GROUP_ID)
                log.warn(message)
                self._exception_callback(UnexpectedDataException(message))

            (nd_timestamp, non_data, non_start,
             non_end) = self._chunker.get_next_non_data_with_index(clean=False)
            (timestamp, chunk, start,
             end) = self._chunker.get_next_data_with_index(clean=True)

            # if there is any non data handle it
            self.handle_non_data(non_data, non_end, start)

        return result_particles
 def set_value(self, new_val):
     """
     Set the stored value to the new value
     @param new_val The new value to set for the parameter
     """
     self.value = new_val
     self.timestamp = ntplib.system_to_ntp_time(time.time())
Example #45
0
    def _convert_string_to_timestamp(ts_str):
        """
        Converts the given string from this data stream's format into an NTP
        timestamp. 
        @param ts_str The timestamp string in the format "yyyy/mm/dd hh:mm:ss.sss"
        @retval The NTP4 timestamp
        """
        match = LOG_TIME_MATCHER.match(ts_str)
        if not match:
            raise ValueError("Invalid time format: %s" % ts_str)

        zulu_ts = "%04d-%02d-%02dT%02d:%02d:%fZ" % (
            int(match.group(1)), int(match.group(2)), int(match.group(3)),
            int(match.group(4)), int(match.group(5)), float(match.group(6))
        )
        log.trace("converted ts '%s' to '%s'", ts_str[match.start(0):(match.start(0) + 24)], zulu_ts)

        format = "%Y-%m-%dT%H:%M:%S.%fZ"
        dt = datetime.strptime(zulu_ts, format)
        unix_timestamp = calendar.timegm(dt.timetuple()) + (dt.microsecond / 1000000.0)

        ntptime = ntplib.system_to_ntp_time(unix_timestamp)

        log.trace("Converted time \"%s\" (unix: %s) into %s", ts_str, unix_timestamp, ntptime)
        return ntptime
Example #46
0
    def _handler_acquire_status(self, *args, **kwargs):
        """
        We generate these particles here to avoid the chunker.  This allows us to process status
        messages with embedded messages from the other parts of the instrument.
        @return next_state, (next_agent_state, result)
        """
        ts = ntplib.system_to_ntp_time(time.time())
        parts = []

        for command, particle_class in [
            (InstrumentCommand.SYST_DUMP1, particles.SystStatusParticle),
            (InstrumentCommand.LILY_DUMP1, particles.LilyStatusParticle1),
            (InstrumentCommand.LILY_DUMP2, particles.LilyStatusParticle2),
            (InstrumentCommand.IRIS_DUMP1, particles.IrisStatusParticle1),
            (InstrumentCommand.IRIS_DUMP2, particles.IrisStatusParticle2),
            (InstrumentCommand.NANO_DUMP1, particles.NanoStatusParticle),
        ]:
            result, _ = self._do_cmd_resp(
                command, response_regex=particle_class.regex_compiled())
            parts.append(result)
        sample = self._extract_sample(
            particles.BotptStatusParticle,
            particles.BotptStatusParticle.regex_compiled(),
            NEWLINE.join(parts), ts)

        if self.get_current_state() == ProtocolState.AUTOSAMPLE:
            # acquiring status stops NANO output, restart it
            self._do_cmd_resp(InstrumentCommand.NANO_ON,
                              expected_prompt=NANO_STRING)

        if not sample:
            raise InstrumentProtocolException(
                'Failed to generate status particle')
        return None, (None, sample)
Example #47
0
    def test_velocity_header_sample_format(self):
        """
        Verify driver can get velocity_header sample data out in a
        reasonable format. Parsed is all we care about...raw is tested in the
        base DataParticle tests
        """

        port_timestamp = 3555423720.711772
        driver_timestamp = 3555423722.711772
        text_timestamp = time.strptime('17/12/2012 11:12:49',
                                       "%d/%m/%Y %H:%M:%S")
        internal_timestamp = ntplib.system_to_ntp_time(
            time.mktime(text_timestamp))

        # construct the expected particle
        expected_particle = {
            DataParticleKey.PKT_FORMAT_ID: DataParticleValue.JSON_DATA,
            DataParticleKey.PKT_VERSION: 1,
            DataParticleKey.STREAM_NAME: DataParticleType.VELOCITY_HEADER,
            DataParticleKey.PORT_TIMESTAMP: port_timestamp,
            DataParticleKey.DRIVER_TIMESTAMP: driver_timestamp,
            DataParticleKey.INTERNAL_TIMESTAMP: internal_timestamp,
            DataParticleKey.PREFERRED_TIMESTAMP:
            DataParticleKey.PORT_TIMESTAMP,
            DataParticleKey.QUALITY_FLAG: DataParticleValue.OK,
            DataParticleKey.VALUES: velocity_header_particle
        }

        self.compare_parsed_data_particle(VectorVelocityHeaderDataParticle,
                                          velocity_header_sample(),
                                          expected_particle)
    def set_platform_attribute_values(self, platform_id, input_attrs):
        self._enter()

        if platform_id not in self._pnodes:
            return {platform_id: InvalidResponse.PLATFORM_ID}

        assert isinstance(input_attrs, list)

        timestamp = ntplib.system_to_ntp_time(time.time())
        attrs = self._pnodes[platform_id].attrs
        vals = {}
        for (attrName, attrValue) in input_attrs:
            if attrName in attrs:
                attr = attrs[attrName]
                if attr.writable:
                    #
                    # TODO check given attrValue
                    #
                    vals[attrName] = (attrValue, timestamp)
                else:
                    vals[attrName] = InvalidResponse.ATTRIBUTE_NOT_WRITABLE
            else:
                vals[attrName] = InvalidResponse.ATTRIBUTE_ID

        retval = {platform_id: vals}
        log.debug("set_platform_attribute_values returning: %s", str(retval))
        return retval
Example #49
0
    def _parse_header(self):
        """
        Parse required parameters from the header and the footer.
        """

        # read the first bytes from the file
        header = self._stream_handle.read(HEADER_BYTES)
        if len(header) < HEADER_BYTES:
            log.warn("File is not long enough to read header")
            return

        # read the last 43 bytes from the file
        self._stream_handle.seek(-FOOTER_BYTES, 2)
        footer = self._stream_handle.read()
        footer_match = FOOTER_MATCHER.search(footer)

        # parse the header to get the timestamp
        header_match = HEADER_MATCHER.search(header)

        if footer_match and header_match:
            self._stream_handle.seek(len(header_match.group(0)))
            timestamp_struct = time.strptime(header_match.group(1), "%Y%m%d %H%M%S")
            timestamp_s = calendar.timegm(timestamp_struct)
            self._timestamp = float(ntplib.system_to_ntp_time(timestamp_s))

            header_footer = header_match.group(0) + footer_match.group(0)

            particle = self._extract_sample(self._metadata_class, None,
                                            header_footer, internal_timestamp=self._timestamp)

            self._record_buffer.append(particle)

        else:
            log.warn("File header or footer does not match header regex")
 def set_value(self, new_val):
     """
     Set the stored value to the new value
     @param new_val The new value to set for the parameter
     """
     self.value = new_val
     self.timestamp = ntplib.system_to_ntp_time(time.time())
    def _convert_string_to_timestamp(ts_str):
        """
        Converts the given string from this data stream's format into an NTP
        timestamp. 
        @param ts_str The timestamp string in the format "yyyy/mm/dd hh:mm:ss.sss"
        @retval The NTP4 timestamp
        """
        match = LOG_TIME_MATCHER.match(ts_str)
        if not match:
            raise ValueError("Invalid time format: %s" % ts_str)

        zulu_ts = "%04d-%02d-%02dT%02d:%02d:%fZ" % (
            int(match.group(1)),
            int(match.group(2)),
            int(match.group(3)),
            int(match.group(4)),
            int(match.group(5)),
            float(match.group(6)),
        )
        log.trace("converted ts '%s' to '%s'", ts_str[match.start(0) : (match.start(0) + 24)], zulu_ts)

        converted_time = float(parser.parse(zulu_ts).strftime("%s.%f"))
        adjusted_time = converted_time - time.timezone
        ntptime = ntplib.system_to_ntp_time(adjusted_time)

        log.trace('Converted time "%s" (unix: %s) into %s', ts_str, adjusted_time, ntptime)
        return ntptime
Example #52
0
    def _generate_metadata_particle(self):
        """
        This function generates a metadata particle.
        """

        particle_data = dict()

        for key in self._metadata_matches_dict.keys():

            self._process_metadata_match_dict(key, particle_data)

        utc_time = formatted_timestamp_utc_time(
            particle_data[CtdmoGhqrImodemDataParticleKey.DATE_TIME_STRING],
            "%Y%m%d %H%M%S")
        ntp_time = ntplib.system_to_ntp_time(utc_time)

        # Generate the metadata particle class and add the
        # result to the list of particles to be returned.
        particle = self._extract_sample(self.metadata_particle_class,
                                        None,
                                        particle_data,
                                        internal_timestamp=ntp_time)
        if particle is not None:
            log.debug("Appending metadata particle to record buffer")
            self._record_buffer.append(particle)
Example #53
0
    def _generate_metadata_particle(self):
        """
        This function generates a metadata particle.
        """

        particle_data = dict()

        for key in self._metadata_matches_dict.keys():

            self._process_metadata_match_dict(key, particle_data)

        utc_time = formatted_timestamp_utc_time(
            particle_data[CtdmoGhqrImodemDataParticleKey.DATE_TIME_STRING],
            "%Y%m%d %H%M%S")
        ntp_time = ntplib.system_to_ntp_time(utc_time)

        # Generate the metadata particle class and add the
        # result to the list of particles to be returned.
        particle = self._extract_sample(self.metadata_particle_class,
                                        None,
                                        particle_data,
                                        ntp_time)
        if particle is not None:
            log.debug("Appending metadata particle to record buffer")
            self._record_buffer.append(particle)
Example #54
0
    def _parse_header(self):
        """
        Parse the start time of the profile and the sensor
        """
        # read the first bytes from the file
        header = self._stream_handle.read(HEADER_BYTES)

        match = WFP_E_COASTAL_FLAGS_HEADER_MATCHER.match(header)

        # parse the header
        if match is not None:

            # use the profile start time as the timestamp
            fields = struct.unpack('>II', match.group(2))
            timestamp = int(fields[1])
            self._timestamp = float(ntplib.system_to_ntp_time(timestamp))
            log.debug(self._start_data_particle_class)
            sample = self._extract_sample(self._start_data_particle_class,
                                          None,
                                          header, self._timestamp)

            if sample:
                # create particle
                self._increment_state(HEADER_BYTES)
                log.debug("Extracting header %s with read_state: %s", sample, self._read_state)
                self._saved_header = (sample, copy.copy(self._read_state))
        else:
            raise SampleException("File header does not match header regex")
Example #55
0
    def _convert_string_to_timestamp(ts_str):
        """
        Converts the given string from this data stream's format into an NTP
        timestamp. This is very likely instrument specific.
        @param ts_str The timestamp string in the format "mm/dd/yyyy hh:mm:ss"
        @retval The NTP4 timestamp
        """
        match = DATE_MATCHER.match(ts_str)
        if not match:
            raise ValueError("Invalid time format: %s" % ts_str)

        zulu_ts = "%04d-%02d-%02dT%02d:%02d:%02dZ" % (
            int(match.group(3)), int(match.group(1)), int(match.group(2)),
            int(match.group(4)), int(match.group(5)), int(match.group(6)))
        log.trace("converted ts '%s' to '%s'", ts_str, zulu_ts)

        localtime_offset = float(
            parser.parse("1970-01-01T00:00:00.00Z").strftime("%s.%f"))
        converted_time = float(parser.parse(zulu_ts).strftime("%s.%f"))
        adjusted_time = round(converted_time - localtime_offset)
        ntptime = ntplib.system_to_ntp_time(adjusted_time)

        log.trace("Converted time \"%s\" (unix: %s) into %s", ts_str,
                  adjusted_time, ntptime)
        return ntptime