Beispiel #1
0
    def read(self, filename):
        h = []
        pages = super(context, self).read(filename)
        # use a sliding window to find valid packets
        for p in pages:
            start_ind = 0
            stop_ind  = start_ind + datalen + majorTimelen
            while stop_ind < (len(p)-majorTimelen-datalen):
                skipped = 0
                if None not in p[start_ind:stop_ind]:
                    # the data is all there in a row just make a context object
                    cp = contextPage(p[start_ind:stop_ind])
                else:
                    # print("Encountered a missing packet")
                    missing_ind = p[start_ind:stop_ind].index(None)
                    if missing_ind < (majorTimelen-1):
                        # did not have a whole time stamp skip the context there is not useful info
                        print("\tSkipped data no time stamp".format())
                        skipped=1
                    elif missing_ind >= (majorTimelen+datalen/2)-1:
                        # this means we have a valid time stamp and 1 valid measurement
                        #    so fill in the missing bytes with 00 and then set it to None
                        #    the context() class then needs to catch the None and set to fill
                        fill = ['00'] * ((majorTimelen+datalen) - missing_ind)
                        cp = contextPage(p[start_ind:stop_ind][0:missing_ind] + fill)
                        cp[0][1][1] = [None]
                        print("\t{0} Filled some data".format(cp[0][0].isoformat()))
                        stop_ind -= (len(p[start_ind:stop_ind])-missing_ind-1)
                        skipped=1
                    else:
                        # this means no valid data so fill in the missing bytes with 00 and then set it to None
                        #    the context() class then needs to catch the None and set to fill
                        #    we are keeping this since there was a valid time stamp
                        fill = ['00'] * ((majorTimelen+datalen) - missing_ind)
                        cp = contextPage(p[start_ind:stop_ind][0:missing_ind] + fill)
                        if cp:
                            cp[0][1][:] = [None, None]
                            print("\t{0} Filled all data".format(cp[0][0].isoformat()))
                        stop_ind -= (len(p[start_ind:stop_ind])-missing_ind-1)
                        skipped=1

                start_ind = stop_ind
                if skipped:
                    # we need to get back on sync, for these data that means finding a
                    #   valid date in the data
                    skip_num = 0
                    while start_ind < len(p) and \
                              len(p[start_ind:]) > majorTimelen+datalen and \
                              not FIREdata.validDate(p[start_ind:start_ind+majorTimelen+datalen]):
                        start_ind += 1
                        skip_num += 1
                    print("\t\tSkipped {0} bytes at the start of the next packet".format(skip_num))

                stop_ind = start_ind + (datalen + majorTimelen)
                h.extend(cp)
                        
        print("Decoded {0} context measurements".format(len(h)))
        return context(h)
Beispiel #2
0
    def read(self, filename):
        # need to have pages and packet information
        packets = packet.BIRDpackets(filename)

        """
        data times is at most one page
        """

        previous_packet = None # holds the last packet
        dataBuffer = [] # this holds the data form a packet as measurement may roll onto
                        #   the next packet
        firstPacket = False
        for packet_ in packets:
            """
            options in here:
            1) new page starting with packet 01
            2) new page with missing packet 01
            3) current page with next packet
            4) current page with missing packet
            5) last packet of page at 13
            6) last packet of a page with missing 13
            """
            if packet_.pktnum == '01':
                firstPacket = True

            ### option 2 ###
            ### option 1 ###
            if previous_packet is None: # new page starting
                dataBuffer = [] # clear the dataBuffer as we are starting a new page
                previous_packet = packet_ # hang on to the last packet
                print packet_
                # this is a decodable page, start now
                dataBuffer.extend(packet_.data) # grab the data out
                # since p.pktnum == 01 this is a major time stamp, decode it.
            else:
                while len(dataBuffer) > 0:
                    if FIREdata.validDate(FIREdata.hex2int(dataBuffer[:majorTimelen])):
                        pass
                    else:
                        dataBuffer.pop(0)
            ### option 3 ###
            ### option 4 ###

            """
            regardless of the packet if there is more data in the buffer we should
            decode it and add it to the arrays
            """
            while len(dataBuffer) >= minorLen:
                tmp = [dataBuffer.pop(0) for v in range(majorLen)]
                self.major_data(tmp)

        # go through and remove duplicate times and data
        print("Looking for duplicate measurements")

        arr, dt_ind, return_inverse = np.unique(self.dat['Epoch'], return_index=True, return_inverse=True) # this is unique an sort
        print("Found {0} duplicates of {1}".format(len(return_inverse)-len(dt_ind), len(return_inverse)))

        self.dat['Epoch'] = arr
        self.dat['Time'] = self.dat['Time'][dt_ind]
        # populate Duration and Mode
        self.dat['Mode'] = dm.dmarray.append(self.dat['Mode'], np.zeros(len(self.dat['Epoch']), dtype=int))
        if firstPacket:
            self.dat['Mode'][::2] = 1
        dur = [FIREdata.total_seconds(v2 - v1) for v1, v2 in itertools.izip(self.dat['Epoch'], self.dat['Time'])]
        self.dat['Duration'] = dm.dmarray.append(self.dat['Duration'], dur)


        return self