コード例 #1
0
    def processData(self, data):
        """Process Environment data """

        currenttime = datetime.utcnow()
        outdate = datetime.strftime(currenttime, "%Y-%m-%d")
        filename = outdate
        actualtime = datetime.strftime(currenttime, "%Y-%m-%dT%H:%M:%S.%f")
        outtime = datetime.strftime(currenttime, "%H:%M:%S")
        timestamp = datetime.strftime(currenttime, "%Y-%m-%d %H:%M:%S.%f")
        packcode = '6hLl'
        sensorid = self.sensor
        header = "# MagPyBin %s %s %s %s %s %s %d" % (
            sensorid, '[x]', '[RN]', '[random]', '[1000]', packcode,
            struct.calcsize('<' + packcode))

        try:
            datearray = acs.timeToArray(timestamp)
            datearray.append(int(data * 1000))
            data_bin = struct.pack('<' + packcode,
                                   *datearray)  #use little endian byte order
        except:
            log.msg('Error while packing binary data')
            pass

        if not self.confdict.get('bufferdirectory', '') == '':
            acs.dataToFile(self.confdict.get('bufferdirectory'), sensorid,
                           filename, data_bin, header)
        return ','.join(list(map(str, datearray))), header
コード例 #2
0
    def processData(self, sensorid, line, ntptime):
        """processing and storing data - requires sensorid and meta info
           data looks like (TR00002):
           01.6 290 +14.8 0E*4F
           windspeed, winddirection, virtualtemperature, status*pruefsumme
        """
        # currenttime = datetime.utcnow()
        outdate = datetime.strftime(ntptime, "%Y-%m-%d")
        filename = outdate
        header = ''
        datearray = datetime2array(ntptime)
        packcode = '6hLlll'
        multiplier = [10, 10, 1]
        #print ("Processing line for {}: {}".format(sensorid, line))
        vals = line.split()
        if len(vals) > 3:
            try:
                datearray.append(int(float(vals[2]) * 10))
                datearray.append(int(float(vals[0]) * 10))
                datearray.append(int(float(vals[1]) * 1))
            except:
                log.msg(
                    '{} protocol: Error while appending data to file'.format(
                        self.sensordict.get('protocol')))

            try:
                data_bin = struct.pack('<' + packcode,
                                       *datearray)  #little endian
            except:
                log.msg('{} protocol: Error while packing binary data'.format(
                    self.sensordict.get('protocol')))

            #asseble header from available global information - write only if information is complete
            key = '[t2,var1,var2]'
            ele = '[Tv,V,Dir]'
            unit = '[degC,m_s,deg]'
            multplier = str(multiplier).replace(" ", "")
            # Correct some common old problem
            unit = unit.replace('deg C', 'degC')
            #print ("ID process", sensorid)

            header = "# MagPyBin {} {} {} {} {} {} {}".format(
                sensorid, key, ele, unit, multplier, packcode,
                struct.calcsize('<' + packcode))
            data = ','.join(list(map(str, datearray)))

            if not self.confdict.get('bufferdirectory', '') == '':
                acs.dataToFile(self.confdict.get('bufferdirectory'), sensorid,
                               filename, data_bin, header)

        else:
            data = ''

        return data, header
コード例 #3
0
ファイル: ardactiveprotocol.py プロジェクト: geomagpy/MARTAS
    def processBlock(self, sensorid, meta, data):
        """Convert raw ADC counts into SI units as per datasheets"""
        currenttime = datetime.utcnow()
        outdate = datetime.strftime(currenttime, "%Y-%m-%d")
        actualtime = datetime.strftime(currenttime, "%Y-%m-%dT%H:%M:%S.%f")
        outtime = datetime.strftime(currenttime, "%H:%M:%S")
        filename = outdate

        datearray = datetime2array(currenttime)
        packcode = '6hL'
        #sensorid = self.sensordict.get(idnum)
        #events = self.eventdict.get(idnum).replace('evt','').split(',')[3:-1]

        values = []
        multiplier = []
        for dat in data:
            try:
                values.append(float(dat))
                datearray.append(int(float(dat) * 10000))
                packcode = packcode + 'l'
                multiplier.append(10000)
            except:
                log.msg(
                    '{} protocol: Error while appending data to file (non-float?): {}'
                    .format(self.sensordict.get('protocol'), dat))

        try:
            data_bin = struct.pack('<' + packcode, *datearray)  #little endian
        except:
            log.msg('{} protocol: Error while packing binary data'.format(
                self.sensordict.get('protocol')))
            pass

        #asseble header from available global information - write only if information is complete
        key = '[' + str(meta.get('SensorKeys')).replace("'", "").strip() + ']'
        ele = '[' + str(meta.get('SensorElements')).replace("'",
                                                            "").strip() + ']'
        unit = '[' + str(meta.get('SensorUnits')).replace("'",
                                                          "").strip() + ']'
        multplier = str(multiplier).replace(" ", "")
        # Correct some common old problem
        unit = unit.replace('deg C', 'degC')

        header = "# MagPyBin %s %s %s %s %s %s %d" % (
            sensorid, key, ele, unit, multplier, packcode,
            struct.calcsize('<' + packcode))

        if not self.confdict.get('bufferdirectory',
                                 '') == '' and headercomplete:
            acs.dataToFile(self.confdict.get('bufferdirectory'), sensorid,
                           filename, data_bin, header)

        return ','.join(list(map(str, datearray))), header
コード例 #4
0
ファイル: bm35protocol.py プロジェクト: geomagpy/MARTAS
    def processData(self, data):

        currenttime = datetime.utcnow()
        outdate = datetime.strftime(currenttime, "%Y-%m-%d")
        filename = outdate
        sensorid = self.sensor
        datearray = []
        pressure_raw = 88888.8
        pressure = 88888.8
        typ = "none"
        dontsavedata = False

        packcode = '6hLL'
        header = "# MagPyBin %s %s %s %s %s %s %d" % (
            self.sensor, '[var3]', '[p1]', '[mBar]', '[1000]', packcode,
            struct.calcsize(packcode))

        try:
            if len(data) == 2:
                typ = "valid"
            # add other types here
        except:
            # TODO??? base x mobile?
            log.err(
                'BM35 - Protocol: Output format not supported - use either base, ... or mobile'
            )

        if typ == "valid":
            pressure_raw = float(data[0].strip())
            pressure = float(data[1].strip())
        elif typ == "none":
            dontsavedata = True
            pass

        if not typ == "none":
            # extract time data
            datearray = datetime2array(currenttime)
            try:
                datearray.append(int(pressure * 1000.))
                data_bin = struct.pack('<' + packcode, *datearray)
            except:
                log.msg('{} protocol: Error while packing binary data'.format(
                    self.sensordict.get('protocol')))

            if not self.confdict.get('bufferdirectory', '') == '':
                acs.dataToFile(self.confdict.get('bufferdirectory'), sensorid,
                               filename, data_bin, header)
            returndata = ','.join(list(map(str, datearray)))
        else:
            returndata = ''

        return returndata, header
コード例 #5
0
    def processData(self, data):
        """Convert raw ADC counts into SI units as per datasheets"""

        currenttime = datetime.utcnow()
        # Correction for ms time to work with databank:
        currenttime_ms = currenttime.microsecond / 1000000.
        ms_rounded = round(float(currenttime_ms), 3)
        if not ms_rounded >= 1.0:
            currenttime = currenttime.replace(microsecond=int(ms_rounded *
                                                              1000000.))
        else:
            currenttime = currenttime.replace(microsecond=0) + timedelta(
                seconds=1.0)
        filename = datetime.strftime(currenttime, "%Y-%m-%d")
        actualtime = datetime.strftime(currenttime, "%Y-%m-%dT%H:%M:%S.%f")
        lastActualtime = currenttime
        outtime = datetime.strftime(currenttime, "%H:%M:%S")
        timestamp = datetime.strftime(currenttime, "%Y-%m-%d %H:%M:%S.%f")

        sensorid = self.sensor
        packcode = '6hLL'
        header = "# MagPyBin %s %s %s %s %s %s %d" % (
            self.sensor, '[f]', '[f]', '[nT]', '[1000]', packcode,
            struct.calcsize('<' + packcode))

        try:
            intval = data[1].split(',')
            value = float(intval[0].strip())
            if 10000 < value < 100000:
                intensity = value
            else:
                intensity = 88888.0
        except ValueError:
            log.err("CS - Protocol: Not a number. Instead found:", data[0])
            intensity = 88888.0

        try:
            datearray = acs.timeToArray(timestamp)
            datearray.append(int(intensity * 1000))
            data_bin = struct.pack('<' + packcode, *datearray)
        except:
            log.msg('Error while packing binary data')

        if not self.confdict.get('bufferdirectory', '') == '':
            acs.dataToFile(self.confdict.get('bufferdirectory'), sensorid,
                           filename, data_bin, header)

        return ','.join(list(map(str, datearray))), header
コード例 #6
0
        def processOwData(self, sensorid, datadict):
            """Process OW data """
            currenttime = datetime.utcnow()
            outdate = datetime.strftime(currenttime, "%Y-%m-%d")
            filename = outdate
            actualtime = datetime.strftime(currenttime, "%Y-%m-%dT%H:%M:%S.%f")
            outtime = datetime.strftime(currenttime, "%H:%M:%S")
            timestamp = datetime.strftime(currenttime, "%Y-%m-%d %H:%M:%S.%f")
            packcode = '6hL' + 'l' * len(datadict)
            multplier = str([1000] * len(datadict)).replace(' ', '')
            if sensorid.startswith('DS18'):
                key = '[t1]'
                ele = '[T]'
                unit = '[degC]'
            elif sensorid.startswith('DS2438'):
                #'temperature','VAD','VDD','humidity','vis'
                key = '[t1,var1,var2,var3,var4]'
                ele = '[T,RH,VDD,VAD,VIS]'
                unit = '[degC,per,V,V,V,V]'

            header = "# MagPyBin %s %s %s %s %s %s %d" % (
                sensorid, key, ele, unit, multplier, packcode,
                struct.calcsize('<' + packcode))

            data_bin = None
            datearray = ''
            try:
                datearray = acs.timeToArray(timestamp)
                paralst = typedef.get(sensorid.split('_')[0])
                for para in paralst:
                    if para in datadict:
                        datearray.append(int(float(datadict[para]) * 1000))
                data_bin = struct.pack('<' + packcode,
                                       *datearray)  # little endian
            except:
                log.msg('Error while packing binary data')

            if not self.confdict.get('bufferdirectory', '') == '' and data_bin:
                acs.dataToFile(self.confdict.get('bufferdirectory'), sensorid,
                               filename, data_bin, header)
            #print ("Sending", ','.join(list(map(str,datearray))), header)
            return ','.join(list(map(str, datearray))), header
コード例 #7
0
    def processData(self, data):
        """Process Environment data """

        currenttime = datetime.utcnow()
        outdate = datetime.strftime(currenttime, "%Y-%m-%d")
        filename = outdate
        actualtime = datetime.strftime(currenttime, "%Y-%m-%dT%H:%M:%S.%f")
        outtime = datetime.strftime(currenttime, "%H:%M:%S")
        timestamp = datetime.strftime(currenttime, "%Y-%m-%d %H:%M:%S.%f")
        packcode = '6hLllL'
        sensorid = self.sensor
        header = "# MagPyBin %s %s %s %s %s %s %d" % (sensorid, '[t1,t2,var1]', '[T,DewPoint,RH]', '[degC,degC,per]', '[1000,1000,1000]', packcode, struct.calcsize('<'+packcode))

        valrh = re.findall(r'\d+',data[0])
        if len(valrh) > 1:
            temp = float(valrh[0] + '.' + valrh[1])
        else:
            temp = float(valrh[0])
        valrh = re.findall(r'\d+',data[1])
        if len(valrh) > 1:
            rh = float(valrh[0] + '.' + valrh[1])
        else:
            rh = float(valrh[0])
        valrh = re.findall(r'\d+',data[2])
        if len(valrh) > 1:
            dew = float(valrh[0] + '.' + valrh[1])
        else:
            dew = float(valrh[0])

        try:
            datearray = acs.timeToArray(timestamp)
            datearray.append(int(temp*1000))
            datearray.append(int(dew*1000))
            datearray.append(int(rh*1000))
            data_bin = struct.pack('<'+packcode,*datearray)  #use little endian byte order
        except:
            log.msg('Error while packing binary data')
            pass

        if not self.confdict.get('bufferdirectory','') == '':
            acs.dataToFile(self.confdict.get('bufferdirectory'), sensorid, filename, data_bin, header)
        return ','.join(list(map(str,datearray))), header
コード例 #8
0
def on_message(client, userdata, msg):
    if pyversion.startswith('3'):
        msg.payload = msg.payload.decode('ascii')

    global qos
    global verifiedlocation
    arrayinterpreted = False
    if stationid in ['all', 'All', 'ALL']:
        stid = msg.topic.split('/')[0]
    else:
        stid = stationid
    try:
        sensorind = msg.topic.split('/')[1]
        sensorid = sensorind.replace('meta',
                                     '').replace('data',
                                                 '').replace('dict', '')
    except:
        # Above will fail if msg.topic does not contain /
        # TODO (previous version was without 1, first occurrence -> the following line should work as well although the code above is more general)
        sensorid = msg.topic.replace(stid, "", 1).replace('/', '').replace(
            'meta', '').replace('data', '').replace('dict', '')
    # define a new data stream for each non-existing sensor
    if not instrument == '':
        if not sensorid.find(instrument) > -1:
            return

    if sensorid in blacklist:
        if debug:
            print("Sensor {} in blacklist - not collecting".format(sensorid))
        return

    ## ################################################################################
    ## ####            Eventually check for additional format libraries       #########
    ## ################################################################################
    identdic = {}

    if addlib and len(addlib) > 0:
        # Currently only one additioal library is supported
        lib = addlib[0]
        #for lib in addlib:
        elemlist = []
        for elem in topic_identifiers[lib]:
            strelem = "msg.topic.{}('{}')".format(elem,
                                                  topic_identifiers[lib][elem])
            elemlist.append(strelem)
        if len(elemlist) > 1:
            teststring = " and ".join(elemlist)
        else:
            teststring = "".join(elemlist)
        if eval(teststring):
            classref = class_reference.get(lib)
            #print ("1", msg.payload)
            try:
                msg.payload, sensorid, headerline, headerdictionary, identdic = classref.GetPayload(
                    msg.payload, msg.topic)
            except:
                print("Interpretation error for {}".format(msg.topic))
                return
            #print (payload, sensorid, headerline)
            headdict[sensorid] = headerline
            headstream[sensorid] = create_head_dict(headerline, sensorid)
            headstream[sensorid] = merge_two_dicts(headstream[sensorid],
                                                   headerdictionary)
            msg.topic = msg.topic + '/data'
            for el in identdic:
                po.identifier[el] = identdic[el]

    metacheck = po.identifier.get(sensorid + ':packingcode', '')

    ## ################################################################################

    if msg.topic.endswith('meta') and metacheck == '':
        log.msg("Found basic header:{}".format(str(msg.payload)))
        log.msg("Quality of Service (QOS):{}".format(str(msg.qos)))
        analyse_meta(str(msg.payload), sensorid)
        if not sensorid in headdict:
            headdict[sensorid] = msg.payload
            # create stream.header dictionary and it here
            headstream[sensorid] = create_head_dict(str(msg.payload), sensorid)
            if debug:
                log.msg("New headdict: {}".format(headdict))
    elif msg.topic.endswith('dict') and sensorid in headdict:
        #log.msg("Found Dictionary:{}".format(str(msg.payload)))
        head_dict = headstream[sensorid]
        for elem in str(msg.payload).split(','):
            keyvaluespair = elem.split(':')
            try:
                if not keyvaluespair[1] in ['-', '-\n', '-\r\n']:
                    head_dict[keyvaluespair[0]] = keyvaluespair[1].strip()
            except:
                pass
        if debug:
            log.msg("Dictionary now looks like {}".format(
                headstream[sensorid]))
    elif msg.topic.endswith('data'):  # or readable json
        #if readable json -> create stream.ndarray and set arrayinterpreted :
        #    log.msg("Found data:", str(msg.payload), metacheck)
        if not metacheck == '':
            if 'file' in destination:
                # Import module for writing data from acquistion
                # -------------------
                #if debug:
                #    log.msg(sensorid, metacheck, msg.payload)  # payload can be split
                # Check whether header is already identified
                # -------------------
                if sensorid in headdict:
                    header = headdict.get(sensorid)
                    if sys.version_info >= (3, 0):
                        metacheck = metacheck.decode()
                    if metacheck.endswith('B'):
                        packcode = metacheck.strip(
                            '<')[:-1]  # drop leading < and final B
                    else:
                        packcode = metacheck.strip('<')  # drop leading <
                    # temporary code - too be deleted when lemi protocol has been updated
                    if packcode.find('4cb6B8hb30f3Bc') >= 0:
                        header = header.replace(
                            '<4cb6B8hb30f3BcBcc5hL 169\n',
                            '6hLffflll {}'.format(
                                struct.calcsize('<6hLffflll')))
                        packcode = '6hLffflll'
                    arrayelem = msg.payload.split(';')
                    for ar in arrayelem:
                        datearray = ar.split(',')
                        # identify string values in packcode
                        # -------------------
                        # convert packcode numbers
                        cpack = []
                        for c in packcode:
                            if c.isdigit():
                                digit = int(c)
                            else:
                                cpack.extend([c] * digit)
                                digit = 1
                        cpackcode = "".join(cpack)
                        for i in range(len(cpackcode)):
                            if cpackcode[-i] == 's':
                                datearray[-i] = datearray[-i]
                            elif cpackcode[-i] == 'f':
                                datearray[-i] = float(datearray[-i])
                            else:
                                datearray[-i] = int(float(datearray[-i]))
                        # pack data using little endian byte order
                        data_bin = struct.pack('<' + packcode, *datearray)
                        # Check whether destination path has been verified already
                        # -------------------
                        if not verifiedlocation:
                            if not location in [None, ''
                                                ] and os.path.exists(location):
                                verifiedlocation = True
                            else:
                                log.msg(
                                    "File: destination location {} is not accessible"
                                    .format(location))
                                log.msg(
                                    "      -> please use option l (e.g. -l '/my/path') to define"
                                )
                        if verifiedlocation:
                            filename = "{}-{:02d}-{:02d}".format(
                                datearray[0], datearray[1], datearray[2])
                            acs.dataToFile(location, sensorid, filename,
                                           data_bin, header)
            if 'websocket' in destination:
                if not arrayinterpreted:
                    stream.ndarray = interprete_data(msg.payload, stream,
                                                     sensorid)
                    #streamdict[sensorid] = stream.ndarray  # to store data from different sensors
                    arrayinterpreted = True
                for idx, el in enumerate(stream.ndarray[0]):
                    time = num2date(el).replace(tzinfo=None)
                    msecSince1970 = int(
                        (time - datetime(1970, 1, 1)).total_seconds() * 1000)
                    datastring = ','.join([
                        str(val[idx]) for i, val in enumerate(stream.ndarray)
                        if len(val) > 0 and not i == 0
                    ])
                    if debug:
                        print("Sending {}: {},{} to webserver".format(
                            sensorid, msecSince1970, datastring))
                    wsserver.send_message_to_all("{}: {},{}".format(
                        sensorid, msecSince1970, datastring))
            if 'diff' in destination:
                global counter
                counter += 1
                global number
                amount = int(number)
                cover = 5
                if not arrayinterpreted:
                    ar = interprete_data(msg.payload, stream, sensorid)
                    if not sensorid in senslst:
                        senslst.append(sensorid)
                        st.append(DataStream([], {}, ar))
                    idx = senslst.index(sensorid)
                    st[idx].extend(stream.container, {'SensorID': sensorid},
                                   ar)
                    arrayinterpreted = True
                st[idx].ndarray = np.asarray(
                    [np.asarray(el[-cover:]) for el in st[idx].ndarray])
                if len(st) < 2:
                    print("Not enough streams for subtraction yet")
                try:
                    if counter > amount:
                        counter = 0
                        sub = subtractStreams(st[0], st[1])
                        try:
                            part1 = (
                                st[0].header.get('SensorID').split('_')[1])
                        except:
                            part1 = 'unkown'
                        try:
                            part2 = (
                                st[1].header.get('SensorID').split('_')[1])
                        except:
                            part2 = 'unkown'
                        name = "Diff_{}-{}_0001".format(part1, part2)
                        # get head line for pub
                        #name = "diff_xxx_0001"
                        keys = sub._get_key_headers(numerical=True)
                        ilst = [KEYLIST.index(key) for key in keys]
                        keystr = "[{}]".format(",".join(keys))
                        #takeunits =  ### take from st[0]
                        packcode = "6hL{}".format("".join(['l'] * len(keys)))
                        multi = "[{}]".format(",".join(['1000'] * len(keys)))
                        unit = "[{}]".format(",".join(['arb'] * len(keys)))
                        head = "# MagPyBin {} {} {} {} {} {} {}".format(
                            name, keystr, keystr, unit, multi, packcode,
                            struct.calcsize('<' + packcode))
                        #print (head)
                        # get data line for pub
                        time = sub.ndarray[0][-1]
                        timestr = (datetime.strftime(
                            num2date(float(time)).replace(tzinfo=None),
                            "%Y,%m,%d,%H,%M,%S,%f"))
                        val = [sub.ndarray[i][-1] for i in ilst]
                        if len(val) > 1:
                            valstr = ",".join(int(val * 1000))
                        else:
                            valstr = int(val[0] * 1000)
                        data = "{},{}".format(timestr, valstr)
                        #print (data)
                        topic = "wic/{}".format(name)
                        client.publish(topic + "/data", data, qos=qos)
                        client.publish(topic + "/meta", head, qos=qos)
                except:
                    print("Found error in subtraction")
            if 'stdout' in destination:
                if not arrayinterpreted:
                    stream.ndarray = interprete_data(msg.payload, stream,
                                                     sensorid)
                    #streamdict[sensorid] = stream.ndarray  # to store data from different sensors
                    arrayinterpreted = True
                for idx, el in enumerate(stream.ndarray[0]):
                    time = num2date(el).replace(tzinfo=None)
                    datastring = ','.join([
                        str(val[idx]) for i, val in enumerate(stream.ndarray)
                        if len(val) > 0 and not i == 0
                    ])
                    log.msg("{}: {},{}".format(sensorid, time, datastring))
            elif 'db' in destination:
                if not arrayinterpreted:
                    stream.ndarray = interprete_data(msg.payload, stream,
                                                     sensorid)
                    #streamdict[sensorid] = stream.ndarray  # to store data from different sensors
                    arrayinterpreted = True
                # create a stream.header
                #if debug:
                #    log.msg(stream.ndarray)
                stream.header = headstream[sensorid]
                if debug:
                    log.msg("writing header: {}".format(headstream[sensorid]))
                if revision != 'free':
                    writeDB(db,
                            stream,
                            tablename="{}_{}".format(sensorid, '0001'))
                else:
                    writeDB(db, stream)
            elif 'stringio' in destination:
                if not arrayinterpreted:
                    stream.ndarray = interprete_data(msg.payload, stream,
                                                     sensorid)
                    #streamdict[sensorid] = stream.ndarray  # to store data from different sensors
                    arrayinterpreted = True
                for idx, el in enumerate(stream.ndarray[0]):
                    time = num2date(el).replace(tzinfo=None)
                    date = datetime.strftime(time, "%Y-%m-%d %H:%M:%S.%f")
                    linelist = list(map(str, [el, date]))
                    linelist.extend([
                        str(val[idx]) for i, val in enumerate(stream.ndarray)
                        if len(val) > 0 and not i == 0
                    ])
                    line = ','.join(linelist)
                    eol = '\r\n'
                    output.write(line + eol)
            elif 'serial' in destination:
                if not arrayinterpreted:
                    stream.ndarray = interprete_data(msg.payload, stream,
                                                     sensorid)
                    #streamdict[sensorid] = stream.ndarray  # to store data from different sensors
                    arrayinterpreted = True
                """
                # send json like structures
                collcount = 10
                if sercount <= collcount:
                    for idx,col in enumerate(stream.ndarray):
                        if not len(col) == 0:
                            keyname = KEYLIST[idx]
                            if idx == 0:
                                time = num2date(col).replace(tzinfo=None)
                                col = int((time - datetime(1970,1,1)).total_seconds()*1000)
                            excol = datacol.get(keyname,[])
                            datacol[keyname] = excol.append(col)
                    sersount += 1
                if sercount == collcount:
                    sercount = 0
                    jsonstr={}
                    jsonstr['sensorid'] = sensorid
                    jsonstr['nr'] = i
                    jsonstr['key'] = po.identifier[sensorid+':keylist'][i]
                    jsonstr['elem'] = po.identifier[sensorid+':elemlist'][i]
                    jsonstr['unit'] = po.identifier[sensorid+':unitlist'][i]
                    payload = json.dumps(jsonstr)
                    # write input to a another serial port (e.g. for radio transmisson) 
                    # requires serdef = e.g. [115200,8,1,N]
                    # eventually create minimum 30 sec json blocks
                    #sendline='{"SensorID":"{}","Units":["Sec1970","degC"],"Keys":{"time":"time","x":"Temperature"}, "time":[{}],"x":[{}]}'.format(sensorid,time,x)
                    #{"SensorID":"ID","units":["Sec1970","degC"],"keys":{"time":"time","x":"Temperature"}, "data":{"time":[12,13,45],"x":[12,13,45]}}
                    #ser.write("{}: {},{}".format(sensorid,msecSince1970,datastring))
                """
            else:
                pass
        else:
            log.msg("Non-interpreted format: {}  {}".format(
                msg.topic, str(msg.payload)))
    elif msg.topic.find('statuslog') > 0:
        # json style statusinfo is coming
        hostname = msg.topic.split('/')[-1]
        #log.msg("---------------------------------------------------------------")
        #log.msg("Receiving updated status information from {}".format(hostname))
        #log.msg("---------------------------------------------------------------")
        print("FOUND STATUS CHANGE", telegramconf)
        statusdict = json.loads(msg.payload)
        for elem in statusdict:
            logmsg = "{}: {} - {}".format(hostname, elem, statusdict[elem])
            # For Nagios - add in marcos.log
            log.msg(logmsg)
        # For Telegram
        try:
            # try to import telegram and telegram.cfg
            ##### Add the configuration to input and marcos.cfg
            ## Please note: requires anaconda2/bin/python on my test PC
            ## !!! Requires network connection !!!
            if not telegramconf == '/telegram.conf':
                martaslog = ml(receiver='telegram')
                martaslog.receiveroptions('telegram',
                                          options={'conf': telegramconf})
                statusdict['Hostname'] = hostname
                martaslog.notify(statusdict)
        except:
            pass

        #telegram.send(msg)

    if msg.topic.endswith('meta') and 'websocket' in destination:
        # send header info for each element (# sensorid   nr   key   elem   unit)
        analyse_meta(str(msg.payload), sensorid)
        for (i, void) in enumerate(po.identifier[sensorid + ':keylist']):
            jsonstr = {}
            jsonstr['sensorid'] = sensorid
            jsonstr['nr'] = i
            jsonstr['key'] = po.identifier[sensorid + ':keylist'][i]
            jsonstr['elem'] = po.identifier[sensorid + ':elemlist'][i]
            jsonstr['unit'] = po.identifier[sensorid + ':unitlist'][i]
            payload = json.dumps(jsonstr)
            wsserver.send_message_to_all('# ' + payload)
コード例 #9
0
def interruptRead(s):
    """
    interrupt routine of class AD7714Protocol
    triggered by AD7714 /DRDY signal
    """
    # at first get the time...
    currenttime = datetime.utcnow()
    # read from data register
    arrvalue = rxreg(5, CHANNEL)
    if len(arrvalue) == 2:
        # 16 -> 24bit
        arrvalue.append(0)
    intvalue = (arrvalue[0] << 16) | (arrvalue[1] << 8) | arrvalue[2]
    voltvalue = float(intvalue) / 2**24 * 5 - 2.5
    # mV better for display
    voltvalue = voltvalue * 1000

    # TIME TO COMMUNICATE!
    global int_comm
    if int_comm == "mySettings":
        mySettings()
        int_comm = "ok"
    if int_comm == "myCalibration":
        myCalibration()
        int_comm = "ok"
    if int_comm == "info":
        info()
        int_comm = "ok"

    # watchdog
    global watchdog
    global Objekt
    if watchdog['oldvalue'] == 999999:
        print('watchdog active')
    if watchdog['oldvalue'] == intvalue:
        watchdog['count_repetitions'] = watchdog['count_repetitions'] + 1
    else:
        if watchdog['count_repetitions'] > 5:
            # avoid a lot of log entries - filter double and triple values in a row
            print('watchdog resetted, count_repetitions:',
                  watchdog['count_repetitions'], 'oldvalue:',
                  watchdog['oldvalue'], 'intvalue:', intvalue)
        watchdog['count_repetitions'] = 0
        watchdog['max_repetitions'] = watchdog['init_max_rep']
    if watchdog['count_repetitions'] == watchdog['max_repetitions']:
        # probably hung up, too many same values
        print('watchdog ad7714protocol:')
        print('  ', watchdog['max_repetitions'], 'same values (intvalue:',
              intvalue, ') in one row - hung up?')
        print('  trying to reset AD7714...')
        # sending LOW to /RESET pin
        reset()
        time.sleep(0.01)
        # loading settings
        mySettings()
        # zero calibration
        myCalibration()
        watchdog['max_repetitions'] = watchdog['max_repetitions'] * 2
        watchdog['count_repetitions'] = 0
    watchdog['oldvalue'] = intvalue
    packcode = "6hLl"
    sensorid = Objekt.sensordict['sensorid']
    header = "# MagPyBin %s %s %s %s %s %s %d" % (sensorid, '[var1]', '[U]',
                                                  '[mV]', '[1000]', packcode,
                                                  struct.calcsize(packcode))
    #timestamp=datetime.strftime(currenttime, "%Y-%m-%d %H:%M:%S.%f")
    timestamp = datetime2array(currenttime)
    darray = timestamp
    darray.append(int(round(voltvalue * 1000)))

    # TO FILE
    data_bin = struct.pack(packcode, *darray)
    filedate = datetime.strftime(datetime(darray[0], darray[1], darray[2]),
                                 "%Y-%m-%d")
    if not Objekt.confdict.get('bufferdirectory', '') == '':
        acs.dataToFile(Objekt.confdict.get('bufferdirectory'), sensorid,
                       filedate, data_bin, header)

    # VIA MQTT
    # instead of external program file TODO: better!
    #def sendData(self, sensorid, data, head, stack=None):
    #sendData.sendData(Objekt,sensorid, ','.join(list(map(str,darray))), header)
    data = ','.join(list(map(str, darray)))
    head = header
    # TODO: implement stack correctly!
    stack = 1

    topic = Objekt.confdict.get('station') + '/' + sensorid
    senddata = False
    if not stack:
        stack = int(Objekt.sensordict.get('stack'))
    coll = stack

    if coll > 1:
        Objekt.metacnt = 1  # send meta data with every block
        if Objekt.datacnt < coll:
            Objekt.datalst.append(data)
            Objekt.datacnt += 1
        else:
            senddata = True
            data = ';'.join(Objekt.datalst)
            Objekt.datalst = []
            Objekt.datacnt = 0
    else:
        senddata = True

    if senddata:
        if Objekt.count == 0:
            # get all values initially from the database
            #add = "SensoriD:{},StationID:{},DataPier:{},SensorModule:{},SensorGroup:{},SensorDecription:{},DataTimeProtocol:{}".format( sensorid, self.confdict.get('station',''),self.sensordict.get('pierid',''), self.sensordict.get('protocol',''),self.sensordict.get('sensorgroup',''),self.sensordict.get('sensordesc',''), self.sensordict.get('ptime','') )
            #self.client.publish(topic+"/dict", add, qos=self.qos)
            Objekt.client.publish(topic + "/meta", head, qos=Objekt.qos)
            if Objekt.debug:
                log.msg("  -> DEBUG - Publishing meta --", topic, head)
        Objekt.client.publish(topic + "/data", data, qos=Objekt.qos)
        if Objekt.debug:
            log.msg("  -> DEBUG - Publishing data")
        Objekt.count += 1
        if Objekt.count >= Objekt.metacnt:
            Objekt.count = 0
コード例 #10
0
ファイル: gp20s3protocol.py プロジェクト: geomagpy/MARTAS
    def processData(self, data):
        """ GP20S3 data """
        """
        Data looks like--- (with GPS lines every minute):
        -- vertical sensor - Old software
        3,3,12.00 111 field1 field2 field3  
        3,3,12.00 111 field1 field2 field3 
        GPS 16.00 111 field1 field2 field3  
        -- horizontal sensor - New software
        time 111 field1 field2 field3                                            (every sec or faster)
        $$$                                                         (every hour, preceeds status line)
        10071506 A 13 250 492 496 329 150 1023 39 39 39 30 29 30 YYYyyyEEENNN 148 149 117 (every hour)
        time 111 field1 field2 field3                                            (every sec or faster)
        time 111 field1 field2 field3                                            (every sec or faster)
        """

        currenttime = datetime.utcnow()
        outdate = datetime.strftime(currenttime, "%Y-%m-%d")
        actualtime = datetime.strftime(currenttime, "%Y-%m-%dT%H:%M:%S.%f")
        outtime = datetime.strftime(currenttime, "%H:%M:%S")
        timestamp = datetime.strftime(currenttime, "%Y-%m-%d %H:%M:%S.%f")
        filename = outdate
        sensorid = self.sensor
        headerlinecoming = False
        datearray = []
        headarray = []
        statusname = "Status_123_0001"

        sensororientation = self.sensor.split('_')[0].replace(self.sensordict.get('protocol'),'')
        if len(sensororientation) > 1:
            sens1 = sensororientation
            sens2 = sensororientation[0]
            sens3 = sensororientation[1]
        else:
            sens1 = 'TA'
            sens2 = 'B'
            sens3 = 'TB'
        celem =  '[{},{},{},{}{},{}{},{}{},None]'.format(sens1,sens2,sens3, sens3,sens1, sens3,sens2, sens2,sens1)
        packcode = '6hLQQQqqq6hL'
        header = "# MagPyBin %s %s %s %s %s %s %d" % (self.sensor, '[x,y,z,dx,dy,dz,sectime]', celem, '[pT,pT,pT,pT,pT,pT,None]', '[1000,1000,1000,1000,1000,1000,1]', packcode, struct.calcsize('<'+packcode))

        try:
            # Extract data
            data_array = data
            if len(data_array) == 5:
                intensity1 = float(data_array[2])
                intensity2 = float(data_array[3])
                intensity3 = float(data_array[4])
                grad1 = intensity3-intensity1
                grad2 = intensity3-intensity2
                grad3 = intensity2-intensity1
                try:
                    gpstime = float(data[0]) # will fail for old dataformat -> NTP
                    if gpstime > 235900.0: # use date of last day if gpstime > 235900 to prevent next day date for 235959 gps when pctime already is on next day
                        cdate = dateprev
                    else:
                        cdate = outdate
                        dateprev = outdate
                    try:
                        internal_t = datetime.strptime(cdate+'T'+data[0], "%Y-%m-%dT%H%M%S.%f")
                    except:
                        internal_t = datetime.strptime(cdate+'T'+data[0], "%Y-%m-%dT%H%M%S")
                    internal_time = datetime.strftime(internal_t, "%Y-%m-%d %H:%M:%S.%f")
                except:
                    internal_time = timestamp #datetime.strftime(datetime.utcnow(), "%Y-%m-%d %H:%M:%S.%f")

            elif len(data_array) == 19:
                """
                        10071506 A 13 250 492 496 329 150 1023 39 39 39 30 29 30 YYYyyyEEENNN 148 149 117

			<GPS> day/month/year/hour A - locked, V unlocked
			<13> Console outside air temperature (13C)
			<250> Battery voltage (25.0V)
			<492> +5V supply voltage (4.92V)
			<496> -5V supply voltage (-4.96)
			<3.3V> +3.3V supply voltage (3.3V)
			<15.0> silver box power supply (15.0V)
			<1023> OCXO internal trimpot adjustment level, automatically adjusted via GPS
			<39> Sensor 1 temperature in C
			<39>  Sensor 2 temperature in C
			<39> Sensor 3 temperature in C
			<30> Light current sensor 1 (3.0uA)
			<29> Light current sensor 2 (2.9uA)
			<30> Light current sensor 3 (3.0uA)
			<YYY>  Sensor 1, sensor 2 sensor 3 lock status Y- locked, N - unlocked
			<yyy>  Sensor 1 heater status, sensor 2 heater status, sensor 3 heater status y-on, n-off
			<EEE> Sensor 1 heater, sensor 2 heater, sensor 3 heater E-enabled, D-disabled (used for over heat protection)
			<NNN> RF sensor 1, RF sensor 2, RF sensor 3, N -on, F-off
			<148> Sensor 1 RF dc voltage (14.8V)
			<149> Sensor 2 RF dc voltage (14.9V)
			<117> Sensor 3 RF dc voltage (11.7V)
                """
                headerlinecoming = True

                try:
                    gpstime = str(data_array[0])
                    internal_t = datetime.strptime(gpstime, "%d%m%y%H")
                    gpstimestamp = datetime.strftime(internal_t, "%Y-%m-%d %H:%M:%S.%f")
                except:
                    gpstimestamp = timestamp
                internal_time = gpstimestamp

                gpstatus = data_array[1]			# str1
                telec = int(data_array[2])			# t2
                Vbat = float(data_array[3])/10.			# f
                Vsup1 = float(data_array[4])/100.		# var4
                Vsup2 = float(data_array[5])/100.		# var5
                Vlow = float(data_array[6])/100.		# t1
                PowerSup = float(data_array[7])/10.		# df
                level = data_array[8]				# str3
                tsens1 = int(data_array[9])			# x
                tsens2 = int(data_array[10])			# y
                tsens3 = int(data_array[11])			# z
                lightcurrent1 = float(data_array[12])/10.	# dx
                lightcurrent2 = float(data_array[13])/10.	# dy
                lightcurrent3 = float(data_array[14])/10.	# dz
                statusstring = data_array[15]			# str2
                Vsens1 = float(data_array[16])/10.		# var1
                Vsens2 = float(data_array[17])/10.		# var2
                Vsens3 = float(data_array[18])/10.		# var3 

            elif len(data_array) == 1 and data_array[0] == '$$$':
                return "","",""
            else:
                log.msg('{} protocol: data line could not be interpreted: ({}) of length {}'.format(self.sensordict.get('protocol'),data, len(data_array)))
        except:
            log.err('{} protocol: Data formatting error. Data looks like: {}'.format(self.sensordict.get('protocol'),data))

        try:
            # Analyze time difference between GSM internal time and utc from PC
            timelist = sorted([internal_t,currenttime])
            timediff = timelist[1]-timelist[0]
            delta = timediff.total_seconds()
            if not delta in [0.0, float('NAN'), None]:
                self.delaylist.append(timediff.total_seconds())
                if len(self.delaylist) > 600:
                    self.delaylist = self.delaylist[-600:]
            if len(self.delaylist) > 100:
                try:
                    self.timedelay = np.abs(np.median(np.asarray(self.delaylist)))
                except:
                    self.timedelay = 0.0
            if self.timedelay > self.timethreshold:
                self.errorcnt['time'] +=1
                if self.errorcnt.get('time') < 2:
                    log.msg("{} protocol: large time difference observed for {}: {} sec".format(self.sensordict.get('protocol'), sensorid, self.timedelay))
                if self.errorcnt.get('time') > 1000:
                    self.errorcnt['time'] = 1000
            else:
                self.errorcnt['time'] = 0 
        except:
            pass

        if self.sensordict.get('ptime','') in ['NTP','ntp']:
            secondtime = internal_time
            maintime = timestamp
        else:
            maintime = internal_time
            secondtime = timestamp

        if not headerlinecoming:
          try:
            ## GP20S3 provides info on whether the GPS reading is OK  - use it

            # extract time data
            datearray = acs.timeToArray(maintime)
            try:
                datearray.append(int(intensity1*1000.))
                datearray.append(int(intensity2*1000.))
                datearray.append(int(intensity3*1000.))
                datearray.append(int(grad1*1000.))
                datearray.append(int(grad2*1000.))
                datearray.append(int(grad3*1000.))
                internalarray = acs.timeToArray(secondtime)
                datearray.extend(internalarray)
                data_bin = struct.pack('<'+packcode,*datearray)
            except:
                log.msg('{} protocol: Error while packing binary data'.format(self.sensordict.get('protocol')))

            if not self.confdict.get('bufferdirectory','') == '':
                acs.dataToFile(self.confdict.get('bufferdirectory'), sensorid, filename, data_bin, header)
          except:
            log.msg('{} protocol: Error with binary save routine'.format(self.sensordict.get('protocol')))

        if headerlinecoming:
            if self.debug:
                print (" now writing header info")
            headpackcode = '6hL15ls12s4s' #'6hLlllllllllllllllsss'
            statusname = "Status_123_0001"
            try:
                # extract time data
                headarray = acs.timeToArray(maintime)
                try:
                    headarray.append(int(tsens1))			# x
                    headarray.append(int(tsens2))			# y
                    headarray.append(int(tsens3))			# z
                    headarray.append(int(Vbat*10.))			# f
                    headarray.append(int(Vlow*100.))		# t1
                    headarray.append(int(telec))			# t2
                    headarray.append(int(lightcurrent1*10.))	# dx
                    headarray.append(int(lightcurrent2*10.))	# dy
                    headarray.append(int(lightcurrent3*10.))	# dz
                    headarray.append(int(PowerSup*10.))		# df
                    headarray.append(int(Vsens1*10.))		# var1
                    headarray.append(int(Vsens2*10.))		# var2
                    headarray.append(int(Vsens3*10.))		# var3 
                    headarray.append(int(Vsup1*100.))		# var4
                    headarray.append(int(Vsup2*100.))		# var5
                    headarray.append(gpstatus)			# str1
                    headarray.append(statusstring)			# str2
                    headarray.append(level)				# str3

                    data_bin = struct.pack('<'+headpackcode,*headarray)
                    statuslst = self.sensor.split('_')
                    if self.debug:
                        print ("Headerdata has been packed")
                    if len(statuslst) == 3:
                        statusname = '_'.join([statuslst[0]+'status',statuslst[1],statuslst[2]])
                    headheader = "# MagPyBin %s %s %s %s %s %s %d" % (statusname, '[x,y,z,f,t1,t2,dx,dy,dz,df,var1,var2,var3,var4,var5,str1,str2,str3]', '[Ts1,Ts2,Ts3,Vbat,V3,Tel,L1,L2,L3,Vps,V1,V2,V3,V5p,V5n,GPSstat,Status,OCXO]', '[degC,degC,degC,V,V,degC,A,A,A,V,V,V,V,V,V,None,None,None]', '[1,1,1,10,100,1,10,10,10,10,10,10,10,100,100,1,1,1]', headpackcode, struct.calcsize('<'+headpackcode))
                    if self.debug:
                        print ("Header looks like: {} ".format(headheader))
                        print ("Writing to file: {}, {}, {}".format(statusname,filename,headheader))
                    if not self.confdict.get('bufferdirectory','') == '':
                        acs.dataToFile(self.confdict.get('bufferdirectory'), statusname, filename, data_bin, headheader)
                except:
                    log.msg('GSMP20 - Protocol: Error while packing binary data')
            except:
                pass

        if len(datearray) > 0:
            topic = self.confdict.get('station') + '/' + self.sensordict.get('sensorid')
            return ','.join(list(map(str,datearray))), header, topic
        elif len(headarray) > 0:
            topic = self.confdict.get('station') + '/' + statusname
            return ','.join(list(map(str,headarray))), headheader, topic
コード例 #11
0
ファイル: cr1000jcprotocol.py プロジェクト: geomagpy/MARTAS
        def sendRequest(self):
            # TODO wohin mit debug?
            debug = False
            if self.reconnect.is_set():
                log.msg('exiting, mutex locked!')
                return
            t = datetime.utcnow()
            past = t-timedelta(seconds=3)
            vals = self.device.get_data('SamplesEvery2s',past,t)
            # vals[0] because we grap no older data, there is only one value in 2 seconds
            # timestamp directly from datetime into array
            # TODO Roman fragen, ob oder wie Vergleich mit Computerzeit
            try:
                darray = datetime2array(vals[0]['Datetime'])
                # TODO "again" ist Provisorium
                again = False
            except:
                again = True
            try:
                if again:
                    t = datetime.utcnow()
                    past = t-timedelta(seconds=3)
                    darray = datetime2array(vals[0]['Datetime'])
                    log.msg("IT TOOK A SECOND TIME TO GET DATA PROPERLY!") 
            except:
                # there will be no log messages when the logger is turned off
                return
                # TODO reconnect Loesung, nur wenn sie sauber funktioniert!
                log.msg('NO DATA FROM CR1000 !!! - vals:')
                log.msg(vals)
                port = self.confdict['serialport']+self.sensordict.get('port')
                baudrate = self.sensordict.get('baudrate')
                self.reconnect.set()
                connected=False
                while not connected:
                    self.device.bye()
                    log.msg('reconnecting to device...')
                    time.sleep(5)
                    try:
                        self.device = CR1000.from_url('serial:{}:{}'.format(port,baudrate))
                        tables = self.device.list_tables()
                        if tables == ['Status', 'SamplesEvery2s', 'ValuesEveryMinute', 'Public']:
                            connected=True
                            log.msg('schaut ok aus...')
                            time.sleep(2)
                    except:
                        log.msg('reconnect wohl missglueckt!')
                    try:
                        past = t-timedelta(seconds=3)
                        vals = self.device.get_data('SamplesEvery2s',past,t,debug=True)
                        log.msg(SENSOR_HEIGHT*1000.-vals[0]['DiffVolt']*250.)
                    except:
                        log.msg('...wohl doch nicht!')
                        connected=False
                self.reconnect.clear()
                log.msg('mutex released...')
                return
            if debug:
                log.msg('getting data...')
            # snowheight (1000mV is 250cm) - values from CR1000 in mV, factor 1000 for packing
            snowheight = SENSOR_HEIGHT*1000.-vals[0]['DiffVolt']*250.
            darray.append(int(round(snowheight)))
            # TODO weg
            if debug:
                log.msg(darray)

            # preparations for file save
            # date 6 short microsecond unsigned long snowheight signed long
            # TODO alter packcode!
            packcode = "6hLl"
            #packcode = "<6hLl"
            # header 
            sensorid = self.sensordict['sensorid']
            header = "# MagPyBin %s %s %s %s %s %s %d" % (sensorid,'[f]','[JC]','[cm]','[1000]',packcode,struct.calcsize(packcode))
            data_bin = struct.pack(packcode,*darray)
            # date of dataloggers timestamp
            filedate = datetime.strftime(datetime(darray[0],darray[1],darray[2]), "%Y-%m-%d")
            if not self.confdict.get('bufferdirectory','') == '':
                acs.dataToFile(self.confdict.get('bufferdirectory'), sensorid, filedate, data_bin, header)
                if debug:
                    log.msg('Daten gesichert...')

            # sending via MQTT
            data = ','.join(list(map(str,darray)))
            head = header
            topic = self.confdict.get('station') + '/' + self.sensordict.get('sensorid')
            coll = int(self.sensordict.get('stack'))
            if coll > 1:
                self.metacnt = 1 # send meta data with every block
                if self.datacnt < coll:
                    self.datalst.append(data)
                    self.datacnt += 1
                else:
                    senddata = True
                    data = ';'.join(self.datalst)
                    self.datalst = []
                    self.datacnt = 0
            else:
                senddata = True

            if senddata:
                self.client.publish(topic+"/data", data)
                if self.count == 0:
                    self.client.publish(topic+"/meta", head)
                self.count += 1
                if self.count >= self.metacnt:
                    self.count = 0


            
            # right now auxiliary data only in the log file
            if t.second<2:
                # every minute aux data (battery voltage and logger temperature) will be available
                # going 61s into the past to make sure there are already data
                past=t-timedelta(seconds=61)
                aux=self.device.get_data('ValuesEveryMinute',past,t)
                log.msg('----- aux every minute:')
                # timestamp directly from datetime into array
                try:
                    darray = datetime2array(aux[0]['Datetime'])
                except:
                    # following should never happen...
                    log.msg('AUXILIARY DATA NOT GOT PROPERLY! - aux:')
                    log.msg(aux)
                    log.msg('trying again...')
                    past=t-timedelta(seconds=62)
                    aux=self.device.get_data('ValuesEveryMinute',past,t)
                    try:
                        darray = datetime2array(aux[0]['Datetime'])
                    except:
                        log.msg('giving up...')
                        return

                # battery voltage - factor 1000 for packing
                BattV_Min=int(round(aux[0]['BattV_Min']*1000))
                PTemp_C_Avg=int(round(aux[0]['PTemp_C_Avg']*1000))
                darray.extend([BattV_Min,PTemp_C_Avg])
                # alernative reading:
                #aux = (aux.filter(('Datetime', 'BattV_Min','PTemp_C_Avg')).to_csv(header=False))
                log.msg(darray)
                packcode="<6hLLl"
コード例 #12
0
    def processData(self, data):
        """ GSM90 data """
        currenttime = datetime.utcnow()
        outdate = datetime.strftime(currenttime, "%Y-%m-%d")
        actualtime = datetime.strftime(currenttime, "%Y-%m-%dT%H:%M:%S.%f")
        outtime = datetime.strftime(currenttime, "%H:%M:%S")
        timestamp = datetime.strftime(currenttime, "%Y-%m-%d %H:%M:%S.%f")
        filename = outdate
        sensorid = self.sensor
        packcode = '6hLLL6hL'
        header = "# MagPyBin %s %s %s %s %s %s %d" % (self.sensor, '[f,var1,sectime]', '[f,errorcode,internaltime]', '[nT,none,none]', '[1000,1,1]', packcode, struct.calcsize('<'+packcode))

        try:
            # Extract data
            # old data looks like 04-22-2015 142244  48464.53 99
            data_array = data
            if len(data) == 4:
                intensity = float(data[2])
                err_code = int(data[3])
                try:
                    try:
                        internal_t = datetime.strptime(data[0]+'T'+data[1], "%m-%d-%YT%H%M%S.%f")
                    except:
                        internal_t = datetime.strptime(data[0]+'T'+data[1], "%m-%d-%YT%H%M%S")
                    internal_time = datetime.strftime(internal_t, "%Y-%m-%d %H:%M:%S.%f")
                except:
                    internal_time = timestamp #datetime.strftime(datetime.utcnow(), "%Y-%m-%d %H:%M:%S.%f")
                #print internal_time
            elif len(data) == 3: # GSM v7.0
                intensity = float(data[1])                
                err_code = int(data[2])
                try:
                    internal_t = datetime.strptime(outdate+'T'+data[0], "%Y-%m-%dT%H%M%S.%f")
                    internal_time = datetime.strftime(internal_t, "%Y-%m-%d %H:%M:%S.%f")
                except:
                    internal_time = timestamp #datetime.strftime(datetime.utcnow(), "%Y-%m-%d %H:%M:%S.%f")
            else:
                err_code = 0
                intensity = float(data[0])
                internal_time = timestamp #datetime.strftime(datetime.utcnow(), "%Y-%m-%d %H:%M:%S")
        except:
            log.err('{} protocol: Data formatting error. Data looks like: {}'.format(self.sensordict.get('protocol'),data))

        try:
            # Analyze time difference between GSM internal time and utc from PC
            timelist = sorted([internal_t,currenttime])
            timediff = timelist[1]-timelist[0]
            #secdiff = timediff.seconds + timediff.microseconds/1E6
            #timethreshold = 3
            delta = timediff.total_seconds()
            if not delta in [0.0, np.nan, None]:
                self.delaylist.append(timediff.total_seconds())
                self.delaylist = self.delaylist[-1000:]
            if len(self.delaylist) > 100:
                try:
                    self.timedelay = np.median(np.asarray(self.delaylist))
                except:
                    self.timedelay = 0.0
            #if secdiff > timethreshold:
            if delta > self.timethreshold:
                self.errorcnt['time'] +=1
                if self.errorcnt.get('time') < 2:
                    log.msg("{} protocol: large time difference observed for {}: {} sec".format(self.sensordict.get('protocol'), sensorid, secdiff))
            else:
                self.errorcnt['time'] = 0 
        except:
            pass

        if self.sensordict.get('ptime','') in ['NTP','ntp']:
            secondtime = internal_time
            maintime = timestamp
        else:
            maintime = internal_time
            secondtime = timestamp

        try:
            ## GSM90 does not provide any info on whether the GPS reading is OK or not

            # extract time data
            datearray = acs.timeToArray(maintime)
            try:
                datearray.append(int(intensity*1000.))
                datearray.append(err_code)
                #print timestamp, internal_time
                internalarray = acs.timeToArray(secondtime)
                datearray.extend(internalarray)
                data_bin = struct.pack('<'+packcode,*datearray)
            except:
                log.msg('{} protocol: Error while packing binary data'.format(self.sensordict.get('protocol')))

            if not self.confdict.get('bufferdirectory','') == '':
                acs.dataToFile(self.confdict.get('bufferdirectory'), sensorid, filename, data_bin, header)

        except:
            log.msg('{} protocol: Error with binary save routine'.format(self.sensordict.get('protocol')))


        return ','.join(list(map(str,datearray))), header
コード例 #13
0
ファイル: gsm19protocol.py プロジェクト: geomagpy/MARTAS
    def processData(self, data):

        currenttime = datetime.utcnow()
        date = datetime.strftime(currenttime, "%Y-%m-%d")
        actualtime = datetime.strftime(currenttime, "%Y-%m-%dT%H:%M:%S.%f")
        outtime = datetime.strftime(currenttime, "%H:%M:%S")
        filename = date
        timestamp = datetime.strftime(currenttime, "%Y-%m-%d %H:%M:%S.%f")
        intensity = 88888.8
        typ = "none"
        dontsavedata = False

        packcode = '6hLLl'
        header = "# MagPyBin %s %s %s %s %s %s %d" % (
            self.sensor, '[f,var1]', '[f,err]', '[nT,none]', '[1000,1000]',
            packcode, struct.calcsize('<' + packcode))

        try:
            # Extract data
            data_array = data
            if len(data_array) == 2:
                typ = "oldbase"
            elif len(data_array) == 3:
                typ = "valid"
            # add other types here
        except:
            log.err(
                'GSM19 - Protocol: Output format not supported - use either base, ... or mobile'
            )

        # Extracting the data from the station
        # Extrat time info and use as primary if GPS is on (in this case PC time is secondary)
        #                          PC is primary when a GPS is not connected

        if typ == "valid" or typ == "oldbase":  # Comprises Mobile and Base Station mode with single sensor and no GPS
            intensity = float(data_array[1])
            try:
                systemtime = datetime.strptime(date + "-" + data_array[0],
                                               "%Y-%m-%d-%H%M%S.%f")
            except:
                # This exception happens for old GSM19 because time is
                # provided e.g. as 410356 instead of 170356 for 17:03:56 (Thursday)
                # e.g 570301.0 instead of 09:03:01 (Friday)
                try:
                    hournum = int(data_array[0][:-6])
                    rest = data_array[0][-6:]
                    factor = np.floor(hournum /
                                      24.)  # factor = days since starting
                    hour = int(hournum - factor * 24.)
                    systemtime = datetime.strptime(
                        date + "-" + str(hour) + rest, "%Y-%m-%d-%H%M%S.%f")
                    #print ("Got oldbase systemtime")
                except:
                    systemtime = currenttime
                    self.timesource = 'NTP'
            if len(data_array) == 2:
                typ = "base"
                errorcode = 99
            elif len(data_array[2]) == 3:
                typ = "base"
                errorcode = int(data_array[2])
            else:
                typ = "gradient"
                gradient = float(data_array[2])
        elif typ == "none":
            dontsavedata = True
            pass

        gpstime = datetime.strftime(systemtime, "%Y-%m-%d %H:%M:%S.%f")

        try:
            # Analyze time difference between GSM internal time and utc from PC
            timelist = sorted([systemtime, currenttime])
            timediff = timelist[1] - timelist[0]
            #secdiff = timediff.seconds + timediff.microseconds/1E6
            delta = timediff.total_seconds()
            if not delta in [0.0, np.nan, None]:
                self.delaylist.append(timediff.total_seconds())
                self.delaylist = self.delaylist[-1000:]
            if len(self.delaylist) > 100:
                try:
                    self.timedelay = np.median(np.asarray(self.delaylist))
                except:
                    self.timedelay = 0.0
            if delta > self.timethreshold:
                self.errorcnt['time'] += 1
                if self.errorcnt.get('time') < 2:
                    log.msg(
                        "{} protocol: large time difference observed for {}: {} sec"
                        .format(self.sensordict.get('protocol'), sensorid,
                                secdiff))
            else:
                self.errorcnt['time'] = 0
        except:
            pass

        if self.sensordict.get('ptime', '') in ['NTP', 'ntp']:
            secondtime = gpstime
            maintime = timestamp
        else:
            maintime = gpstime
            secondtime = timestamp

        try:
            if not typ == "none":
                # extract time data
                datearray = acs.timeToArray(maintime)
                try:
                    datearray.append(int(intensity * 1000.))
                    if typ == 'base':
                        datearray.append(int(errorcode * 1000.))
                    else:
                        datearray.append(int(gradient * 1000.))
                    data_bin = struct.pack('<' + packcode, *datearray)
                except:
                    log.msg(
                        'GSM19 - Protocol: Error while packing binary data')
                    pass
        except:
            log.msg('GSM19 - Protocol: Error with binary save routine')
            pass

        if not self.confdict.get('bufferdirectory', '') == '':
            acs.dataToFile(self.confdict.get('bufferdirectory'), self.sensor,
                           filename, data_bin, header)

        return ','.join(list(map(str, datearray))), header
コード例 #14
0
ファイル: mysqlprotocol.py プロジェクト: geomagpy/MARTAS
    def sendRequest(self):
        """
        source:mysql:
        Method to obtain data from table
        """
        t1 = datetime.utcnow()
        outdate = datetime.strftime(t1, "%Y-%m-%d")
        filename = outdate

        if self.debug:
            log.msg("  -> DEBUG - Sending periodic request ...")

        def getList(sql):
            cursor = self.db.cursor()
            try:
                cursor.execute(sql)
            except mysql.IntegrityError as message:
                return message
            except mysql.Error as message:
                return message
            except:
                return 'dbgetlines: unkown error'
            head = cursor.fetchall()
            keys = list(np.transpose(np.asarray(head))[0])
            return keys

        # get self.sensorlist
        # get last timestamps 
        # read all data for each sensor since last timestamp
        # send that and store last timestamp 
        for index,sensdict in enumerate(self.sensorlist):
            sensorid = sensdict.get('sensorid')
            if self.debug:
                log.msg("  -> DEBUG - dealing with sensor {}".format(sensorid))
            # 1. Getting header
            # -----------------
            # load keys, elements and units
            #header = "# MagPyBin %s %s %s %s %s %s %d" % (sensorid, key, ele, unit, multplier, packcode, struct.calcsize('<'+packcode))
            dataid = sensorid+'_'+self.revision
            keyssql = 'SHOW COLUMNS FROM %s' % (dataid)
            keystab = getList(keyssql)
            if 'time' in keystab:
                keystab.remove('time')
            if 'flag' in keystab:
                keystab.remove('flag')
            if 'typ' in keystab:
                keystab.remove('typ')
            if 'comment' in keystab:
                keystab.remove('comment')
            keys = ','.join(keystab)
            if self.debug:
                log.msg("  -> DEBUG - requesting header {}".format(sensorid))
            sql1 = 'SELECT SensorElements FROM SENSORS WHERE SensorID LIKE "{}"'.format(sensorid)
            sql2 = 'SELECT Sensorkeys FROM SENSORS WHERE SensorID LIKE "{}"'.format(sensorid)
            sql3 = 'SELECT ColumnUnits FROM DATAINFO WHERE SensorID LIKE "{}"'.format(sensorid)
            sql4 = 'SELECT ColumnContents FROM DATAINFO WHERE SensorID LIKE "{}"'.format(sensorid)
            try:
                elem = getList(sql1)[0].split(',')
            except:
                elem =[]
            try:
                keyssens = getList(sql2)[0].split(',')
            except:
                keyssens =[]
            try:
                unit = getList(sql3)[0].split(',')
            except:
                unit =[]
            try:
                cont = getList(sql4)[0].split(',')
            except:
                cont =[]
            units, elems = [], []
            for key in keystab:
                try:
                    pos1 = keyssens.index(key)
                    ele = elem[pos1]
                except:
                    ele = key
                elems.append(ele)
                try:
                    pos2 = cont.index(ele)
                    units.append(unit[pos2])
                except:
                    units.append('None')
            if self.debug:
                log.msg("  -> DEBUG - creating head line {}".format(sensorid))
            multplier = '['+','.join(map(str, [10000]*len(keystab)))+']'
            packcode = '6HL'+''.join(['q']*len(keystab))
            header = ("# MagPyBin {} {} {} {} {} {} {}".format(sensorid, '['+','.join(keystab)+']', '['+','.join(elems)+']', '['+','.join(units)+']', multplier, packcode, struct.calcsize('<'+packcode)))

            # 2. Getting dict
            sql = 'SELECT DataSamplingRate FROM DATAINFO WHERE SensorID LIKE "{}"'.format(sensorid)
            sr = float(getList(sql)[0])
            coverage = int(self.requestrate/sr)+120

            # 3. Getting data
            # get data and create typical message topic
            # based on sampling rate and collection rate -> define coverage

            li = sorted(mdb.dbselect(self.db, 'time,'+keys, dataid, expert='ORDER BY time DESC LIMIT {}'.format(int(coverage))))
            if not self.lastt[index]:
                self.lastt[index]=li[0][0]

            # drop
            newdat = False
            newli = []
            for elem in li:
                if elem[0] == self.lastt[index]:
                    newdat = True
                if newdat:
                    newli.append(elem)

            if not len(newli) > 0:
                # if last time not included in li then newli will be empty
                # in this case just add the list
                for elem in li:
                    newli.append(elem)

            for dataline in newli:
                timestamp = dataline[0]
                data_bin = None
                datearray = ''
                try:
                    datearray = acs.timeToArray(timestamp)
                    for i,para in enumerate(keystab):
                        try:
                            val=int(float(dataline[i+1])*10000)
                        except:
                            val=999990000
                        datearray.append(val)
                    data_bin = struct.pack('<'+packcode,*datearray)  # little endian
                except:
                    log.msg('Error while packing binary data')

                if not self.confdict.get('bufferdirectory','') == '' and data_bin:
                    acs.dataToFile(self.confdict.get('bufferdirectory'), sensorid, filename, data_bin, header)
                if self.debug:
                    log.msg("  -> DEBUG - sending ... {}".format(','.join(list(map(str,datearray))), header))
                self.sendData(sensorid,','.join(list(map(str,datearray))),header,len(newli)-1)

            self.lastt[index]=li[-1][0]

        t2 = datetime.utcnow()
        if self.debug:
            log.msg("  -> DEBUG - Needed {}".format(t2-t1))
コード例 #15
0
    def processPos1Data(self, data):
        """Convert raw ADC counts into SI units as per datasheets"""
        if len(data) != 44:
            log.err('POS1 - Protocol: Unable to parse data of length %i' % len(data))

        currenttime = datetime.utcnow()
        outdate = datetime.strftime(currenttime, "%Y-%m-%d")
        actualtime = datetime.strftime(currenttime, "%Y-%m-%dT%H:%M:%S.%f")
        outtime = datetime.strftime(currenttime, "%H:%M:%S")
        timestamp = datetime.strftime(currenttime, "%Y-%m-%d %H:%M:%S.%f")
        filename = outdate
        sensorid = self.sensor

        packcode = '6hLLLh6hL'
        header = "# MagPyBin %s %s %s %s %s %s %d" % (self.sensor, '[f,df,var1,sectime]', '[f,df,var1,GPStime]', '[nT,nT,none,none]', '[1000,1000,1,1]', packcode, struct.calcsize('<'+packcode))

        try:
            # Extract data
            data_array = data.split()
            intensity = float(data_array[0])/1000.
            sigma_int = float(data_array[2])/1000.
            err_code = int(data_array[3].strip('[').strip(']'))
            dataelements = datetime.strptime(data_array[4],"%m-%d-%y")
            newdate = datetime.strftime(dataelements,"%Y-%m-%d")
            gps_time = newdate + ' ' + str(data_array[5])[:11]
        except:
            log.err('POS1 - Protocol: Data formatting error.')
            intensity = 0.0
            sigma_int = 0.0
            err_code = 0.0

        try:
            # Analyze time difference between POS1 internal time and utc from PC
            # Please note that the time difference between POS1-GPS (data recorded) 
            # and NTP (data received at PC) can be very large
            # for our POS1 it is 6.2 seconds

            gpstime = datetime.strptime(gps_time, "%Y-%m-%d %H:%M:%S.%f")
            timelist = sorted([gpstime,currenttime])
            timediff = timelist[1]-timelist[0]
            delta = timediff.total_seconds()
            if not delta in [0.0, np.nan, None]:
                self.delaylist.append(delta)
                self.delaylist = self.delaylist[-1000:]
            if len(self.delaylist) > 100:
                try:
                    self.timedelay = np.median(np.asarray(self.delaylist))
                except:
                    self.timedelay = 0.0
            if delta-self.ntp_gps_offset > self.timethreshold:
                self.errorcnt['time'] +=1
                if self.errorcnt.get('time') < 2:
                    log.msg("{} protocol: large time difference observed for {}: {} sec".format(self.sensordict.get('protocol'), sensorid, secdiff))
            else:
                self.errorcnt['time'] = 0 
        except:
            pass

        if self.sensordict.get('ptime','') in ['NTP','ntp']:
            secondtime = gps_time
            maintime = timestamp
        else:
            maintime = gps_time
            secondtime = timestamp

        try:
            # extract time data
            datearray = acs.timeToArray(maintime)
            sectarray = acs.timeToArray(secondtime)
            try:
                datearray.append(int(intensity*1000))
                datearray.append(int(sigma_int*1000))
                datearray.append(err_code)
                datearray.extend(sectarray)
                data_bin = struct.pack('<'+packcode,datearray[0],datearray[1],datearray[2],datearray[3],datearray[4],datearray[5],datearray[6],datearray[7],datearray[8],datearray[9],datearray[10],datearray[11],datearray[12],datearray[13],datearray[14],datearray[15],datearray[16])
            except:
                log.msg('POS1 - Protocol: Error while packing binary data')
                pass
            if not self.confdict.get('bufferdirectory','') == '':
                acs.dataToFile(self.confdict.get('bufferdirectory'), sensorid, filename, data_bin, header)
        except:
            log.msg('POS1 - Protocol: Error with binary save routine')
            pass

        return ','.join(list(map(str,datearray))), header