Exemple #1
0
def GetData(source,
            path,
            db,
            dbcredentials,
            sensorid,
            amount,
            startdate=None,
            debug=False):
    """
    DESCRIPTION:
    read the appropriate amount of data from the data file, database or mqtt stream
    """

    data = DataStream()
    msg = ''
    if not startdate:
        startdate = datetime.utcnow()
        endtime = None
    else:
        endtime = startdate

    starttime = startdate - timedelta(seconds=int(amount))

    if source in ['file', 'File']:
        filepath = os.path.join(path, sensorid)
        # TODO eventually check for existance and look for similar names
        # expath = CheckPath(filepath)
        # if expath:
        if debug:
            print("Trying to access files in {}: Timerange: {} to {}".format(
                filepath, starttime, endtime))
        try:
            data = read(os.path.join(filepath, '*'),
                        starttime=starttime,
                        endtime=endtime)
        except:
            msg = "Could not access data for sensorid {}".format(sensorid)
            if debug:
                print(msg)
    elif source in ['db', 'DB', 'database', 'Database']:
        db = mysql.connect()
        data = readDB(db, sensorid, starttime=starttime)

    if debug:
        print("Got {} datapoints".format(data.length()[0]))

    return (data, msg)
Exemple #2
0
def GetTestValue(data=DataStream(), key='x', function='average', debug=False):
    """
    DESCRIPTION
    Returns comparison value(e.g. mean, max etc)
    """
    if debug:
        print("Obtaining test value for key {} with function {}".format(
            key, function))
    func = 'mean'
    testvalue = None
    msg = ''
    n = data.length()[0]
    keys = data._get_key_headers()

    if not key in keys:
        print("Requested key not found")
        return (testvalue, 'failure')
    if function in ['mean', 'Mean', 'average', 'Average', 'Median', 'median']:
        if n < 3:
            print(
                "not enough data points --- {} insignificant".format(function))
        if function in ['mean', 'Mean', 'average', 'Average']:
            func = 'mean'
        elif function in ['Median', 'median']:
            func = 'median'
        testvalue = data.mean(key, meanfunction=func)
    elif function in ['max', 'Max']:
        testvalue = data._get_max(key)
    elif function in ['min', 'Min']:
        testvalue = data._get_min(key)
    elif function in ['stddev', 'Stddev']:
        mean, testvalue = data.mean(key, std=True)
    else:
        msg = 'selected test function not available'

    if debug:
        print(" ... got {}".format(testvalue))

    return (testvalue, msg)
Exemple #3
0
def on_message(client, userdata, msg):
    if pyversion.startswith('3'):
        msg.payload = msg.payload.decode('ascii')

    global qos
    global verifiedlocation
    arrayinterpreted = False
    if stationid in ['all', 'All', 'ALL']:
        stid = msg.topic.split('/')[0]
    else:
        stid = stationid
    try:
        sensorind = msg.topic.split('/')[1]
        sensorid = sensorind.replace('meta',
                                     '').replace('data',
                                                 '').replace('dict', '')
    except:
        # Above will fail if msg.topic does not contain /
        # TODO (previous version was without 1, first occurrence -> the following line should work as well although the code above is more general)
        sensorid = msg.topic.replace(stid, "", 1).replace('/', '').replace(
            'meta', '').replace('data', '').replace('dict', '')
    # define a new data stream for each non-existing sensor
    if not instrument == '':
        if not sensorid.find(instrument) > -1:
            return

    if sensorid in blacklist:
        if debug:
            print("Sensor {} in blacklist - not collecting".format(sensorid))
        return

    ## ################################################################################
    ## ####            Eventually check for additional format libraries       #########
    ## ################################################################################
    identdic = {}

    if addlib and len(addlib) > 0:
        # Currently only one additioal library is supported
        lib = addlib[0]
        #for lib in addlib:
        elemlist = []
        for elem in topic_identifiers[lib]:
            strelem = "msg.topic.{}('{}')".format(elem,
                                                  topic_identifiers[lib][elem])
            elemlist.append(strelem)
        if len(elemlist) > 1:
            teststring = " and ".join(elemlist)
        else:
            teststring = "".join(elemlist)
        if eval(teststring):
            classref = class_reference.get(lib)
            #print ("1", msg.payload)
            try:
                msg.payload, sensorid, headerline, headerdictionary, identdic = classref.GetPayload(
                    msg.payload, msg.topic)
            except:
                print("Interpretation error for {}".format(msg.topic))
                return
            #print (payload, sensorid, headerline)
            headdict[sensorid] = headerline
            headstream[sensorid] = create_head_dict(headerline, sensorid)
            headstream[sensorid] = merge_two_dicts(headstream[sensorid],
                                                   headerdictionary)
            msg.topic = msg.topic + '/data'
            for el in identdic:
                po.identifier[el] = identdic[el]

    metacheck = po.identifier.get(sensorid + ':packingcode', '')

    ## ################################################################################

    if msg.topic.endswith('meta') and metacheck == '':
        log.msg("Found basic header:{}".format(str(msg.payload)))
        log.msg("Quality of Service (QOS):{}".format(str(msg.qos)))
        analyse_meta(str(msg.payload), sensorid)
        if not sensorid in headdict:
            headdict[sensorid] = msg.payload
            # create stream.header dictionary and it here
            headstream[sensorid] = create_head_dict(str(msg.payload), sensorid)
            if debug:
                log.msg("New headdict: {}".format(headdict))
    elif msg.topic.endswith('dict') and sensorid in headdict:
        #log.msg("Found Dictionary:{}".format(str(msg.payload)))
        head_dict = headstream[sensorid]
        for elem in str(msg.payload).split(','):
            keyvaluespair = elem.split(':')
            try:
                if not keyvaluespair[1] in ['-', '-\n', '-\r\n']:
                    head_dict[keyvaluespair[0]] = keyvaluespair[1].strip()
            except:
                pass
        if debug:
            log.msg("Dictionary now looks like {}".format(
                headstream[sensorid]))
    elif msg.topic.endswith('data'):  # or readable json
        #if readable json -> create stream.ndarray and set arrayinterpreted :
        #    log.msg("Found data:", str(msg.payload), metacheck)
        if not metacheck == '':
            if 'file' in destination:
                # Import module for writing data from acquistion
                # -------------------
                #if debug:
                #    log.msg(sensorid, metacheck, msg.payload)  # payload can be split
                # Check whether header is already identified
                # -------------------
                if sensorid in headdict:
                    header = headdict.get(sensorid)
                    if sys.version_info >= (3, 0):
                        metacheck = metacheck.decode()
                    if metacheck.endswith('B'):
                        packcode = metacheck.strip(
                            '<')[:-1]  # drop leading < and final B
                    else:
                        packcode = metacheck.strip('<')  # drop leading <
                    # temporary code - too be deleted when lemi protocol has been updated
                    if packcode.find('4cb6B8hb30f3Bc') >= 0:
                        header = header.replace(
                            '<4cb6B8hb30f3BcBcc5hL 169\n',
                            '6hLffflll {}'.format(
                                struct.calcsize('<6hLffflll')))
                        packcode = '6hLffflll'
                    arrayelem = msg.payload.split(';')
                    for ar in arrayelem:
                        datearray = ar.split(',')
                        # identify string values in packcode
                        # -------------------
                        # convert packcode numbers
                        cpack = []
                        for c in packcode:
                            if c.isdigit():
                                digit = int(c)
                            else:
                                cpack.extend([c] * digit)
                                digit = 1
                        cpackcode = "".join(cpack)
                        for i in range(len(cpackcode)):
                            if cpackcode[-i] == 's':
                                datearray[-i] = datearray[-i]
                            elif cpackcode[-i] == 'f':
                                datearray[-i] = float(datearray[-i])
                            else:
                                datearray[-i] = int(float(datearray[-i]))
                        # pack data using little endian byte order
                        data_bin = struct.pack('<' + packcode, *datearray)
                        # Check whether destination path has been verified already
                        # -------------------
                        if not verifiedlocation:
                            if not location in [None, ''
                                                ] and os.path.exists(location):
                                verifiedlocation = True
                            else:
                                log.msg(
                                    "File: destination location {} is not accessible"
                                    .format(location))
                                log.msg(
                                    "      -> please use option l (e.g. -l '/my/path') to define"
                                )
                        if verifiedlocation:
                            filename = "{}-{:02d}-{:02d}".format(
                                datearray[0], datearray[1], datearray[2])
                            acs.dataToFile(location, sensorid, filename,
                                           data_bin, header)
            if 'websocket' in destination:
                if not arrayinterpreted:
                    stream.ndarray = interprete_data(msg.payload, stream,
                                                     sensorid)
                    #streamdict[sensorid] = stream.ndarray  # to store data from different sensors
                    arrayinterpreted = True
                for idx, el in enumerate(stream.ndarray[0]):
                    time = num2date(el).replace(tzinfo=None)
                    msecSince1970 = int(
                        (time - datetime(1970, 1, 1)).total_seconds() * 1000)
                    datastring = ','.join([
                        str(val[idx]) for i, val in enumerate(stream.ndarray)
                        if len(val) > 0 and not i == 0
                    ])
                    if debug:
                        print("Sending {}: {},{} to webserver".format(
                            sensorid, msecSince1970, datastring))
                    wsserver.send_message_to_all("{}: {},{}".format(
                        sensorid, msecSince1970, datastring))
            if 'diff' in destination:
                global counter
                counter += 1
                global number
                amount = int(number)
                cover = 5
                if not arrayinterpreted:
                    ar = interprete_data(msg.payload, stream, sensorid)
                    if not sensorid in senslst:
                        senslst.append(sensorid)
                        st.append(DataStream([], {}, ar))
                    idx = senslst.index(sensorid)
                    st[idx].extend(stream.container, {'SensorID': sensorid},
                                   ar)
                    arrayinterpreted = True
                st[idx].ndarray = np.asarray(
                    [np.asarray(el[-cover:]) for el in st[idx].ndarray])
                if len(st) < 2:
                    print("Not enough streams for subtraction yet")
                try:
                    if counter > amount:
                        counter = 0
                        sub = subtractStreams(st[0], st[1])
                        try:
                            part1 = (
                                st[0].header.get('SensorID').split('_')[1])
                        except:
                            part1 = 'unkown'
                        try:
                            part2 = (
                                st[1].header.get('SensorID').split('_')[1])
                        except:
                            part2 = 'unkown'
                        name = "Diff_{}-{}_0001".format(part1, part2)
                        # get head line for pub
                        #name = "diff_xxx_0001"
                        keys = sub._get_key_headers(numerical=True)
                        ilst = [KEYLIST.index(key) for key in keys]
                        keystr = "[{}]".format(",".join(keys))
                        #takeunits =  ### take from st[0]
                        packcode = "6hL{}".format("".join(['l'] * len(keys)))
                        multi = "[{}]".format(",".join(['1000'] * len(keys)))
                        unit = "[{}]".format(",".join(['arb'] * len(keys)))
                        head = "# MagPyBin {} {} {} {} {} {} {}".format(
                            name, keystr, keystr, unit, multi, packcode,
                            struct.calcsize('<' + packcode))
                        #print (head)
                        # get data line for pub
                        time = sub.ndarray[0][-1]
                        timestr = (datetime.strftime(
                            num2date(float(time)).replace(tzinfo=None),
                            "%Y,%m,%d,%H,%M,%S,%f"))
                        val = [sub.ndarray[i][-1] for i in ilst]
                        if len(val) > 1:
                            valstr = ",".join(int(val * 1000))
                        else:
                            valstr = int(val[0] * 1000)
                        data = "{},{}".format(timestr, valstr)
                        #print (data)
                        topic = "wic/{}".format(name)
                        client.publish(topic + "/data", data, qos=qos)
                        client.publish(topic + "/meta", head, qos=qos)
                except:
                    print("Found error in subtraction")
            if 'stdout' in destination:
                if not arrayinterpreted:
                    stream.ndarray = interprete_data(msg.payload, stream,
                                                     sensorid)
                    #streamdict[sensorid] = stream.ndarray  # to store data from different sensors
                    arrayinterpreted = True
                for idx, el in enumerate(stream.ndarray[0]):
                    time = num2date(el).replace(tzinfo=None)
                    datastring = ','.join([
                        str(val[idx]) for i, val in enumerate(stream.ndarray)
                        if len(val) > 0 and not i == 0
                    ])
                    log.msg("{}: {},{}".format(sensorid, time, datastring))
            elif 'db' in destination:
                if not arrayinterpreted:
                    stream.ndarray = interprete_data(msg.payload, stream,
                                                     sensorid)
                    #streamdict[sensorid] = stream.ndarray  # to store data from different sensors
                    arrayinterpreted = True
                # create a stream.header
                #if debug:
                #    log.msg(stream.ndarray)
                stream.header = headstream[sensorid]
                if debug:
                    log.msg("writing header: {}".format(headstream[sensorid]))
                if revision != 'free':
                    writeDB(db,
                            stream,
                            tablename="{}_{}".format(sensorid, '0001'))
                else:
                    writeDB(db, stream)
            elif 'stringio' in destination:
                if not arrayinterpreted:
                    stream.ndarray = interprete_data(msg.payload, stream,
                                                     sensorid)
                    #streamdict[sensorid] = stream.ndarray  # to store data from different sensors
                    arrayinterpreted = True
                for idx, el in enumerate(stream.ndarray[0]):
                    time = num2date(el).replace(tzinfo=None)
                    date = datetime.strftime(time, "%Y-%m-%d %H:%M:%S.%f")
                    linelist = list(map(str, [el, date]))
                    linelist.extend([
                        str(val[idx]) for i, val in enumerate(stream.ndarray)
                        if len(val) > 0 and not i == 0
                    ])
                    line = ','.join(linelist)
                    eol = '\r\n'
                    output.write(line + eol)
            elif 'serial' in destination:
                if not arrayinterpreted:
                    stream.ndarray = interprete_data(msg.payload, stream,
                                                     sensorid)
                    #streamdict[sensorid] = stream.ndarray  # to store data from different sensors
                    arrayinterpreted = True
                """
                # send json like structures
                collcount = 10
                if sercount <= collcount:
                    for idx,col in enumerate(stream.ndarray):
                        if not len(col) == 0:
                            keyname = KEYLIST[idx]
                            if idx == 0:
                                time = num2date(col).replace(tzinfo=None)
                                col = int((time - datetime(1970,1,1)).total_seconds()*1000)
                            excol = datacol.get(keyname,[])
                            datacol[keyname] = excol.append(col)
                    sersount += 1
                if sercount == collcount:
                    sercount = 0
                    jsonstr={}
                    jsonstr['sensorid'] = sensorid
                    jsonstr['nr'] = i
                    jsonstr['key'] = po.identifier[sensorid+':keylist'][i]
                    jsonstr['elem'] = po.identifier[sensorid+':elemlist'][i]
                    jsonstr['unit'] = po.identifier[sensorid+':unitlist'][i]
                    payload = json.dumps(jsonstr)
                    # write input to a another serial port (e.g. for radio transmisson) 
                    # requires serdef = e.g. [115200,8,1,N]
                    # eventually create minimum 30 sec json blocks
                    #sendline='{"SensorID":"{}","Units":["Sec1970","degC"],"Keys":{"time":"time","x":"Temperature"}, "time":[{}],"x":[{}]}'.format(sensorid,time,x)
                    #{"SensorID":"ID","units":["Sec1970","degC"],"keys":{"time":"time","x":"Temperature"}, "data":{"time":[12,13,45],"x":[12,13,45]}}
                    #ser.write("{}: {},{}".format(sensorid,msecSince1970,datastring))
                """
            else:
                pass
        else:
            log.msg("Non-interpreted format: {}  {}".format(
                msg.topic, str(msg.payload)))
    elif msg.topic.find('statuslog') > 0:
        # json style statusinfo is coming
        hostname = msg.topic.split('/')[-1]
        #log.msg("---------------------------------------------------------------")
        #log.msg("Receiving updated status information from {}".format(hostname))
        #log.msg("---------------------------------------------------------------")
        print("FOUND STATUS CHANGE", telegramconf)
        statusdict = json.loads(msg.payload)
        for elem in statusdict:
            logmsg = "{}: {} - {}".format(hostname, elem, statusdict[elem])
            # For Nagios - add in marcos.log
            log.msg(logmsg)
        # For Telegram
        try:
            # try to import telegram and telegram.cfg
            ##### Add the configuration to input and marcos.cfg
            ## Please note: requires anaconda2/bin/python on my test PC
            ## !!! Requires network connection !!!
            if not telegramconf == '/telegram.conf':
                martaslog = ml(receiver='telegram')
                martaslog.receiveroptions('telegram',
                                          options={'conf': telegramconf})
                statusdict['Hostname'] = hostname
                martaslog.notify(statusdict)
        except:
            pass

        #telegram.send(msg)

    if msg.topic.endswith('meta') and 'websocket' in destination:
        # send header info for each element (# sensorid   nr   key   elem   unit)
        analyse_meta(str(msg.payload), sensorid)
        for (i, void) in enumerate(po.identifier[sensorid + ':keylist']):
            jsonstr = {}
            jsonstr['sensorid'] = sensorid
            jsonstr['nr'] = i
            jsonstr['key'] = po.identifier[sensorid + ':keylist'][i]
            jsonstr['elem'] = po.identifier[sensorid + ':elemlist'][i]
            jsonstr['unit'] = po.identifier[sensorid + ':unitlist'][i]
            payload = json.dumps(jsonstr)
            wsserver.send_message_to_all('# ' + payload)
Exemple #4
0
    # available since MagPy 0.3.99 in magpy.collector
    # since MARTAS 0.1.9 in core
    from core.websocket_server import WebsocketServer
except:
    ws_available = False

# Some variable initialization
## -----------------------------------------------------------
global identifier  # Thats probably wrong ... global should be used in functions
identifier = {}  # used to store lists from header lines
global counter  # use for diffcalc
counter = 0

qos = 0
streamdict = {}
stream = DataStream()
st = []
senslst = []
headdict = {
}  # store headerlines for all sensors (headerline are firstline for BIN files)
headstream = {}
verifiedlocation = False
destination = 'stdout'
location = '/tmp'
credentials = 'cred'
stationid = 'WIC'
stid = stationid
webpath = './web'
webport = 8080
socketport = 5000
blacklist = []
Exemple #5
0
def main(argv):

    para = sp.parameterdict
    conf = sp.configdict
    debug = False
    configfile = None
    statusdict = {}
    statuskeylist = []
    travistestrun = False

    usagestring = 'threshold.py -h <help> -m <configpath>'
    try:
        opts, args = getopt.getopt(argv,"hm:DT",["configpath="])
    except getopt.GetoptError:
        print ('Check your options:')
        print (usagestring)
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print ('------------------------------------------------------')
            print ('Usage:')
            print (usagestring)
            print ('------------------------------------------------------')
            print ('Options:')
            print ('-h            help')
            print ('-m            Define the path for the configuration file.')
            print ('              Please note: a configuration file is obligatory')
            print ('              ----------------------------')
            print ('              configurationfile')
            print ('              ----------------------------')
            print ('              threhold.cfg: (looks like)')
            print ('              # MARTAS directory')
            print ('              martasdir            :   /home/cobs/MARTAS/')
            print ('              # Define data source (file, db)')
            print ('              source               :   file')
            print ('              # If source = db then define data base credentials created by addcred (MARTAS)')
            print ('              dbcredentials        :   None')
            print ('              # If source = file define the base path')
            print ('              sensorpath           :   /srv/mqtt/')
            print ('              # Notifaction (uses martaslog class, one of email, telegram, mqtt, log) ')
            print ('              notification         :   email')
            print ('              notificationconfig   :   /etc/martas/notification.cfg')
            print ('              # serial communication for switch commands (based on ardcomm.py (MARTAS/app)')
            print ('              serialcfg            :   None')
            print ('              #parameter (all given parameters are checked in the given order, use semicolons for parameter list):')
            print ('              # sensorid; timerange to check; key to check, value, lower or upper bound,statusmessage,resetby,switchcommand(optional)')
            print ('              1  :  DS18B20XX,1800,t1,5,low,average,on,swP:4:1')
            print ('              2  :  DS18B20XY,1800,t1,10,high,median,off,swP:4:0')
            print ('              3  :  DS18B20XZ,600,t2,20,high,max,alarm at date,None')
            print ('              #to be continued...')

            print ('------------------------------------------------------')
            print ('Example:')
            print ('   python threshold.py -m /etc/martas/threshold.cfg')
            sys.exit()
        elif opt in ("-m", "--configfile"):
            configfile = arg
            print ("Getting all parameters from configration file: {}".format(configfile))
            conf = GetConf2(configfile, confdict=conf)
            para = AssignParameterlist(sp.valuenamelist,conf)
        elif opt in ("-D", "--debug"):
            debug = True
        elif opt in ("-T", "--Test"):
            travistestrun = True
            conf = GetConf2(os.path.join('..','conf','threshold.cfg'))
            para = AssignParameterlist(sp.valuenamelist,conf)

        # TODO activate in order prevent default values
        #if not configfile or conf == {}:
        #    print (' !! Could not read configuration information - aborting')
        #    sys.exit()

    if debug:
        print ("Configuration dictionary: \n{}".format(conf))
        print ("Parameter dictionary: \n{}".format(para))

    if not (len(para)) > 0:
        print ("No parameters given too be checked - aborting")
        sys.exit()


    try:
        martaslogpath = os.path.join(conf.get('martasdir'), 'core')
        sys.path.insert(1, martaslogpath)
        from martas import martaslog as ml
        logpath = conf.get('bufferpath')
    except:
        print ("Could not import martas logging routines - check MARTAS directory path")

    # For each parameter
    for i in range(0,1000):
            valuedict = para.get(str(i),{})
            content = ''
            if not valuedict == {}:
                if debug:
                    print ("Checking parameterset {}".format(i))
                data = DataStream()
                testvalue = None
                evaluate = {}

                # Obtain a magpy data stream of the respective data set
                if debug:
                    print ("Accessing data from {} at {}: Sensor {} - Amount: {} sec".format(conf.get('source'),conf.get('bufferpath'),valuedict.get('sensorid'),valuedict.get('timerange') ))

                (data,msg1) = GetData(conf.get('source'), conf.get('bufferpath'), conf.get('database'), conf.get('dbcredentials'), valuedict.get('sensorid'),valuedict.get('timerange'), debug=debug , startdate=conf.get('startdate') )
                (testvalue,msg2) = GetTestValue( data, valuedict.get('key'), valuedict.get('function'), debug=debug) # Returns comparison value(e.g. mean, max etc)
                if not testvalue and travistestrun:
                    print ("Testrun for parameterset {} OK".format(i))
                elif not testvalue:
                    print ("Please check your test data set... are bufferfiles existing? Is the sensorid correct?")
                elif is_number(testvalue):
                    (evaluate, msg) = CheckThreshold(testvalue, valuedict.get('value'), valuedict.get('state'), debug=debug) # Returns statusmessage
                    if evaluate and msg == '':
                        content = InterpreteStatus(valuedict,debug=debug)
                        # Perform switch and added "switch on/off" to content
                        if not valuedict.get('switchcommand') in ['None','none',None]:
                            if debug:
                                print ("Found switching command ... eventually will send serial command (if not done already) after checking all other commands")
                            content = '{} - switch: {}'.format(content, valuedict.get('switchcommand'))
                            # remember the switchuing command and only issue it if statusdict is changing
                    elif not msg == '':
                        content =  msg
                    else:
                        content = ''
                else:
                    content = msg1+' - '+msg2

                if content:
                    statuskeylist.append('Sensor {} and key {}'.format(valuedict.get('sensorid'),valuedict.get('key')))
                    statusdict['Sensor {} and key {}'.format(valuedict.get('sensorid'),valuedict.get('key'))] = content

                if debug:
                    print ("Finished parameterset {}".format(i))


    if conf.get('reportlevel') == 'full':
        # Get a unique status key list:
        statuskeylist = list(dict.fromkeys(statuskeylist))
        for elem in statuskeylist:
            cont = statusdict.get(elem,'')
            if cont == '':
                statusdict[elem] = "Everything fine"

    if debug:
        print ("Statusdict: {}".format(statusdict))

    receiver = conf.get('notification')
    cfg = conf.get('notificationconfig')
    logfile = conf.get('logfile')

    if travistestrun:
        print ("Skipping send routines in test run - finished successfully")
        sys.exit()

    if debug:
        print ("New notifications will be send to: {} (Config: {})".format(receiver,cfg))

    martaslog = ml(logfile=logfile,receiver=receiver)
    if receiver == 'telegram':
        martaslog.telegram['config'] = cfg
    elif receiver == 'email':
        martaslog.email['config'] = cfg

    changes = martaslog.msg(statusdict)

    if not len(changes) > 0:
        print ("Nothing to report - threshold check successfully finished")

    for element in changes:
        line = changes.get(element)
        if debug:
            print ("Changes affecting:", element)
        l = line.split('switch:')
        if len(l) == 2:
            print (" ... now dealing with switching serial command:")
            comm = l[1].strip()
            script = os.path.join(conf.get('martasdir'),'app','ardcomm.py')
            pythonpath = sys.executable
            arg1 = "-c {}".format(comm)
            arg2 = "-p {}".format(conf.get('port'))
            arg3 = "-b {}".format(conf.get('baudrate'))
            arg4 = "-a {}".format(conf.get('parity'))
            arg5 = "-y {}".format(conf.get('bytesize'))
            arg6 = "-s {}".format(conf.get('stopbits'))
            arg7 = "-t {}".format(conf.get('timeout'))
            #arg8 = "-e {}".format(conf.get('eol')) # not used so far

            command = "{} {} {} {} {} {} {} {} {}".format(pythonpath,script,arg1, arg2, arg3, arg4, arg5, arg6, arg7) ## Too be checked
            command = "{} {} {}".format(pythonpath,script,arg1)
            if debug:
                print (" ... sending {}".format(command))


            try:
                import subprocess
                p = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
                (output, err) = p.communicate()
                mesg = "{}".format(output)
            except subprocess.CalledProcessError:
                mesg = "threshold: sending command didnt work"
            except:
                mesg = "threshold: sending command problem"

            print (mesg)

            print (" ... success")
Exemple #6
0
def readJSON(filename, headonly=False, **kwargs):
    """
    Reading JSON format data.
    """
    stream = DataStream()
    header = {}
    array = [[] for key in KEYLIST]

    with open(filename, 'r') as jsonfile:
        dataset = json.load(jsonfile)
        loggerlib.info('Read: %s, Format: %s ' % (filename, "JSON"))

        fillkeys = ['var1', 'var2', 'var3', 'var4', 'var5', 'x', 'y', 'z', 'f']
        datakeys = dataset[0]
        keydict = {}

        for i, key in enumerate(datakeys):
            if 'time' in key:
                keydict[i] = 'time'
            elif key == 'density':
                keydict[i] = 'var1'
                fillkeys.pop(fillkeys.index('var1'))
            elif key == 'speed':
                keydict[i] = 'var2'
                fillkeys.pop(fillkeys.index('var2'))
            elif key == 'temperature':
                keydict[i] = 'var3'
                fillkeys.pop(fillkeys.index('var3'))
            elif 'bx' in key.lower():
                keydict[i] = 'x'
                fillkeys.pop(fillkeys.index('x'))
            elif 'by' in key.lower():
                keydict[i] = 'y'
                fillkeys.pop(fillkeys.index('y'))
            elif 'bz' in key.lower():
                keydict[i] = 'z'
                fillkeys.pop(fillkeys.index('z'))
            elif 'bt' in key.lower():
                keydict[i] = 'f'
                fillkeys.pop(fillkeys.index('f'))
            else:
                try:
                    keydict[i] = fillkeys.pop(0)
                except IndexError:
                    loggerlib.warning(
                        "CAUTION! Out of available keys for data. {} will not be contained in stream."
                        .format(key))
                    print(
                        "CAUTION! Out of available keys for data. {} will not be contained in stream."
                        .format(key))

            if 'time' in key:
                data = [
                    date2num(testTimeString(str(x[i]))) for x in dataset[1:]
                ]
            else:

                data = [
                    np.nan if x[i] is None else float(x[i])
                    for x in dataset[1:]
                ]
            array[KEYLIST.index(keydict[i])] = data
            header['col-' + keydict[i]] = key
            header['unit-col-' + keydict[i]] = ''

    for idx, elem in enumerate(array):
        array[idx] = np.asarray(array[idx])

    stream = DataStream([], header, np.asarray(array))

    return stream
Exemple #7
0
def readNETCDF(filename, headonly=False, **kwargs):
    """
    Reading NetCDF format data.
    To see all attributes of file: print(ncdata.ncattrs())
    """

    stream = DataStream([], {})
    headers = {}
    array = [[] for key in stream.KEYLIST]
    timef = "%Y-%m-%dT%H:%M:%S.%fZ"

    starttime = kwargs.get('starttime')
    endtime = kwargs.get('endtime')

    ncdata = Dataset(filename, 'r')
    filestart = datetime.strptime(ncdata.time_coverage_start, timef)
    fileend = datetime.strptime(ncdata.time_coverage_end, timef)

    # Check if file is within defined time ranges:
    getfile = True
    try:
        if starttime:
            if not filestart >= datetime.date(stream._testtime(starttime)):
                getfile = False
        if endtime:
            if not fileend <= datetime.date(stream._testtime(endtime)):
                getfile = False
    except:
        getfile = True

    # Read data into assigned columns:
    if getfile:
        logger.info("readNETCDF: Reading {}".format(filename))

        if ncdata.program == 'DSCOVR':
            logger.info(
                "readNETCDF: File contains DSCOVR data. Using appropriate keys"
            )
            KEYDICT = DSCOVR_KEYDICT

        if ncdata.variables[
                'time'].units == 'milliseconds since 1970-01-01T00:00:00Z':
            array[0] = np.array([
                date2num(datetime.utcfromtimestamp(x / 1000.))
                for x in ncdata.variables['time'][...]
            ])
        else:
            logger.warning(
                "readNETCDF: Could not identify time format. Time array probably incorrect"
            )
            array[0] = ncdata.variables['time'][...]

        for var in ncdata.variables:
            if var in KEYDICT:
                column = ncdata.variables[var]
                coldata = column[...]
                key = KEYDICT[var]
                idx = stream.KEYLIST.index(key)
                # Convert from MaskedArray
                coldata = np.array(coldata, dtype=np.float)
                # Replace masked values with NaNs:
                coldata[np.where(
                    coldata == float(column.missing_value))] = np.NaN

                array[idx] = coldata
                headers['col-' + key] = var
                headers['unit-col-' + key] = ncdata.variables[var].units

        # Fill in additional header data:
        for attr in ncdata.ncattrs():
            headers[attr] = getattr(ncdata, attr)

    ncdata.close()
    return DataStream([], headers, np.asarray(array))
Exemple #8
0
def main(argv):
    path = ''
    startdate = ''
    enddate = ''
    conf = ''
    dummy = DataStream()
    destination = ''
    global energylist
    global isotopelst
    global colorlst
    global intervallist
    name = "{}-Projects-gamma".format(sn)
    #global roi
    try:
        opts, args = getopt.getopt(argv, "hp:b:e:m:d:", [
            "path=",
            "begin=",
            "end=",
            "config=",
            "destination=",
        ])
    except getopt.GetoptError:
        print(
            'gamma.py -p <path> -b <begin> -e <end> -m <config> -d <destination>'
        )
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print('-------------------------------------')
            print('Description:')
            print('-- gamma.py analyses gamma data  --')
            print('-------------------------------------')
            print('Usage:')
            print(
                'gamma.py -p <path> -b <begin> -e <end> -m <config> -d <destination>'
            )
            print('-------------------------------------')
            print('Options:')
            print(
                '-p            : path to gamma spectral data like "/home/max/myarchive"'
            )
            print('-b            : begin: default yesterday')
            print('-e            : end: default = datetime.utcnow()')
            print('-d            : path to store data')
            print('-m            : config file')
            print('-------------------------------------')
            print('Example:')
            print(
                'python gamma.py /media/Samsung/Observatory/data/ -b "2012-06-01" -m myconf.cfg'
            )
            sys.exit()
        elif opt in ("-p", "--path"):
            path = arg
        elif opt in ("-b", "--begin"):
            startdate = arg
        elif opt in ("-e", "--end"):
            enddate = arg
        elif opt in ("-m", "--config"):
            conf = arg
        elif opt in ("-d", "--destination"):
            destination = arg

    if path == '':
        path = r"/home/leon/CronScripts/MagPyAnalysis/RadonAnalysis/ScriptsThemisto"

        #print ('Specify a path to gamma data:')
        #print ('-- check gamma.py -h for more options and requirements')
        #sys.exit()

    if conf == '':
        print('Specify a configuration file:')
        print('-- check gamma.py -h for more options and requirements')
        sys.exit()

    # Test time
    if not startdate == '':
        starttime = dummy._testtime(startdate)
        print(starttime)
    else:
        starttime = datetime.utcnow() - timedelta(days=2)
    if not enddate == '':
        endtime = dummy._testtime(enddate)
    else:
        endtime = datetime.utcnow()

    # Read Config data
    confdict = acs.GetConf(conf)
    #print (confdict)

    debug = False
    if confdict.get('debug') in ['True', 'true']:
        debug = True

    if not confdict.get('plots') in ['Random', 'random']:
        pl = confdict.get('plots').split(',')
        try:
            plotselection = [int(el) for el in pl]
        except:
            plotselection = []
    else:
        plotselection = 'random'
    print("SELECTED for plotting:", plotselection)
    graphdir = confdict.get('graphdir')
    if not os.path.exists(graphdir):
        os.makedirs(graphdir)

    # Get analysis data
    ok = True
    if ok:
        energylist = [int(el) for el in confdict.get('energies').split(',')]
        if debug:
            print("E:", energylist)
        isotopelst = confdict.get('isotops').split(',')
        if debug:
            print("Isotops:", isotopelst)
        colorlst = confdict.get('colors').split(',')
        if debug:
            print("Colors:", colorlst)
        intervallist = [int(el) for el in confdict.get('intervals').split(',')]
        if debug:
            print("Intervals:", intervallist)
        roi = []

        roistrtmp = confdict.get('roi')
        roistr = re.sub("\[[^]]*\]", lambda x: x.group(0).replace(',', ';'),
                        roistrtmp)

        for el in roistr.split(','):
            el = el.strip()
            if not el.startswith('['):
                roi.append(int(el))
            else:
                lelt = el.replace('[', '').replace(']', '')
                lel = [int(ele) for ele in lelt.split(';')]
                roi.append(lel)
        if debug:
            print("Rois:", roi)
        confdict['roi'] = roi

        stream = analyse_mca(path,
                             startdate=starttime,
                             enddate=endtime,
                             config=confdict)
        sys.exit()
        try:
            stream = analyse_mca(path,
                                 startdate=starttime,
                                 enddate=endtime,
                                 config=confdict)

            stream.header['SensorSerialNum'] = confdict.get('sensorserial', '')
            stream.header['SensorDataLogger'] = confdict.get(
                'sensordatalogger', '')
            stream.header['SensorDataLoggerSerialNum'] = confdict.get(
                'sensordataloggerserialnum', '')
            stream.header['SensorName'] = confdict.get('sensorname', '')
            stream.header['SensorID'] = "{}_{}_0001".format(
                confdict.get('sensorname', 'None'),
                confdict.get('sensorserial', '12345'))
            stream.header['DataID'] = "{}_{}_0001_0001".format(
                confdict.get('sensorname', 'None'),
                confdict.get('sensorserial', '12345'))
            stream.header['StationID'] = confdict.get('StationID', 'WIC')
            print(stream.length())

            print("writing data")
            if destination:
                stream.write(destination,
                             coverage='year',
                             mode="replace",
                             format_type='PYSTR',
                             filenamebegins='{}_'.format(
                                 stream.header['SensorID']),
                             dateformat='%Y')
            #stream.write(destination, format_type='PYASCII', filenamebegins='{}_'.format(stream.header['SensorID']))

            statusmsg[name] = 'gamma analysis successfully finished'
        except:
            statusmsg[name] = 'gamma analysis failed'
        martaslog = ml(logfile=logpath, receiver='telegram')
        martaslog.telegram['config'] = '/etc/martas/telegram.cfg'
        martaslog.msg(statusmsg)
Exemple #9
0
def analyse_mca(path, startdate=None, enddate=None, config={}):
    """
    DESCRIPTION:
    Anlyze MCA reads spectral data files with time stamp in filename (e.g. hour)

    RETURN:
    Datastream

    config contains:
    filename="Spectral_",
    filedate="", 
    roi
    sensorname  :  xx
    sensorid  :  xx
    voltage  :  720  # -> datatype for eventually new datarevision
    graphdir  :  /path
    plots  :  random  # selects 2 random time steps
    plots  :  427760  # list of filenumbers
    notes  :  20181118T000000,20181122T235900,Testlauf
    notes  :  20181123T000000,current,Active
    notes  :  20181118T000000,20181122T235900,background
    roi   : 
    energies  : 
    isotops  :    
    element  :  
    
    """
    if not startdate:
        startdate = datetime.utcnow() - timedelta(days=1)
    if not enddate:
        enddate = datetime.utcnow()

    #analyzemca = False
    #if analyzemca:
    # Filter environment data
    # #######################
    filterenv = False
    backgrdend = 0
    accumulationend = 0

    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # IMPORTANT: check times !!!!
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

    #path = r"/home/leon/CronScripts/MagPyAnalysis/RadonAnalysis/ScriptsThemisto"
    path = path
    #namedummy = "Spectral_"
    namedummy = config.get('filename')
    resolution = config.get('filedate')

    numlist, dirs = NumList(path, namedummy)
    #print (dirs)

    print("limit filerange by starttime and endtime")
    # min and max number within given timerange
    if resolution in ['Day', 'day']:
        div = 86400.
    elif resolution in ['Minute', 'minute', 'minutes']:
        div = 60.
    else:  # hour as default
        div = 3600.

    mintime = float(startdate.strftime("%s")) / div
    maxtime = float(enddate.strftime("%s")) / div
    validrange = [int(mintime), int(maxtime)]
    filerange = [[el, i] for i, el in enumerate(numlist)
                 if el in range(min(validrange), max(validrange))]

    if not len(filerange) > 0:
        print("Did not find any spectrum files within the given time range")
        return DataStream()

    # get config data
    if config.get('plots') in ['Random', 'random']:
        # select one diagram randomly
        import random
        plotselection = [random.choice(filerange)]
    if not config.get('plots') in ['Random', 'random']:
        pl = config.get('plots').split(',')
        try:
            plotselection = [int(el) for el in pl]
        except:
            plotselection = []
    roi = config.get('roi')

    dataarray = [[] for i in range(len(roi) + 1)]
    # Cycle through channels
    validfilerange = []

    print(
        "# -------------------------------------------------------------------------"
    )
    print("# A. Main prog")
    print(
        "# -------------------------------------------------------------------------"
    )
    print("# Cycling through all spectrograms")
    print(filerange)

    for eleme in filerange:
        # Define file name
        i = eleme[0]
        idx = eleme[1]
        name = (namedummy + "%06i.Chn") % (i)
        print(
            "# -------------------------------------------------------------------------"
        )
        print(" - Analyzing {}".format(name))
        print(
            "# -------------------------------------------------------------------------"
        )
        filename = os.path.join(dirs[idx], name)
        #test=True
        #if test:
        try:
            temp = open(filename, 'rb')
            line1 = temp.read(32)
            data1 = struct.unpack("<bb3H2L8c4c2H", line1)
            nchn = data1[-1]
            line2 = temp.read(nchn * 4)
            data2 = struct.unpack("<%sL" % nchn, line2)
            # put zero values in front so that channel number correspond to indicies
            data = [0] * 7
            data = data + list(data2[6:])
            data2 = np.asarray(data)

            #roi = [63, 114, 231, 291, [197,398]]
            testplot = False
            if testplot:
                print(" - Raw data plot")
                # Determine the following channel energy relation from each analysis
                fig, ax = plt.subplots(1, 1)
                ax.set_yscale('log')
                ax.set_xlim([0, 1050])
                ax.plot(range(0, len(data2)), data2, '-')
                plt.xlabel("channel []")
                plt.ylabel("counts per hour [1/h]")
                plt.grid()
                plt.show()

            print(
                "# -------------------------------------------------------------------------"
            )
            print(" - Analysis of spectrum")

            #try:
            if i in plotselection:
                print("  --> Plotting")
                #result[str(i)] = singlespecanalysis(data2,roi=roi,plot=True,name=str(i))
                singlespecanalysis(data2, roi=roi, plot=True, name=str(i))
                plt.show()
            else:
                #result[str(i)] = singlespecanalysis(data2,roi=roi,plot=False,name=str(i))
                singlespecanalysis(data2, roi=roi, plot=False, name=str(i))
            #except:
            #    singlespecanalysis(data2,roi=roi,plot=False,name=str(i))

            print(
                "# -------------------------------------------------------------------------"
            )
            print(" - Extracting energy levels")

            dataarray[0].append(
                date2num(datetime.utcfromtimestamp(int(i) * 3600.)))
            for idx, elem in enumerate(roi):
                exec("roiname = str(roi[{}])".format(idx))
                if isinstance(elem, (list, )):
                    roiname = roiname.replace("[", "").replace("]",
                                                               "").replace(
                                                                   ", ", "-")
                #print (idx,roiname)
                dataarray[idx + 1].append(result[roiname][3])
                #exec("liste{}.append(result[roiname][3])".format(idx+1))

            listemca.append(result[str(roi[3])][2])
            listesca.append(result[str(roi[3])][5])
            listeshift.append(result[str(roi[3])][6])

            # Add flags to the datastream
            # Notes should be converted to flags
        except:
            print("----------------------------")
            print("Failed analysis for {}!".format(filename))

    print(
        "# -------------------------------------------------------------------------"
    )
    print("# Creating summary and MagPy file for storage")

    starray = [np.asarray([]) for el in KEYLIST]
    for idx, elem in enumerate(dataarray):
        starray[idx] = np.asarray(elem)
    #dataarray = np.asarray([np.asarray(elem) for elem in dataarray])
    #print (dataarray)

    print(KEYLIST[1:len(roi) + 1])

    stream = DataStream()
    stream.ndarray = np.asarray(starray)
    #stream.header = header
    stream = stream.sorting()
    stream.header['SensorElements'] = ",".join(
        ["Counts{}".format(elem) for elem in energylist])
    stream.header['SensorKeys'] = KEYLIST[1:len(roi) + 1]

    return stream
    """
Exemple #10
0
def CheckMARCOS(db,threshold=600, statusdict={},jobname='JOB',excludelist=[],acceptedoffsets={},debug=False):

    testst = DataStream()
    offset = {}
    testname = '{}-DBactuality'.format(jobname)
    cursor = db.cursor()
    ok = True

    if debug:
        print ("1. Get all tables")
        print ("-----------------------------------")
    tablessql = 'SHOW TABLES'
    try:
        cursor.execute(tablessql)
    except mpdb.mysql.IntegrityError as message:
        print (message)
        ok = False
    except mpdb.mysql.Error as message:
        print (message)
        ok = False
    except:
        print ('check table: unkown error')
        ok = False

    if ok:
        tables = cursor.fetchall()
        tables = [el[0] for el in tables]
        if not len(tables) > 0:
            print ('check table: no tables found - stopping')
            ok = False
    else:
        print ('check table: aborting')
        #cursor.close()
        ok = False

    if ok:
        if debug:
            print ("2. Extract tables to be examined")
            print ("-----------------------------------")
        # get BLV tables
        blv = [el for el  in tables if el.startswith('BLV')]
        excludelist.extend(blv)
        if debug:
            print ("Data to be excluded: {}".format(excludelist))
        tables = [el for el in tables if not el in excludelist]

    if ok:
        if debug:
            print ("3. Delete any existing timestamps which are in the future")
            # classic problem of IWT
            print ("-----------------------------------")
        delsql = "DELETE FROM IWT_TILT01_0001_0001 WHERE time > NOW()"
        if ok:
            try:
                cursor.execute(delsql)
            except mpdb.mysql.IntegrityError as message:
                print (' -- check: {}'.format(message))
            except mpdb.mysql.Error as message:
                print (' -- check: {}'.format(message))
            except:
                print (' -- check: unkown error')
        # eventually an execute is necessary here

    if ok:
        if debug:
            print ("4. Getting last input in each table")
            print ("-----------------------------------")
        for table in tables:
            if debug:
                print (' -> running for {}'.format(table))
            lastsql = 'SELECT time FROM {} ORDER BY time DESC LIMIT 1'.format(table)
            try:
                cursor.execute(lastsql)
            except mpdb.mysql.IntegrityError as message:
                print (' -- check table: {}'.format(message))
            except mpdb.mysql.Error as message:
                print (' -- check table: {}'.format(message))
            except:
                print (' -- check table: unkown error')
            value = cursor.fetchall()
            try:
                lasttime = value[0][0]
                timetest = True
            except:
                timetest = False
                pass
            if timetest:
                lastt = testst._testtime(lasttime)
                # Get difference to current time
                current = datetime.utcnow()
                tdiff = np.abs((current-lastt).total_seconds())
                offset[table] = tdiff
                if debug:
                    print ("Difference: {}".format(tdiff))

    if ok:
        if debug:
            print ("5. Check threshold information")
            print ("-----------------------------------")
        statusdict[testname] = 'possible'
        for el in offset:
            # determine threshold
            usedthreshold = threshold
            name = "{}-{}".format(testname,el.replace('_',''))
            for elem in acceptedoffsets:
                if el.find(elem) > -1:
                    usedthreshold = acceptedoffsets[elem]
            if offset[el] > usedthreshold:
                if debug:
                    print ("{} : data too old by {} seconds".format(el,offset[el]))
                statusdict[name] = 'latest input older than {} sec'.format(usedthreshold)
            else:
                statusdict[name] = 'actual'
    else:
        statusdict[testname] = 'failure'
    cursor.close()

    return statusdict
Exemple #11
0
def readJSON(filename, headonly=False, **kwargs):
    """
    Reading JSON format data.
    """
    stream = DataStream()
    array = [[] for key in KEYLIST]

    with open(filename, "r") as jsonfile:
        dataset = json.load(jsonfile)
        loggerlib.info("Read: %s, Format: %s " % (filename, "JSON"))

        fillkeys = ["var1", "var2", "var3", "var4", "var5", "x", "y", "z", "f"]
        datakeys = dataset[0]
        keydict = {}

        for i, key in enumerate(datakeys):
            if "time" in key:
                keydict[i] = "time"
            elif key == "density":
                keydict[i] = "var1"
                fillkeys.pop(fillkeys.index("var1"))
            elif key == "speed":
                keydict[i] = "var2"
                fillkeys.pop(fillkeys.index("var2"))
            elif key == "temperature":
                keydict[i] = "var3"
                fillkeys.pop(fillkeys.index("var3"))
            elif "bx" in key.lower():
                keydict[i] = "x"
                fillkeys.pop(fillkeys.index("x"))
            elif "by" in key.lower():
                keydict[i] = "y"
                fillkeys.pop(fillkeys.index("y"))
            elif "bz" in key.lower():
                keydict[i] = "z"
                fillkeys.pop(fillkeys.index("z"))
            elif "bt" in key.lower():
                keydict[i] = "f"
                fillkeys.pop(fillkeys.index("f"))
            else:
                try:
                    keydict[i] = fillkeys.pop(0)
                except IndexError:
                    loggerlib.warning(
                        "CAUTION! Out of available keys for data. {} will not be contained in stream.".format(key)
                    )
                    print("CAUTION! Out of available keys for data. {} will not be contained in stream.".format(key))

            if "time" in key:
                data = [date2num(testTimeString(str(x[i]))) for x in dataset[1:]]
            else:
                data = [np.nan if x[i] is None else float(x[i]) for x in dataset[1:]]
            array[KEYLIST.index(keydict[i])] = data
            stream.header["col-" + keydict[i]] = key
            stream.header["unit-col-" + keydict[i]] = ""

    for idx, elem in enumerate(array):
        array[idx] = np.asarray(array[idx])

    stream = DataStream([], stream.header, np.asarray(array))

    return stream
Exemple #12
0
def main(argv):

    #para = sp.parameterdict
    #conf = sp.configdict
    para = {}
    conf = {}
    # necessary configs (may be overwritten):
    conf['statusfile'] = '/var/log/magpy/statusfile.log'
    debug = False
    configfile = None
    statusdict = {}
    statuskeylist = []
    MachineToReset = None
    ListMachine = False

    usagestring = 'threshold.py -h <help> -m <configpath> [-l][-r state machine number]'
    try:
        opts, args = getopt.getopt(argv, "hm:Ur:l", ["configpath=", "reset="])
    except getopt.GetoptError:
        print('Check your options:')
        print(usagestring)
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print('------------------------------------------------------')
            print('Usage:')
            print(usagestring)
            print('------------------------------------------------------')
            print('Options:')
            print('-h            help')
            print('-l            List states of all state machines')
            print(
                '-r nr         Reset state machine number nr into initial state'
            )
            print('-m            Define the path for the configuration file.')
            print(
                '              Please note: a configuration file is obligatory'
            )
            print('              ----------------------------')
            print('              configurationfile')
            print('              ----------------------------')
            print('              statemachine.cfg: (looks like)')
            print('              # MARTAS directory')
            print('              martasdir            :   /home/cobs/MARTAS/')
            print('              # Define data source (file, db, ...)')
            print('              source               :   file')
            print(
                '              # If source = db then define data base credentials created by addcred (MARTAS)'
            )
            print('              dbcredentials        :   None')
            print(
                '              # If source = file define the MARTAS buffer base path'
            )
            print('              bufferpath           :   /srv/mqtt/')
            print(
                '              # statusfile (a json style dictionary, which contains states)'
            )
            print(
                '              statusfile           :   /var/log/magpy/status.log'
            )
            print('              # Path of mail config file')
            print('              emailconfig : /etc/martas/mail.cfg')
            print(
                '              # serial communication for switch commands (based on ardcomm.py (MARTAS/app))'
            )
            print('              serialcfg            :   None')
            print(
                "              #Nr. : 'sensorid';'timerange';'key';'value';'function';'operator';'statusmessage';'nextstatus'[;action;argument;action;argument...]"
            )
            print(
                '              # comment: timerange in seconds, statusmessage is until now a dummy'
            )
            print('              status : start')
            print(
                '              1  :  SENSOR1_SERNR_0001;180;t1;20;min;below;;triggered;email;t1 below 20°'
            )
            print(
                '              2  :  SENSOR1_SERNR_0001;180;t1;20;min;below;;triggered;email;t1 below 20°'
            )
            print('              status : triggered')
            print(
                '              1  :  SENSOR1_SERNR_0001;180;t1;21;min;above;;start;email;t1 above 21° - I like to send a lot of emails inspite of hystereses;-)'
            )
            print(
                "              2  :  SENSOR1_SERNR_0001;180;t1;20;min;above;;reentered;email;t1 above 20° again - but that doesn't mean everything is fine again..."
            )
            print('              status : reentered')
            print(
                '              2  :  SENSOR1_SERNR_0001;120;t1;20;min;below;;stop;email;it seems like t1 is toggling around 20°C'
            )
            print(
                '              2  :  SENSOR1_SERNR_0001;120;t1;20;average;below;;stop;email;no more email'
            )
            print('              ----------------------------')
            print('')
            print('-r            reset a state machine')
            print('-l            display states')
            print('------------------------------------------------------')
            print('Example:')
            print('   python statemachine.py -m /etc/martas/statemachine.cfg')
            sys.exit()
        elif opt in ("-m", "--configfile"):
            configfile = arg
            if debug:
                print(
                    "Getting all parameters of the state machine from configuration file: {}"
                    .format(configfile))
            (conf, para) = readConfig(configfile)
        elif opt in ("-r", "--reset"):
            if is_number(arg):
                MachineToReset = arg
            else:
                print("--reset must_be_a_number")
                sys.exit()
        elif opt in ("-l"):
            ListMachine = True
        elif opt in ("-U", "--debug"):
            debug = True

    if debug:
        print("Configuration dictionary: \n{}".format(conf))
        print("Parameter dictionary: \n{}".format(para))

    if not (len(para)) > 0:
        print("No parameters given to be checked - aborting")
        sys.exit()

    try:
        martaslogpath = os.path.join(conf.get('martasdir'), 'core')
        sys.path.insert(1, martaslogpath)
        import martas
        logpath = conf.get('bufferpath')
    except:
        print(
            "Could not import martas logging routines - check MARTAS directory path"
        )

    statusdict = {}
    if os.path.isfile(conf['statusfile']):
        # read log if exists and exentually update changed information
        # return changes
        with open(conf['statusfile'], 'r') as file:
            statusdict = json.load(file)
        if debug:
            print("Statusfile {} loaded".format(conf['statusfile']))

    if ListMachine:
        if statusdict == {}:
            print('no states but start')
        else:
            for state in statusdict:
                print(state + ": " + statusdict[state]['status'])
        exit()

    if MachineToReset and not statusdict == {}:
        if MachineToReset in statusdict:
            del statusdict[MachineToReset]
            print(MachineToReset + " set to 'start'")
        else:
            print(MachineToReset + " not found")
        with open(conf['statusfile'], 'w') as file:
            if debug:
                print('writing to ' + conf['statusfile'] + ' :')
                print(statusdict)
            file.write(
                json.dumps(statusdict))  # use `json.loads` to do the reverse
        exit()

    # For each machine
    for i in range(0, 1000):
        valuedict = {}
        values = []
        if not str(i) in statusdict and str(i) in para['start']:
            # machine is not yet in the statusfile, let's add it to the dict
            statusdict[str(i)] = {}
            statusdict[str(i)]['status'] = 'start'
            #laststatusdict[str(i)]['sensorid'] = para['start'][str(i)]['sensorid']
            #laststatusdict[str(i)]['key'] = para['start'][str(i)]['key']
        if str(i) in statusdict:
            status = statusdict[str(i)]['status']
            # TODO handle states deleted from the config file!
            values = para[status].get(str(i), [])
        if not values == []:
            if debug:
                print("Checking state machine {}".format(i))
            data = DataStream()

            # Obtain a magpy data stream of the respective data set

            for valuedict in values:
                if debug:
                    print(
                        "Accessing data from {} at {}: Sensor {} - Amount: {} sec"
                        .format(conf.get('source'), conf.get('bufferpath'),
                                valuedict.get('sensorid'),
                                valuedict.get('timerange')))
                (data, msg1) = GetData(conf.get('source'),
                                       conf.get('bufferpath'),
                                       conf.get('database'),
                                       conf.get('dbcredentials'),
                                       valuedict.get('sensorid'),
                                       valuedict.get('timerange'),
                                       debug=debug,
                                       startdate=conf.get('startdate'))
                testvalue = None
                if data._get_key_headers() == []:
                    # there are no keys in the data
                    if debug:
                        print('no data for testvalue')
                else:
                    (testvalue, msg2) = GetTestValue(
                        data,
                        valuedict.get('key'),
                        valuedict.get('function'),
                        debug=debug
                    )  # Returns comparison value(e.g. mean, max etc)
                if debug:
                    print("testvalue is {}".format(testvalue))
                if is_number(testvalue):
                    (evaluate, msg) = CheckThreshold(
                        testvalue,
                        valuedict.get('value'),
                        valuedict.get('operator'),
                        debug=debug)  # Returns statusmessage
                    if evaluate and msg == '':
                        # criteria are met - do something
                        # change status
                        if debug:
                            print("changing status of machine " + str(i) +
                                  " from")
                            print(statusdict[str(i)]['status'])
                            print("to")
                            print(valuedict['nextstatus'])
                        statusdict[str(i)]['status'] = valuedict['nextstatus']
                        if 'action' in valuedict:
                            for action in valuedict['action']:
                                if action['action'] == 'email':
                                    dic = readConfigFromFile(
                                        conf.get('emailconfig'))
                                    dic['Text'] = action['argument']
                                    martas.sendmail(dic)
                                # TODO not implemented / not tested
                                if action['action'] == 'telegram':
                                    dic = sm_support.readConfigFromFile(
                                        conf.get('telegramconfig'))
                                    dic['text'] = action['argument']
                                    martas.sendtelegram(dic)
                                if action['action'] == 'switch:':
                                    dic = conf
                                    dic['comm'] = action['argument']
                                    martas.sendswitchcommand(dic)

                        # TODO handle content resp. errors
                        content = InterpreteStatus(valuedict, debug=debug)
                        # Perform switch and added "switch on/off" to content
                        if not valuedict.get('switchcommand') in [
                                'None', 'none', None
                        ]:
                            if debug:
                                print(
                                    "Found switching command ... eventually will send serial command (if not done already) after checking all other commands"
                                )
                            content = '{} - switch: {}'.format(
                                content, valuedict.get('switchcommand'))
                            # remember the switchuing command and only issue it if statusdict is changing
                    elif not msg == '':
                        content = msg
                    else:
                        content = ''
            else:
                #content = msg1+' - '+msg2
                pass

            #if content:
            if 0:
                statuskeylist.append('Sensor {} and key {}'.format(
                    valuedict.get('sensorid'), valuedict.get('key')))
                statusdict['Sensor {} and key {}'.format(
                    valuedict.get('sensorid'), valuedict.get('key'))] = content

            if debug:
                print("Finished state machine {}".format(i))

    with open(conf['statusfile'], 'w') as file:
        if debug:
            print('writing to ' + conf['statusfile'] + ' :')
            print(statusdict)
        file.write(
            json.dumps(statusdict))  # use `json.loads` to do the reverse
    exit()

    if conf.get('reportlevel') == 'full':
        # Get a unique status key list:
        statuskeylist = list(dict.fromkeys(statuskeylist))
        for elem in statuskeylist:
            cont = statusdict.get(elem, '')
            if cont == '':
                statusdict[elem] = "Everything fine"

    if debug:
        print("Statusdict: {}".format(statusdict))

    receiver = conf.get('notification')
    cfg = conf.get('notificationconfig')
    logfile = conf.get('logfile')

    if debug:
        print("New notifications will be send to: {} (Config: {})".format(
            receiver, cfg))

    martaslog = ml(logfile=logfile, receiver=receiver)
    if receiver == 'telegram':
        martaslog.telegram['config'] = cfg
    elif receiver == 'email':
        martaslog.email['config'] = cfg

    changes = martaslog.msg(statusdict)

    if not len(changes) > 0:
        print("Nothing to report - threshold check successfully finished")

    for element in changes:
        line = changes.get(element)
        if debug:
            print("Changes affecting:", element)
        l = line.split('switch:')
        if len(l) == 2:
            print(" ... now dealing with switching serial command:")
            comm = l[1].strip()
            script = os.path.join(conf.get('martasdir'), 'app', 'ardcomm.py')
            pythonpath = sys.executable
            arg1 = "-c {}".format(comm)
            arg2 = "-p {}".format(conf.get('port'))
            arg3 = "-b {}".format(conf.get('baudrate'))
            arg4 = "-a {}".format(conf.get('parity'))
            arg5 = "-y {}".format(conf.get('bytesize'))
            arg6 = "-s {}".format(conf.get('stopbits'))
            arg7 = "-t {}".format(conf.get('timeout'))
            #arg8 = "-e {}".format(conf.get('eol')) # not used so far

            command = "{} {} {} {} {} {} {} {} {}".format(
                pythonpath, script, arg1, arg2, arg3, arg4, arg5, arg6,
                arg7)  ## Too be checked
            command = "{} {} {}".format(pythonpath, script, arg1)
            if debug:
                print(" ... sending {}".format(command))

            try:
                import subprocess
                p = subprocess.Popen(command,
                                     stdout=subprocess.PIPE,
                                     shell=True)
                (output, err) = p.communicate()
                mesg = "{}".format(output)
            except subprocess.CalledProcessError:
                mesg = "threshold: sending command didnt work"
            except:
                mesg = "threshold: sending command problem"

            print(mesg)

            print(" ... success")
Exemple #13
0
def readCOVJSON(filename, headonly=False, **kwargs):
    """
    Reading CoverageJSON format data.

    """
    header = {}
    array = [[] for key in KEYLIST]

    print("Reading coverage json")

    with open(filename, 'r') as jsonfile:
        dataset = json.load(jsonfile)
        loggerlib.info('Read: {}, Format: {} '.format(filename,
                                                      "CoverageJSON"))

    # Extract header and data
    axes = dataset.get("domain").get("axes")
    ranges = dataset.get("ranges")
    parameters = dataset.get("parameters")

    times = dataset.get("domain").get("axes").get("t").get("values")
    times = [testTimeString(el) for el in times]
    array[0] = date2num(times)
    stream = DataStream([], header, array)

    try:
        stream.header['DataAcquisitionLatitude'] = dataset.get("domain").get(
            "axes").get("x").get("values")
        stream.header['DataAcquisitionLongitude'] = dataset.get("domain").get(
            "axes").get("y").get("values")
        stream.header['DataElevation'] = dataset.get("domain").get("axes").get(
            "z").get("values")
    except:
        pass

    print(dataset.get('context'))

    def addelement(datastream, key, element, elementdict, parameterdict):
        array = np.asarray(elementdict.get('values'))
        datastream = datastream._put_column(array, key)
        datastream.header['col-{}'.format(key)] = element
        datastream.header['unit-col-{}'.format(key)] = parameterdict.get(
            "unit").get("label")

    numcnt = 0
    strcnt = 1
    AVAILKEYS = NUMKEYLIST
    ELEMENTSTODO = []
    fixedgroups = {
        'x': ['x', 'X', 'H', 'I'],
        'y': ['y', 'Y', 'D', 'E'],
        'z': ['z', 'Z'],
        'f': ['f', 'F', 'S'],
        'df': ['g', 'G']
    }
    # Firstly assign data from fixed groups, then fill rest
    for element in ranges:
        print("Dealing with {}".format(element))
        foundgroups = False
        for group in fixedgroups:
            if element in fixedgroups[group]:
                print(" -> adding to {}".format(group))
                addelement(stream, group, element, ranges[element],
                           parameters[element])
                AVAILKEYS = ['USED' if x == group else x for x in AVAILKEYS]
                foundgroups = True
                break
        if not foundgroups:
            ELEMENTSTODO.append(element)

    # Now assign all other elements to appropriate keys
    for element in ELEMENTSTODO:
        print("Now dealing with {}".format(element))
        # assign element to key
        if ranges.get(element).get('dataType') in ['float', 'double', 'int']:
            # get the first key which is not yet used
            index = min(
                [idx for idx, el in enumerate(AVAILKEYS) if not el == 'USED'])
            key = AVAILKEYS[index]
            print(" -> adding to {}".format(key))
            addelement(stream, key, element, ranges[element],
                       parameters[element])
            AVAILKEYS[index] = 'USED'
        else:
            if strcnt <= 4:
                key = "str{}".format(strcnt)
                print(" -> adding to {}".format(key))
                addelement(stream, key, element, ranges[element],
                           parameters[element])
            strcnt += 1
    return stream