Exemple #1
0
def gen_metric_dict():
    metrics = os.path.join(ua.data_dir(), "metrics.csv")
    d = {}
    with open(metrics, 'rU') as data_file:    
        data = csv.DictReader(data_file)
        '''
        "short": "Pressure",
        "datatype": "float",
        "extend": "random_float_range_select",
        "range": "30:32"

        '''
        for x in data:
            m = x["Metric (long name)"]
            d[m] = {}
            d[m]["unit"] = x["Unit"]
            d[m]["metric"] = x["Metric (short name)"]
            d[m]["asset"] = x["Asset"]
            d[m]["extend"] = "random_float_range_select"
            d[m]["range"] = x["Data range"].replace("-", ":")
        data_file.close()

    jmetrics = os.path.join(ua.data_dir(), "metrics.json")
    with open(jmetrics, 'w') as mfp:
        json.dump(d, mfp)
        mfp.close()
            
    print d                    
    return d
Exemple #2
0
def gen_metric_dict():
    metrics = os.path.join(ua.data_dir(), "metrics.csv")
    d = {}
    with open(metrics, 'rU') as data_file:
        data = csv.DictReader(data_file)
        '''
        "short": "Pressure",
        "datatype": "float",
        "extend": "random_float_range_select",
        "range": "30:32"

        '''
        for x in data:
            m = x["Metric (long name)"]
            d[m] = {}
            d[m]["unit"] = x["Unit"]
            d[m]["metric"] = x["Metric (short name)"]
            d[m]["asset"] = x["Asset"]
            d[m]["extend"] = "random_float_range_select"
            d[m]["range"] = x["Data range"].replace("-", ":")
        data_file.close()

    jmetrics = os.path.join(ua.data_dir(), "metrics.json")
    with open(jmetrics, 'w') as mfp:
        json.dump(d, mfp)
        mfp.close()

    print d
    return d
def run():

    logger.info("Start running.........")
    
    confs = get_config()
    serverpath = confs["serverpath"]
    description = confs["description"]
    stanza = confs["name"]
    interval = confs["interval"]
    serverport = confs["serverport"]
    nodefile = confs["nodefile"]
    namespace = confs["namespace"]
    interval = confs["interval"]
    endpoint = "opc.tcp://0.0.0.0:%s%s" % (serverport, serverpath)
        
    logger.info("Data File is loaded.")
    
    nf = os.path.join(uaserver.data_dir(), nodefile)
    with open(nf, 'rU') as fd:
        nodejson = json.load(fd)
        fd.close()
        
    logger.info("Address space is loaded.")
        
    s, v = server.run(nodejson, endpoint, namespace, description)

    logger.info("UA server is running...")
            
    post_run(s, v, int(interval))
def run():

    logger.info("Start running.........")
    
    confs = get_config()
    logger.debug("Configuration: %s" % confs)
    serverpath = confs["serverpath"]
    description = confs["description"]
    stanza = confs["name"]
    interval = confs["interval"]
    serverport = confs["serverport"]
    nodefile = confs["nodefile"]
    namespace = confs["namespace"]
    interval = confs["interval"]
    endpoint = "opc.tcp://0.0.0.0:%s%s" % (serverport, serverpath)

    nf = os.path.join(uaserver.data_dir(), nodefile)
    if logger.isEnabledFor(logging.DEBUG):
        if not os.path.exists(nf):
            logger.debug("The node file %s does not exist!" % nf)
        else:
            logger.debug("The node file : %s" % nf)
            
    with open(nf, 'rU') as fd:
        nodejson = json.load(fd)
        fd.close()
        
    logger.info("Address space is loaded.")
        
    s, v = server.run(nodejson, endpoint, namespace, description)

    logger.info("UA server is running...")
            
    post_run(s, v, int(interval))
Exemple #5
0
def gen_opc_data(startdate, duration):
    # data from data file, rotate it.
    cf = os.path.join(uaserver.data_dir(), "DataLog_2016_01.dat")

    with open(cf, 'rU') as fd:
        odr = uaserver.OrderedDictReader(fd, dialect=csv.excel_tab)
        data = [o for o in odr]
        datach = uaserver.DataChannel(data, count=1)
        fd.close()

    mf = os.path.join(uaserver.data_dir(), "metrics.json")
    with open(mf, 'rU') as fd:
        metrics = json.load(fd)
        fd.close()

    def mtimes():
        enddate = startdate
        while enddate < datetime.datetime.today():
            yield enddate
            enddate = enddate + datetime.timedelta(minutes=duration)

    evts = m_ua.random_events()
    ekeys = evts.keys()

    mf = os.path.join(uaserver.data_dir(), "measurements.csv")
    num = 1
    with open(mf, 'w') as fd:
        fd.write("mid,measure_time,measure,asset,metric,value,unit\n")
        for t in mtimes():
            for k in ekeys:
                fv = evts[k].next()
                if metrics.has_key(k):
                    fd.write("%d,%s,%s,%s,%s,%s,%s\n" %
                             (num, t, k, metrics[k]["asset"],
                              metrics[k]["metric"], fv, metrics[k]["unit"]))
                    num = num + 1
            data = datach.next()
            for d in data:
                for n, v in d.items():
                    if metrics.has_key(n):
                        fd.write("%d,%s,%s,%s,%s,%s,%s\n" %
                                 (num, t, n, metrics[n]["asset"],
                                  metrics[n]["metric"], v, metrics[n]["unit"]))
                        num = num + 1
        fd.close()
Exemple #6
0
def gen_opc_data(startdate, duration):
    # data from data file, rotate it.
    cf = os.path.join(uaserver.data_dir(), "DataLog_2016_01.dat")
    
    with open(cf, 'rU') as fd:
        odr = uaserver.OrderedDictReader(fd, dialect=csv.excel_tab)
        data = [o for o in odr]
        datach = uaserver.DataChannel(data, count=1)
        fd.close()

    mf = os.path.join(uaserver.data_dir(), "metrics.json")
    with open(mf, 'rU') as fd:
        metrics = json.load(fd)
        fd.close()
    
    def mtimes():
        enddate = startdate
        while enddate < datetime.datetime.today():
            yield enddate
            enddate = enddate+datetime.timedelta(minutes = duration)
    
    evts = m_ua.random_events()
    ekeys = evts.keys()
    
    mf = os.path.join(uaserver.data_dir(), "measurements.csv")
    num = 1
    with open(mf, 'w') as fd:
        fd.write("mid,measure_time,measure,asset,metric,value,unit\n")
        for t in mtimes():
            for k in ekeys:
                fv = evts[k].next()
                if metrics.has_key(k):
                    fd.write("%d,%s,%s,%s,%s,%s,%s\n" % (num, t, k, metrics[k]["asset"], metrics[k]["metric"], fv, metrics[k]["unit"]))
                    num=num+1
            data = datach.next()
            for d in data:
                for n, v in d.items():
                    if metrics.has_key(n):
                        fd.write("%d,%s,%s,%s,%s,%s,%s\n" % (num, t, n, metrics[n]["asset"], metrics[n]["metric"], v, metrics[n]["unit"]))
                        num=num+1
        fd.close()
def run():

    logger.info("Start running.........")
    
    confs = get_config()
    serverpath = confs["serverpath"]
    description = confs["description"]
    stanza = confs["name"]
    interval = confs["interval"]
    count = int(confs["count"])
    df = confs["datafile"]
    serverport = confs["serverport"]
    nodefile = confs["nodefile"]
    namespace = confs["namespace"]
    interval = confs["interval"]
    endpoint = "opc.tcp://0.0.0.0:%s%s" % (serverport, serverpath)

    cf = os.path.join(uaserver.data_dir(), df)
    
    with open(cf, 'rU') as fd:
        odr = uaserver.OrderedDictReader(fd, dialect=csv.excel_tab)
        data = [o for o in odr]
        ch = uaserver.DataChannel(data, count=count, interval=int(interval))
        fd.close()
        
    logger.info("Data File is loaded.")
    
    nf = os.path.join(uaserver.data_dir(), nodefile)
    with open(nf, 'rU') as fd:
        nodejson = json.load(fd)
        fd.close()
        
    logger.info("Address space is loaded.")
        
    s, v = server.run(nodejson, endpoint, namespace, description)

    logger.info("UA server is running...")
            
    post_run(s, v, ch, int(interval))
Exemple #8
0
def run():

    logger.info("Start running.........")

    confs = get_config()
    serverpath = confs["serverpath"]
    description = confs["description"]
    stanza = confs["name"]
    interval = confs["interval"]
    count = int(confs["count"])
    df = confs["datafile"]
    serverport = confs["serverport"]
    nodefile = confs["nodefile"]
    namespace = confs["namespace"]
    interval = confs["interval"]
    endpoint = "opc.tcp://0.0.0.0:%s%s" % (serverport, serverpath)

    cf = os.path.join(uaserver.data_dir(), df)

    with open(cf, 'rU') as fd:
        odr = uaserver.OrderedDictReader(fd, dialect=csv.excel_tab)
        data = [o for o in odr]
        ch = uaserver.DataChannel(data, count=count, interval=int(interval))
        fd.close()

    logger.info("Data File is loaded.")

    nf = os.path.join(uaserver.data_dir(), nodefile)
    with open(nf, 'rU') as fd:
        nodejson = json.load(fd)
        fd.close()

    logger.info("Address space is loaded.")

    s, v = server.run(nodejson, endpoint, namespace, description)

    logger.info("UA server is running...")

    post_run(s, v, ch, int(interval))
Exemple #9
0
def gen():
    data = os.path.join(ua.data_dir(), "data.xml")
    d = {}
    with open(data, 'rU') as data_file:
        tree = ElementTree.parse(data_file)    
        data_file.close()


    print "Timestamp,Source,Asset,Metric,Value,Unit"
    for node in tree.iter():
        for n in node:
            for nn in n:
                if nn.tag=="data":
                    c = convert(nn.text[27:])
                    if c.has_key("asset"):
                        print "%s,%s,%s,%s,%s,%s" % (c["collect_time"], "OPC", c["asset"], c["metric"], c["value"], c["unit"])
def gen_opc_alert():
    f_alert = os.path.join(ua.data_dir(), "EventLog_2016_01.dat")

    d = []
    with open(f_alert, 'rU') as fd:
        odr = uaserver.OrderedDictReader(fd, dialect=csv.excel_tab)
        data = [o for o in odr]

        fd.close()

    dl = len(data)

    cc = random.randrange(10, 30)
    for c in range(cc):
        d.append(data[random.randrange(0, dl)])

    return d
def gen_opc_alert():
    f_alert = os.path.join(ua.data_dir(), "EventLog_2016_01.dat")

    d = []
    with open(f_alert, 'rU') as fd:
        odr = uaserver.OrderedDictReader(fd, dialect=csv.excel_tab)
        data = [o for o in odr]
        
        fd.close()
    
    dl = len(data)
    
    cc = random.randrange(10, 30)
    for c in range(cc):
        d.append(data[random.randrange(0, dl)])
    
    return d
def run():
    logger.info("Modular Input mi_opcua command: %s" % sys.argv)
    if len(sys.argv) > 1:
        try:
            if sys.argv[1] == "--scheme":
                do_scheme()
            elif sys.argv[1] == "--validate-arguments":
                validate_arguments()
            elif sys.argv[1] == "--test":
                test()
            else:
                usage()
        except Exception as ex:
            logger.critical(ex)
    else:
        all_events = []

        configs = get_config()
        stanza = configs["name"]
        c_asset = configs["assets"]
        c_metric = configs["metrics"]
        c_condition = configs["conditions"]
        f_asset = os.path.join(ua.data_dir(), c_asset)
        f_metric = os.path.join(ua.data_dir(), c_metric)
        f_condition = os.path.join(ua.data_dir(), c_condition)
        f_downtime = os.path.join(ua.data_dir(), "downtime.json")

        with open(f_metric, 'rU') as fp_m:
            j_metric = json.load(fp_m)
            fp_m.close()

        with open(f_condition, 'rU') as fp_c:
            j_condition = json.load(fp_c)
            fp_c.close()

        with open(f_downtime, 'rU') as fp_d:
            j_downtime = json.load(fp_d)
            fp_d.close()

        evts = mi_ua.random_events()
        t = datetime.datetime.today()

        opc_alerts = gen_opc_alert()

        for opc in opc_alerts:
            all_events.append(convert_opc_alert(opc))

        with open(f_asset, 'rU') as fp_a:
            j_asset = json.load(fp_a)
            fp_a.close()

            for a, v in j_asset.items():
                o = odict()
                o["DateTime"] = t
                o["Asset"] = a
                o["Type"] = "Data"
                collected = False
                for m in v["metrics"]:
                    if evts.has_key(m):
                        collected = True
                        fv = evts[m].next()
                        nm = j_metric[m]["metric"]
                        o[nm] = dict(measure=m,
                                     unit=j_metric[m]["unit"],
                                     optimum=j_metric[m]["optimum"],
                                     value=fv)

                # has collected data for this asset.
                if collected:
                    health_score(o, j_asset, j_downtime)
                    t = o["DateTime"]

                    all_events.append(convert_data(o))

                    alerts = gen_alert(o, j_condition, j_metric)

                    for alert in alerts:
                        all_events.append(convert_data(alert))
                    '''
                    opc_alerts = gen_opc_alert()
                    for opc in opc_alerts:
                        print_alert(stanza, t, opc, sys.stdout)
                    '''

        mi.init_stream(sys.stdout)

        random.shuffle(all_events)
        for evt in all_events:
            mi.print_kv_event(stanza, t, evt, sys.stdout)

        mi.fini_stream(sys.stdout)
Exemple #13
0
def run():
    logger.info("Modular Input mi_opcua command: %s" % sys.argv)
    if len(sys.argv) > 1:
        try:
            if sys.argv[1] == "--scheme":
                do_scheme()
            elif sys.argv[1] == "--validate-arguments":
                validate_arguments()
            elif sys.argv[1] == "--test":
                test()
            else:
                usage()
        except Exception as ex:
            logger.critical(ex)
    else:
        logger.info("Modular Input mi_opcua Starts data collection.")

        configs = get_config()
        stanza = configs["name"]
        patterns = configs["measures"].split(":")
        tout = configs["connection_timeout"].strip()
        spec = configs.get("metrics_spec", "n.a.").strip()
        timeout = 1 if len(tout) <= 0 else int(tout)

        conn = configs[
            "connection"]  ## "opc.tcp://ec2-54-190-162-94.us-west-2.compute.amazonaws.com:49320"

        if configs.has_key("username"):
            username = configs["username"].strip()
            if len(username) > 0:
                password = configs["password"].strip()
                conn = "%s?username=%s&password=%s" % (conn, username,
                                                       password)

        client = Client(conn, timeout=timeout)

        mi.init_stream(sys.stdout)
        try:
            logger.info("Start connecting OPC Server [%s]." % conn)
            client.connect()
            logger.info("OPC Server [%s] is connected." % conn)
            measures = []
            root = client.get_root_node()

            node.collect_measures(measures, patterns, root)

            md = {}
            try:
                jm = os.path.join(ua.data_dir(), spec)
                with open(jm, 'r') as mfp:
                    md = json.load(mfp)
                    mfp.close()
            except:
                pass

            for m in measures:
                collect_data(stanza, m[len(m) - 1], spec=md)

        except Exception as ex:
            logger.critical(ex)
        finally:
            mi.fini_stream(sys.stdout)
            logger.info("---- end of opc ua ----")
            client.disconnect()
def run():
    logger.info("Modular Input mi_opcua command: %s" % sys.argv)
    if len(sys.argv) > 1:
        try:
            if sys.argv[1] == "--scheme":
                do_scheme()
            elif sys.argv[1] == "--validate-arguments":
                validate_arguments()
            elif sys.argv[1] == "--test":
                test()
            else:
                usage()
        except Exception as ex:
            logger.critical(ex)
    else:
        all_events = []
        
        configs = get_config()
        stanza = configs["name"]
        c_asset = configs["assets"]
        c_metric = configs["metrics"]
        c_condition = configs["conditions"]
        f_asset = os.path.join(ua.data_dir(), c_asset)
        f_metric = os.path.join(ua.data_dir(), c_metric)
        f_condition = os.path.join(ua.data_dir(), c_condition)
        f_downtime = os.path.join(ua.data_dir(), "downtime.json")

        with open(f_metric, 'rU') as fp_m:
            j_metric = json.load(fp_m)
            fp_m.close()
            
        with open(f_condition, 'rU') as fp_c:
            j_condition = json.load(fp_c)
            fp_c.close()

        with open(f_downtime, 'rU') as fp_d:
            j_downtime = json.load(fp_d)
            fp_d.close()
        
        evts = mi_ua.random_events()
        t = datetime.datetime.today()

        opc_alerts = gen_opc_alert()
        
        for opc in opc_alerts:
            all_events.append(convert_opc_alert(opc))
        
        with open(f_asset, 'rU') as fp_a:
            j_asset = json.load(fp_a)
            fp_a.close()
            
            for a, v in j_asset.items():
                o = odict()
                o["DateTime"] = t
                o["Asset"] = a
                o["Type"] = "Data"
                collected = False
                for m in v["metrics"]:
                    if evts.has_key(m):
                        collected = True
                        fv = evts[m].next()
                        nm = j_metric[m]["metric"]
                        o[nm] = dict(measure=m, unit=j_metric[m]["unit"], optimum=j_metric[m]["optimum"], value=fv)
                        
                # has collected data for this asset.
                if collected:
                    health_score(o, j_asset, j_downtime)
                    t = o["DateTime"]
                    
                    all_events.append(convert_data(o))
                    
                    alerts = gen_alert(o, j_condition, j_metric)
                    
                    for alert in alerts:
                        all_events.append(convert_data(alert))

                    '''
                    opc_alerts = gen_opc_alert()
                    for opc in opc_alerts:
                        print_alert(stanza, t, opc, sys.stdout)
                    '''
    
        mi.init_stream(sys.stdout)

        random.shuffle(all_events)
        for evt in all_events:
            mi.print_kv_event(stanza, t, evt, sys.stdout)
            
        mi.fini_stream(sys.stdout)
Exemple #15
0
def run():
    logger.debug("Modular Input mi_opcua command: %s" % sys.argv)
    if len(sys.argv) > 1:
        try:
            if sys.argv[1] == "--scheme":
                do_scheme()
            elif sys.argv[1] == "--validate-arguments":
                validate_arguments()
            elif sys.argv[1] == "--test":
                test()
            else:
                usage()
        except Exception as ex:
            logger.critical(ex)
    else:
        logger.debug("Modular Input mi_opcua Starts data collection.")

        configs = get_config()
        logger.debug("Configuration: %s" % configs)
        stanza = configs["name"]
        SP = configs.get("separator", ":")
        patterns = configs["measures"].split(SP)
        tout = configs["connection_timeout"].strip()
        spec = configs.get("metrics_spec", "n.a.").strip()
        timeout = 1 if len(tout) <= 0 else int(tout)

        conn = configs[
            "connection"]  ## "opc.tcp://ec2-54-190-162-94.us-west-2.compute.amazonaws.com:49320"

        if configs.has_key("username"):
            username = configs["username"].strip()
            if len(username) > 0:
                password = configs["password"].strip()
                conn = "%s?username=%s&password=%s" % (conn, username,
                                                       password)

        client = Client(conn, timeout=timeout)

        mi.init_stream(sys.stdout)
        try:
            if logger.isEnabledFor(logging.DEBUG):
                try:
                    servers = client.connect_and_find_servers()
                    logger.debug("Servers are found: ")
                    for s in servers:
                        print s
                        logger.debug("\tServer: %s" % s)
                except:
                    pass

                try:
                    nservers = client.connect_and_find_servers_on_network()
                    logger.debug("Network Servers are found: ")
                    for n in nservers:
                        logger.debug("\tNetwork Server: %s" % n)
                except:
                    pass

                endpoints = None
                try:
                    endpoints = client.connect_and_get_server_endpoints()
                    logger.debug("Server Endpoints are found: ")
                    for e in endpoints:
                        logger.debug("\tServer Endpoint: %s" % e.EndpointUrl)
                        logger.debug("\t\tServer Details: %s" % e)
                except:
                    pass

            try:
                logger.info("Start connecting OPC Server [%s]." % conn)
                client.connect()
                logger.info("OPC Server [%s] is connected." % conn)
            except Exception as ex:
                logger.error("Connecting to [%s] failed." % conn)
                if endpoints and len(endpoints) > 0:
                    for ep in endpoints:
                        try:
                            conn = ep.EndpointUrl
                            logger.info(
                                "Try connect to another OPC Server [%s]." %
                                conn)
                            client = Client(conn, timeout=timeout)
                            client.connect()
                            logger.info("OPC Server [%s] is connected." % conn)
                        except:
                            break
                else:
                    raise ex

            measures = []
            root = client.get_root_node()

            node.collect_measures(measures, patterns, root)

            md = {}
            try:
                jm = os.path.join(ua.data_dir(), spec)
                with open(jm, 'r') as mfp:
                    md = json.load(mfp)
                    mfp.close()
            except:
                pass

            for m in measures:
                collect_data(stanza, m[len(m) - 1], spec=md)

        except Exception as ex:
            logger.critical(ex)
        finally:
            mi.fini_stream(sys.stdout)
            logger.info("---- end of opc ua ----")
            client.disconnect()