Ejemplo n.º 1
0
    def __del__(self):
        logging.debug("SIPSMsgParser __del__")
        self.in_shutdown = True
        if (self.in_cs_msg):
            self.submit_cs_message()
        # go through all caches and submit unprocessed messages as-is
        # clients/requests cache
        for clientid in self.d_cs_clients_msgs:
            for refid in self.d_cs_clients_msgs[clientid]:
                for typeid in self.d_cs_clients_msgs[clientid][refid]:
                    self.d_cs_msg = self.d_cs_clients_msgs[clientid][refid][
                        typeid]
                    self.d_cs_msg[
                        'is_failed'] = -1  # we dont know, no information in log
                    self.submit_cs_message()
        #external auth messages
        for endpoint in self.d_cs_exta_intops:
            for typeid in self.d_cs_exta_intops[endpoint]:
                self.d_cs_msg = self.d_cs_exta_intops[endpoint][typeid]
                self.d_cs_msg[
                    'is_failed'] = -1  # we dont know, no information in log
                self.submit_cs_message()

        LogParser.__del__(self)
        return
Ejemplo n.º 2
0
    def seachHtmlFile(self,curpath=None,filename=None,methodlist=None,devices=None):

        listcontent=[]
        _failCount=0
        _sucessCount=0
        failCase=[]
        content=None
        dictcontent={}
        if not os.path.exists(curpath):
            print "%s路径不存在"%curpath
        for root,dirs,files in os.walk(curpath,True):
            if -1!=root.find(filename):
                print root
            for item in files:
                path=os.path.join(root,item)
                if -1!=path.find(filename):
                    if os.path.splitext(item)[1]=='.html':
                        testcase=os.path.split(path)[0].split('\\')[-1]
                        listcontent=self.parseHtml(path)
                        log=LogParser()
                        _str=log.parser('%s\\%s\\log.html'%(curpath,testcase))
                        if _str=='crash':
                            _failCount=_failCount+1
                            dictcontent['%s'%testcase]='crash'
                        else:
                            if len(listcontent)>0:
                                _failCount=_failCount+1
                                content=listcontent[0]
                                dictcontent['%s'%testcase]=content
                            else:
                                content='Success'
                                dictcontent['%s'%testcase]=content
                                _sucessCount=_sucessCount+1
                            
        self.headHtml('%s\sumHtml.html'%curpath)
        for device in devices:
            total=0
            _fail=0
            _success=0
            for key in dictcontent.keys():
                if device in key:
                    total=total+1
                    if dictcontent[key]!='Success':
                        _fail=_fail+1
                        failCase.append(key)
                    else:
                        _success=_success+1
            self.titleHtml('%s\sumHtml.html'%curpath,device)
            self.getSummaryHtml(curpath,'%s\sumHtml.html'%curpath,total,_success,_fail,failCase,device)
        for device in devices:
            dic={}
            for key in dictcontent.keys():
                if device in key:
                    dic['%s'%key]=dictcontent[key]
                    if dictcontent[key]!='Success':
                        self.failDetailHtml(curpath,'%s\\%s_FailHtml.html'%(curpath,device),dic,device)
            self.titleHtml('%s\sumHtml.html'%curpath,device)
            self.getDetailHtml(curpath,'%s\sumHtml.html'%curpath,dic,device)
        self.endHtml('%s\sumHtml.html'%curpath)
Ejemplo n.º 3
0
 def ParsePQATAndPrintTable(self, logName):
     metrics = ["HPWL", "TNS", "WNS"]
     parser = LogParser(logName, TableType.PQAT, self.cfgParser)
     table = parser.ParsePQAT(metrics)
     table = MakeTableInPercents(table)
     pqatName = r"%s.csv" % (os.path.basename(logName))
     PQATFileName = os.path.join(os.path.dirname(logName), pqatName)
     PrintTableToFile(PQATFileName, table, metrics)
Ejemplo n.º 4
0
 def __init__(self, submitter, tags={}):
     logging.debug("SIPSMsgParser __init__")
     LogParser.__init__(self, submitter, tags)
     # buffer
     self.sip_msg = ''
     # dictionary for SIP msg
     self.d_sip_msg = {}
     # bool we are in sip msg
     self.in_sip_msg = 0
Ejemplo n.º 5
0
 def __init__(self,submitter,tags={}):
     logging.debug("TLibMsgParser __init__")
     LogParser.__init__(self, submitter,tags)
     # buffer
     self.tlib_msg = ''
     # dictionary for SIP msg
     self.d_tlib_msg = {}
     # bool we are in sip msg
     self.in_tlib_msg = 0
Ejemplo n.º 6
0
 def __init__(self, submitter, tags={}):
     logging.debug("TLibMsgParser __init__")
     LogParser.__init__(self, submitter, tags)
     # buffer
     self.tlib_msg = ''
     # dictionary for SIP msg
     self.d_tlib_msg = {}
     # bool we are in sip msg
     self.in_tlib_msg = 0
     self.has_ConnID = False
     self.has_ThisDN = False
Ejemplo n.º 7
0
def PrintAndPlotPFST(logName, metrics, stages):
    parser = LogParser(logName)
    table = parser.ParsePFST(metrics, stages)

    if table == []:
        print("Error: table is empty")
        return

    table = MakeTableInPercents(table)
    PrintTableToFile(r"%s.csv" % logName, table, metrics, stages)

    del table[0]  #don"t use values of 0 iteration
    [xValues, yValues] = ExtractXYFromTable(table)
    PlotChartForBenchmark(logName, xValues, "TNS", yValues, "HPWL")
Ejemplo n.º 8
0
    def failDetailHtml(self,curpath,filename,dictcontent,device):
        log=LogParser()
        f=open(filename,'w')
        str='<html>\
                    <head>\
                    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />\
                    <title>Test</title>\
                    <style>img{float:left;margin:5px;}</style>'
        str+='</head>\
                    <body align=center>'
        str+='<center><h1>失败测试报告</h1>'
        str+= '<table border=1 borderColor=#EAEAEA width=100% style=border-collapse:collapse>\
                    <tr>\
                        <th width=350>用例名称</th>\
                        <th>结果</th>\
                    </tr>' 
        for key in dictcontent.keys():
##            _str=log.parser('%s\\%s\\log.html'%(curpath,key))
            case=key[0:len(key)-len(device)-1]
            str += '<tr>\
                        <th><a href="./%s/report.html">%s</a></th>'%(key,case)
            if dictcontent[key]!="Success":
                if dictcontent[key]=="crash":
                     str+='<th style=color:#FF0033 id="e">%s:<a href="./%s/log.html">System.out</a></th></tr>'%(dictcontent[key],key)
                else:
                     str+='<th style=color:#FF0033 id="e">Exception:%s</th></tr>'%(dictcontent[key])
            else:
                str+='<th id="e">%s</th></tr>'%(dictcontent[key])
Ejemplo n.º 9
0
    def __init__(self, submitter, tags={}):
        logging.debug("CSMsgParser __init__")
        LogParser.__init__(self, submitter, tags)
        # buffer
        self.cs_msg = ''
        # dictionary for current CS msg
        self.d_cs_msg = {}
        # dictonary for previous CS message TODO: do we need a full stack ?
        self.d_cs_msg_stack = []

        # bool we are in CS msg
        self.in_cs_msg = 0
        # we are handling termination (cleanup caches)
        self.in_shutdown = False
        # dictionary for messages per clients that are currently pending future processing
        self.d_cs_clients_msgs = {}
        # aux dictionary for ext auth module requests by internal reqid_exta (linked to d_cs_clients_msgs
        self.d_cs_exta_msgs = {}
        # dictonary for ext auth operations messages by ext auth endpoint
        self.d_cs_exta_intops = {}
Ejemplo n.º 10
0
    def openDirectory(self):

        folder = str(
            QFileDialog.getExistingDirectory(self, "Select Directory", "./"))

        if not folder:
            return
        logmerger = LogMerger(folder)
        logparser = LogParser(folder)
        self.events = logparser.getEvents()
        self.logs = logparser.getLogs()

        # for event in self.events:
        #     print(event.getEventName())
        #     print(event.getEventSymbol())
        #     print(event.getEventLogs())

        # Event Checkbox init
        self.eventCheckboxInit()

        # device MAC Label init
        self.ui.deviceMACLabel.setText(logparser.getDeviceMAC())
Ejemplo n.º 11
0
    def getDetailHtml(self,curpath,filename,dictcontent,device):
        log=LogParser()
        f=open(filename,'a')
        str = '<table border=1 borderColor=#EAEAEA width=100% style=border-collapse:collapse>\
                    <tr>\
                        <th width=350>用例名称</th>\
                        <th>结果</th>\
                    </tr>' 
        for key in dictcontent.keys():
##            _str=log.parser('%s\\%s\\log.html'%(curpath,key))
            case=key[0:len(key)-len(device)-1]
            str += '<tr>\
                        <th><a href="./%s/report.html">%s</a></th>'%(key,case)
            if dictcontent[key]!="Success":
                if dictcontent[key]=="crash":
                     str+='<th style=color:#FF0033 id="e">%s:<a href="./%s/log.html">System.out</a></th></tr>'%(dictcontent[key],key)
                else:
                     str+='<th style=color:#FF0033 id="e">Exception:%s</th></tr>'%(dictcontent[key])
            else:
                str+='<th id="e">%s</th></tr>'%(dictcontent[key])
Ejemplo n.º 12
0
from utilsCommon import sys_oper
from utilsCommon import sys_base
from utilsCommon import Config_agent
from auditOper import auditOper
import os
import pyinotify
import threading
import time
import ctypes
import sys
from datetime import datetime
import commands
import hashlib
from lib_dec import dec_lib

logparser = LogParser()
config = ConfigInfor()
operpaser = OperParser()
stateInfo = StateInfo()


class OnIOHandler(pyinotify.ProcessEvent):
    def process_IN_MODIFY(self, event):
        global g_EventListSet, mutex
        if event.maskname == 'IN_MODIFY':
            if mutex.acquire(5):
                g_EventListSet.add(event.pathname)
                mutex.release()


class inotify_log(threading.Thread):
Ejemplo n.º 13
0
 def __init__(self,submitter,tags={}):
     LogParser.__init__(self, submitter,tags)
     # dictionary for SIP msg
     self.d_std_msg = {}
Ejemplo n.º 14
0
 def ParseLog(self, logName):
     parser = LogParser(logName, TableType.PFST, self.cfgParser)
     return parser.ParsePFST(self.metrics, self.stages)
Ejemplo n.º 15
0
    def seachHtmlFile(self,curpath=None,filename=None,methodlist=None,devices=None):

        listcontent=[]
        _failCount=0
        _sucessCount=0
        failCase=[]
        content=None
        dictcontent={}
        if not os.path.exists(curpath):
            print "%s路径不存在"%curpath
        for root,dirs,files in os.walk(curpath,True):
            if -1!=root.find(filename):
                print root
            for item in files:
                path=os.path.join(root,item)
                if -1!=path.find(filename):
                    if os.path.splitext(item)[1]=='.html':
                        testcase=os.path.split(path)[0].split('\\')[-1]
                        listcontent=self.parseHtml(path)
                        log=LogParser()
                        _str=log.parser('%s\\%s\\log.html'%(curpath,testcase))
                        if _str=='crash':
                            _failCount=_failCount+1
                            dictcontent['%s'%testcase]='crash'
                        else:
                            if len(listcontent)>0:
                                _failCount=_failCount+1
                                content=listcontent[0]
                                dictcontent['%s'%testcase]=content
                            else:
                                content='Success'
                                dictcontent['%s'%testcase]=content
                                _sucessCount=_sucessCount+1
                            
        self.headHtml('%s\sumHtml.html'%curpath)
        for device in devices:
            total=0
            _fail=0
            _success=0
            for key in dictcontent.keys():
                if device in key:
                    total=total+1
                    if dictcontent[key]!='Success':
                        _fail=_fail+1
                        failCase.append(key)
                    else:
                        _success=_success+1
            self.titleHtml('%s\sumHtml.html'%curpath,device)
            self.getSummaryHtml(curpath,'%s\sumHtml.html'%curpath,total,_success,_fail,failCase,device)
        for device in devices:
            dic={}
            dicf={}
            for key in dictcontent.keys():
                if device in key:
                    dic['%s'%key]=dictcontent[key]
                    if dictcontent[key]!='Success':
                        dicf['%s'%key]=dictcontent[key]
            self.failDetailHtml(curpath,'%s\\%s_FailHtml.html'%(curpath,device),dicf,device)
            self.titleHtml('%s\sumHtml.html'%curpath,device)
            self.getDetailHtml(curpath,'%s\sumHtml.html'%curpath,dic,device)
        self.endHtml('%s\sumHtml.html'%curpath)
Ejemplo n.º 16
0
 def __del__(self):
     logging.debug("SIPSMsgParser __del__")
     if (self.in_sip_msg):
         self.submit_sip_message()
     LogParser.__del__(self)
     return
from InputFileValidator import InputFileValidator
from LogParser import LogParser
import argparse
if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='This is a Python version of grep function.')
    parser.add_argument('name_of_file', type=str,
                    help='an name of the file to check')
    parser.add_argument('string_to_search', type=str,
                    help='full phrase to search in the file')
    args = parser.parse_args()
    name_of_file = args.name_of_file
    string_to_search = args.string_to_search
    file_validator = InputFileValidator(name_of_file)
    if file_validator.validate():
        log_parser = LogParser(file_validator.get_list_of_files(), string_to_search)
        print(log_parser.parselog())








Ejemplo n.º 18
0
 def __del__(self):
     logging.debug("TLibSMsgParser __del__")
     if (self.in_tlib_msg):
         self.submit_tlib_message()
     LogParser.__del__(self)
     return
Ejemplo n.º 19
0
 def __del__(self):
     logging.debug("SIPSMsgParser __del__")
     if(self.in_sip_msg):
         self.submit_sip_message()
     LogParser.__del__(self)
     return
Ejemplo n.º 20
0
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib import style
from LogParser import LogParser

style.use('fivethirtyeight')

fig = plt.figure()
ax1 = fig.add_subplot(1, 1, 1)
old_f = []
lp = LogParser('config.ini')


def animate(i):
    global old_f
    with open('history.log', 'r') as f:
        new_f = f.readlines()
        if new_f != old_f:
            old_f = new_f[:]
            lp.add_log(old_f)
            lp.watson_report_cumulative()
            # Isolate x axis
            x = lp.axes['x']
            ax1.clear()
            for name, things in lp.axes.items():
                if name == 'x':
                    continue
                print(lp.axes)
                plt.plot(x, things, label=name)
                plt.legend(loc=2, prop={'size': 7})
Ejemplo n.º 21
0
Archivo: main.py Proyecto: h4ck1t/PitE
import os
import matplotlib.pyplot as plt

from LogParser import LogParser
from InputFileValidator import InputFileValidator

InputFileValidator.validate(LogParser.get_parser())

data = LogParser.parse_log(LogParser.get_parser(), "PrChecker.Downs")[1:]
data_to_strings = []
iterator = 0
x_buffer = []
y_buffer = []
for buffer in data:
    part = buffer.split()
    for atom in part:
        if atom.isdigit():
            data_to_strings.append(atom)
for string in data_to_strings:
    if iterator == 0:
        x_buffer.append(string)
        iterator += 1
    elif iterator == 1:
        y_buffer.append(string)
        iterator += 1
    else:
        iterator = 0

LogParser.save_result(
    "./results/results",
    LogParser.parse_log(LogParser.get_parser(), "PrChecker.Downs"))
Ejemplo n.º 22
0
 def __del__(self):
     logging.debug("TLibSMsgParser __del__")
     if(self.in_tlib_msg):
         self.submit_tlib_message()
     LogParser.__del__(self)
     return
Ejemplo n.º 23
0
from MsgWrap import OperMsgParser
from LogParser import LogParser
from utilsCommon import pf_oper
from utilsCommon import pf_base
from utilsCommon import PrntLog
from utilsCommon import judge_ip_localhost
from utilsCommon import get_netcard
import os
import crypt
import random, string
import threading
import traceback

opermsgpaser = OperMsgParser()
logparser = LogParser()
gCreateAccountrSet = set()


class OperParser(object):
    def init_gCreateAccountrSet(self):
        ret = os.path.exists('AddUserList.conf')
        if ret:
            global gCreateAccountrSet
            file = open('AddUserList.conf', 'r')
            for line in file.readlines():
                gCreateAccountrSet.add(line.rstrip('\n'))
            file.close()

    def OperParserMsg(self, str):
        try:
Ejemplo n.º 24
0
def main(arguments: Optional[List[str]] = None) -> None:
    def parse_ignored_ips(x: str) -> List[ipaddress.IPv4Network]:
        return [
            ipaddress.ip_network(address, strict=False)
            for address in x.split(',')
        ]

    parser = argparse.ArgumentParser(description='Process log files.')

    group = parser.add_mutually_exclusive_group(required=True)
    group.add_argument('--realtime',
                       '--interactive',
                       '-i',
                       '-r',
                       action='store_true',
                       help='Watch a single log file in realtime')
    group.add_argument(
        '--batch',
        '-b',
        action='store_true',
        help='Print a report on one or more completed log files')
    group.add_argument('--slug-summary',
                       action='store_true',
                       dest='show_slugs',
                       help="Show the slugs that have been used in a log file")
    group.add_argument('--xxfake',
                       action='store_true',
                       help=argparse.SUPPRESS,
                       dest='fake')  # For testing only

    group2 = parser.add_mutually_exclusive_group()
    group2.add_argument('--by-ip',
                        action='store_true',
                        dest='by_ip',
                        help='Sorts batched logs by host ip')
    group2.add_argument('--by-time',
                        action='store_false',
                        dest='by_ip',
                        help='Sorts batched logs by session start time')

    parser.add_argument('--html',
                        action='store_true',
                        dest='uses_html',
                        help='Generate html output rather than text output')

    parser.add_argument('--api-host-url',
                        default=DEFAULT_FIELDS_PREFIX,
                        metavar='URL',
                        dest='api_host_url',
                        help='base url to access the information')
    parser.add_argument('--reverse-dns',
                        '--dns',
                        action='store_true',
                        dest='uses_reverse_dns',
                        help='Attempt to resolve the real host name')
    parser.add_argument(
        '--ignore-ip',
        '-x',
        default=[],
        action="append",
        metavar='cidrlist',
        dest='ignore_ip',
        type=parse_ignored_ips,
        help='list of ips to ignore.  May be specified multiple times')
    parser.add_argument(
        '--session-timeout',
        default=60,
        type=int,
        metavar="minutes",
        dest='session_timeout_minutes',
        help='a session ends after this period (minutes) of inactivity')

    parser.add_argument('--output',
                        '-o',
                        type=argparse.FileType('w'),
                        default=sys.stdout,
                        dest='output',
                        help="output file.  default is stdout")

    # TODO(fy): Temporary hack for when I don't have internet access
    parser.add_argument('--xxlocal',
                        action="store_true",
                        dest="uses_local",
                        help=argparse.SUPPRESS)

    parser.add_argument('log_files',
                        nargs=argparse.REMAINDER,
                        help='log files')
    args = parser.parse_args(arguments)

    slugs = Slug.ToInfoMap(
        LOCAL_SLUGS_PREFIX if args.uses_local else args.api_host_url)
    # args.ignored_ip comes out as a list of lists, and it needs to be flattened.
    ignored_ips = [ip for arg_list in args.ignore_ip for ip in arg_list]
    session_info_generator = SessionInfoGenerator(slugs, ignored_ips)
    log_parser = LogParser(session_info_generator, **vars(args))

    if args.realtime:
        if len(args.log_files) != 1:
            raise Exception("Must specify exactly one file for batch mode.")
        log_entries_realtime = LogReader.read_logs_from_tailed_file(
            args.log_files[0])
        log_parser.run_realtime(log_entries_realtime)
    else:
        if len(args.log_files) < 1:
            raise Exception("Must specify at least one log file.")
        log_entries_list = LogReader.read_logs(args.log_files)
        if args.batch:
            log_parser.run_batch(log_entries_list)
        elif args.show_slugs:
            log_parser.show_slugs(log_entries_list)
        elif args.fake:
            log_entries_list.sort(key=operator.attrgetter('time'))
            log_parser.run_realtime(iter(log_entries_list))
Ejemplo n.º 25
0
when argument "port" is not included, the default port would be 3306\n
But other arguments "host", "user", "passwd", "db" are required.''', action='append')
parser.add_argument('-s', help='the OS where the chat log came from. "android" and "iOS" is available.')
parser.add_argument('-v', '--version', action='version', version='%(prog)s 0.11')

startOptions = parser.parse_args()
dbConfig = {}

if startOptions.db:
    for db_arg in startOptions.e:
        leftside, rightside = parseExpression(db_arg)
        dbConfig[leftside] = rightside

    if 'PORT' not in dbConfig.keys():
        dbConfig['PORT'] = 3306

    for necessary in ['HOST', 'USER', 'PASSWD', 'DB']:
        if necessary not in dbConfig.keys():
            print('[Error]Invalid Argument:: "%s" variable not in Database Config' % necessary)
            exit(-1)

parser = LogParser(system=startOptions.s if startOptions.s else 'android', topKw=100)
data = parser.process(startOptions.source)

if startOptions.db:
    saveToDB(data, dbConfig)
else:
    print(json.dumps(data))


Ejemplo n.º 26
0
 def test_parse(self):
     LogParser("./test_log.txt", ".", "@ERROR_GROUP")
     with open("./test_log.txt_err_timestamps.txt") as f:
         lines = f.readlines()
         self.assertEquals(len(lines), 1)
         self.assertEquals(lines[0], "1502443757149")
Ejemplo n.º 27
0
 def __init__(self, submitter, tags={}):
     LogParser.__init__(self, submitter, tags)
     # dictionary for SIP msg
     self.d_std_msg = {}