示例#1
0
    def _export(self, with_line_str=True):
        fields = [
            '_id', 'datetime', 'operation', 'thread', 'namespace', 'nscanned',
            'nreturned', 'duration', 'numYields', 'w', 'r'
        ]
        if with_line_str:
            fields.append('line_str')

        first_row = True
        result_str = ''
        out_count = 0
        for line_no, line in enumerate(self.args['logfile']):
            logline = LogLine(line)
            # only export lines that have a datetime and duration
            if logline.datetime and logline.duration:
                out_count += 1
                # if too many lines include a line_str, the page won't load
                if with_line_str and out_count > 10000:
                    print "Warning: more than 10,000 data points detected. Skipping actual log line strings for faster plotting."
                    return False
                # write log line out as json
                if not first_row:
                    # prepend comma and newline
                    result_str += ',\n'
                else:
                    first_row = False
                # hack to include _id for log lines from file
                logline._id = line_no
                result_str += logline.to_json(fields)
        return result_str
示例#2
0
    def _export(self, with_line_str=True):
        fields = ['_id', 'datetime', 'operation', 'thread', 'namespace', 'nscanned', 'nreturned', 'duration', 'numYields', 'w', 'r']
        if with_line_str:
            fields.append('line_str')

        first_row = True
        result_str = ''
        out_count = 0
        for line_no, line in enumerate(self.args['logfile']):
            logline = LogLine(line)
            # only export lines that have a datetime and duration
            if logline.datetime and logline.duration:
                out_count += 1
                # if too many lines include a line_str, the page won't load
                if with_line_str and out_count > 10000:
                    print "Warning: more than 10,000 data points detected. Skipping actual log line strings for faster plotting."
                    return False
                # write log line out as json
                if not first_row:
                    # prepend comma and newline
                    result_str += ',\n'
                else:
                    first_row = False
                # hack to include _id for log lines from file
                logline._id = line_no
                result_str += logline.to_json(fields)
        return result_str
示例#3
0
    def run(self):
        LogFileTool.run(self)

        # store in current local folder
        mlogvis_dir = '.'

        # change stdin logfile name and remove the < >
        logname = self.args['logfile'].name
        if logname == '<stdin>':
            logname = 'stdin'

        os.chdir(mlogvis_dir)

        data_path = os.path.join(os.path.dirname(mtools.__file__), 'data')
        srcfilelocation = os.path.join(data_path, 'index.html')
        outf = '{"type": "duration", "logfilename": "' + logname + '", "data":['

        first_row = True
        for line in self.args['logfile']:
            logline = LogLine(line)
            # group regular connections together
            if logline.datetime and logline.duration:
                if logline.thread and logline.thread.startswith("conn"):
                    logline._thread = "conn####"
                # write log line out as json
                if not first_row:
                    # prepend comma and newline
                    outf += ',\n'
                else:
                    first_row = False
                outf += logline.to_json([
                    'line_str', 'datetime', 'operation', 'thread', 'namespace',
                    'nscanned', 'nreturned', 'duration'
                ])
        outf += ']}'

        dstfilelocation = os.path.join(os.getcwd(), '%s.html' % logname)

        print "copying %s to %s" % (srcfilelocation, dstfilelocation)

        srcfile = open(srcfilelocation)
        contents = srcfile.read()
        srcfile.close()

        dstfile = open(dstfilelocation, 'wt')
        replaced_contents = contents.replace('##REPLACE##', outf)
        dstfile.write(replaced_contents)
        dstfile.close()

        print "serving visualization on file://" + dstfilelocation

        webbrowser.open("file://" + dstfilelocation)
示例#4
0
文件: mlogvis.py 项目: rgsingh/mtools
    def run(self):
        LogFileTool.run(self)

        # store in current local folder
        mlogvis_dir = "."

        # change stdin logfile name and remove the < >
        logname = self.args["logfile"].name
        if logname == "<stdin>":
            logname = "stdin"

        os.chdir(mlogvis_dir)

        data_path = os.path.join(os.path.dirname(mtools.__file__), "data")
        srcfilelocation = os.path.join(data_path, "index.html")
        outf = '{"type": "duration", "logfilename": "' + logname + '", "data":['

        first_row = True
        for line in self.args["logfile"]:
            logline = LogLine(line)
            # group regular connections together
            if logline.datetime and logline.duration:
                if logline.thread and logline.thread.startswith("conn"):
                    logline._thread = "conn####"
                # write log line out as json
                if not first_row:
                    # prepend comma and newline
                    outf += ",\n"
                else:
                    first_row = False
                outf += logline.to_json(
                    ["line_str", "datetime", "operation", "thread", "namespace", "nscanned", "nreturned", "duration"]
                )
        outf += "]}"

        dstfilelocation = os.path.join(os.getcwd(), "%s.html" % logname)

        print "copying %s to %s" % (srcfilelocation, dstfilelocation)

        srcfile = open(srcfilelocation)
        contents = srcfile.read()
        srcfile.close()

        dstfile = open(dstfilelocation, "wt")
        replaced_contents = contents.replace("##REPLACE##", outf)
        dstfile.write(replaced_contents)
        dstfile.close()

        print "serving visualization on file://" + dstfilelocation

        webbrowser.open("file://" + dstfilelocation)
示例#5
0
    outf = open('events.json', 'w')
    outf.write('{"type": "duration", "data":[')
    first_row = True
    for line in logfile:
        logline = LogLine(line)
        # group regular connections together
        if logline.datetime and logline.duration:
            if logline.thread and logline.thread.startswith("conn"):
                logline._thread = "conn####"
            # write log line out as json
            if not first_row:
                # prepend comma and newline
                outf.write(',\n')
            else:
                first_row = False
            outf.write(logline.to_json(['line_str', 'datetime', 'operation', 'thread', 'namespace', 'nscanned', 'nreturned', 'duration']))
    outf.write(']}')
    outf.close()

    data_path = os.path.join(os.path.dirname(mtools.__file__), 'data')
    src = os.path.join(data_path, 'index.html')
    dst = os.path.join(os.getcwd(), 'index.html')
    
    print "trying to copy %s to %s" % (src, dst)
    shutil.copyfile(src, dst)

    Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
    
    for i in range(100):
        try:
            httpd = SocketServer.TCPServer(("", PORT), Handler)