Beispiel #1
0
def main():
    log_format = parse_log_format()

    result_files = [
        result_file.replace('.html', '')
        for result_file in get_dir_files('../result/report/')
    ]
    target_files = sorted([
        data_file for data_file in get_dir_files('../data')
        if data_file not in result_files
    ])

    for target_file in target_files:
        try:
            print datetime.datetime.now(), ' Start parse file : ' + target_file

            parse_log_file(target_file, log_format)
            if config.goaccess_flag:
                parse_log_file_with_goaccess(target_file)

            print datetime.datetime.now(), ' End parse file: ' + target_file
        except Exception:
            exstr = traceback.format_exc()
            print exstr
    update_index_html()
Beispiel #2
0
def main():

    log_format = parse_log_format()

    result_files = [result_file.replace('.html', '') for result_file in get_dir_files('../result/report/')]
    target_files = sorted([data_file for data_file in get_dir_files('../data') if data_file not in result_files])

    for target_file in target_files:
        print datetime.datetime.now(), ' Start parse file : '+target_file

        parse_log_file(target_file, log_format)
        if config.goaccess_flag:
            parse_log_file_with_goaccess(target_file)

        print datetime.datetime.now(), ' End parse file: '+target_file

    update_index_html()
Beispiel #3
0
def update_index_html():
    html = index_html_header
    for report_file in sorted(get_dir_files('../result/report/')):
        if report_file.find('GoAccess') != -1 or report_file.find('.git') != -1 :
            pass
        else:
            html += index_html_li % {'web_log_parser_file': report_file}
    html += index_html_end

    with open('../result/index.html', 'w') as f:
        f.write(html)
Beispiel #4
0
def update_index_html():
    html = index_template.render(
        files=sorted(get_dir_files('../result/report/')))

    with open('../result/index.html', 'wb') as f:
        f.write(html.encode('utf-8'))
Beispiel #5
0
            ftp.mkd(self.datafile_prefix.split("-")[1])
        except Exception, e:
            print ("[INFO]ftp directory: %s existed" %
                   self.datafile_prefix.split("-")[1])
            print e
        ftp.cwd(self.datafile_prefix.split("-")[1])
        try:
            ftp.mkd("monitor")
        except Exception, e:
            print "[INFO]ftp directory: monitor existed"
            print e
        ftp.cwd("monitor")
        ftp.mkd(os.path.basename(self.report_file).split(".html")[0])
        ftp.cwd(os.path.basename(self.report_file).split(".html")[0])
        buffer_size = 1024
        for datafile in util.get_dir_files(self.result_dir):
            file_handler = open(self.result_dir + "/" + datafile, "rb")
            ftp.storbinary("STOR %s" % datafile, file_handler, buffer_size)
        ftp.set_debuglevel(0)
        file_handler.close()
        ftp.quit()

    def api_upload(self):
        self.summary_data['line_data'] = self.line_data
        r = requests.post(self.api_url, json=json.dumps(self.summary_data))
        if r.text == "200":
            print "[INFO]api upload success"
        else:
            print "[ERROR]api upload failed"

    def work(self):
Beispiel #6
0
def update_index_html():
    html = index_template.render(files=sorted(get_dir_files('../result/report/')))

    with open('../result/index.html', 'w') as f:
        f.write(html.encode('utf-8'))
Beispiel #7
0
        ftp.mkd(os.path.basename(self.report_file).split(".html")[0])
        ftp.cwd(os.path.basename(self.report_file).split(".html")[0])
        buffer_size = 1024

        file_handler = open(self.package_file, "rb")
        ftp.storbinary("STOR %s" % os.path.basename(self.package_file),
                       file_handler, buffer_size)
        file_handler = open(self.report_file, "rb")
        ftp.storbinary("STOR %s" % os.path.basename(self.report_file),
                       file_handler, buffer_size)
        file_handler = open(self.result_dir + "/" + "An_Report1.html", "rb")
        ftp.storbinary("STOR %s" % "An_Report1.html", file_handler, buffer_size)

        ftp.mkd("An_Report1")
        ftp.cwd("An_Report1")
        for report_file in util.get_dir_files(self.result_dir + "/" + "An_Report1"):
            file_handler = open(self.result_dir + "/" +
                                "An_Report1" + "/" + report_file, "rb")
            ftp.storbinary("STOR %s" % report_file, file_handler, buffer_size)
        ftp.set_debuglevel(0)
        file_handler.close()
        ftp.quit()

    def work(self):
        self.get_conf()
        self.set_file_name()
        self.generate_html_report()
        self.package_files()
        if self.ftp_conf.get('flag'):
            self.ftp_upload()
Beispiel #8
0
            ftp.mkd(self.datafile_prefix.split("-")[1])
        except Exception, e:
            print("[INFO]ftp directory: %s existed" %
                  self.datafile_prefix.split("-")[1])
            print e
        ftp.cwd(self.datafile_prefix.split("-")[1])
        try:
            ftp.mkd("monitor")
        except Exception, e:
            print "[INFO]ftp directory: monitor existed"
            print e
        ftp.cwd("monitor")
        ftp.mkd(os.path.basename(self.report_file).split(".html")[0])
        ftp.cwd(os.path.basename(self.report_file).split(".html")[0])
        buffer_size = 1024
        for datafile in util.get_dir_files(self.result_dir):
            file_handler = open(self.result_dir + "/" + datafile, "rb")
            ftp.storbinary("STOR %s" % datafile, file_handler, buffer_size)
        ftp.set_debuglevel(0)
        file_handler.close()
        ftp.quit()

    def api_upload(self):
        self.summary_data['line_data'] = self.line_data
        r = requests.post(self.api_url, json=json.dumps(self.summary_data))
        if r.text == "200":
            print "[INFO]api upload success"
        else:
            print "[ERROR]api upload failed"

    def work(self):
def main():
    #get the avg
    parameter_lists = util.get_parameter_lists(sys.argv)
    if len(parameter_lists):
        result_prefix = parameter_lists[0]
    else:
        result_prefix = "test"
    if len(parameter_lists) > 1:
        end_time = parameter_lists[1]
    else:
        end_time = "N/A"

    #read the monitor config
    config = MonitorConfig("../conf/report.ini")

    #analyse file
    files = util.get_dir_files(config.res_dir)
    for datafile in files:
        if datafile.find("txt") != -1:
            if datafile.find("server_cpu") != -1:
                cpu_resource = plot.CPUResource(config.server_cpu_types,
                        result_prefix, config.res_dir+"/"+datafile, config.granularity)
                cpu_resource.work()
            if datafile.find("server_memory") != -1:
                memory_resource = plot.MemoryResource(config.server_memory_types,
                        result_prefix, config.res_dir+"/"+datafile, config.granularity)
                memory_resource.work()
            if datafile.find("server_io_rate") != -1:
                io_rate_resource = plot.IOResource(config.server_io_rate_types,
                        result_prefix, config.res_dir+"/"+datafile, config.granularity)
                io_rate_resource.work()
            if datafile.find("server_eth0") != -1:
                eth0_resource = plot.EthResource(config.server_eth_types,
                        result_prefix, config.res_dir+"/"+datafile, config.granularity)
                eth0_resource.work()
            if datafile.find("server_eth1") != -1:
                eth1_resource = plot.EthResource(config.server_eth_types,
                        result_prefix, config.res_dir+"/"+datafile, config.granularity)
                eth1_resource.work()
            if datafile.find("server_queue_load") != -1:
                load_resource = plot.LoadResource(config.server_load_types,
                        result_prefix, config.res_dir+"/"+datafile, config.granularity)
                load_resource.work()
            if datafile.find("server_socket") != -1:
                sock_resource = plot.SockResource(config.server_sock_types,
                        result_prefix, config.res_dir+"/"+datafile, config.granularity)
                sock_resource.work()
            if datafile.find("mysql") != -1:
                mysql_resource = plot.MySQLResource(config.mysql_connections_types,
                        result_prefix, config.res_dir+"/"+datafile, config.granularity)
                mysql_resource.work()
            if datafile.find("TCPPort") != -1:
                tcp_port_resource = plot.TCPPortResource(config.tcp_port_types,
                        result_prefix, config.res_dir+"/"+datafile, config.granularity)
                tcp_port_resource.work()
            if datafile.find("process") != -1:
                process_resource = plot.ProcessResource(config.process_types,
                        result_prefix, config.res_dir+"/"+datafile, config.granularity)
                process_resource.work()
            if datafile.find("redis") != -1:
                if datafile.find('process') == -1 and datafile.find('thread') == -1:
                    redis_resource = plot.RedisResource(config.redis_types,
                            result_prefix, config.res_dir+"/"+datafile, config.granularity)
                    redis_resource.work()
            if datafile.find('memcached') != -1:
                if datafile.find('process') == -1 and datafile.find('thread') == -1:
                    memcached_resource = plot.MemcachedResource(config.memcached_types,
                            result_prefix, config.res_dir+"/"+datafile, config.granularity)
                    memcached_resource.work()
            if datafile.find('mongodb') != -1:
                if datafile.find('process') == -1 and datafile.find('thread') == -1:
                    mongodb_resource = plot.MongodbResource(config.mongodb_types,
                            result_prefix, config.res_dir+"/"+datafile, config.granularity)
                    mongodb_resource.work()
            if datafile.find('apache') != -1:
                if datafile.find('process') == -1 and datafile.find('thread') == -1:
                    apache_resource = plot.ApacheResource(config.apache_types,
                            result_prefix, config.res_dir+"/"+datafile, config.granularity)
                    apache_resource.work()
            if datafile.find('tomcat6') != -1:
                if datafile.find('process') == -1 and datafile.find('thread') == -1:
                    tomcat_resource = plot.TomcatResource(config.tomcat6_types,
                            result_prefix, config.res_dir+"/"+datafile, config.granularity)
                    tomcat_resource.work()
            if datafile.find('tomcat7') != -1:
                if datafile.find('process') == -1 and datafile.find('thread') == -1:
                    tomcat_resource = plot.TomcatResource(config.tomcat7_types,
                            result_prefix, config.res_dir+"/"+datafile, config.granularity)
                    tomcat_resource.work()
            if datafile.find('nginx') != -1:
                if datafile.find('process') == -1 and datafile.find('thread') == -1:
                    nginx_resource = plot.TomcatResource(config.nginx_types,
                            result_prefix, config.res_dir+"/"+datafile, config.granularity)
                    nginx_resource.work()

    #generate sum report
    report.Report.end_time = end_time
    resource_sum_report = report.Report(config.res_dir)
    resource_sum_report.work()
Beispiel #10
0
def main():
    # get the avg
    parameter_lists = util.get_parameter_lists(sys.argv)
    if len(parameter_lists):
        result_prefix = parameter_lists[0]
    else:
        result_prefix = "test"
    if len(parameter_lists) > 1:
        end_time = parameter_lists[1]
    else:
        end_time = "N/A"

    # read the monitor config
    config = MonitorConfig("../conf/report.ini")

    # analyse file
    files = util.get_dir_files(config.res_dir)
    for datafile in files:
        if datafile.find("txt") != -1:
            if datafile.find("server_cpu") != -1:
                cpu_resource = plot.CPUResource(
                    config.server_cpu_types, result_prefix,
                    config.res_dir + "/" + datafile, config.granularity)
                cpu_resource.work()
            if datafile.find("server_memory") != -1:
                memory_resource = plot.MemoryResource(
                    config.server_memory_types, result_prefix,
                    config.res_dir + "/" + datafile, config.granularity)
                memory_resource.work()
            if datafile.find("server_io_rate") != -1:
                io_rate_resource = plot.IOResource(
                    config.server_io_rate_types, result_prefix,
                    config.res_dir + "/" + datafile, config.granularity)
                io_rate_resource.work()
            if datafile.find("server_eth0") != -1:
                eth0_resource = plot.EthResource(
                    config.server_eth_types, result_prefix,
                    config.res_dir + "/" + datafile, config.granularity)
                eth0_resource.work()
            if datafile.find("server_eth1") != -1:
                eth1_resource = plot.EthResource(
                    config.server_eth_types, result_prefix,
                    config.res_dir + "/" + datafile, config.granularity)
                eth1_resource.work()
            if datafile.find("server_queue_load") != -1:
                load_resource = plot.LoadResource(
                    config.server_load_types, result_prefix,
                    config.res_dir + "/" + datafile, config.granularity)
                load_resource.work()
            if datafile.find("server_socket") != -1:
                sock_resource = plot.SockResource(
                    config.server_sock_types, result_prefix,
                    config.res_dir + "/" + datafile, config.granularity)
                sock_resource.work()
            if datafile.find("mysql") != -1 and datafile.find('threads') != -1:
                mysql_resource = plot.MySQLResource(
                    config.mysql_connections_types, result_prefix,
                    config.res_dir + "/" + datafile, config.granularity)
                mysql_resource.work()
            if datafile.find("TCPPort") != -1:
                tcp_port_resource = plot.TCPPortResource(
                    config.tcp_port_types, result_prefix,
                    config.res_dir + "/" + datafile, config.granularity)
                tcp_port_resource.work()
            if datafile.find("process") != -1 and datafile.find('mysql') == -1:
                process_resource = plot.ProcessResource(
                    config.process_types, result_prefix,
                    config.res_dir + "/" + datafile, config.granularity)
                process_resource.work()
            if datafile.find("redis") != -1:
                if datafile.find('process') == -1 and datafile.find(
                        'thread') == -1:
                    redis_resource = plot.RedisResource(
                        config.redis_types, result_prefix,
                        config.res_dir + "/" + datafile, config.granularity)
                    redis_resource.work()
            if datafile.find('memcached') != -1:
                if datafile.find('process') == -1 and datafile.find(
                        'thread') == -1:
                    memcached_resource = plot.MemcachedResource(
                        config.memcached_types, result_prefix,
                        config.res_dir + "/" + datafile, config.granularity)
                    memcached_resource.work()
            if datafile.find('mongodb') != -1:
                if datafile.find('process') == -1 and datafile.find(
                        'thread') == -1:
                    mongodb_resource = plot.MongodbResource(
                        config.mongodb_types, result_prefix,
                        config.res_dir + "/" + datafile, config.granularity)
                    mongodb_resource.work()
            if datafile.find('apache') != -1:
                if datafile.find('process') == -1 and datafile.find(
                        'thread') == -1:
                    apache_resource = plot.ApacheResource(
                        config.apache_types, result_prefix,
                        config.res_dir + "/" + datafile, config.granularity)
                    apache_resource.work()
            if datafile.find('tomcat6') != -1:
                if datafile.find('process') == -1 and datafile.find(
                        'thread') == -1:
                    tomcat_resource = plot.TomcatResource(
                        config.tomcat6_types, result_prefix,
                        config.res_dir + "/" + datafile, config.granularity)
                    tomcat_resource.work()
            if datafile.find('tomcat7') != -1:
                if datafile.find('process') == -1 and datafile.find(
                        'thread') == -1:
                    tomcat_resource = plot.TomcatResource(
                        config.tomcat7_types, result_prefix,
                        config.res_dir + "/" + datafile, config.granularity)
                    tomcat_resource.work()
            if datafile.find('nginx') != -1:
                if datafile.find('process') == -1 and datafile.find(
                        'thread') == -1:
                    nginx_resource = plot.TomcatResource(
                        config.nginx_types, result_prefix,
                        config.res_dir + "/" + datafile, config.granularity)
                    nginx_resource.work()
            if datafile.find('SocketStat') != -1:
                socket_stat_resource = plot.SocketStatResource(
                    config.socket_stat_types, result_prefix,
                    config.res_dir + "/" + datafile, config.granularity)
                socket_stat_resource.work()

    # generate sum report
    report.Report.end_time = end_time
    resource_sum_report = report.Report(config.res_dir)
    resource_sum_report.work()
Beispiel #11
0
        ftp.cwd(os.path.basename(self.report_file).split(".html")[0])
        buffer_size = 1024

        file_handler = open(self.package_file, "rb")
        ftp.storbinary("STOR %s" % os.path.basename(self.package_file),
                       file_handler, buffer_size)
        file_handler = open(self.report_file, "rb")
        ftp.storbinary("STOR %s" % os.path.basename(self.report_file),
                       file_handler, buffer_size)
        file_handler = open(self.result_dir + "/" + "An_Report1.html", "rb")
        ftp.storbinary("STOR %s" % "An_Report1.html", file_handler,
                       buffer_size)

        ftp.mkd("An_Report1")
        ftp.cwd("An_Report1")
        for report_file in util.get_dir_files(self.result_dir + "/" +
                                              "An_Report1"):
            file_handler = open(
                self.result_dir + "/" + "An_Report1" + "/" + report_file, "rb")
            ftp.storbinary("STOR %s" % report_file, file_handler, buffer_size)
        ftp.set_debuglevel(0)
        file_handler.close()
        ftp.quit()

    def api_upload(self):
        r = requests.post(self.api_url, json=json.dumps(self.summary_data))
        if r.text == "200":
            print "[INFO]api upload success"
        else:
            print "[ERROR]api upload failed: " + r.text

    def work(self):