Пример #1
0
def report():
    # Create LogParser instance for this report
    logparser = LogParser(gi, KEYWORDS)

    filename = request.args.get("filename")
    if "/" in filename:  # Prevent directory traversal attacks
        return "Go away!"

    path = os.path.join("/home/malyhass/log-parser", filename)
    logparser.parse_file(
        gzip.open(path) if path.endswith(".gz") else open(path))

    return env.get_template("report.html").render({
        "map_svg":
        render_map(open(os.path.join(PROJECT_ROOT, "templates", "map.svg")),
                   logparser.countries),
        "humanize":
        humanize.naturalsize,
        "keyword_hits":
        sorted(logparser.d.items(), key=lambda i: i[1], reverse=True),
        "url_hits":
        sorted(logparser.urls.items(), key=lambda i: i[1], reverse=True),
        "user_bytes":
        sorted(logparser.user_bytes.items(),
               key=lambda item: item[1],
               reverse=True)
    })
Пример #2
0
  def parse(self):
    while True:
      try:
        os.kill(self.parent_pid, 0)
        logfile, harnessType = self.job_queue.get_nowait()
      except Empty:
        time.sleep(5)
        continue
      except OSError:
        sys.exit(0)

      self.logger.info('parsing %s' % logfile)
      if logfile == 'exit':
        break
      try:
        lp = LogParser([logfile],
                       es=self.es,
                       es_servers=self.es_servers,
                       includePass=self.include_pass,
                       output_dir=self.output_dir,
                       logger=self.logger,
                       harnessType=harnessType,
                      )
        lp.parseFiles()
      except Exception, inst:
        self.logger.exception(inst)
Пример #3
0
def main():
    parser = OptionParser()
    parser.add_option('--harness', dest='harness', action='store',
                      default='mochitest',
                      help='test harness log being parsed. ' +
                           'one of build, mochitest, reftest, jsreftest, crashtest or xpcshell')
    parser.add_option('--logfile', dest='logfile', action='store',
                      default=None,
                      help='path to log file')
    parser.add_option('--revision', dest='commit', action='store',
                      help='repo revision')
    parser.add_option('--autolog', dest='autolog', action='store_true',
                      help='post results to autolog')
    parser.add_option('--testgroup', dest='testgroup', action='store',
                      help='testgroup name for autolog')

    options, args = parser.parse_args()

    # set default log file
    if not options.logfile:
        options.logfile = options.harness + '.log'
    options.logfile = os.path.abspath(options.logfile)

    if options.autolog and not options.commit:
        raise Exception('must specify --revision if --autolog is used')

    if options.autolog and not options.testgroup:
        raise Exception('must specify --testgroup if --autolog is used')

    # parse the logfile, which will give us a nice dict of results
    parser = LogParser([options.logfile], harnessType=options.harness)
    results = parser.parseFiles()
    results['id'] = str(uuid.uuid1())
    print json.dumps(results, indent=2)

    # post the results to autolog
    if options.autolog:
        post_to_autolog(results,
                        options.testgroup,
                        logfile=options.logfile,
                        revision=options.commit,
                        harness=options.harness)
Пример #4
0
 def __init__(self, maker):
     Depend.__init__(self, maker)
     self.logfile = None
     self.auxfile = None
     self.srcfile = None
     self.srcbase = None
     self.outfile = None
     self.program = "latex"
     self.engine = "TeX"
     self.paper = ""
     self.prods = []
     self.must_compile = 0
     self.something_done = 0
     self.failed_module = None
     self.watcher = Watcher()
     self.log = LogParser()
     self.modules = Modules(self)
     self.parser = TexParser(self)
     self.date = None
     # Is the final output expected?
     self.draft_only = False
     self.draft_support = False
     self.batch = 1
     self.opts = []
Пример #5
0
                    action="store_true")
parser.add_argument('--skip-compressed',
                    help="Skip compressed files",
                    action="store_true")
args = parser.parse_args()

try:
    gi = GeoIP.open(args.geoip, GeoIP.GEOIP_MEMORY_CACHE)
except GeoIP.error:
    print "Failed to open up GeoIP database, it seems %s does not exist!" % os.path.realpath(
        args.geoip)
    exit(254)

import gzip

logparser = LogParser(gi, keywords=("Windows", "Linux", "OS X"))

for filename in os.listdir(args.path):
    if not filename.startswith("access."):
        continue

    if filename.endswith(".gz"):
        if args.skip_compressed:
            continue
        fh = gzip.open(os.path.join(args.path, filename))
    else:
        fh = open(os.path.join(args.path, filename))

    if args.verbose:
        print "Parsing:", filename
Пример #6
0
    def process_test_log(self, test_parameters, logfilehandle):

        test_log = None
        test_runs = []

        if test_parameters['use_newparser']:
            logfilehandle.close()
            logfilehandle = open(logfilehandle.name)
            try:
                # Turn off verbose logging for the log parser
                logger = logging.getLogger('logparser')
                logger_effectiveLevel = logger.getEffectiveLevel()
                logger.setLevel(logging.WARN)
                test_log = newlogparser.parse_log(logfilehandle)
                test_runs = test_log.convert(test_parameters['include_pass'])
            finally:
                logger.setLevel(logger_effectiveLevel)
                logfilehandle.close()
        else:
            lp = LogParser([logfilehandle.name],
                           es=False,
                           es_server=None,
                           includePass=True,
                           output_dir=None,
                           logger=self.logger,
                           harnessType=test_parameters['harness_type'])

            # Use logparser's parsers, but do not allow it to
            # submit data directly to elasticsearch.
            test_runs.append(lp.parseFiles())

        if test_parameters['es_server'] is None or test_parameters[
                'rest_server'] is None:
            return

        # testgroup must match entry in autolog/js/Config.js:testNames
        # os        must match entry in autolog/js/Config.js:OSNames
        # platform  must match entry in autolog/js/Config.js:OSNames

        logfilename = None
        if test_parameters['submit_log']:
            logfilename = logfilehandle.name

        chunk_descriptor = ''
        if test_parameters['total_chunks'] > 1:
            chunk_descriptor = 's-%d' % test_parameters['this_chunk']

        testgroup_name = '%s%s' % (test_parameters['test_name'],
                                   chunk_descriptor)

        platform_name = self.phone_cfg['machinetype']

        self.loggerdeco.debug('testgroup_name = %s' % testgroup_name)

        testgroup = RESTfulAutologTestGroup(
            index=test_parameters['index'],
            testgroup=testgroup_name,
            os='android',
            platform=platform_name,
            harness=test_parameters['harness_type'],
            server=test_parameters['es_server'],
            restserver=test_parameters['rest_server'],
            machine=self.phone_cfg['phoneid'],
            logfile=logfilename)

        testgroup.set_primary_product(tree=test_parameters['tree'],
                                      buildtype='opt',
                                      buildid=test_parameters['buildid'],
                                      revision=test_parameters['revision'])

        for testdata in test_runs:

            if self.logger.getEffectiveLevel() == logging.DEBUG:
                self.loggerdeco.debug('Begin testdata')
                self.loggerdeco.debug(json.dumps(testdata, indent=4))
                self.loggerdeco.debug('End testdata')

            testgroup.add_test_suite(testsuite=testgroup_name,
                                     cmdline=test_parameters['cmdline'],
                                     passed=testdata.get('passed', None),
                                     failed=testdata.get('failed', None),
                                     todo=testdata.get('todo', None))

            for t in testdata.get('failures', {}):
                test = t["test"]
                for f in t["failures"]:
                    text = f["text"]
                    status = f["status"]
                    testgroup.add_test_failure(test=test,
                                               text=text,
                                               status=status)

            # Submitting passing tests not supported via REST API
            if test_parameters['include_pass']:
                for t in testdata.get('passes', {}):
                    test = t["test"]
                    duration = None
                    if "duration" in t:
                        duration = t["duration"]
                    testgroup.add_test_pass(test=test, duration=duration)

        testgroup.submit()
Пример #7
0
def run(manifests, output_dir, args, post_to_autolog=False):
    args = args[:]
    log = mozlog.getLogger('REFTEST')

    # set up chunks in args list
    try:
        this_index = args.index("--this-chunk")
        this_chunk = int(args[this_index + 1])
        total_chunks = this_chunk
    except:
        this_index = len(args)
        this_chunk = 1
        args.append("--this-chunk")
        args.append("1")
        try:
            total_index = args.index("--total-chunks")
        except:
            total_index = len(args)
            args.append("--total-chunks")
            args.append(str(this_chunk))
        total_chunks = int(args[total_index + 1])

    b2g_path = args[args.index("--b2gpath") + 1]
    # symlink reftests so reftest server can serve them
    if not os.path.exists('tests'):
        gecko_path = os.path.join(b2g_path, 'gecko')
        os.symlink(gecko_path, 'tests')

    # get revision
    default = open(os.path.join(b2g_path, 'default.xml'), 'r')
    soup = BeautifulSoup(default.read())
    mc = soup.find_all('project', attrs={'name': 'mozilla-central'})[0]
    revision = mc['revision']

    with open(manifests, "r") as manifest_file:
        manifests = manifest_file.readlines()

    args.append('')
    for manifest in manifests:
        manifest = manifest.strip()
        if manifest[0] == '#':
            continue
        manifest_path = os.path.join('tests', 'layout', 'reftests', manifest)
        args[-1] = manifest_path

        for chunk in range(this_chunk, total_chunks + 1):
            args[this_index + 1] = str(chunk)
            log.info("Running with manifest '%s' and chunk '%s' of '%s'" %
                     (manifest_path, chunk, total_chunks))
            ret = runreftestb2g.main(args)
            log.info("Run finished with return value '%s'" % ret)
            sleep(5)

            if os.path.exists('reftest.log'):
                if not os.path.exists(output_dir):
                    os.makedirs(output_dir)
                output_file = manifest.replace('/', '_').replace(
                    '.list', '%s_of_%s.log' % (chunk, total_chunks))
                log_file = os.path.join(output_dir, output_file)
                shutil.move('reftest.log', log_file)

                # send log file to autolog
                if post_to_autolog:
                    parser = LogParser([log_file], harnessType='reftest')
                    results = parser.parseFiles()
                    results['id'] = str(uuid.uuid1())
                    try:
                        autolog.post_to_autolog(results, 'reftests-%s' % chunk,
                                                revision, log_file, 'reftest')
                    except urllib2.HTTPError:
                        # autolog times out sometimes, try again
                        autolog.post_to_autolog(results, 'reftests-%s' % chunk,
                                                revision, log_file, 'reftest')

            else:
                log.error("No reftest.log! :(")

    log.info("Test Runs Completed")