Example #1
0
def report():
    # Create LogParser instance for this report
    logparser = LogParser(gi, KEYWORDS)

    filename = request.args.get("filename")
    if "/" in filename:  # Prevent directory traversal attacks
        return "Go away!"

    path = os.path.join("/home/malyhass/log-parser", filename)
    logparser.parse_file(
        gzip.open(path) if path.endswith(".gz") else open(path))

    return env.get_template("report.html").render({
        "map_svg":
        render_map(open(os.path.join(PROJECT_ROOT, "templates", "map.svg")),
                   logparser.countries),
        "humanize":
        humanize.naturalsize,
        "keyword_hits":
        sorted(logparser.d.items(), key=lambda i: i[1], reverse=True),
        "url_hits":
        sorted(logparser.urls.items(), key=lambda i: i[1], reverse=True),
        "user_bytes":
        sorted(logparser.user_bytes.items(),
               key=lambda item: item[1],
               reverse=True)
    })
Example #2
0
  def parse(self):
    while True:
      try:
        os.kill(self.parent_pid, 0)
        logfile, harnessType = self.job_queue.get_nowait()
      except Empty:
        time.sleep(5)
        continue
      except OSError:
        sys.exit(0)

      self.logger.info('parsing %s' % logfile)
      if logfile == 'exit':
        break
      try:
        lp = LogParser([logfile],
                       es=self.es,
                       es_servers=self.es_servers,
                       includePass=self.include_pass,
                       output_dir=self.output_dir,
                       logger=self.logger,
                       harnessType=harnessType,
                      )
        lp.parseFiles()
      except Exception, inst:
        self.logger.exception(inst)
Example #3
0
    def process_test_log(self, logfilehandle):

        logfilehandle.close()

        # convert embedded \n into real newlines
        logfilehandle = open(self.unittest_logpath)
        logcontents = logfilehandle.read()
        logfilehandle.close()
        logcontents = re.sub(r'\\n', '\n', logcontents)
        logfilehandle = open(self.unittest_logpath, 'wb')
        logfilehandle.write(logcontents)
        logfilehandle.close()

        lp = LogParser([logfilehandle.name],
                       includePass=True,
                       output_dir=None,
                       logger=self.loggerdeco,
                       harnessType=self.parms['harness_type'])
        parsed_log = lp.parseFiles()
        if self.options.verbose:
            self.loggerdeco.debug('process_test_log: LogParser parsed log : %s' %
                                  json.dumps(parsed_log, indent=2))

        self.test_result.todo = parsed_log.get('todo', 0)
        self.test_result.passes = parsed_log.get('passes', [])
        failures = parsed_log.get('failures', [])
        if failures:
            for failure in failures:
                for test_failure in failure['failures']:
                    self.test_failure(failure['test'],
                                      test_failure['status'],
                                      test_failure['text'],
                                      PhoneTestResult.TESTFAILED)
        self.loggerdeco.debug('process_test_log: test_result: %s' %
                              json.dumps(self.test_result.__dict__, indent=2))
Example #4
0
def report():
    # Create LogParser instance for this report
    logparser = LogParser(gi, KEYWORDS)

    filename = request.args.get("filename")
    if "/" in filename: # Prevent directory traversal attacks
        return "Go away!"

    path = os.path.join("/home/ebarrier/Documents/Python/logs", filename)
    logparser.parse_file(gzip.open(path) if path.endswith(".gz") else open(path))

    return env.get_template("report.html").render({
            "map_svg": render_map(open(os.path.join(PROJECT_ROOT, "templates", "map.svg")), logparser.countries),
            "humanize": humanize.naturalsize,
            "keyword_hits": sorted(logparser.d.items(), key=lambda i:i[1], reverse=True),
            "url_hits": sorted(logparser.urls.items(), key=lambda i:i[1], reverse=True),
            "user_bytes": sorted(logparser.user_bytes.items(), key = lambda item:item[1], reverse=True)
        })
Example #5
0
 def __init__(self, maker):
     Depend.__init__(self, maker)
     self.logfile = None
     self.auxfile = None
     self.srcfile = None
     self.srcbase = None
     self.outfile = None
     self.program = "latex"
     self.engine = "TeX"
     self.paper = ""
     self.prods = []
     self.must_compile = 0
     self.something_done = 0
     self.failed_module = None
     self.watcher = Watcher()
     self.log = LogParser()
     self.modules = Modules(self)
     self.parser = TexParser(self)
     self.date = None
     # Is the final output expected?
     self.draft_only = False
     self.draft_support = False
     self.batch = 1
     self.opts = []
Example #6
0
def main():
    parser = OptionParser()
    parser.add_option('--harness', dest='harness', action='store',
                      default='mochitest',
                      help='test harness log being parsed. ' +
                           'one of build, mochitest, reftest, jsreftest, crashtest or xpcshell')
    parser.add_option('--logfile', dest='logfile', action='store',
                      default=None,
                      help='path to log file')
    parser.add_option('--revision', dest='commit', action='store',
                      help='repo revision')
    parser.add_option('--autolog', dest='autolog', action='store_true',
                      help='post results to autolog')
    parser.add_option('--testgroup', dest='testgroup', action='store',
                      help='testgroup name for autolog')

    options, args = parser.parse_args()

    # set default log file
    if not options.logfile:
        options.logfile = options.harness + '.log'
    options.logfile = os.path.abspath(options.logfile)

    if options.autolog and not options.commit:
        raise Exception('must specify --revision if --autolog is used')

    if options.autolog and not options.testgroup:
        raise Exception('must specify --testgroup if --autolog is used')

    # parse the logfile, which will give us a nice dict of results
    parser = LogParser([options.logfile], harnessType=options.harness)
    results = parser.parseFiles()
    results['id'] = str(uuid.uuid1())
    print json.dumps(results, indent=2)

    # post the results to autolog
    if options.autolog:
        post_to_autolog(results,
                        options.testgroup,
                        logfile=options.logfile,
                        revision=options.commit,
                        harness=options.harness)
Example #7
0
 def __init__(self, maker):
     Depend.__init__(self, maker)
     self.logfile = None
     self.auxfile = None
     self.srcfile = None
     self.srcbase = None
     self.outfile = None
     self.program = "latex"
     self.engine = "TeX"
     self.paper = ""
     self.prods = []
     self.must_compile = 0
     self.something_done = 0
     self.failed_module = None
     self.watcher = Watcher()
     self.log = LogParser()
     self.modules = Modules(self)
     self.parser = TexParser(self)
     self.date = None
     # Is the final output expected?
     self.draft_only = False
     self.draft_support = False
     self.batch = 1
     self.opts = []
Example #8
0
                    action="store_true")
parser.add_argument('--skip-compressed',
                    help="Skip compressed files",
                    action="store_true")
args = parser.parse_args()

try:
    gi = GeoIP.open(args.geoip, GeoIP.GEOIP_MEMORY_CACHE)
except GeoIP.error:
    print "Failed to open up GeoIP database, it seems %s does not exist!" % os.path.realpath(
        args.geoip)
    exit(254)

# Here we create an instance of the LogParser class
# this object shall contain statistics for one run
logparser = LogParser(gi, keywords=("Windows", "Linux", "OS X"))
file_handles = []

for filename in os.listdir(args.path):
    if not filename.startswith("access.log"):
        continue

    if filename.endswith(".gz"):
        if args.skip_compressed:
            continue
        fh = gzip.open(os.path.join(args.path, filename))
    else:
        fh = open(os.path.join(args.path, filename))

    if args.verbose:
        print "Parsing:", filename
Example #9
0
    help="Resolve IP-s to country codes", default="/usr/share/GeoIP/GeoIP.dat")
parser.add_argument('--verbose',
    help="Increase verbosity", action="store_true")
parser.add_argument('--skip-compressed',
    help="Skip compressed files", action="store_true")
args = parser.parse_args()

try:
    gi = GeoIP.open(args.geoip, GeoIP.GEOIP_MEMORY_CACHE)
except GeoIP.error:
    print "Failed to open up GeoIP database, it seems %s does not exist!" % os.path.realpath(args.geoip)
    exit(254)

import gzip

logparser = LogParser(gi, keywords = ("Windows", "Linux", "OS X"))

for filename in os.listdir(args.path):
    if not filename.startswith("access."):
        continue

    if filename.endswith(".gz"):
        if args.skip_compressed:
            continue
        fh = gzip.open(os.path.join(args.path, filename))
    else:
        fh = open(os.path.join(args.path, filename))

    if args.verbose:
        print "Parsing:", filename
Example #10
0
    def process_test_log(self, test_parameters, logfilehandle):

        test_log = None
        test_runs = []

        if test_parameters['use_newparser']:
            logfilehandle.close()
            logfilehandle = open(logfilehandle.name)
            try:
                # Turn off verbose logging for the log parser
                logger = logging.getLogger('logparser')
                logger_effectiveLevel = logger.getEffectiveLevel()
                logger.setLevel(logging.WARN)
                test_log = newlogparser.parse_log(logfilehandle)
                test_runs = test_log.convert(test_parameters['include_pass'])
            finally:
                logger.setLevel(logger_effectiveLevel)
                logfilehandle.close()
        else:
            lp = LogParser([logfilehandle.name],
                           es=False,
                           es_server=None,
                           includePass=True,
                           output_dir=None,
                           logger=self.logger,
                           harnessType=test_parameters['harness_type'])

            # Use logparser's parsers, but do not allow it to
            # submit data directly to elasticsearch.
            test_runs.append(lp.parseFiles())

        if test_parameters['es_server'] is None or test_parameters[
                'rest_server'] is None:
            return

        # testgroup must match entry in autolog/js/Config.js:testNames
        # os        must match entry in autolog/js/Config.js:OSNames
        # platform  must match entry in autolog/js/Config.js:OSNames

        logfilename = None
        if test_parameters['submit_log']:
            logfilename = logfilehandle.name

        chunk_descriptor = ''
        if test_parameters['total_chunks'] > 1:
            chunk_descriptor = 's-%d' % test_parameters['this_chunk']

        testgroup_name = '%s%s' % (test_parameters['test_name'],
                                   chunk_descriptor)

        platform_name = self.phone_cfg['machinetype']

        self.loggerdeco.debug('testgroup_name = %s' % testgroup_name)

        testgroup = RESTfulAutologTestGroup(
            index=test_parameters['index'],
            testgroup=testgroup_name,
            os='android',
            platform=platform_name,
            harness=test_parameters['harness_type'],
            server=test_parameters['es_server'],
            restserver=test_parameters['rest_server'],
            machine=self.phone_cfg['phoneid'],
            logfile=logfilename)

        testgroup.set_primary_product(tree=test_parameters['tree'],
                                      buildtype='opt',
                                      buildid=test_parameters['buildid'],
                                      revision=test_parameters['revision'])

        for testdata in test_runs:

            if self.logger.getEffectiveLevel() == logging.DEBUG:
                self.loggerdeco.debug('Begin testdata')
                self.loggerdeco.debug(json.dumps(testdata, indent=4))
                self.loggerdeco.debug('End testdata')

            testgroup.add_test_suite(testsuite=testgroup_name,
                                     cmdline=test_parameters['cmdline'],
                                     passed=testdata.get('passed', None),
                                     failed=testdata.get('failed', None),
                                     todo=testdata.get('todo', None))

            for t in testdata.get('failures', {}):
                test = t["test"]
                for f in t["failures"]:
                    text = f["text"]
                    status = f["status"]
                    testgroup.add_test_failure(test=test,
                                               text=text,
                                               status=status)

            # Submitting passing tests not supported via REST API
            if test_parameters['include_pass']:
                for t in testdata.get('passes', {}):
                    test = t["test"]
                    duration = None
                    if "duration" in t:
                        duration = t["duration"]
                    testgroup.add_test_pass(test=test, duration=duration)

        testgroup.submit()
Example #11
0
def run(manifests, output_dir, args, post_to_autolog=False):
    args = args[:]
    log = mozlog.getLogger('REFTEST')

    # set up chunks in args list
    try:
        this_index = args.index("--this-chunk")
        this_chunk = int(args[this_index + 1])
        total_chunks = this_chunk
    except:
        this_index = len(args)
        this_chunk = 1
        args.append("--this-chunk")
        args.append("1")
        try:
            total_index = args.index("--total-chunks")
        except:
            total_index = len(args)
            args.append("--total-chunks")
            args.append(str(this_chunk))
        total_chunks = int(args[total_index + 1])

    b2g_path = args[args.index("--b2gpath") + 1]
    # symlink reftests so reftest server can serve them
    if not os.path.exists('tests'):
        gecko_path = os.path.join(b2g_path, 'gecko')
        os.symlink(gecko_path, 'tests')

    # get revision
    default = open(os.path.join(b2g_path, 'default.xml'), 'r')
    soup = BeautifulSoup(default.read())
    mc = soup.find_all('project', attrs={'name': 'mozilla-central'})[0]
    revision = mc['revision']

    with open(manifests, "r") as manifest_file:
        manifests = manifest_file.readlines()

    args.append('')
    for manifest in manifests:
        manifest = manifest.strip()
        if manifest[0] == '#':
            continue
        manifest_path = os.path.join('tests', 'layout', 'reftests', manifest)
        args[-1] = manifest_path

        for chunk in range(this_chunk, total_chunks + 1):
            args[this_index + 1] = str(chunk)
            log.info("Running with manifest '%s' and chunk '%s' of '%s'" %
                     (manifest_path, chunk, total_chunks))
            ret = runreftestb2g.main(args)
            log.info("Run finished with return value '%s'" % ret)
            sleep(5)

            if os.path.exists('reftest.log'):
                if not os.path.exists(output_dir):
                    os.makedirs(output_dir)
                output_file = manifest.replace('/', '_').replace(
                    '.list', '%s_of_%s.log' % (chunk, total_chunks))
                log_file = os.path.join(output_dir, output_file)
                shutil.move('reftest.log', log_file)

                # send log file to autolog
                if post_to_autolog:
                    parser = LogParser([log_file], harnessType='reftest')
                    results = parser.parseFiles()
                    results['id'] = str(uuid.uuid1())
                    try:
                        autolog.post_to_autolog(results, 'reftests-%s' % chunk,
                                                revision, log_file, 'reftest')
                    except urllib2.HTTPError:
                        # autolog times out sometimes, try again
                        autolog.post_to_autolog(results, 'reftests-%s' % chunk,
                                                revision, log_file, 'reftest')

            else:
                log.error("No reftest.log! :(")

    log.info("Test Runs Completed")
Example #12
0
def run(manifests, output_dir, args, post_to_autolog=False):
    args = args[:]
    log = mozlog.getLogger('REFTEST')

    # set up chunks in args list
    try:
        this_index = args.index("--this-chunk")
        this_chunk = int(args[this_index+1])
        total_chunks = this_chunk
    except:
        this_index = len(args)
        this_chunk = 1
        args.append("--this-chunk")
        args.append("1")
        try:
            total_index = args.index("--total-chunks")
        except:
            total_index = len(args)
            args.append("--total-chunks")
            args.append(str(this_chunk))
        total_chunks = int(args[total_index+1])

    b2g_path = args[args.index("--b2gpath")+1]
    # symlink reftests so reftest server can serve them
    if not os.path.exists('tests'):
        gecko_path = os.path.join(b2g_path, 'gecko')
        os.symlink(gecko_path, 'tests')
    
    # get revision
    default = open(os.path.join(b2g_path, 'default.xml'), 'r')
    soup = BeautifulSoup(default.read())
    mc = soup.find_all('project', attrs={'name':'mozilla-central'})[0]
    revision = mc['revision']
    
    with open(manifests, "r") as manifest_file:
        manifests = manifest_file.readlines()
    
    args.append('')
    for manifest in manifests:
        manifest = manifest.strip()
        if manifest[0] == '#':
            continue
        manifest_path = os.path.join('tests', 'layout', 'reftests', manifest)
        args[-1] = manifest_path

        for chunk in range(this_chunk, total_chunks + 1):
            args[this_index + 1] = str(chunk)
            log.info("Running with manifest '%s' and chunk '%s' of '%s'" % (manifest_path, chunk, total_chunks))
            ret = runreftestb2g.main(args)
            log.info("Run finished with return value '%s'" % ret)
            sleep(5)
        
            if os.path.exists('reftest.log'):
                if not os.path.exists(output_dir):
                    os.makedirs(output_dir)
                output_file = manifest.replace('/', '_').replace('.list', '%s_of_%s.log' % (chunk, total_chunks))
                log_file = os.path.join(output_dir, output_file)
                shutil.move('reftest.log', log_file)
               
                # send log file to autolog
                if post_to_autolog:
                    parser = LogParser([log_file], harnessType='reftest')
                    results = parser.parseFiles()
                    results['id'] = str(uuid.uuid1())
                    try:
                        autolog.post_to_autolog(results,
                                                'reftests-%s' % chunk,
                                                revision,
                                                log_file,
                                                'reftest')
                    except urllib2.HTTPError:
                        # autolog times out sometimes, try again
                        autolog.post_to_autolog(results,
                                                'reftests-%s' % chunk,
                                                revision,
                                                log_file,
                                                'reftest')

            else:
                log.error("No reftest.log! :(")

    log.info("Test Runs Completed")
Example #13
0
class Latex(Depend):
    def __init__(self, maker):
        Depend.__init__(self, maker)
        self.logfile = None
        self.auxfile = None
        self.srcfile = None
        self.srcbase = None
        self.outfile = None
        self.program = "latex"
        self.engine = "TeX"
        self.paper = ""
        self.prods = []
        self.must_compile = 0
        self.something_done = 0
        self.failed_module = None
        self.watcher = Watcher()
        self.log = LogParser()
        self.modules = Modules(self)
        self.parser = TexParser(self)
        self.date = None
        # Is the final output expected?
        self.draft_only = False
        self.draft_support = False
        self.batch = 1
        self.opts = []

    def reinit(self):
        # Restart with a clean module set, parser and logger
        self.__init__(self.env)

    def set_source(self, input):
        self.srcfile = os.path.realpath(input)
        self.srcbase = os.path.splitext(self.srcfile)[0]
        self.src_base = self.srcbase
        self.logfile = self.srcbase + ".log"
        self.auxfile = self.srcbase + ".aux"
        self.set_format("dvi")

    def set_format(self, format):
        self.outfile = self.srcbase + "." + format
        self.prods = [self.outfile]

    def compile_needed (self):
        """
        Returns true if a first compilation is needed. This method supposes
        that no compilation was done (by the script) yet.
        """
        if self.must_compile:
            return 1
        msg.log(_("checking if compiling is necessary..."))
        if not self.draft_support and not os.path.exists(self.outfile):
            msg.debug(_("the output file doesn't exist"))
            return 1
        if not os.path.exists(self.logfile):
            msg.debug(_("the log file does not exist"))
            return 1
        if (not self.draft_support and 
            (os.path.getmtime(self.outfile) < os.path.getmtime(self.srcfile))):
            msg.debug(_("the source is younger than the output file"))
            return 1
        if self.log.read(self.logfile):
            msg.debug(_("the log file is not produced by TeX"))
            return 1
        return self.recompile_needed()

    def recompile_needed (self):
        """
        Returns true if another compilation is needed. This method is used
        when a compilation has already been done.
        """
        changed = self.watcher.update()
        if self.must_compile:
            return 1
        if self.log.errors():
            msg.debug(_("last compilation failed"))
            return 1
#        if self.deps_modified(os.path.getmtime(self.outfile)):
#            msg.debug(_("dependencies were modified"))
#            return 1
        if changed and (len(changed) > 1 or changed[0] != self.auxfile):
            msg.debug(_("the %s file has changed") % changed[0])
            return 1
        if self.log.run_needed():
            msg.debug(_("LaTeX asks to run again"))
            if (not(changed)):
                msg.debug(_("but the aux files are unchanged"))
                return 0
            return 1
        if changed:
            msg.debug(_("the %s file has changed but no re-run required?") \
                      % changed[0])
            if self.program == "xelatex":
                msg.debug(_("force recompilation (XeTeX engine)"))
                return 1

        msg.debug(_("no new compilation is needed"))
        return 0

    def prepare(self, exclude_mods=None):
        """
        Prepare the compilation by parsing the source file. The parsing
        loads all the necessary modules required by the packages used, etc.
        """
        f = open(self.srcfile)
        self.parser.parse(f, exclude_mods=exclude_mods)
        f.close()

    def force_run(self):
        self.run(force=1)

    def run(self, force=0):
        """
        Run the building process until the last compilation, or stop on error.
        This method supposes that the inputs were parsed to register packages
        and that the LaTeX source is ready. If the second (optional) argument
        is true, then at least one compilation is done. As specified by the
        class Depend, the method returns 0 on success and 1 on failure.
        """
        if self.pre_compile(force):
            return 1

        # If an error occurs after this point, it will be while LaTeXing.
        self.failed_dep = self
        self.failed_module = None

        if self.batch:
            self.opts.append("-interaction=batchmode")

        need_compile = force or self.compile_needed()
        while need_compile:
            if self.compile(): return 1
            if self.post_compile(): return 1
            need_compile = self.recompile_needed()

        # Finally there was no error.
        self.failed_dep = None

        if self.last_compile():
            return 1

        if self.something_done:
            self.date = int(time.time())
        return 0

    def pre_compile(self, force):
        """
        Prepare the source for compilation using package-specific functions.
        This function must return true on failure. This function sets
        `must_compile' to 1 if we already know that a compilation is needed,
        because it may avoid some unnecessary preprocessing (e.g. BibTeXing).
        """
        # Watch for the changes of these working files
        for ext in ("aux", "toc", "lot", "lof"):
            self.watcher.watch(self.srcbase + "." + ext)

        msg.log(_("building additional files..."))
        for mod in self.modules.objects.values():
            if mod.pre_compile():
                self.failed_module = mod
                return 1
        return 0

    def post_compile(self):
        """
        Run the package-specific operations that are to be performed after
        each compilation of the main source. Returns true on failure.
        """
        msg.log(_("running post-compilation scripts..."))

        for mod in self.modules.objects.values():
            if mod.post_compile():
                self.failed_module = mod
                return 1
        return 0

    def last_compile(self):
        """
        Run the module-specific operations that are to be performed after
        the last compilation of the main source. Returns true on failure.
        """
        msg.log(_("running last-compilation scripts..."))

        for mod in self.modules.objects.values():
            if mod.last_compile():
                self.failed_module = mod
                return 1
        return 0

    def compile(self):
        self.must_compile = 0
        cmd = [self.program] + self.opts + [os.path.basename(self.srcfile)]
        msg.log(" ".join(cmd))
        rc = subprocess.call(cmd, stdout=msg.stdout)
        if rc != 0:
            msg.error(_("%s failed") % self.program)
        # Whatever the result is, read the log file
        if self.log.read(self.logfile):
            msg.error(_("Could not run %s.") % self.program)
            return 1
        if self.log.errors():
            return 1
        return rc

    def clean(self):
        """
        Remove all files that are produced by compilation.
        """
        self.remove_suffixes([".log", ".aux", ".toc", ".lof", ".lot",
                              ".out", ".glo", ".cb"])

        msg.log(_("cleaning additional files..."))
        # for dep in self.sources.values():
        #     dep.clean()

        for mod in self.modules.objects.values():
            mod.clean()

    def remove_suffixes (self, list):
        """
        Remove all files derived from the main source with one of the
        specified suffixes.
        """
        for suffix in list:
            file = self.src_base + suffix
            if os.path.exists(file):
                msg.log(_("removing %s") % file)
                os.unlink(file)

    def get_errors (self):
        if not(self.failed_module):
            return self.log.get_errors()
        else:
            return self.failed_module.get_errors()

    def print_misschars(self):
        """
        Sort the characters not handled by the selected font,
        and print them as a warning.
        """
        missed_chars = []
        for c in self.log.get_misschars():
            missed_chars.append((c["uchar"], c["font"]))
        # Strip redundant missed chars
        missed_chars = list(set(missed_chars))
        missed_chars.sort()
        for m in missed_chars:
            # The log file is encoded in UTF8 (xetex) or in latin1 (pdftex)
            try:
                uchar = m[0].decode("utf8")
            except:
                uchar = m[0].decode("latin1")
            # Check we have a real char (e.g. not something like '^^a3')
            if len(uchar) == 1:
                msg.warn("Character U+%X (%s) not in font '%s'" % \
                         (ord(uchar), m[0], m[1]))
            else:
                msg.warn("Character '%s' not in font '%s'" % (m[0], m[1]))
Example #14
0
def deligateParserObject(input_dir_day,tmp_dir,log_dir,date_str,hour):
    """ 调用解析日志文件的对象. """
    logParser = LogParser(input_dir_day,tmp_dir,log_dir,date_str,hour)
    logParser.handleHaLogsByHour()
Example #15
0
                    action="store_true")
parser.add_argument('--skip-compressed',
                    help="Skip compressed files",
                    action="store_true")
args = parser.parse_args()

try:
    gi = GeoIP.open(args.geoip, GeoIP.GEOIP_MEMORY_CACHE)
except GeoIP.error:
    print "Failed to open up GeoIP database, it seems %s does not exist!" % os.path.realpath(
        args.geoip)
    exit(254)

import gzip

logparser = LogParser(gi, keywords=("Windows", "Linux", "OS X"))

for filename in os.listdir(args.path):
    if not filename.startswith("access."):
        continue

    if filename.endswith(".gz"):
        if args.skip_compressed:
            continue
        fh = gzip.open(os.path.join(args.path, filename))
    else:
        fh = open(os.path.join(args.path, filename))

    if args.verbose:
        print "Parsing:", filename
Example #16
0
    def process_test_log(self, test_parameters, logfilehandle):

        test_log = None
        test_runs = []

        if test_parameters['use_newparser']:
            logfilehandle.close()
            logfilehandle = open(logfilehandle.name)
            try:
                # Turn off verbose logging for the log parser
                logger = logging.getLogger('logparser')
                logger_effectiveLevel = logger.getEffectiveLevel()
                logger.setLevel(logging.WARN)
                test_log = newlogparser.parse_log(logfilehandle)
                test_runs = test_log.convert(test_parameters['include_pass'])
            finally:
                logger.setLevel(logger_effectiveLevel)
                logfilehandle.close()
        else:
            lp = LogParser([logfilehandle.name],
                           es=False,
                           es_server=None,
                           includePass=True,
                           output_dir=None,
                           logger=self.logger,
                           harnessType=test_parameters['harness_type'])

            # Use logparser's parsers, but do not allow it to
            # submit data directly to elasticsearch.
            test_runs.append(lp.parseFiles())

        if test_parameters['es_server'] is None or test_parameters['rest_server'] is None:
            return

        # testgroup must match entry in autolog/js/Config.js:testNames
        # os        must match entry in autolog/js/Config.js:OSNames
        # platform  must match entry in autolog/js/Config.js:OSNames

        logfilename = None
        if test_parameters['submit_log']:
            logfilename = logfilehandle.name

        chunk_descriptor = ''
        if test_parameters['total_chunks'] > 1:
            chunk_descriptor = 's-%d' % test_parameters['this_chunk']

        testgroup_name = '%s%s' % (test_parameters['test_name'],
                                   chunk_descriptor)

        platform_name = self.phone_cfg['machinetype']

        self.loggerdeco.debug('testgroup_name = %s' % testgroup_name)

        testgroup = RESTfulAutologTestGroup(
            index=test_parameters['index'],
            testgroup=testgroup_name,
            os='android',
            platform=platform_name,
            harness=test_parameters['harness_type'],
            server=test_parameters['es_server'],
            restserver=test_parameters['rest_server'],
            machine=self.phone_cfg['phoneid'],
            logfile=logfilename)

        testgroup.set_primary_product(
            tree=test_parameters['tree'],
            buildtype='opt',
            buildid=test_parameters['buildid'],
            revision=test_parameters['revision'])

        for testdata in test_runs:

            if self.logger.getEffectiveLevel() == logging.DEBUG:
                self.loggerdeco.debug('Begin testdata')
                self.loggerdeco.debug(json.dumps(testdata, indent=4))
                self.loggerdeco.debug('End testdata')

            testgroup.add_test_suite(
                testsuite=testgroup_name,
                cmdline=test_parameters['cmdline'],
                passed=testdata.get('passed', None),
                failed=testdata.get('failed', None),
                todo=testdata.get('todo', None))

            for t in testdata.get('failures', {}):
                test = t["test"]
                for f in t["failures"]:
                    text = f["text"]
                    status = f["status"]
                    testgroup.add_test_failure(test=test,
                                               text=text,
                                               status=status)

            # Submitting passing tests not supported via REST API
            if test_parameters['include_pass']:
                for t in testdata.get('passes', {}):
                    test = t["test"]
                    duration = None
                    if "duration" in t:
                        duration = t["duration"]
                    testgroup.add_test_pass(test=test,
                                            duration=duration)

        testgroup.submit()
Example #17
0
class Latex(Depend):
    def __init__(self, maker):
        Depend.__init__(self, maker)
        self.logfile = None
        self.auxfile = None
        self.srcfile = None
        self.srcbase = None
        self.outfile = None
        self.program = "latex"
        self.engine = "TeX"
        self.paper = ""
        self.prods = []
        self.must_compile = 0
        self.something_done = 0
        self.failed_module = None
        self.watcher = Watcher()
        self.log = LogParser()
        self.modules = Modules(self)
        self.parser = TexParser(self)
        self.date = None
        # Is the final output expected?
        self.draft_only = False
        self.draft_support = False
        self.batch = 1
        self.opts = []

    def reinit(self):
        # Restart with a clean module set, parser and logger
        self.__init__(self.env)

    def set_source(self, input):
        self.srcfile = os.path.realpath(input)
        self.srcbase = os.path.splitext(self.srcfile)[0]
        self.src_base = self.srcbase
        self.logfile = self.srcbase + ".log"
        self.auxfile = self.srcbase + ".aux"
        self.set_format("dvi")

    def set_format(self, format):
        self.outfile = self.srcbase + "." + format
        self.prods = [self.outfile]

    def compile_needed(self):
        """
        Returns true if a first compilation is needed. This method supposes
        that no compilation was done (by the script) yet.
        """
        if self.must_compile:
            return 1
        msg.log(_("checking if compiling is necessary..."))
        if not self.draft_support and not os.path.exists(self.outfile):
            msg.debug(_("the output file doesn't exist"))
            return 1
        if not os.path.exists(self.logfile):
            msg.debug(_("the log file does not exist"))
            return 1
        if (not self.draft_support and
            (os.path.getmtime(self.outfile) < os.path.getmtime(self.srcfile))):
            msg.debug(_("the source is younger than the output file"))
            return 1
        if self.log.read(self.logfile):
            msg.debug(_("the log file is not produced by TeX"))
            return 1
        return self.recompile_needed()

    def recompile_needed(self):
        """
        Returns true if another compilation is needed. This method is used
        when a compilation has already been done.
        """
        changed = self.watcher.update()
        if self.must_compile:
            return 1
        if self.log.errors():
            msg.debug(_("last compilation failed"))
            return 1
#        if self.deps_modified(os.path.getmtime(self.outfile)):
#            msg.debug(_("dependencies were modified"))
#            return 1
        if changed and (len(changed) > 1 or changed[0] != self.auxfile):
            msg.debug(_("the %s file has changed") % changed[0])
            return 1
        if self.log.run_needed():
            msg.debug(_("LaTeX asks to run again"))
            if (not (changed)):
                msg.debug(_("but the aux files are unchanged"))
                return 0
            return 1
        if changed:
            msg.debug(_("the %s file has changed but no re-run required?") \
                      % changed[0])
            if self.program == "xelatex":
                msg.debug(_("force recompilation (XeTeX engine)"))
                return 1

        msg.debug(_("no new compilation is needed"))
        return 0

    def prepare(self, exclude_mods=None):
        """
        Prepare the compilation by parsing the source file. The parsing
        loads all the necessary modules required by the packages used, etc.
        """
        f = open(self.srcfile)
        self.parser.parse(f, exclude_mods=exclude_mods)
        f.close()

    def force_run(self):
        self.run(force=1)

    def run(self, force=0):
        """
        Run the building process until the last compilation, or stop on error.
        This method supposes that the inputs were parsed to register packages
        and that the LaTeX source is ready. If the second (optional) argument
        is true, then at least one compilation is done. As specified by the
        class Depend, the method returns 0 on success and 1 on failure.
        """
        if self.pre_compile(force):
            return 1

        # If an error occurs after this point, it will be while LaTeXing.
        self.failed_dep = self
        self.failed_module = None

        if self.batch:
            self.opts.append("-interaction=batchmode")

        need_compile = force or self.compile_needed()
        while need_compile:
            if self.compile(): return 1
            if self.post_compile(): return 1
            need_compile = self.recompile_needed()

        # Finally there was no error.
        self.failed_dep = None

        if self.last_compile():
            return 1

        if self.something_done:
            self.date = int(time.time())
        return 0

    def pre_compile(self, force):
        """
        Prepare the source for compilation using package-specific functions.
        This function must return true on failure. This function sets
        `must_compile' to 1 if we already know that a compilation is needed,
        because it may avoid some unnecessary preprocessing (e.g. BibTeXing).
        """
        # Watch for the changes of these working files
        for ext in ("aux", "toc", "lot", "lof"):
            self.watcher.watch(self.srcbase + "." + ext)

        msg.log(_("building additional files..."))
        for mod in self.modules.objects.values():
            if mod.pre_compile():
                self.failed_module = mod
                return 1
        return 0

    def post_compile(self):
        """
        Run the package-specific operations that are to be performed after
        each compilation of the main source. Returns true on failure.
        """
        msg.log(_("running post-compilation scripts..."))

        for mod in self.modules.objects.values():
            if mod.post_compile():
                self.failed_module = mod
                return 1
        return 0

    def last_compile(self):
        """
        Run the module-specific operations that are to be performed after
        the last compilation of the main source. Returns true on failure.
        """
        msg.log(_("running last-compilation scripts..."))

        for mod in self.modules.objects.values():
            if mod.last_compile():
                self.failed_module = mod
                return 1
        return 0

    def compile(self):
        self.must_compile = 0
        cmd = [self.program] + self.opts + [os.path.basename(self.srcfile)]
        msg.log(" ".join(cmd))
        rc = subprocess.call(cmd, stdout=msg.stdout)
        if rc != 0:
            msg.error(_("%s failed") % self.program)
        # Whatever the result is, read the log file
        if self.log.read(self.logfile):
            msg.error(_("Could not run %s.") % self.program)
            return 1
        if self.log.errors():
            return 1
        return rc

    def clean(self):
        """
        Remove all files that are produced by compilation.
        """
        self.remove_suffixes(
            [".log", ".aux", ".toc", ".lof", ".lot", ".out", ".glo", ".cb"])

        msg.log(_("cleaning additional files..."))
        # for dep in self.sources.values():
        #     dep.clean()

        for mod in self.modules.objects.values():
            mod.clean()

    def remove_suffixes(self, list):
        """
        Remove all files derived from the main source with one of the
        specified suffixes.
        """
        for suffix in list:
            file = self.src_base + suffix
            if os.path.exists(file):
                msg.log(_("removing %s") % file)
                os.unlink(file)

    def get_errors(self):
        if not (self.failed_module):
            return self.log.get_errors()
        else:
            return self.failed_module.get_errors()

    def print_misschars(self):
        """
        Sort the characters not handled by the selected font,
        and print them as a warning.
        """
        missed_chars = []
        for c in self.log.get_misschars():
            missed_chars.append((c["uchar"], c["font"]))
        # Strip redundant missed chars
        missed_chars = list(set(missed_chars))
        missed_chars.sort()
        for m in missed_chars:
            # The log file is encoded in UTF8 (xetex) or in latin1 (pdftex)
            try:
                uchar = m[0].decode("utf8")
            except:
                uchar = m[0].decode("latin1")
            # Check we have a real char (e.g. not something like '^^a3')
            if len(uchar) == 1:
                msg.warn("Character U+%X (%s) not in font '%s'" % \
                         (ord(uchar), m[0], m[1]))
            else:
                msg.warn("Character '%s' not in font '%s'" % (m[0], m[1]))