Esempio n. 1
0
    def log_is_unstable(self, log_f):
        log_f.seek(0)

        statuses = defaultdict(set)

        def handle_status(item):
            if item["test"] == self.target:
                statuses[item["subtest"]].add(item["status"])

        def handle_end(item):
            if item["test"] == self.target:
                statuses[None].add(item["status"])

        reader.each_log(reader.read(log_f),
                        {"test_status": handle_status,
                         "test_end": handle_end})

        logger.debug(str(statuses))

        if not statuses:
            logger.error("Didn't get any useful output from wptrunner")
            log_f.seek(0)
            for item in reader.read(log_f):
                logger.debug(item)
            return None

        return any(len(item) > 1 for item in statuses.itervalues())
Esempio n. 2
0
def to_json(*log_files):
    handler = LogHandler()
    for f in log_files:
        reader.handle_log(reader.read(f),
                          handler)

    return handler.rv
Esempio n. 3
0
def process_test_job(data):
    global logger
    logger = logger or structuredlog.get_default_logger()

    build_name = "{}-{} {}".format(data['platform'], data['buildtype'], data['test'])
    logger.debug("now processing a '{}' job".format(build_name))

    log_url = None
    for name, url in data['blobber_files'].iteritems():
        if name in settings['structured_log_names']:
            log_url = url
            break
    log_path = _download_log(log_url)

    try:
        backend = settings['datastore']
        db_args = config.database
        store = get_storage_backend(backend, **db_args)

        # TODO commit metadata about the test run

        handler = StoreResultsHandler(store)
        with open(log_path, 'r') as log:
            iterator = reader.read(log)
            reader.handle_log(iterator, handler)
    finally:
        mozfile.remove(log_path)
Esempio n. 4
0
    def test_handler(self):
        data = [{"action": "action_0", "data": "data_0"},
                {"action": "action_1", "data": "data_1"}]

        f = self.to_file_like(data)

        test = self
        class ReaderTestHandler(reader.LogHandler):
            def __init__(self):
                self.action_0_count = 0
                self.action_1_count = 0

            def action_0(self, item):
                test.assertEquals(item["action"], "action_0")
                self.action_0_count += 1

            def action_1(self, item):
                test.assertEquals(item["action"], "action_1")
                self.action_1_count += 1

        handler = ReaderTestHandler()
        reader.handle_log(reader.read(f), handler)

        self.assertEquals(handler.action_0_count, 1)
        self.assertEquals(handler.action_1_count, 1)
Esempio n. 5
0
def get_statuses(filenames):
    handler = StatusHandler()

    for filename in filenames:
        with open(filename) as f:
            reader.handle_log(reader.read(f), handler)

    return handler.statuses
Esempio n. 6
0
    def test_imap_log(self):
        data = [{"action": "action_0", "data": "data_0"}, {"action": "action_1", "data": "data_1"}]

        f = self.to_file_like(data)

        def f_action_0(item):
            return ("action_0", item["data"])

        def f_action_1(item):
            return ("action_1", item["data"])

        res_iter = reader.imap_log(reader.read(f), {"action_0": f_action_0, "action_1": f_action_1})
        self.assertEquals([("action_0", "data_0"), ("action_1", "data_1")], list(res_iter))
Esempio n. 7
0
def get_test_failures(raw_log):
    """
    Return the list of test failures contained within a structured log file.
    """
    failures = []
    def test_status(data):
        if data['status'] == 'FAIL':
            failures.append(data)
    with open(raw_log, 'r') as f:
        #XXX: bug 985606: map_action is a generator
        list(reader.map_action(reader.read(f),
                               {"test_status":test_status}))
    return failures
Esempio n. 8
0
    def test_each_log(self):
        data = [{"action": "action_0", "data": "data_0"}, {"action": "action_1", "data": "data_1"}]

        f = self.to_file_like(data)

        count = {"action_0": 0, "action_1": 0}

        def f_action_0(item):
            count[item["action"]] += 1

        def f_action_1(item):
            count[item["action"]] += 2

        reader.each_log(reader.read(f), {"action_0": f_action_0, "action_1": f_action_1})

        self.assertEquals({"action_0": 1, "action_1": 2}, count)
Esempio n. 9
0
def main(**kwargs):
    if kwargs["output"] is None:
        output = sys.stdout
    else:
        output = open(kwargs["output"], "w")
    readers = [read(open(filename, 'r')) for filename in kwargs["files"]]
    start_events = [process_until_suite_start(reader, output) for reader in readers]
    validate_start_events(start_events)
    merged_start_event = merge_start_events(start_events)
    dump_entry(fill_process_info(merged_start_event), output)

    end_events = [process_until_suite_end(reader, output) for reader in readers]
    dump_entry(fill_process_info(end_events[0]), output)

    for reader in readers:
        for entry in reader:
            dump_entry(entry, output)
Esempio n. 10
0
    def parse_log(self):
        """
        Parse the entire log with ``mozlog``.

        This presumes that the log at ``log_url`` is a gzipped structured
        log generated by ``mozlog``.
        """
        handler = self.SummaryHandler()

        with self.get_log_handle(self.url) as in_f:
            try:
                reader.handle_log(reader.read(in_f), handler)
                self.artifact["errors_truncated"] = False
            except StopIteration:
                # cap out the number of lines we store in the artifact.
                self.artifact["errors_truncated"] = True

        self.artifact["all_errors"] = handler.lines
Esempio n. 11
0
def record_results(*log_files):
    handler = LogHandler()

    products = []
    for name in log_files:
        product, filename = name.split(":", 1)
        products.append((product, filename))

    handler.set_products([item[0] for item in products])
    for product, logfile in products:
        handler.set_product(product)
        with open(logfile) as f:
            reader.handle_log(reader.read(f),
                              handler)

    data = handler.data

    data["results"] = data["results"].values()

    return data
Esempio n. 12
0
    def test_imap_log(self):
        data = [{
            "action": "action_0",
            "data": "data_0"
        }, {
            "action": "action_1",
            "data": "data_1"
        }]

        f = self.to_file_like(data)

        def f_action_0(item):
            return ("action_0", item["data"])

        def f_action_1(item):
            return ("action_1", item["data"])

        res_iter = reader.imap_log(reader.read(f), {
            "action_0": f_action_0,
            "action_1": f_action_1
        })
        self.assertEquals([("action_0", "data_0"), ("action_1", "data_1")],
                          list(res_iter))
Esempio n. 13
0
def process_test_job(data):
    global logger
    logger = logger or utils.get_logger(name='catalog-worker')

    build_name = "{}-{} {}".format(data['platform'], data['buildtype'], data['test'])
    logger.info("now processing a '{}' job".format(build_name))

    log_url = utils.get_structured_log(data['blobber_files'])
    log_path = _download_log(log_url)

    try:
        backend = settings['datastore']
        db_args = config.database
        store = get_storage_backend(backend, **db_args)

        # TODO commit metadata about the test run

        handler = StoreResultsHandler(store)
        with open(log_path, 'r') as log:
            iterator = reader.read(log)
            reader.handle_log(iterator, handler)
    finally:
        mozfile.remove(log_path)
Esempio n. 14
0
    def process_suite(self, data):
        platform = '{}-{}'.format(data['platform'], data['buildtype'])
        build_str = "{}-{}".format(data['buildid'], platform)

        suite_name = self.get_suite_name(data['test'], platform)
        if not suite_name:
            return

        logs = [
            url for fn, url in data['blobber_files'].iteritems()
            if fn.endswith('_raw.log')
        ]
        # return if there are no _raw.log files
        if not logs:
            return

        logger.debug("now processing build '{}'".format(build_str))
        handler = RuntimeHandler()
        for url in logs:
            log_path = self._prepare_mozlog(url)
            with open(log_path, 'r') as log:
                iterator = reader.read(log)
                reader.handle_log(iterator, handler)
            mozfile.remove(log_path)
        runtimes = handler.runtimes

        with lock:
            # create an entry for this build in the db
            suite, is_new = Suite.objects.get_or_create(
                name=suite_name,
                buildid=data['buildid'],
                platform=platform,
                timestamp=data['builddate'],
                revision=data['revision'],
            )
            suite.runtimes.update(runtimes)
            suite.save()
Esempio n. 15
0
def process_test_job(data):
    global logger
    logger = logger or utils.get_logger(name='catalog-worker')

    build_name = "{}-{} {}".format(data['platform'], data['buildtype'],
                                   data['test'])
    logger.info("now processing a '{}' job".format(build_name))

    log_url = utils.get_structured_log(data['blobber_files'])
    log_path = _download_log(log_url)

    try:
        backend = settings['datastore']
        db_args = config.database
        store = get_storage_backend(backend, **db_args)

        # TODO commit metadata about the test run

        handler = StoreResultsHandler(store)
        with open(log_path, 'r') as log:
            iterator = reader.read(log)
            reader.handle_log(iterator, handler)
    finally:
        mozfile.remove(log_path)
Esempio n. 16
0
def worker_thread(action_map, context):
    stream = DataStream(context)
    reader.each_log(reader.read(stream), action_map)
    stream.socket.close()
Esempio n. 17
0
    def test_read(self):
        data = [{"action": "action_0", "data": "data_0"},
                {"action": "action_1", "data": "data_1"}]

        f = self.to_file_like(data)
        self.assertEquals(data, list(reader.read(f)))
Esempio n. 18
0
    def test_read(self):
        data = [{"action": "action_0", "data": "data_0"},
                {"action": "action_1", "data": "data_1"}]

        f = self.to_file_like(data)
        self.assertEquals(data, list(reader.read(f)))
Esempio n. 19
0
 def update_from_log(self, log_file):
     self.run_info = None
     log_reader = reader.read(log_file)
     reader.each_log(log_reader, self.action_map)
Esempio n. 20
0
def parse_log(path):
    with open(path) as f:
        regression_handler = results.LogHandler()
        reader.handle_log(reader.read(f),
                          regression_handler)
        return regression_handler.results
def parse_log(path):
    with open(path) as f:
        regression_handler = results.LogHandler()
        reader.handle_log(reader.read(f), regression_handler)
        return regression_handler.results