示例#1
0
    def test_handler(self):
        data = [{"action": "action_0", "data": "data_0"},
                {"action": "action_1", "data": "data_1"}]

        f = self.to_file_like(data)

        test = self
        class ReaderTestHandler(reader.LogHandler):
            def __init__(self):
                self.action_0_count = 0
                self.action_1_count = 0

            def action_0(self, item):
                test.assertEquals(item["action"], "action_0")
                self.action_0_count += 1

            def action_1(self, item):
                test.assertEquals(item["action"], "action_1")
                self.action_1_count += 1

        handler = ReaderTestHandler()
        reader.handle_log(reader.read(f), handler)

        self.assertEquals(handler.action_0_count, 1)
        self.assertEquals(handler.action_1_count, 1)
示例#2
0
    def test_handler(self):
        data = [{
            "action": "action_0",
            "data": "data_0"
        }, {
            "action": "action_1",
            "data": "data_1"
        }]

        f = self.to_file_like(data)

        test = self

        class ReaderTestHandler(reader.LogHandler):
            def __init__(self):
                self.action_0_count = 0
                self.action_1_count = 0

            def action_0(self, item):
                test.assertEquals(item["action"], "action_0")
                self.action_0_count += 1

            def action_1(self, item):
                test.assertEquals(item["action"], "action_1")
                self.action_1_count += 1

        handler = ReaderTestHandler()
        reader.handle_log(reader.read(f), handler)

        self.assertEquals(handler.action_0_count, 1)
        self.assertEquals(handler.action_1_count, 1)
示例#3
0
def to_json(*log_files):
    handler = LogHandler()
    for f in log_files:
        reader.handle_log(reader.read(f),
                          handler)

    return handler.rv
示例#4
0
def process_test_job(data):
    global logger
    logger = logger or structuredlog.get_default_logger()

    build_name = "{}-{} {}".format(data['platform'], data['buildtype'],
                                   data['test'])
    logger.debug("now processing a '{}' job".format(build_name))

    log_url = None
    for name, url in data['blobber_files'].iteritems():
        if name in settings['structured_log_names']:
            log_url = url
            break
    log_path = _download_log(log_url)

    try:
        backend = settings['datastore']
        db_args = config.database
        store = get_storage_backend(backend, **db_args)

        # TODO commit metadata about the test run

        handler = StoreResultsHandler(store)
        with open(log_path, 'r') as log:
            iterator = reader.read(log)
            reader.handle_log(iterator, handler)
    finally:
        mozfile.remove(log_path)
示例#5
0
def process_test_job(data):
    global logger
    logger = logger or structuredlog.get_default_logger()

    build_name = "{}-{} {}".format(data['platform'], data['buildtype'], data['test'])
    logger.debug("now processing a '{}' job".format(build_name))

    log_url = None
    for name, url in data['blobber_files'].iteritems():
        if name in settings['structured_log_names']:
            log_url = url
            break
    log_path = _download_log(log_url)

    try:
        backend = settings['datastore']
        db_args = config.database
        store = get_storage_backend(backend, **db_args)

        # TODO commit metadata about the test run

        handler = StoreResultsHandler(store)
        with open(log_path, 'r') as log:
            iterator = reader.read(log)
            reader.handle_log(iterator, handler)
    finally:
        mozfile.remove(log_path)
示例#6
0
def get_statuses(filenames):
    handler = StatusHandler()

    for filename in filenames:
        with open(filename) as f:
            reader.handle_log(reader.read(f), handler)

    return handler.statuses
示例#7
0
def get_statuses(filenames):
    handler = StatusHandler()

    for filename in filenames:
        with open(filename) as f:
            reader.handle_log(reader.read(f), handler)

    return handler.statuses
示例#8
0
    def parse_log(self):
        """
        Parse the entire log with ``mozlog``.

        This presumes that the log at ``log_url`` is a gzipped structured
        log generated by ``mozlog``.
        """
        handler = self.SummaryHandler()

        with self.get_log_handle(self.url) as in_f:
            try:
                reader.handle_log(reader.read(in_f), handler)
                self.artifact["errors_truncated"] = False
            except StopIteration:
                # cap out the number of lines we store in the artifact.
                self.artifact["errors_truncated"] = True

        self.artifact["all_errors"] = handler.lines
示例#9
0
    def parse_log(self):
        """
        Parse the entire log with ``mozlog``.

        This presumes that the log at ``log_url`` is a gzipped structured
        log generated by ``mozlog``.
        """
        handler = self.SummaryHandler()

        with self.get_log_handle(self.url) as in_f:
            try:
                reader.handle_log(reader.read(in_f), handler)
                self.artifact["errors_truncated"] = False
            except StopIteration:
                # cap out the number of lines we store in the artifact.
                self.artifact["errors_truncated"] = True

        self.artifact["all_errors"] = handler.lines
示例#10
0
def record_results(*log_files):
    handler = LogHandler()

    products = []
    for name in log_files:
        product, filename = name.split(":", 1)
        products.append((product, filename))

    handler.set_products([item[0] for item in products])
    for product, logfile in products:
        handler.set_product(product)
        with open(logfile) as f:
            reader.handle_log(reader.read(f),
                              handler)

    data = handler.data

    data["results"] = data["results"].values()

    return data
示例#11
0
def process_test_job(data):
    global logger
    logger = logger or utils.get_logger(name='catalog-worker')

    build_name = "{}-{} {}".format(data['platform'], data['buildtype'], data['test'])
    logger.info("now processing a '{}' job".format(build_name))

    log_url = utils.get_structured_log(data['blobber_files'])
    log_path = _download_log(log_url)

    try:
        backend = settings['datastore']
        db_args = config.database
        store = get_storage_backend(backend, **db_args)

        # TODO commit metadata about the test run

        handler = StoreResultsHandler(store)
        with open(log_path, 'r') as log:
            iterator = reader.read(log)
            reader.handle_log(iterator, handler)
    finally:
        mozfile.remove(log_path)
示例#12
0
    def process_suite(self, data):
        platform = '{}-{}'.format(data['platform'], data['buildtype'])
        build_str = "{}-{}".format(data['buildid'], platform)

        suite_name = self.get_suite_name(data['test'], platform)
        if not suite_name:
            return

        logs = [
            url for fn, url in data['blobber_files'].iteritems()
            if fn.endswith('_raw.log')
        ]
        # return if there are no _raw.log files
        if not logs:
            return

        logger.debug("now processing build '{}'".format(build_str))
        handler = RuntimeHandler()
        for url in logs:
            log_path = self._prepare_mozlog(url)
            with open(log_path, 'r') as log:
                iterator = reader.read(log)
                reader.handle_log(iterator, handler)
            mozfile.remove(log_path)
        runtimes = handler.runtimes

        with lock:
            # create an entry for this build in the db
            suite, is_new = Suite.objects.get_or_create(
                name=suite_name,
                buildid=data['buildid'],
                platform=platform,
                timestamp=data['builddate'],
                revision=data['revision'],
            )
            suite.runtimes.update(runtimes)
            suite.save()
示例#13
0
def process_test_job(data):
    global logger
    logger = logger or utils.get_logger(name='catalog-worker')

    build_name = "{}-{} {}".format(data['platform'], data['buildtype'],
                                   data['test'])
    logger.info("now processing a '{}' job".format(build_name))

    log_url = utils.get_structured_log(data['blobber_files'])
    log_path = _download_log(log_url)

    try:
        backend = settings['datastore']
        db_args = config.database
        store = get_storage_backend(backend, **db_args)

        # TODO commit metadata about the test run

        handler = StoreResultsHandler(store)
        with open(log_path, 'r') as log:
            iterator = reader.read(log)
            reader.handle_log(iterator, handler)
    finally:
        mozfile.remove(log_path)
示例#14
0
def parse_log(path):
    with open(path) as f:
        regression_handler = results.LogHandler()
        reader.handle_log(reader.read(f),
                          regression_handler)
        return regression_handler.results
def parse_log(path):
    with open(path) as f:
        regression_handler = results.LogHandler()
        reader.handle_log(reader.read(f), regression_handler)
        return regression_handler.results