def __init__(self, filename, parent_logger): super(MolotovReportReader, self).__init__() self.is_distributed = False self.log = parent_logger.getChild(self.__class__.__name__) self.ldjson_reader = LDJSONReader(filename, self.log) self.read_records = 0 self._concurrency = 0
def test_reader_buffering(self): first_part = '{"a": 1, "b": 2}\n{"a": 2,' second_part = '"b": 3}\n{"a": 3, "b": 4}\n' reader = LDJSONReader("yip", logging.getLogger()) buffer = StringIO(first_part) reader.fds = buffer items = list(reader.read(last_pass=False)) self.assertEqual(len(items), 1) buffer.write(second_part) items = list(reader.read(last_pass=False)) self.assertEqual(len(items), 2)
def test_reader_buffering(self): first_part = b'{"a": 1, "b": 2}\n{"a": 2,' second_part = b'"b": 3}\n{"a": 3, "b": 4}\n' reader = LDJSONReader("yip", ROOT_LOGGER) buffer = BytesIO(first_part) reader.file.fds = buffer reader.file.fds.name = "yip" items = list(reader.read(last_pass=False)) self.assertEqual(len(items), 1) buffer.write(second_part) items = list(reader.read(last_pass=False)) self.assertEqual(len(items), 2)
class MolotovReportReader(ResultsReader): def __init__(self, filename, parent_logger): super(MolotovReportReader, self).__init__() self.is_distributed = False self.log = parent_logger.getChild(self.__class__.__name__) self.ldjson_reader = LDJSONReader(filename, self.log) self.read_records = 0 self._concurrency = 0 def _read(self, final_pass=False): for row in self.ldjson_reader.read(final_pass): self.read_records += 1 if row.get("type") == "workers": self._concurrency = row.get("value", self._concurrency) elif row.get("type") == "scenario_success": label = unicode_decode(row["name"]) tstmp = int(float(row["ts"])) rtm = float(row["duration"]) rcd = "200" error = None cnn = ltc = byte_count = 0 trname = '' yield tstmp, label, self._concurrency, rtm, cnn, ltc, rcd, error, trname, byte_count elif row.get("type") == "scenario_failure": label = unicode_decode(row["name"]) tstmp = int(float(row["ts"])) rtm = float(row["duration"]) rcd = row["exception"] error = row["errorMessage"] cnn = ltc = byte_count = 0 trname = '' yield tstmp, label, self._concurrency, rtm, cnn, ltc, rcd, error, trname, byte_count
class TestReportReader(object): REPORT_ITEM_KEYS = ["test_case", "test_suite", "status", "start_time", "duration", "error_msg", "error_trace", "extras", "subsamples"] TEST_STATUSES = ("PASSED", "FAILED", "BROKEN", "SKIPPED") FAILING_TESTS_STATUSES = ("FAILED", "BROKEN") def __init__(self, filename, parent_logger, translation_table=None): super(TestReportReader, self).__init__() self.log = parent_logger.getChild(self.__class__.__name__) self.json_reader = LDJSONReader(filename, self.log) self.translation_table = translation_table or {} def process_label(self, label): if label in self.translation_table: return self.translation_table[label] if isinstance(label, string_types): if label.startswith('test_') and label[5:10].isdigit(): return label[11:] return label def read(self, last_pass=False): for row in self.json_reader.read(last_pass): for key in self.REPORT_ITEM_KEYS: if key not in row: self.log.debug("Unexpected test record: %s", row) self.log.warning("Test record doesn't conform to schema, skipping, %s", key) continue row["test_case"] = self.process_label(row["test_case"]) yield row
class TestReportReader(object): SAMPLE_KEYS = [ "test_case", # str "test_suite", # str "status", # str "start_time", # float, epoch "duration", # float, in seconds "error_msg", # short string "error_trace", # multiline string "extras", # dict "subsamples", # list of samples "assertions", # list of dicts, {"name": str, "failed": bool, "error_msg": str, "error_trace": str} "path" # list of components, [{"value": "test_Something", "type": "module"}, # {"value": "TestAPI", "type": "class"}, # {"value": "test_heartbeat", "type": "method"} # {"value": "index page": "type": "transaction"} # {"value": "http://blazedemo.com/": "type": "request"} ] TEST_STATUSES = ("PASSED", "FAILED", "BROKEN", "SKIPPED") FAILING_TESTS_STATUSES = ("FAILED", "BROKEN") def __init__(self, filename, parent_logger): super(TestReportReader, self).__init__() self.log = parent_logger.getChild(self.__class__.__name__) self.json_reader = LDJSONReader(filename, self.log) @staticmethod def process_label(label): if isinstance(label, string_types): parts = label.split('_', 2) # 'test_01_feeling_good' if len(parts) == 3 and parts[0] == 'test' and parts[1].isdigit(): return parts[2] return label def process_path(self, path): if isinstance(path, dict): test_suite = ".".join(part["value"] for part in path[:-1]) test_case = path[-1]["value"] return test_suite, test_case return None def read(self, last_pass=False): for row in self.json_reader.read(last_pass): if "path" in row: processed_path = self.process_path(row["path"]) if processed_path is not None: row["test_suite"], row["test_case"] = processed_path row["test_case"] = self.process_label(row["test_case"]) yield row
class MolotovReportReader(ResultsReader): def __init__(self, filename, parent_logger): super(MolotovReportReader, self).__init__() self.is_distributed = False self.log = parent_logger.getChild(self.__class__.__name__) self.ldjson_reader = LDJSONReader(filename, self.log) self.read_records = 0 self._concurrency = 0 def _read(self, final_pass=False): for row in self.ldjson_reader.read(final_pass): self.read_records += 1 if row.get("type") == "workers": self._concurrency = row.get("value", self._concurrency) elif row.get("type") == "scenario_success": label = unicode_decode(row["name"]) tstmp = int(float(row["ts"])) rtm = float(row["duration"]) rcd = "200" error = None cnn = ltc = byte_count = 0 trname = '' yield tstmp, label, self._concurrency, rtm, cnn, ltc, rcd, error, trname, byte_count elif row.get("type") == "scenario_failure": label = unicode_decode(row["name"]) tstmp = int(float(row["ts"])) rtm = float(row["duration"]) rcd = row["exception"] error = row["errorMessage"] cnn = ltc = byte_count = 0 trname = '' yield tstmp, label, self._concurrency, rtm, cnn, ltc, rcd, error, trname, byte_count elif row.get("type") == "request": label = unicode_decode(row["label"]) tstmp = int(float(row["ts"])) rtm = float(row["elapsed"]) rcd = row["responseCode"] error = None if int(rcd) >= 400: error = row["responseMessage"] cnn = 0 ltc = 0 trname = '' byte_count = 0 yield tstmp, label, self._concurrency, rtm, cnn, ltc, rcd, error, trname, byte_count
class TestReportReader(object): REPORT_ITEM_KEYS = ["test_case", "test_suite", "status", "start_time", "duration", "error_msg", "error_trace", "extras", "subsamples"] TEST_STATUSES = ("PASSED", "FAILED", "BROKEN", "SKIPPED") FAILING_TESTS_STATUSES = ("FAILED", "BROKEN") def __init__(self, filename, parent_logger): super(TestReportReader, self).__init__() self.log = parent_logger.getChild(self.__class__.__name__) self.json_reader = LDJSONReader(filename, self.log) def process_label(self, label): if isinstance(label, string_types): if label.startswith('test_') and label[5:10].isdigit(): return label[11:] return label def read(self, last_pass=False): for row in self.json_reader.read(last_pass): row["test_case"] = self.process_label(row["test_case"]) yield row
def __init__(self, filename, parent_logger): super(TestReportReader, self).__init__() self.log = parent_logger.getChild(self.__class__.__name__) self.json_reader = LDJSONReader(filename, self.log)
def __init__(self, filename, parent_logger, translation_table=None): super(TestReportReader, self).__init__() self.log = parent_logger.getChild(self.__class__.__name__) self.json_reader = LDJSONReader(filename, self.log) self.translation_table = translation_table or {}