def test_05_optional_configuration(): """Tests that tokenizer allows for optional configuration of filters. tests: R-PARSER-TOKENIZER-001 tests: R-PARSER-TOKENIZER-005 """ t = Tokenizer({}, None) t.configure(foo="bar")
def test_02_optional_configuration(): """ Tests that parser allows for optional configuration of filters. tests: R-PARSER-ELOG-008 """ elp = Tokenizer({}, None) elp.configure(foo="bar")
def test_01b_missing_handler_raises(): """ Tests that the parser raises if an entry entry_handler was not given. tests: R-PARSER-ELOG-001 tests: R-PARSER-ELOG-003 """ elp = Tokenizer({}, None) with open(join(dirname(__file__), "good_emerge.log")) as fh: with pytest.raises(TokenizerError): elp.tokenize(fh)
def test_06_entry_handler_property(): """Tests that getter and setter for entry_handler works. tests: R-PARSER-TOKENIZER-001 tests: R-PARSER-TOKENIZER-002 """ meh = MockEntryHandler([]) t = Tokenizer({}, meh) assert t.entry_handler == meh meh2 = MockEntryHandler(["a"]) t.entry_handler = meh2 assert t.entry_handler == meh2
def test_01_missing_entry_handler_raises(): """Test that a missing entry handler raises. tests: R-PARSER-TOKENIZER-001 tests: R-PARSER-TOKENIZER-002 tests: R-PARSER-TOKENIZER-003 tests: R-PARSER-TOKENIZER-004 """ t = Tokenizer({}) with pytest.raises(TokenizerError) as cm: t.tokenize(StringIO()) assert cm.value.args[0] == "Entry entry_handler not given!"
def test_03_registering_unknown_entry_type_raises(): """ Tests that an exception is raised if trying to register for an unknown entry type. tests: R-PARSER-ENTRY-HANDLER-001 tests: R-PARSER-ENTRY-HANDLER-002 tests: R-PARSER-ENTRY-HANDLER-003 """ eh = EntryHandler() eh.register_listener(lambda x: x, "void") elp = Tokenizer({}, eh) with pytest.raises(TokenizerError): elp.tokenize(StringIO(""))
def test_02_unknown_entry_raises(): """Tests that an unknown entry raises. tests: R-PARSER-TOKENIZER-001 tests: R-PARSER-TOKENIZER-002 tests: R-PARSER-TOKENIZER-003 tests: R-PARSER-TOKENIZER-004 """ meh = MockEntryHandler(["a"]) t = Tokenizer({}, meh) with pytest.raises(TokenizerError) as cm: t.tokenize(StringIO()) assert cm.value.args[0] == "Unknown registered entry type"
def test_03_simple_tokenization_succeeds(capsys): """Tests that a simple tokenization succeeds. tests: R-PARSER-TOKENIZER-001 tests: R-PARSER-TOKENIZER-002 tests: R-PARSER-TOKENIZER-003 tests: R-PARSER-TOKENIZER-004 """ meh = MockEntryHandler(["a"]) t = Tokenizer(dict(a=re.compile("^a$")), meh) t.tokenize(StringIO("a")) captured = capsys.readouterr() assert len(meh.entries) == 1 assert meh.entries[0] == ("a", {}) assert captured.out == ""
def test_04_complex_tokenization_succeeds(capsys): """Tests that a complex tokenization succeeds. tests: R-PARSER-TOKENIZER-001 tests: R-PARSER-TOKENIZER-002 tests: R-PARSER-TOKENIZER-003 tests: R-PARSER-TOKENIZER-004 """ meh = MockEntryHandler(["a"]) t = Tokenizer(dict(a=re.compile("^(?P<number>[0-9]+)a$"), b=re.compile("^(?P<number>[0-9]+)b$")), meh, echo=True) t.tokenize(StringIO("1337a\n42b")) captured = capsys.readouterr() assert len(meh.entries) == 1 assert meh.entries[0] == ("a", dict(number="1337")) assert captured.out == "1337a\n"
def main(argv=None): if argv is None: # pragma: no cover argv = sys.argv runtime = dict( configurator=CommandLineConfigurator(argv[1:]), elog_tokenizer=Tokenizer(EMERGE_LOG_ENTRY_TYPES, EntryHandler()), output=Output() ) m = Main(**runtime) try: m.run() except BaseException as e: print(f"Error: {e}", file=sys.stderr) runtime["configurator"].print_help() sys.exit()
def _parse_pretended_packages(self): tp = Tokenizer(EMERGE_PRETEND_ENTRY_TYPES, entry_handler=EntryHandler(), echo=True) tp.entry_handler.register_listener( lambda properties: self.pretended_packages.append(properties["atom_base"]), "pretended_package") tp.tokenize(self.pretend_stream)
def test_01a_good_elog_parses_successful(): """ Tests that the parser matches the entry types and delegates to the given entry_handler for a proper emerge log. tests: R-PARSER-ELOG-001 tests: R-PARSER-ELOG-002 tests: R-PARSER-ELOG-003 tests: R-PARSER-ELOG-004 tests: R-PARSER-ELOG-005 tests: R-PARSER-ELOG-006 tests: R-PARSER-ELOG-007 """ elp = Tokenizer(EMERGE_LOG_ENTRY_TYPES, MockedEntryHandler()) with open(join(dirname(__file__), "good_emerge.log")) as fh: elp.tokenize(fh) entries = elp.entry_handler.entries assert len(entries) == 8 entry = entries[0] assert entry[0] == "sync" assert entry[1]["timestamp"] == "1507734360" assert entry[1]["repo_name"] == "gentoo" entry = entries[1] assert entry[0] == "merge_begin" assert entry[1]["timestamp"] == "1507735226" assert entry[1]["atom_base"] == "sys-devel/gcc-config" assert entry[1]["atom_version"] == "1.8-r1" assert entry[1]["count_n"] == "1" assert entry[1]["count_m"] == "2" entry = entries[2] assert entry[0] == "unmerge" assert entry[1]["timestamp"] == "1507735236" assert entry[1]["atom_base"] == "sys-devel/gcc-config" assert entry[1]["atom_version"] == "1.7.3" entry = entries[3] assert entry[0] == "merge_end" assert entry[1]["timestamp"] == "1507735239" assert entry[1]["atom_base"] == "sys-devel/gcc-config" assert entry[1]["atom_version"] == "1.8-r1" assert entry[1]["count_n"] == "1" assert entry[1]["count_m"] == "2" entry = entries[4] assert entry[0] == "merge_begin" assert entry[1]["timestamp"] == "1507735239" assert entry[1]["atom_base"] == "app-laptop/laptop-mode-tools" assert entry[1]["atom_version"] == "1.71" assert entry[1]["count_n"] == "2" assert entry[1]["count_m"] == "2" entry = entries[5] assert entry[0] == "unmerge" assert entry[1]["timestamp"] == "1507735248" assert entry[1]["atom_base"] == "app-laptop/laptop-mode-tools" assert entry[1]["atom_version"] == "1.70" entry = entries[6] assert entry[0] == "merge_end" assert entry[1]["timestamp"] == "1507735250" assert entry[1]["atom_base"] == "app-laptop/laptop-mode-tools" assert entry[1]["atom_version"] == "1.71" assert entry[1]["count_n"] == "2" assert entry[1]["count_m"] == "2" entry = entries[7] assert entry[0] == "sync" assert entry[1]["timestamp"] == "1508345663" assert entry[1]["repo_name"] == "gentoo"