def test_integration(tokenizer, stream_parser): data = ( b"Hel" + B.IAC.byte + B.NOP.byte + b"lo,\r" + # start a subneg B.IAC.byte + B.SB.byte + bytes([42]) + b"abc" + # literal IAC SE as subneg data B.IAC.byte + B.IAC.byte + B.SE.byte + b"def" + # finish the subneg B.IAC.byte + B.SE.byte + b"\0wor" + B.IAC.byte + B.DO.byte + bytes([42]) + b"ld!" ) atomized = [bytes([b]) for b in data] # process it one byte at a time toks = sum([tokenizer.tokens(b) for b in atomized], []) events = sum([stream_parser.stream_updates([tok]) for tok in toks], []) assert events == [ StreamParser.UserData("H"), StreamParser.UserData("e"), StreamParser.UserData("l"), StreamParser.Command(B.NOP, B.NOP.value), StreamParser.UserData("l"), StreamParser.UserData("o"), StreamParser.UserData(","), StreamParser.OptionSubnegotiation(None, 42), StreamParser.UserData("\r"), StreamParser.UserData("w"), StreamParser.UserData("o"), StreamParser.UserData("r"), StreamParser.OptionNegotiation(None, 42, StreamParser.Host.LOCAL, True), StreamParser.UserData("l"), StreamParser.UserData("d"), StreamParser.UserData("!"), ]
def test_stream_command_in_crlf(stream_parser): events = stream_parser.stream_updates( [ Tokenizer.StreamData(b"abc\r"), Tokenizer.Command(B.NOP, B.NOP.value), Tokenizer.StreamData(b"\ndef"), ] ) assert events == [ StreamParser.UserData("abc"), StreamParser.Command(B.NOP, B.NOP.value), StreamParser.UserData("\n"), StreamParser.UserData("def"), ]
def test_stream_command(stream_parser): events = stream_parser.stream_updates([Tokenizer.Command(B.NOP, B.NOP.value)]) assert events == [StreamParser.Command(B.NOP, B.NOP.value)]