def test_stream_command_in_crlf(stream_parser): events = stream_parser.stream_updates( [ Tokenizer.StreamData(b"abc\r"), Tokenizer.Command(B.NOP, B.NOP.value), Tokenizer.StreamData(b"\ndef"), ] ) assert events == [ StreamParser.UserData("abc"), StreamParser.Command(B.NOP, B.NOP.value), StreamParser.UserData("\n"), StreamParser.UserData("def"), ]
def test_stream_user_data_crlf(stream_parser): events = stream_parser.stream_updates([Tokenizer.StreamData(b"Hello,\r\nworld!")]) assert events == [ StreamParser.UserData("Hello,"), StreamParser.UserData("\n"), StreamParser.UserData("world!"), ]
def test_stream_sb(stream_parser): events = stream_parser.stream_updates( [ Tokenizer.Option(B.SB, None, 42), Tokenizer.StreamData(b"1234"), Tokenizer.Command(B.SE, B.SE.value), ] ) assert events == [StreamParser.OptionSubnegotiation(None, 42)]
def test_stream_user_data_nonascii(stream_parser): events = stream_parser.stream_updates([Tokenizer.StreamData(b"abc\xabdef")]) assert events == [StreamParser.UserData("abcdef")]
def test_stream_user_data(stream_parser): events = stream_parser.stream_updates([Tokenizer.StreamData(b"Hello, world!")]) assert events == [StreamParser.UserData("Hello, world!")]
def test_split_option(tokenizer): toks = tokenizer.tokens(b"abc" + B.IAC.byte + B.WONT.byte) assert toks == [Tokenizer.StreamData(b"abc")] toks = tokenizer.tokens(bytes([42]) + b"def") assert toks == [Tokenizer.Option(B.WONT, None, 42), Tokenizer.StreamData(b"def")]
def test_split_command(tokenizer): toks = tokenizer.tokens(b"abc" + B.IAC.byte) assert toks == [Tokenizer.StreamData(b"abc")] toks = tokenizer.tokens(B.NOP.byte + b"def") assert toks == [Tokenizer.Command(B.NOP, B.NOP.value), Tokenizer.StreamData(b"def")]
def test_tokenizer_data(tokenizer): TEST_DATA = b"abcde" toks = tokenizer.tokens(TEST_DATA) assert toks == [Tokenizer.StreamData(TEST_DATA)]