def test_stream_sb(stream_parser): events = stream_parser.stream_updates( [ Tokenizer.Option(B.SB, None, 42), Tokenizer.StreamData(b"1234"), Tokenizer.Command(B.SE, B.SE.value), ] ) assert events == [StreamParser.OptionSubnegotiation(None, 42)]
def test_stream_command_in_crlf(stream_parser): events = stream_parser.stream_updates( [ Tokenizer.StreamData(b"abc\r"), Tokenizer.Command(B.NOP, B.NOP.value), Tokenizer.StreamData(b"\ndef"), ] ) assert events == [ StreamParser.UserData("abc"), StreamParser.Command(B.NOP, B.NOP.value), StreamParser.UserData("\n"), StreamParser.UserData("def"), ]
def test_stream_iac(stream_parser): # IAC IAC produces a single IAC on the user stream. This is not a valid # ascii character, so it's filtered by decoding. events = stream_parser.stream_updates([Tokenizer.Command(B.IAC, B.IAC.value)]) assert events == []
def test_stream_command(stream_parser): events = stream_parser.stream_updates([Tokenizer.Command(B.NOP, B.NOP.value)]) assert events == [StreamParser.Command(B.NOP, B.NOP.value)]
def test_split_command(tokenizer): toks = tokenizer.tokens(b"abc" + B.IAC.byte) assert toks == [Tokenizer.StreamData(b"abc")] toks = tokenizer.tokens(B.NOP.byte + b"def") assert toks == [Tokenizer.Command(B.NOP, B.NOP.value), Tokenizer.StreamData(b"def")]
def test_tokenizer_command(tokenizer): toks = tokenizer.tokens(B.IAC.byte + B.NOP.byte) assert toks == [Tokenizer.Command(B.NOP, B.NOP.value)]