예제 #1
0
def test_stream_command_in_crlf(stream_parser):
    events = stream_parser.stream_updates(
        [
            Tokenizer.StreamData(b"abc\r"),
            Tokenizer.Command(B.NOP, B.NOP.value),
            Tokenizer.StreamData(b"\ndef"),
        ]
    )
    assert events == [
        StreamParser.UserData("abc"),
        StreamParser.Command(B.NOP, B.NOP.value),
        StreamParser.UserData("\n"),
        StreamParser.UserData("def"),
    ]
예제 #2
0
def test_stream_user_data_crlf(stream_parser):
    events = stream_parser.stream_updates([Tokenizer.StreamData(b"Hello,\r\nworld!")])
    assert events == [
        StreamParser.UserData("Hello,"),
        StreamParser.UserData("\n"),
        StreamParser.UserData("world!"),
    ]
예제 #3
0
def test_stream_sb(stream_parser):
    events = stream_parser.stream_updates(
        [
            Tokenizer.Option(B.SB, None, 42),
            Tokenizer.StreamData(b"1234"),
            Tokenizer.Command(B.SE, B.SE.value),
        ]
    )
    assert events == [StreamParser.OptionSubnegotiation(None, 42)]
예제 #4
0
def test_stream_user_data_nonascii(stream_parser):
    events = stream_parser.stream_updates([Tokenizer.StreamData(b"abc\xabdef")])
    assert events == [StreamParser.UserData("abcdef")]
예제 #5
0
def test_stream_user_data(stream_parser):
    events = stream_parser.stream_updates([Tokenizer.StreamData(b"Hello, world!")])
    assert events == [StreamParser.UserData("Hello, world!")]
예제 #6
0
def test_split_option(tokenizer):
    toks = tokenizer.tokens(b"abc" + B.IAC.byte + B.WONT.byte)
    assert toks == [Tokenizer.StreamData(b"abc")]

    toks = tokenizer.tokens(bytes([42]) + b"def")
    assert toks == [Tokenizer.Option(B.WONT, None, 42), Tokenizer.StreamData(b"def")]
예제 #7
0
def test_split_command(tokenizer):
    toks = tokenizer.tokens(b"abc" + B.IAC.byte)
    assert toks == [Tokenizer.StreamData(b"abc")]

    toks = tokenizer.tokens(B.NOP.byte + b"def")
    assert toks == [Tokenizer.Command(B.NOP, B.NOP.value), Tokenizer.StreamData(b"def")]
예제 #8
0
def test_tokenizer_data(tokenizer):
    TEST_DATA = b"abcde"
    toks = tokenizer.tokens(TEST_DATA)
    assert toks == [Tokenizer.StreamData(TEST_DATA)]