def test_skip_block_not_found(self): """Test skip_block where token is not found.""" cs = TokenisedStream() cs.write(b'\x0b\x01\x00ghja') cs.seek(0) cs.skip_block(b'\x91', b'\x90') assert cs.tell() == 7
def test_pickle_tokenisedstream(self): """Pickle TokenisedStream object.""" ts = TokenisedStream() ts.write(b'123') ts.seek(0) ts.seek(0) ps = pickle.dumps(ts) ts2 = pickle.loads(ps) assert ts2.read() == b'123'
def test_skip_to_token_not_found(self): """Test skip_to_token where token is not found.""" cs = TokenisedStream() cs.write(b'\x0b\x01\x00ghja') cs.seek(0) assert cs.skip_to_token(b'\x91') is None
def test_read_number_token(self): """Test read_number_token.""" cs = TokenisedStream() cs.write(b'\x0b\x01\x00ghja') cs.seek(0) assert cs.read_number_token() == b'\x0b\x01\x00' cs.seek(0) cs.write(b'\x0c\x01\x00ghja') cs.seek(0) assert cs.read_number_token() == b'\x0c\x01\x00' cs.seek(0) cs.write(b'\x11\x01\x00ghja') cs.seek(0) assert cs.read_number_token() == b'\x11' cs.seek(0) cs.write(b'\x0f\xff\x00ghja') cs.seek(0) assert cs.read_number_token() == b'\x0f\xff' cs.seek(0) cs.write(b'\x1c\xff\x00ghja') cs.seek(0) assert cs.read_number_token() == b'\x1c\xff\x00' cs.seek(0) cs.write(b'\x1d\xff\x00ghja') cs.seek(0) assert cs.read_number_token() == b'\x1d\xff\x00gh' cs.seek(0) cs.write(b'\x1f\xff\x00ghjagh007') cs.seek(0) assert cs.read_number_token() == b'\x1f\xff\x00ghjagh' cs.seek(0) cs.write(b'\x00\xff\x00ghja') cs.seek(0) assert cs.read_number_token() == b''