Exemplo n.º 1
0
def test_tokenize_no_collisions():
    points = [
        Point(x, y)
        for x in range(100)
        for y in range(100)
    ]
    tokens = [
        tokenize(point, pickle.dumps)
        for point in points
    ]
    assert len(set(tokens)) == len(points)
Exemplo n.º 2
0
def test_tokenize_straight_translation():
    assert tokenize(1) == '1'
    assert tokenize(1.0) == '1.0'
    assert tokenize('hello') == 'hello'
Exemplo n.º 3
0
def test_tokenize_complex_type():
    token = tokenize(Point(1, 2), pickle.dump)
    assert isinstance(token, string_types)
    assert len(token) == 10
Exemplo n.º 4
0
def test_ensure_token_length_is_capped():
    assert len(tokenize('a' * 1000)) < 50
Exemplo n.º 5
0
def test_avoid_initial_period():
    assert tokenize('.test').startswith('_.test')
    assert tokenize('\x00\x00').startswith('_..')
Exemplo n.º 6
0
def test_tokenize_simple_cleaning():
    assert tokenize('Hello').startswith('hello_')
    assert tokenize(True).startswith('true_')
    assert tokenize('test\x00').startswith('test._')
Exemplo n.º 7
0
def test_tokenize_straight_translation():
    assert tokenize(1) == "1"
    assert tokenize(1.0) == "1.0"
    assert tokenize("hello") == "hello"
Exemplo n.º 8
0
def test_avoid_initial_period():
    assert tokenize(".test").startswith("_.test")
    assert tokenize("\x00\x00").startswith("_..")
Exemplo n.º 9
0
def test_tokenize_simple_cleaning():
    assert tokenize("Hello").startswith("hello_")
    assert tokenize(True).startswith("true_")
    assert tokenize("test\x00").startswith("test._")