Exemplo n.º 1
0
def test_atomic_element():
    e1, p1 = parser.atomic_element(TokenStream(tokenize('42 not')))
    assert isinstance(e1, AtomicElement) and e1.value == 42
    assert len(p1) == 2

    e2, p2 = parser.atomic_element(TokenStream(tokenize('not 42')))
    assert isinstance(e2, AtomicElement) and e2.value == 'not'
    assert len(p2) == 2
Exemplo n.º 2
0
def dummy_file_elements():
        tokens_ = tuple(lexer.tokenize("""
name = fawzy
another_name=another_fawzy

[details]
id= 42
section =fourth

[[person]]
personname= lefawzy
dest=north

[[person]]
dest=south
personname=lafawzy

[details.extended]
number = 313
type =complex"""))

        elements = \
            [TableElement(primitive_tokens_to_primitive_elements(tokens_[:12]))] + \
            [TableHeaderElement(tokens_[12:16])] + \
            [TableElement(primitive_tokens_to_primitive_elements(tokens_[16:25]))] + \
            [TableHeaderElement(tokens_[25:31])] + \
            [TableElement(primitive_tokens_to_primitive_elements(tokens_[31:39]))] + \
            [TableHeaderElement(tokens_[39:45])] + \
            [TableElement(primitive_tokens_to_primitive_elements(tokens_[45:53]))] + \
            [TableHeaderElement(tokens_[53:60])] + \
            [TableElement(primitive_tokens_to_primitive_elements(tokens_[60:]))]

        return elements
Exemplo n.º 3
0
def test_array_element():
    tokens = tuple(lexer.tokenize('[4, 8, 42, \n 23, 15]'))
    assert len(tokens) == 17
    sub_elements = (
        PunctuationElement(tokens[:1]),

        AtomicElement(tokens[1:2]),
        PunctuationElement(tokens[2:3]),
        WhitespaceElement(tokens[3:4]),

        AtomicElement(tokens[4:5]),
        PunctuationElement(tokens[5:6]),
        WhitespaceElement(tokens[6:7]),

        AtomicElement(tokens[7:8]),
        PunctuationElement(tokens[8:9]),
        WhitespaceElement(tokens[9:10]),
        NewlineElement(tokens[10:11]),
        WhitespaceElement(tokens[11:12]),

        AtomicElement(tokens[12:13]),
        PunctuationElement(tokens[13:14]),

        WhitespaceElement(tokens[14:15]),
        AtomicElement(tokens[15:16]),
        PunctuationElement(tokens[16:17])
    )

    array_element = ArrayElement(sub_elements)

    # Test length
    assert len(array_element) == 5

    # Test getting a value
    assert array_element[0] == 4
    assert array_element[1] == 8
    assert array_element[2] == 42
    assert array_element[3] == 23
    assert array_element[-1] == 15

    # Test assignment with a negative index
    array_element[-1] = 12

    # Test persistence of formatting
    assert '[4, 8, 42, \n 23, 12]' == array_element.serialized()

    # Test raises IndexError on invalid index
    with pytest.raises(IndexError) as _:
        print(array_element[5])

    # Test appending a new value
    array_element.append(77)
    assert '[4, 8, 42, \n 23, 12, 77]' == array_element.serialized()

    # Test deleting a value
    del array_element[3]
    assert '[4, 8, 42, 12, 77]' == array_element.serialized()

    # Test primitive_value
    assert [4, 8, 42, 12, 77] == array_element.primitive_value
Exemplo n.º 4
0
def test_inline_table():
    inline_table, pending_ts = parser.inline_table_element(TokenStream(tokenize('{ "id"= 42,test = name} vroom')))

    assert set(inline_table.keys()) == {'id', 'test'}
    assert len(pending_ts) == 2
    assert inline_table['id'] == 42
    assert inline_table['test'] == 'name'
Exemplo n.º 5
0
def test_empty_array():

    text = '[]'

    array_element, pending_ts = parser.array_element(TokenStream(tokenize(text)))

    assert isinstance(array_element, ArrayElement)
    assert pending_ts.at_end
Exemplo n.º 6
0
def test_line_terminator_1():
    tokens = tokenize('# Sup\n')
    ts = TokenStream(tokens)
    element, pending_ts = parser.line_terminator_element(ts)

    assert isinstance(element, CommentElement)
    assert pending_ts.offset == 2
    assert ts.offset == 0
Exemplo n.º 7
0
def test_tableheader():
    tokens = tuple(lexer.tokenize('\n\t [[personal. information.details]] \n'))
    element = TableHeaderElement(tokens)

    assert element.is_array_of_tables
    assert ('personal', 'information', 'details') == element.names

    assert element.has_name_prefix(('personal', 'information'))
Exemplo n.º 8
0
def test_entry_extraction():
    text = open('sample.toml').read()
    elements = parser.parse_tokens(lexer.tokenize(text))

    e = tuple(toplevels.identify(elements))

    assert len(e) == 13
    assert isinstance(e[0], toplevels.AnonymousTable)
Exemplo n.º 9
0
def test_space_3():
    ts = TokenStream(tokenize('noo'))
    space_element, pending_ts = parser.space_element(ts)

    assert isinstance(space_element, WhitespaceElement)
    assert len(space_element.tokens) == 0
    assert pending_ts.offset == 0
    assert ts.offset == 0
Exemplo n.º 10
0
def test_line_terminator_2():
    tokens = tokenize('\n')
    ts = TokenStream(tokens)
    element, pending_ts = parser.line_terminator_element(ts)

    assert isinstance(element, NewlineElement)
    assert pending_ts.offset == 1
    assert ts.offset == 0
Exemplo n.º 11
0
def test_array_2():

    text = """[
  "alpha",
  "omega"
]"""

    array_element, pending_ts = parser.array_element(TokenStream(tokenize(text)))

    assert array_element[0] == 'alpha'
    assert array_element[1] == 'omega'
Exemplo n.º 12
0
def test_key_value_pair():
    text = """hosts = [
  "alpha",
  "omega"
]
"""

    parsed, pending_ts = parser.key_value_pair(TokenStream(tokenize(text)))

    assert isinstance(parsed[1], AtomicElement)
    assert isinstance(parsed[5], ArrayElement)
Exemplo n.º 13
0
def test_table():

    initial_toml = """name = "first"
id=42 # My id


"""

    tokens = tuple(lexer.tokenize(initial_toml))

    elements = (
        AtomicElement(tokens[:1]),
        WhitespaceElement(tokens[1:2]),
        PunctuationElement(tokens[2:3]),
        WhitespaceElement(tokens[3:4]),
        AtomicElement(tokens[4:5]),
        NewlineElement(tokens[5:6]),
        AtomicElement(tokens[6:7]),
        PunctuationElement(tokens[7:8]),
        AtomicElement(tokens[8:9]),
        WhitespaceElement(tokens[9:10]),
        CommentElement(tokens[10:12]),
        NewlineElement(tokens[12:13]),
        NewlineElement(tokens[13:14]),
    )

    table = TableElement(elements)

    assert set(table.items()) == {('name', 'first'), ('id', 42)}

    assert table['name'] == 'first'
    assert table['id'] == 42

    table['relation'] = 'another'

    assert set(table.items()) == {('name', 'first'), ('id', 42),
                                  ('relation', 'another')}

    table['name'] = 'fawzy'

    assert set(table.items()) == {('name', 'fawzy'), ('id', 42),
                                  ('relation', 'another')}

    expected_toml = """name = "fawzy"
id=42 # My id
relation = "another"


"""

    assert table.serialized() == expected_toml
Exemplo n.º 14
0
def test_table():

    initial_toml = """name = "first"
id=42 # My id


"""

    tokens = tuple(lexer.tokenize(initial_toml))

    elements = (
        AtomicElement(tokens[:1]),
        WhitespaceElement(tokens[1:2]),
        PunctuationElement(tokens[2:3]),
        WhitespaceElement(tokens[3:4]),
        AtomicElement(tokens[4:5]),
        NewlineElement(tokens[5:6]),

        AtomicElement(tokens[6:7]),
        PunctuationElement(tokens[7:8]),
        AtomicElement(tokens[8:9]),
        WhitespaceElement(tokens[9:10]),
        CommentElement(tokens[10:12]),

        NewlineElement(tokens[12:13]),
        NewlineElement(tokens[13:14]),
    )

    table = TableElement(elements)

    assert set(table.items()) == {('name', 'first'), ('id', 42)}

    assert table['name'] == 'first'
    assert table['id'] == 42

    table['relation'] = 'another'

    assert set(table.items()) == {('name', 'first'), ('id', 42), ('relation', 'another')}

    table['name'] = 'fawzy'

    assert set(table.items()) == {('name', 'fawzy'), ('id', 42), ('relation', 'another')}

    expected_toml = """name = "fawzy"
id=42 # My id
relation = "another"


"""

    assert table.serialized() == expected_toml
Exemplo n.º 15
0
def test_array_element():
    tokens = tuple(lexer.tokenize('[4, 8, 42, \n 23, 15]'))
    assert len(tokens) == 17
    sub_elements = (PunctuationElement(tokens[:1]), AtomicElement(tokens[1:2]),
                    PunctuationElement(tokens[2:3]),
                    WhitespaceElement(tokens[3:4]), AtomicElement(tokens[4:5]),
                    PunctuationElement(tokens[5:6]),
                    WhitespaceElement(tokens[6:7]), AtomicElement(tokens[7:8]),
                    PunctuationElement(tokens[8:9]),
                    WhitespaceElement(tokens[9:10]),
                    NewlineElement(tokens[10:11]),
                    WhitespaceElement(tokens[11:12]),
                    AtomicElement(tokens[12:13]),
                    PunctuationElement(tokens[13:14]),
                    WhitespaceElement(tokens[14:15]),
                    AtomicElement(tokens[15:16]),
                    PunctuationElement(tokens[16:17]))

    array_element = ArrayElement(sub_elements)

    # Test length
    assert len(array_element) == 5

    # Test getting a value
    assert array_element[0] == 4
    assert array_element[1] == 8
    assert array_element[2] == 42
    assert array_element[3] == 23
    assert array_element[-1] == 15

    # Test assignment with a negative index
    array_element[-1] = 12

    # Test persistence of formatting
    assert '[4, 8, 42, \n 23, 12]' == array_element.serialized()

    # Test raises IndexError on invalid index
    with pytest.raises(IndexError) as _:
        print(array_element[5])

    # Test appending a new value
    array_element.append(77)
    assert '[4, 8, 42, \n 23, 12, 77]' == array_element.serialized()

    # Test deleting a value
    del array_element[3]
    assert '[4, 8, 42, 12, 77]' == array_element.serialized()

    # Test primitive_value
    assert [4, 8, 42, 12, 77] == array_element.primitive_value
Exemplo n.º 16
0
def test_table():
    initial_toml = """id=42 # My id\nage=14"""
    tokens = tuple(lexer.tokenize(initial_toml))
    table = TableElement([
        AtomicElement(tokens[0:1]),
        PunctuationElement(tokens[1:2]),
        AtomicElement(tokens[2:3]),
        WhitespaceElement(tokens[3:4]),
        CommentElement(tokens[4:6]),
        AtomicElement(tokens[6:7]),
        PunctuationElement(tokens[7:8]),
        AtomicElement(tokens[8:9]),
    ])
    assert set(table.items()) == {('id', 42), ('age', 14)}
    del table['id']
    assert set(table.items()) == {('age', 14)}
Exemplo n.º 17
0
def test_table_body_2():

    text = """
data = [ ["gamma", "delta"], [1, 2] ]

# Line breaks are OK when inside arrays
hosts = [
  "alpha",
  "omega"
]

str_multiline = wohoo
"""

    table_body, pending_ts = parser.table_body_element(TokenStream(tokenize(text)))

    assert len(pending_ts) == 0
Exemplo n.º 18
0
def test_inline_table():
    tokens = tuple(lexer.tokenize('{ name= "first", id=42}'))

    elements = (
        PunctuationElement(tokens[:1]),
        WhitespaceElement(tokens[1:2]),
        AtomicElement(tokens[2:3]),
        PunctuationElement(tokens[3:4]),
        WhitespaceElement(tokens[4:5]),
        AtomicElement(tokens[5:6]),
        PunctuationElement(tokens[6:7]),
        WhitespaceElement(tokens[7:8]),
        AtomicElement(tokens[8:9]),
        PunctuationElement(tokens[9:10]),
        AtomicElement(tokens[10:11]),
        PunctuationElement(tokens[11:12]),
    )

    table = InlineTableElement(elements)

    assert table["name"] == "first"
    assert table["id"] == 42

    table["name"] = "fawzy"
    table["nickname"] = "nickfawzy"

    assert set(table.items()) == {("name", "fawzy"), ("id", 42), ("nickname", "nickfawzy")}

    assert table.serialized() == '{ name= "fawzy", id=42, nickname = "nickfawzy"}'

    del table["name"]

    assert table.serialized() == '{ id=42, nickname = "nickfawzy"}'

    del table["nickname"]

    assert table.serialized() == "{ id=42}"

    del table["id"]

    assert table.serialized() == "{ }"

    table["item1"] = 11
    table["item2"] = 22

    assert table.serialized() == "{ item1 = 11, item2 = 22}"
Exemplo n.º 19
0
def test_inline_table():
    tokens = tuple(lexer.tokenize('{ name= "first", id=42}'))

    elements = (PunctuationElement(tokens[:1]), WhitespaceElement(tokens[1:2]),
                AtomicElement(tokens[2:3]), PunctuationElement(tokens[3:4]),
                WhitespaceElement(tokens[4:5]), AtomicElement(tokens[5:6]),
                PunctuationElement(tokens[6:7]),
                WhitespaceElement(tokens[7:8]), AtomicElement(tokens[8:9]),
                PunctuationElement(tokens[9:10]), AtomicElement(tokens[10:11]),
                PunctuationElement(tokens[11:12]))

    table = InlineTableElement(elements)

    assert table['name'] == 'first'
    assert table['id'] == 42

    table['name'] = 'fawzy'
    table['nickname'] = 'nickfawzy'

    assert set(table.items()) == {('name', 'fawzy'), ('id', 42),
                                  ('nickname', 'nickfawzy')}

    assert table.serialized(
    ) == '{ name= "fawzy", id=42, nickname = "nickfawzy"}'

    del table['name']

    assert table.serialized() == '{ id=42, nickname = "nickfawzy"}'

    del table['nickname']

    assert table.serialized() == '{ id=42}'

    del table['id']

    assert table.serialized() == '{ }'

    table['item1'] = 11
    table['item2'] = 22

    assert table.serialized() == '{ item1 = 11, item2 = 22}'
Exemplo n.º 20
0
def test_structure():
    tokens = lexer.tokenize(open('sample.toml').read())
    elements = elementsanitizer.sanitize(parser.parse_tokens(tokens))
    entries_ = tuple(toplevels.identify(elements))

    s = structure(entries_)

    assert s['']['title'] == 'TOML Example'
    assert s['owner']['name'] == 'Tom Preston-Werner'
    assert s['database']['ports'][1] == 8001
    assert s['servers']['alpha']['dc'] == 'eqdc10'
    assert s['clients']['data'][1][0] == 1
    assert s['clients']['key3'] == 'The quick brown fox jumps over the lazy dog.'

    assert s['fruit'][0]['name'] == 'apple'
    assert s['fruit'][0]['physical']['color'] == 'red'
    assert s['fruit'][0]['physical']['shape'] == 'round'
    assert s['fruit'][0]['variety'][0]['name'] == 'red delicious'
    assert s['fruit'][0]['variety'][1]['name'] == 'granny smith'

    assert s['fruit'][1]['name'] == 'banana'
    assert s['fruit'][1]['variety'][0]['name'] == 'plantain'
    assert s['fruit'][1]['variety'][0]['points'][2]['y'] == 4
Exemplo n.º 21
0
def test_whitespace_element():
    element = WhitespaceElement(tuple(lexer.tokenize(' \t   ')))
    assert element.serialized() == ' \t   '
Exemplo n.º 22
0
def test_punctuation_element():
    PunctuationElement(tuple(lexer.tokenize('[')))
    PunctuationElement(tuple(lexer.tokenize('[[')))
    PunctuationElement(tuple(lexer.tokenize('.')))
    PunctuationElement(tuple(lexer.tokenize(']')))
    PunctuationElement(tuple(lexer.tokenize(']]')))
Exemplo n.º 23
0
def test_comment_element():
    element = CommentElement(
        tuple(lexer.tokenize('# This is my insightful remark\n')))
    assert element.serialized() == '# This is my insightful remark\n'
Exemplo n.º 24
0
def test_comment_element():
    element = CommentElement(tuple(lexer.tokenize('# This is my insightful remark\n')))
    assert element.serialized() == '# This is my insightful remark\n'
Exemplo n.º 25
0
def test_array():
    array_element, pending_ts = parser.array_element(TokenStream(tokenize('[ 3, 4, 5,6,7] ')))

    assert isinstance(array_element, ArrayElement)
    assert len(array_element) == 5
    assert len(pending_ts) == 1
Exemplo n.º 26
0
def test_newline_element():
    element = NewlineElement(tuple(lexer.tokenize('\n\n\n')))
    assert element.serialized() == '\n\n\n'
Exemplo n.º 27
0
def test_table_body():
    table_body, pending_ts = parser.table_body_element(TokenStream(tokenize(' name= "test" # No way man!\nid =42\n vvv')))
    assert set(table_body.keys()) == {'name', 'id'}
    assert len(pending_ts) == 2
    assert table_body['name'] == 'test'
    assert table_body['id'] == 42
Exemplo n.º 28
0
def test_newline_element():
    element = NewlineElement(tuple(lexer.tokenize('\n\n\n')))
    assert element.serialized() == '\n\n\n'
Exemplo n.º 29
0
def test_punctuation_element():
    PunctuationElement(tuple(lexer.tokenize('[')))
    PunctuationElement(tuple(lexer.tokenize('[[')))
    PunctuationElement(tuple(lexer.tokenize('.')))
    PunctuationElement(tuple(lexer.tokenize(']')))
    PunctuationElement(tuple(lexer.tokenize(']]')))
Exemplo n.º 30
0
def test_table_header():
    ts = TokenStream(tokenize(" [ namez    . namey . namex ] \n other things"))
    table_header_element, pending_tokens = parser.table_header_element(ts)

    assert isinstance(table_header_element, TableHeaderElement)
    assert len(pending_tokens) == 4
Exemplo n.º 31
0
def test_atomic_element():
    element = AtomicElement(tuple(lexer.tokenize(' \t 42 ')))
    assert element.value == 42
    element.set(23)
    assert element.serialized() == ' \t 23 '
Exemplo n.º 32
0
def test_whitespace_element():
    element = WhitespaceElement(tuple(lexer.tokenize(' \t   ')))
    assert element.serialized() == ' \t   '