Exemplo n.º 1
0
def test_repeated_keyword():
    """Test that we catch an accidentally repeated keyword."""
    grammar = getkw.grammar(has_complex=True)
    keywords = """
int_array = [42]
bool_array = [on, true, yes, False, True, false]
str_array = [foo, bar, "lorem", "IpSuM"]
str_array = [oops, repeated, "lorem", "IpSuM"]
"""
    with pytest.raises(
            ParselglossyError,
            match="A keyword is repeated. Please check your input."):
        _ = lexer.parse_string_to_dict(grammar, keywords)

    keywords = """
int_array = [42]
bool_array = [on, true, yes, False, True, false]
str_array = [foo, bar, "lorem", "IpSuM"]

Something {
  Foo {
    bar = true
  }

  Foo {
    bar = false
  }
}
"""
    with pytest.raises(
            ParselglossyError,
            match="A keyword is repeated. Please check your input."):
        _ = lexer.parse_string_to_dict(grammar, keywords)
Exemplo n.º 2
0
def test_data_only_section(data_only_section):
    ref = {
        "molecule": {"coords": "H  0.0000  0.0000 -0.7000\nH  0.0000  0.0000  0.7000\n"}
    }
    grammar = getkw.grammar()
    tokens = lexer.parse_string_to_dict(grammar, data_only_section)

    assert tokens == ref
Exemplo n.º 3
0
def test_keyword(keywords):
    """Test an input made only of keywords."""
    grammar = getkw.grammar(has_complex=True)
    tokens = lexer.parse_string_to_dict(grammar, keywords)

    assert tokens == reference
    # dump to JSON
    getkw_json = StringIO()
    json.dump(tokens, getkw_json, cls=ComplexEncoder, indent=4)
    del tokens

    # load from JSON
    tokens = json.loads(getkw_json.getvalue(), object_hook=as_complex)

    assert tokens == reference
Exemplo n.º 4
0
def test_keyword(keywords):
    """Test an input made only of keywords."""
    grammar = getkw.grammar()
    tokens = lexer.parse_string_to_dict(grammar, keywords)

    assert tokens == reference
    # dump to JSON
    getkw_json = StringIO()
    json.dump(tokens, getkw_json, indent=4)
    del tokens

    # load from JSON
    tokens = json.loads(getkw_json.getvalue())

    assert tokens == reference
Exemplo n.º 5
0
def test_flat_sections(flat_sections):
    """Test an input made of two unnested sections, tagged or untagged."""
    ref_dict = {"topsect": dict(reference), "foo<bar>": dict(reference)}
    grammar = getkw.grammar(has_complex=True)
    tokens = lexer.parse_string_to_dict(grammar, flat_sections)

    assert tokens == ref_dict
    # dump to JSON
    getkw_json = StringIO()
    json.dump(tokens, getkw_json, cls=ComplexEncoder, indent=4)
    del tokens

    # load from JSON
    tokens = json.loads(getkw_json.getvalue(), object_hook=as_complex)

    assert tokens == ref_dict
Exemplo n.º 6
0
def test_section(name):
    """Test an input made of one section, tagged or untagged."""
    ref_dict = {name: dict(reference)}
    grammar = getkw.grammar(has_complex=True)
    tokens = lexer.parse_string_to_dict(grammar, section(name))

    assert tokens == ref_dict
    # dump to JSON
    getkw_json = StringIO()
    json.dump(tokens, getkw_json, cls=ComplexEncoder, indent=4)
    del tokens

    # load from JSON
    tokens = json.loads(getkw_json.getvalue(), object_hook=as_complex)

    assert tokens == ref_dict
Exemplo n.º 7
0
def test_keywords_and_section(name):
    """Test an input made of keywords, one section, tagged or untagged, and more keywords."""
    ref_dict = dict(reference)
    ref_dict[name] = dict(reference)
    grammar = getkw.grammar()
    tokens = lexer.parse_string_to_dict(grammar, keywords_and_section(name))

    assert tokens == ref_dict
    # dump to JSON
    getkw_json = StringIO()
    json.dump(tokens, getkw_json, indent=4)
    del tokens

    # load from JSON
    tokens = json.loads(getkw_json.getvalue())

    assert tokens == ref_dict
Exemplo n.º 8
0
def test_nested_sections(nested_sections):
    """Test an input made of two nested sections, tagged or untagged."""
    ref_dict = {"topsect": dict(reference)}
    ref_dict["topsect"]["foo<bar>"] = dict(reference)
    grammar = getkw.grammar()
    tokens = lexer.parse_string_to_dict(grammar, nested_sections)

    assert tokens == ref_dict
    # dump to JSON
    getkw_json = StringIO()
    json.dump(tokens, getkw_json, indent=4)
    del tokens

    # load from JSON
    tokens = json.loads(getkw_json.getvalue())

    assert tokens == ref_dict
Exemplo n.º 9
0
def test_keywords_and_nested_sections(keywords_and_nested_sections):
    """Test an input made of keywords and two nested sections, interspersed."""
    ref_dict = dict(reference)
    ref_dict["topsect"] = dict(reference)
    ref_dict["topsect"]["foo<bar>"] = dict(reference)
    grammar = getkw.grammar(has_complex=True)
    tokens = lexer.parse_string_to_dict(grammar, keywords_and_nested_sections)

    assert tokens == ref_dict
    # dump to JSON
    getkw_json = StringIO()
    json.dump(tokens, getkw_json, cls=ComplexEncoder, indent=4)
    del tokens

    # load from JSON
    tokens = json.loads(getkw_json.getvalue(), object_hook=as_complex)

    assert tokens == ref_dict
Exemplo n.º 10
0
def test_keywords_and_flat_sections(keywords_and_flat_sections):
    """Test an input made of keywords and two unnested sections, interspersed."""
    ref_dict = dict(reference)
    ref_dict["topsect"] = dict(reference)
    ref_dict["foo<bar>"] = dict(reference)
    grammar = getkw.grammar()
    tokens = lexer.parse_string_to_dict(grammar, keywords_and_flat_sections)

    assert tokens == ref_dict
    # dump to JSON
    getkw_json = StringIO()
    json.dump(tokens, getkw_json, indent=4)
    del tokens

    # load from JSON
    tokens = json.loads(getkw_json.getvalue())

    assert tokens == ref_dict