Ejemplo n.º 1
0
def exercise_basic():
  tests = [
  ["a",
    ['a']
  ],
  ["a and b",
    ['a', 'b', 'and']
  ],
  ["a or b",
    ['a', 'b', 'or']
  ],
  ["not a or b",
    ['a', 'not', 'b', 'or']
  ],
  ["not a or b and c",
    ['a', 'not', 'b', 'c', 'and', 'or']
  ],
  ["not (a or b) and c",
    ['a', 'b', 'or', 'not', 'c', 'and']
  ],
  ["(not (a or b) and c)",
    ['a', 'b', 'or', 'not', 'c', 'and']
  ],
  ["not ((a or b) and c)",
    ['a', 'b', 'or', 'c', 'and', 'not']
  ],
  ]
  verbose = "--verbose" in sys.argv[1:]
  for input_string,expected_result in tests:
    infix = tokenizer.word_iterator(input_string=input_string)
    if (verbose): print input_string
    postfix = [word
      for word,word_iterator in simple_parser.infix_as_postfix(infix)]
    if (verbose): print [word.value for word in postfix]
Ejemplo n.º 2
0
def parse_strings (value) :
  try :
    if (value != "") and (not "\"" in value) and (not "'" in value) :
      value = "\"" + "\" \"".join(value.split()) + "\""
    words = list(tokenizer.word_iterator(value))
    string_list = strings_from_words(words)
  except ValueError, e :
    raise Sorry(str(e))
Ejemplo n.º 3
0
def parse_strings(value):
    try:
        if (value != "") and (not "\"" in value) and (not "'" in value):
            value = "\"" + "\" \"".join(value.split()) + "\""
        words = list(tokenizer.word_iterator(value))
        string_list = strings_from_words(words)
    except ValueError, e:
        raise Sorry(str(e))
Ejemplo n.º 4
0
def exercise_basic(verbose):
    tests = [
        ["", []],
        [
            "resname=a and chain=b",
            ['resname', '=', 'a', 'and', 'chain', '=', 'b']
        ],
        ["resname a and chain b", ['resname', 'a', 'and', 'chain', 'b']],
        [
            "resname resname and chain chain",
            ['resname', 'resname', 'and', 'chain', 'chain']
        ],
        ["resname \"a b\"", ['resname', 'a b']],
        ["resname a", ['resname', 'a']],
        ["resname ala and backbone", ['resname', 'ala', 'and', 'backbone']],
        ["resname ala or backbone", ['resname', 'ala', 'or', 'backbone']],
        ["name x and x > 10", ['name', 'x', 'and', 'x', '>', '10']],
        [
            "((expr or expr) and expr)",
            ['(', '(', 'expr', 'or', 'expr', ')', 'and', 'expr', ')']
        ],
        ["resname and and chain b", ['resname', 'and', 'and', 'chain', 'b']],
        ["resname ( and chain b", ['resname', '(', 'and', 'chain', 'b']],
        ["resname \"(\" and chain b", ['resname', '(', 'and', 'chain', 'b']],
        [
            "all_hydrophobic_within(5) and resname ALA",
            ['all_hydrophobic_within', '(', '5', ')', 'and', 'resname', 'ALA']
        ],
        ["something(a, b)", ['something', '(', 'a', ',', 'b', ')']],
        ["something(a b)", ['something', '(', 'a', 'b', ')']],
        ["something(\"a\"\"b\")", ['something', '(', 'a', 'b', ')']],
        ["resname 'a \\\\'", ['resname', 'a \\']],
        ["resname 'a'", ['resname', 'a']],
        ["resname '\"'", ['resname', '"']],
        ["resname '\"\\''", ['resname', '"\'']],
        ["resname \"'\\\"\"", ['resname', '\'"']],
        ["name o1'", ['name', 'o1\'']],
        ['name """o1\'"""', ['name', 'o1\'']],
        ['name """o1\n  o2\'"""', ['name', "o1\n  o2'"]],
        ['name """o1\\\n  o2\'"""', ['name', "o1  o2'"]],
    ]
    for input_string, expected_result in tests:
        show = verbose or expected_result is None
        if (show): print(input_string)
        result = [
            word.value
            for word in tokenizer.word_iterator(input_string=input_string)
        ]
        if (show): print(result)
        if (expected_result is not None):
            assert result == expected_result
        if (show): print()
Ejemplo n.º 5
0
def exercise_pickle():
    # TODO: verify this is intended change for py2/3 compat
    from six.moves import cPickle as pickle
    for p in [pickle]:
        o = tokenizer.word(value="hello")
        l = p.loads(p.dumps(o))
        assert l.value == "hello"
        o = tokenizer.settings(meta_comment="%")
        l = p.loads(p.dumps(o))
        assert l.meta_comment == "%"
        o = tokenizer.word_iterator(input_string="all")
        l = p.loads(p.dumps(o))
        assert l.char_iter.input_string == "all"
Ejemplo n.º 6
0
def exercise_pickle():
  import pickle
  import cPickle
  for p in [pickle, cPickle]:
    o = tokenizer.word(value="hello")
    l = p.loads(p.dumps(o))
    assert l.value == "hello"
    o = tokenizer.settings(meta_comment="%")
    l = p.loads(p.dumps(o))
    assert l.meta_comment == "%"
    o = tokenizer.word_iterator(input_string="all")
    l = p.loads(p.dumps(o))
    assert l.char_iter.input_string == "all"
Ejemplo n.º 7
0
def exercise_basic():
    tests = [
        ["a", ['a']],
        ["a and b", ['a', 'b', 'and']],
        ["a or b", ['a', 'b', 'or']],
        ["not a or b", ['a', 'not', 'b', 'or']],
        ["not a or b and c", ['a', 'not', 'b', 'c', 'and', 'or']],
        ["not (a or b) and c", ['a', 'b', 'or', 'not', 'c', 'and']],
        ["(not (a or b) and c)", ['a', 'b', 'or', 'not', 'c', 'and']],
        ["not ((a or b) and c)", ['a', 'b', 'or', 'c', 'and', 'not']],
    ]
    verbose = "--verbose" in sys.argv[1:]
    for input_string, expected_result in tests:
        infix = tokenizer.word_iterator(input_string=input_string)
        if (verbose): print input_string
        postfix = [
            word
            for word, word_iterator in simple_parser.infix_as_postfix(infix)
        ]
        if (verbose): print[word.value for word in postfix]
        assert [word.value for word in postfix] == expected_result
        if (verbose): print
Ejemplo n.º 8
0
def rewrite(input_string):
  word_iterator = tokenizer.word_iterator(input_string=input_string)
  return rewrite_parser(word_iterator=word_iterator)
Ejemplo n.º 9
0
def rewrite(input_string):
    word_iterator = tokenizer.word_iterator(input_string=input_string)
    return rewrite_parser(word_iterator=word_iterator)
Ejemplo n.º 10
0
def exercise_basic(verbose):
  tests = [
  ["",
    []],
  ["resname=a and chain=b",
    ['resname', '=', 'a', 'and', 'chain', '=', 'b']],
  ["resname a and chain b",
    ['resname', 'a', 'and', 'chain', 'b']],
  ["resname resname and chain chain",
    ['resname', 'resname', 'and', 'chain', 'chain']],
  ["resname \"a b\"",
    ['resname', 'a b']],
  ["resname a",
    ['resname', 'a']],
  ["resname ala and backbone",
    ['resname', 'ala', 'and', 'backbone']],
  ["resname ala or backbone",
    ['resname', 'ala', 'or', 'backbone']],
  ["name x and x > 10",
    ['name', 'x', 'and', 'x', '>', '10']],
  ["((expr or expr) and expr)",
    ['(', '(', 'expr', 'or', 'expr', ')', 'and', 'expr', ')']],
  ["resname and and chain b",
    ['resname', 'and', 'and', 'chain', 'b']],
  ["resname ( and chain b",
    ['resname', '(', 'and', 'chain', 'b']],
  ["resname \"(\" and chain b",
    ['resname', '(', 'and', 'chain', 'b']],
  ["all_hydrophobic_within(5) and resname ALA",
    ['all_hydrophobic_within', '(', '5', ')', 'and', 'resname', 'ALA']],
  ["something(a, b)",
    ['something', '(', 'a', ',', 'b', ')']],
  ["something(a b)",
    ['something', '(', 'a', 'b', ')']],
  ["something(\"a\"\"b\")",
    ['something', '(', 'a', 'b', ')']],
  ["resname 'a \\\\'",
    ['resname', 'a \\']],
  ["resname 'a'",
    ['resname', 'a']],
  ["resname '\"'",
    ['resname', '"']],
  ["resname '\"\\''",
    ['resname', '"\'']],
  ["resname \"'\\\"\"",
    ['resname', '\'"']],
  ["name o1'",
    ['name', 'o1\'']],
  ['name """o1\'"""',
    ['name', 'o1\'']],
  ['name """o1\n  o2\'"""',
    ['name', "o1\n  o2'"]],
  ['name """o1\\\n  o2\'"""',
    ['name', "o1  o2'"]],
  ]
  for input_string,expected_result in tests:
    show = verbose or expected_result is None
    if (show): print input_string
    result = [word.value
      for word in tokenizer.word_iterator(input_string=input_string)]
    if (show): print result
    if (expected_result is not None):
      assert result == expected_result
    if (show): print