Пример #1
0
 def canned():
     """parser for canned proof statements"""
     return (next_phrase("we proceed as follows") |
             (next_word('the') +
              first_word('result lemma theorem proposition corollary') +
              next_word('now').possibly() + next_word('follows'))
             | next_phrase('the other cases are similar') |
             (next_phrase('the proof is') +
              first_word('obvious trivial easy routine'))
             ).nil().expect('canned')
Пример #2
0
def let_annotation():
    """Parser for let_annotations. Terminating punctuation not included.
    
    Sample parser inputs:
        Let G be a group
        Let G be a fixed group
        Let (H G : group)
        Fix (x : R)
        
    Issues: No treatment for now, but return to this later.   
    """
    return (
        (first_word('fix let') + c.comma_nonempty_list(annotated_sort_vars))
        | let_annotation_prefix() + post_colon_balanced())
Пример #3
0
def lit(s):
    """parser generator for 's'-like words or phrases"""
    if s in ['record', 'doc', 'location']:
        if s == 'record':
            return (Parse.word('we').possibly() +
                    first_word('record register') +
                    Parse.word('identification').possibly() +
                    Parse.word('that').possibly())
        if s == 'doc':
            return lit['document'] | lit['end-document']
        if s == 'location':
            return Parse.first(
                [lit_dict['document'], lit_dict['theorem'], lit_dict['axiom']])
    return lit_dict[s]
Пример #4
0
    def instruction():
        """parsing and processing of synonyms and other instructions"""
        def treat_syn(acc):
            for ac in acc:
                vs = [t.value for t in ac]
                v_expand = Instruction._expand_slashdash(vs)
                c.synonym_add(v_expand)
                return ()

        def treat_instruct(acc):
            keyword, ls = acc
            instruct[keyword.value] = Instruction._param_value(ls)
            return ()

        keyword_instruct = (first_word("""exit timelimit printgoal dump 
                         ontored read library error warning""") +
                            Parse.next_token().possibly())
        return (c.bracket(
            next_word('synonym') + Instruction._syn().treat(treat_syn)
            | c.bracket(keyword_instruct.treat(treat_instruct))))
Пример #5
0
def test_next_word():
    its = mk_item_stream('Hello X journey there   now  and then')
    p = pc.next_word('hello')
    its1 = p.process(its)
    assert its1.acc.value == 'hello'

    #next_any_word
    p = pc.next_any_word()
    its2 = p.process(its1)
    assert its2.acc.value == 'x'

    #next_any_word_except
    p = pc.next_any_word_except(['star'])  # synonym with journey.
    try:
        its3 = p.process(its2)
        assert False
    except pc.ParseError:
        assert True

    #first_word
    p = pc.first_word('X journey there hello now')
    #print(f'its={its}')
    itp = p.process(its)
    #print(f'itp={itp}')
    assert itp.acc.value == 'hello'

    #next_phrase
    p = pc.next_phrase('star there now')
    #print(its2)
    its3 = p.process(its2)
    val = [t.value for t in its3.acc]
    #print(val)
    assert val == ['journey roundtrip star', 'there', 'now']

    #first_phrase
    p = pc.first_phrase(['yes', 'not ever', 'and even', 'and then'])
    its4 = p.process(its3)
    #print(its4)
    val = [t.value for t in its4.acc]
    #print(val)
    assert val == ['and', 'then']
Пример #6
0
def phrase_list_filler():
    """parser for filler words"""
    return (Parse.word('we').possibly() +
            first_word('put write have know see') +
            Parse.word('that').possibly()).nil()
Пример #7
0
    Parser output is a single token."""
    return Parse.next_token().if_type(['HIERARCHICAL_IDENTIFIER'])


def identifier():
    """parser for hierarchical or atomic identifier.
    Parser output is a single token"""
    return (atomic() | hierarchical_identifier()).expect('identifier')


# canned phrases that have small variants
# lit[w] gives parser for w-like words or phrases

lit_dict = {
    'a':
    first_word('a an'),  #indefinite
    'article':
    first_word('a an the'),
    'defined-as':
    first_phrase(['said to be', 'defined as', 'defined to be']),
    'is':
    first_phrase(['is', 'are', 'be', 'to be']),
    'iff': (first_phrase(['iff', 'if and only if']) |
            (first_phrase(['is', 'are', 'be', 'to be']) +
             next_word('the').possibly() + next_word('predicate'))),
    'denote':
    first_phrase(['denote', 'stand for']),
    'do':
    first_word('do does'),
    'equal':
    next_phrase('equal to'),