Esempio n. 1
0
 def __init__(self, input, gramfile=None, background=None):
     """
     Initialize a C{DiscourseTester}.
     
     @parameter input: the discourse sentences
     @type input: C{list} of C{str}
     @parameter gramfile: name of file where grammar can be loaded
     @type gramfile: C{str}
     @parameter background: Formulas which express background assumptions
     @type background: C{list} of L{logic.Expression}.
     """
     self._input = input
     self._sentences = dict([('s%s' % i, sent)
                             for i, sent in enumerate(input)])
     self._models = None
     self._readings = {}
     if gramfile is None:
         self._gramfile = 'grammars/sem4.fcfg'
     else:
         self._gramfile = gramfile
     self._threads = {}
     self._filtered_threads = {}
     self._parser = parse.load_earley(self._gramfile)
     if background is not None:
         for e in background:
             assert isinstance(e, Expression)
         self._background = background
     else:
         self._background = []
Esempio n. 2
0
 def __init__(self, input, gramfile=None, background=None):       
     """
     Initialize a C{DiscourseTester}.
     
     @parameter input: the discourse sentences
     @type input: C{list} of C{str}
     @parameter gramfile: name of file where grammar can be loaded
     @type gramfile: C{str}
     @parameter background: Formulas which express background assumptions
     @type background: C{list} of L{logic.Expression}.
     """
     self._input = input
     self._sentences = dict([('s%s' % i, sent) for i, sent in enumerate(input)])
     self._models = None
     self._readings = {}
     if gramfile is None:
         self._gramfile = 'grammars/sem4.fcfg'
     else:
         self._gramfile = gramfile
     self._threads = {}
     self._filtered_threads = {}
     self._parser = parse.load_earley(self._gramfile) 
     if background is not None:
         for e in background:
             assert isinstance(e, Expression)
         self._background = background
     else:
         self._background = []
Esempio n. 3
0
def earley_parse(sentence="every cat leaves"):
    from nltk.parse import load_earley

    cp = load_earley(r"grammars/gluesemantics.fcfg")
    tokens = sentence.split()
    trees = cp.nbest_parse(tokens)
    return trees
Esempio n. 4
0
 def __init__(self, gramfile=None):
     """
     @parameter gramfile: name of file where grammar can be loaded
     @type gramfile: C{str}
     """
     if gramfile is None:
         self._gramfile = 'grammars/sample_grammars/sem4.fcfg'
     else:
         self._gramfile = gramfile
     self._parser = parse.load_earley(self._gramfile)
Esempio n. 5
0
def parse_with_bindops(sentence, grammar=None, trace=0):
    """
    Use a grammar with Binding Operators to parse a sentence.
    """
    if not grammar:
        grammar = 'grammars/book_grammars/storage.fcfg'
    parser = load_earley(grammar,
                         trace=trace,
                         chart_class=InstantiateVarsChart)
    # Parse the sentence.
    tokens = sentence.split()
    return parser.nbest_parse(tokens)
Esempio n. 6
0
def main():
    import sys
    from optparse import OptionParser, OptionGroup
    usage = """%%prog [options] [grammar_file]""" % globals()

    opts = OptionParser(usage=usage)
    opts.add_option("-c", "--components",
        action="store_true", dest="show_components", default=0,
        help="show hole semantics components")
    opts.add_option("-r", "--raw",
        action="store_true", dest="show_raw", default=0,
        help="show the raw hole semantics expression")
    opts.add_option("-d", "--drawtrees",
        action="store_true", dest="draw_trees", default=0,
        help="show formula trees in a GUI window")
    opts.add_option("-v", "--verbose",
        action="count", dest="verbosity", default=0,
        help="show more information during parse")

    (options, args) = opts.parse_args()

    if len(args) > 0:
        filename = args[0]
    else:
        filename = 'grammars/hole.fcfg'

    print 'Reading grammar file', filename
    #grammar = data.load(filename)
    parser = load_earley(filename, trace=options.verbosity)

    # Prompt the user for a sentence.
    print 'Sentence: ',
    line = sys.stdin.readline()[:-1]

    # Parse the sentence.
    tokens = line.split()
    trees = parser.nbest_parse(tokens)
    print 'Got %d different parses' % len(trees)

    for tree in trees:
        # Get the semantic feature from the top of the parse tree.
        sem = tree[0].node['sem'].simplify()

        # Skolemise away all quantifiers.  All variables become unique.
        sem = sem.skolemise()

        # Reparse the semantic representation from its bracketed string format.
        # I find this uniform structure easier to handle.  It also makes the
        # code mostly independent of the lambda calculus classes.
        usr = bracket_parse(str(sem))

        # Break the hole semantics representation down into its components
        # i.e. holes, labels, formula fragments and constraints.
        hole_sem = HoleSemantics(usr)

        # Maybe print the raw semantic representation.
        if options.show_raw:
            print
            print 'Raw expression'
            print usr

        # Maybe show the details of the semantic representation.
        if options.show_components:
            print
            print 'Holes:       ', hole_sem.holes
            print 'Labels:      ', hole_sem.labels
            print 'Constraints: ', hole_sem.constraints
            print 'Top hole:    ', hole_sem.top_hole
            print 'Top labels:  ', hole_sem.top_most_labels
            print 'Fragments:'
            for (l,f) in hole_sem.fragments.items():
                print '\t%s: %s' % (l, f)

        # Find all the possible ways to plug the formulas together.
        pluggings = hole_sem.pluggings()

        # Build FOL formula trees using the pluggings.
        trees = map(hole_sem.formula_tree, pluggings)

        # Print out the formulas in a textual format.
        n = 1
        for tree in trees:
            print
            print '%d. %s' % (n, tree)
            n += 1

        # Maybe draw the formulas as trees.
        if options.draw_trees:
            draw_trees(*trees)

        print
        print 'Done.'
Esempio n. 7
0
def earley_parse(sentence='every cat leaves'):
    from nltk.parse import load_earley
    cp = load_earley(r'grammars/gluesemantics.fcfg')
    tokens = sentence.split()
    trees = cp.nbest_parse(tokens)
    return trees