Example #1
0
def parse():
    """ Parse a command and return a json string.
    If parse is successful, returns a tuple (true, [instructions]).
    If parse is not successful, returns a tuple (false, [errors]).
    """
    status = False
    data = {}
    errors = []

    command = request.forms.get('command')

    # preprocess
    tokens = preprocess(command)

    # Make a local copy of productions
    lproductions = list(productions)

    # find all integers
    ints = set(filter(RE_INT.match, tokens))
    # Add a production for every integer
    lproductions.extend(map(num_production, ints))

    # Make a local copy of the grammar with extra productions
    lgrammar = FeatureGrammar(grammar.start(), lproductions)

    # Load grammar into a parser
    parser = FeatureEarleyChartParser(lgrammar, trace=0)

    try:
        trees = parser.nbest_parse(tokens)
        if not trees:
            errors = ['I could not parse this sentence.']
        elif len(trees) > 1:
            errors = ['This sentence had multiple interpretations.']
        else:
            status = True
            try:
                # Setup session for history
                session = bottle.request.environ.get('beaker.session')
                history.history = session.get('history', [])
                session['history'] = history.history

                commands = eval(str(trees[0].node['SEM']), draw.functions)
                data = {
                    'sentence': request.forms.get('command'),
                    'tree': trees[0],
                    # Eval semantics in draw namespace
                    'actions': commands,
                    }

            except AssertionError as e:
                status = False
                errors = ['I got the following semantic error: <br /><pre>' + str(e) + '</pre>']

    except ValueError as e:
        status = False
        errors = ['I got the following error: <br /><pre>' + str(e) + '</pre>']
    return {'status':status, 'errors':errors, 'data':data}
    def parse(self, command):
        tokens = preprocess(command)
        ints = set(filter(RE_INT.match, tokens))
        lproductions = list(grammar.productions())
        # Add a production for every integer
        lproductions.extend(map(num_production, ints))

        # Make a local copy of the grammar with extra productions
        lgrammar = FeatureGrammar(type(self).start, lproductions)

        # Load grammar into a parser
        parser = FeatureEarleyChartParser(lgrammar, trace=0)
        return parser.nbest_parse(tokens)