Пример #1
0
 def special_local(self, greedy):
     """
     The special LOCAL form (with unevaluated variables).  (Should
     this be generally greedy?)
     """
     vars = []
     if greedy:
         while 1:
             tok = self.tokenizer.peek()
             if tok in (':', '"'):
                 self.tokenizer.next()
                 continue
             elif not reader.is_word(tok):
                 break
             vars.append(tok)
             self.tokenizer.next()
     else:
         if self.tokenizer.peek() in (':', '"'):
             self.tokenizer.next()
         vars = [self.tokenizer.next()]
     for v in vars:
         self.make_local(v)
     return None
Пример #2
0
 def special_local(self, greedy):
     """
     The special LOCAL form (with unevaluated variables).  (Should
     this be generally greedy?)
     """
     vars = []
     if greedy:
         while 1:
             tok = self.tokenizer.peek()
             if tok in (':', '"'):
                 self.tokenizer.next()
                 continue
             elif not reader.is_word(tok):
                 break
             vars.append(tok)
             self.tokenizer.next()
     else:
         if self.tokenizer.peek() in (':', '"'):
             self.tokenizer.next()
         vars = [self.tokenizer.next()]
     for v in vars:
         self.make_local(v)
     return None
Пример #3
0
    def expr_inner(self, apply=None, get_function=None, get_variable=None):
        """
        An 'inner' expression, an expression that does not include
        infix operators.

        ::

          exprInner ::= <literal int or float>
                    ::= '-' expr
                    ::= '+' expr
                    ::= ('\"' or 'QUOTE') <word>
                    ::= ':' <word>
                    ::= MAKE (':' or '\"') <word> expr
                    ::= MAKE <word> expr
                    ::= TO <to expression>
                    ::= '[' <list expression> ']'
                    ::= '(' <word> <expr> ... <expr> ')'
                    ::= <word> <expr> ... <expr>

        Things to note:

        * ``MAKE :x 10``, ``MAKE \"x 10``, and ``MAKE x 10`` all work
          equivalently (make is a special form, unlike in UCBLogo).
        * <list expression> is a nested list of tokens.
        * <to expression> is TO func_name var1 var2 <int>, where <int>
          is the default arity (number of variables).  Variables, like
          with make, can be prefixed with : or \", but need not be.
        * () is not used to force precedence, but to force execution
          with a specific arity.  In other words, () works like Lisp.
        """
        tok = self.tokenizer.next()
        if apply is None:
            apply = self.apply
        if get_function is None:
            get_function = self.get_function
        if get_variable is None:
            get_variable = self.get_variable

        if tok == '\n':
            raise LogoEndOfLine("The end of the line was not expected")
            return self.expr_inner()

        elif tok is EOF:
            raise LogoEndOfCode("The end of the code block was not expected")

        elif not isinstance(tok, basestring):
            # Some other fundamental type (usually int or float)
            return tok

        elif tok == '-':
            # This works really poorly in practice, because "-" usually
            # gets interpreted as an infix operator.
            return -self.expr()

        elif tok == '+':
            return self.expr()

        elif tok in ('/', '*'):
            raise LogoError("Operator not expected: %s" % tok)

        elif tok == '"' or tok.lower() == 'quote':
            tok = self.tokenizer.next()
            return tok

        elif tok == ':':
            tok = self.tokenizer.next()
            return get_variable(tok)

        elif tok == '[':
            self.tokenizer.push_context('[')
            result = self.expr_list()
            self.tokenizer.pop_context()
            return result

        elif tok == ';':
            while 1:
                tok = self.tokenizer.next()
                if tok == '\n' or tok is EOF:
                    break

        elif tok == '(':
            self.tokenizer.push_context('(')
            try:
                func = self.tokenizer.peek()
                if not reader.is_word(func):
                    # We don't actually have a function call then, but
                    # just a sub-expression.
                    val = self.expr()
                    if not self.tokenizer.next() == ')':
                        raise LogoSyntaxError("')' expected")
                    return val
                else:
                    self.tokenizer.next()
                if func.lower() in self.special_forms:
                    special_form = self.special_forms[func.lower()]
                    val = special_form(self, greedy=True)
                    next_tok = self.tokenizer.next()
                    if next_tok != ')':
                        raise LogoSyntaxError("')' expected")
                    return val
                else:
                    args = []
                    while 1:
                        tok = self.tokenizer.peek()
                        if tok == ')':
                            break
                        elif tok == '\n':
                            self.tokenizer.next()
                            continue
                        elif tok is EOF:
                            raise LogoEndOfCode(
                                "Unexpected end of code (')' expected)")
                        args.append(self.expr())
                    val = apply(get_function(func), args)
                if not self.tokenizer.next() == ')':
                    raise LogoSyntaxError("')' was expected.")
            finally:
                self.tokenizer.pop_context()
            return val

        else:
            if not reader.is_word(tok):
                raise LogoSyntaxError("Unknown token: %r" % tok)
            if tok.lower() in self.special_forms:
                special_form = self.special_forms[tok.lower()]
                val = special_form(self, greedy=False)
                return val
            else:
                func_name = tok
                func = get_function(func_name)
                n = arity(func)
                self.tokenizer.push_context('func')
                try:
                    args = []
                    # -1 arity means the function is greedy
                    if n == -1:
                        while 1:
                            tok = self.tokenizer.peek()
                            if tok == '\n' or tok is EOF:
                                self.tokenizer.next()
                                break
                            args.append(self.expr())
                    else:
                        for i in range(n):
                            try:
                                args.append(self.expr())
                            except (LogoEndOfCode, LogoEndOfLine):
                                raise LogoEndOfCode(
                                    "Not enough arguments provided to %s: got %i and need %i"
                                    % (func_name, i, n))
                finally:
                    self.tokenizer.pop_context()
                return apply(func, args)
Пример #4
0
    def expr_inner(self, apply=None, get_function=None,
                   get_variable=None):
        """
        An 'inner' expression, an expression that does not include
        infix operators.

        ::

          exprInner ::= <literal int or float>
                    ::= '-' expr
                    ::= '+' expr
                    ::= ('\"' or 'QUOTE') <word>
                    ::= ':' <word>
                    ::= MAKE (':' or '\"') <word> expr
                    ::= MAKE <word> expr
                    ::= TO <to expression>
                    ::= '[' <list expression> ']'
                    ::= '(' <word> <expr> ... <expr> ')'
                    ::= <word> <expr> ... <expr>

        Things to note:

        * ``MAKE :x 10``, ``MAKE \"x 10``, and ``MAKE x 10`` all work
          equivalently (make is a special form, unlike in UCBLogo).
        * <list expression> is a nested list of tokens.
        * <to expression> is TO func_name var1 var2 <int>, where <int>
          is the default arity (number of variables).  Variables, like
          with make, can be prefixed with : or \", but need not be.
        * () is not used to force precedence, but to force execution
          with a specific arity.  In other words, () works like Lisp.
        """
        tok = self.tokenizer.next()
        if apply is None:
            apply = self.apply
        if get_function is None:
            get_function = self.get_function
        if get_variable is None:
            get_variable = self.get_variable

        if tok == '\n':
            raise LogoEndOfLine("The end of the line was not expected")
            return self.expr_inner()

        elif tok is EOF:
            raise LogoEndOfCode("The end of the code block was not expected")

        elif not isinstance(tok, basestring):
            # Some other fundamental type (usually int or float)
            return tok

        elif tok == '-':
            # This works really poorly in practice, because "-" usually
            # gets interpreted as an infix operator.
            return -self.expr()

        elif tok == '+':
            return self.expr()

        elif tok in ('/', '*'):
            raise LogoError("Operator not expected: %s" % tok)

        elif tok == '"' or tok == 'quote':
            tok = self.tokenizer.next()
            return tok

        elif tok == ':':
            tok = self.tokenizer.next()
            return get_variable(tok)

        elif tok == '[':
            self.tokenizer.push_context('[')
            result = self.expr_list()
            self.tokenizer.pop_context()
            return result

        elif tok == ';':
            while 1:
                tok = self.tokenizer.next()
                if tok == '\n' or tok is EOF:
                    break

        elif tok == '(':
            self.tokenizer.push_context('(')
            try:
                func = self.tokenizer.peek()
                if not reader.is_word(func):
                    # We don't actually have a function call then, but
                    # just a sub-expression.
                    val = self.expr()
                    if not self.tokenizer.next() == ')':
                        raise LogoSyntaxError("')' expected")
                    return val
                else:
                    self.tokenizer.next()
                if self.special_forms.has_key(func):
                    special_form = self.special_forms[func]
                    val = special_form(self, greedy=True)
                    next_tok = self.tokenizer.next()
                    if next_tok != ')':
                        raise LogoSyntaxError("')' expected")
                    return val
                else:
                    args = []
                    while 1:
                        tok = self.tokenizer.peek()
                        if tok == ')':
                            break
                        elif tok == '\n':
                            self.tokenizer.next()
                            continue
                        elif tok is EOF:
                            raise LogoEndOfCode("Unexpected end of code (')' expected)")
                        args.append(self.expr())
                    val = apply(get_function(func), args)
                if not self.tokenizer.next() == ')':
                    raise LogoSyntaxError("')' was expected.")
            finally:
                self.tokenizer.pop_context()
            return val

        else:
            if not reader.is_word(tok):
                raise LogoSyntaxError("Unknown token: %r" % tok)
            if tok in self.special_forms:
                special_form = self.special_forms[tok]
                val = special_form(self, greedy=False)
                return val
            else:
                func_name = tok
                func = get_function(func_name)
                n = arity(func)
                self.tokenizer.push_context('func')
                try:
                    args = []
                    # -1 arity means the function is greedy
                    if n == -1:
                        while 1:
                            tok = self.tokenizer.peek()
                            if tok == '\n' or tok is EOF:
                                self.tokenizer.next()
                                break
                            args.append(self.expr())
                    else:
                        for i in range(n):
                            try:
                                args.append(self.expr())
                            except (LogoEndOfCode, LogoEndOfLine):
                                raise LogoEndOfCode(
                                    "Not enough arguments provided to %s: got %i and need %i" % (func_name, i, n))
                finally:
                    self.tokenizer.pop_context()
                return apply(func, args)