def __init__(self, strict=0):
     self.strict = strict
     if strict:
         sdict = _getStrictScannerDict()
     else:
         sdict = _getScannerDict()
     ContextSensitiveScanner.__init__(self, sdict)
Beispiel #2
0
 def tokenize(self, input, default_mode=_COMMAND_MODE):
     self.rv = []
     self.lineno = 1
     # default mode when leaving _START_LINE_MODE
     self.default_mode = default_mode
     # argsep is used to insert commas as argument separators
     # in command mode
     self.argsep = None
     self.parencount = 0
     ContextSensitiveScanner.tokenize(self, input)
     self.addToken(type='NEWLINE')
     return self.rv
Beispiel #3
0
 def tokenize(self, input, default_mode=_COMMAND_MODE):
     self.rv = []
     self.lineno = 1
     # default mode when leaving _START_LINE_MODE
     self.default_mode = default_mode
     # argsep is used to insert commas as argument separators
     # in command mode
     self.argsep = None
     self.parencount = 0
     ContextSensitiveScanner.tokenize(self, input)
     self.addToken(type='NEWLINE')
     return self.rv
Beispiel #4
0
    def __init__(self, strict=0):

        if pyrafglobals._use_ecl:
            _keywordDict["iferr"] = 1
            _keywordDict["ifnoerr"] = 1
            _keywordDict["then"] = 1

        self.strict = strict
        if strict:
            sdict = _getStrictScannerDict()
        else:
            sdict = _getScannerDict()
        ContextSensitiveScanner.__init__(self, sdict)
Beispiel #5
0
    def __init__(self, strict=0):

        if pyrafglobals._use_ecl:
            _keywordDict["iferr"] = 1
            _keywordDict["ifnoerr"] = 1
            _keywordDict["then"] = 1

        self.strict = strict
        if strict:
            sdict = _getStrictScannerDict()
        else:
            sdict = _getScannerDict()
        ContextSensitiveScanner.__init__(self, sdict)
Beispiel #6
0
    def enterComputeEqnMode(self):
        # Nasty hack to work around weird CL syntax
        # In compute-start mode, tokens are strings or identifiers
        # or numbers depending on what follows them, and the mode
        # once switched to compute-mode stays there until a
        # terminating comma.  Ugly stuff.
        #
        # This is called when a token is received that triggers the
        # transition to the compute-eqn mode from compute-start mode.
        # It may be necessary to change tokens already on the
        # list when this is called...

        self.current.append(_COMPUTE_EQN_MODE)
        if self.rv and self.rv[-1].type == "STRING":
            # if last token was a string, we must remove it and
            # rescan it using the compute-mode scanner
            # Hope this works!
            last = self.rv[-1].attr
            del self.rv[-1]
            ContextSensitiveScanner.tokenize(self, last)
Beispiel #7
0
    def enterComputeEqnMode(self):
        # Nasty hack to work around weird CL syntax
        # In compute-start mode, tokens are strings or identifiers
        # or numbers depending on what follows them, and the mode
        # once switched to compute-mode stays there until a
        # terminating comma.  Ugly stuff.
        #
        # This is called when a token is received that triggers the
        # transition to the compute-eqn mode from compute-start mode.
        # It may be necessary to change tokens already on the
        # list when this is called...

        self.current.append(_COMPUTE_EQN_MODE)
        if self.rv and self.rv[-1].type == "STRING":
            # if last token was a string, we must remove it and
            # rescan it using the compute-mode scanner
            # Hope this works!
            last = self.rv[-1].attr
            del self.rv[-1]
            ContextSensitiveScanner.tokenize(self, last)