def split_cmd(cmd): """ Split command line """ lexer = Lexer(ustring(cmd), [], comment=None, macro=None) lexer.escaping_on() data = [] while True: res = lexer.lexer() if res is None: break else: __, val = res data.append(val) return data
def parse(self, source, default_data=None): """ Begin parsing """ def _get_parser(val): _p = None for _key in self.parsers: # First string if isinstance(_key, basestring) and _key == val: _p = self.parsers[_key] break # Next sequence elif hasattr(_key, "__contains__") and val in _key: _p = self.parsers[_key] break # Otherwise regexp elif type(_key) is type(self._rx) and _key.match(val): _p = self.parsers[_key] break return _p #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self._result = {} self._lexer = Lexer(source, self.tokens, self.comment) _data = default_data or {} try: while True: _res = self._lexer.lexer() if _res is None: break else: _lex, _val = _res _parser = _get_parser(_val) if _parser: # Push read token back self._lexer.unget(_val) _data = _parser.parse(self._lexer.sdata, _data) self._result.update(_data) else: self.error(_(u"Unknown keyword: %s") % _val) except LexerError, e: self.error(str(e))
class MultiParser(BaseParser): """ MultiParser is a simple container for any other parsers Usually parsers, such as BlockParser designed to parse homogeneous blocks in single source: file or string. But still it might be useful sometimes to parse multiple blocks of different syntax in single source. Thus, one can register some necessary parsers into MultiParser and go on. """ DEFAULT_OPT = { u"comment": u"#", u"tokens": (), } def __init__(self, parsers, opt=None): """ @param parsers: dictionary containing string, list or compiled regexp of keywords as key and *Parser objects as values. @type parsers: dictionary with string or sequence keys @param opt: Options Available options: - comment: Comment character. Everything else ignored until EOL. Type: I{string (single char)} - tokens: List of tokens. Type: I{sequence} """ super(MultiParser, self).__init__() self._rx = re.compile("") if parsers: if not isinstance(parsers, dict): raise XYZValueError(_(u"Invalid argument type %s. "\ u"Dictionary expected") % str(parsers)) else: self.parsers = parsers else: self.parsers = {} self.opt = opt or self.DEFAULT_OPT self.set_opt(self.DEFAULT_OPT, self.opt) self._lexer = None self._result = {} #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def parse(self, source, default_data=None): """ Begin parsing """ def _get_parser(val): _p = None for _key in self.parsers: # First string if isinstance(_key, basestring) and _key == val: _p = self.parsers[_key] break # Next sequence elif hasattr(_key, "__contains__") and val in _key: _p = self.parsers[_key] break # Otherwise regexp elif type(_key) is type(self._rx) and _key.match(val): _p = self.parsers[_key] break return _p #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self._result = {} self._lexer = Lexer(source, self.tokens, self.comment) _data = default_data or {} try: while True: _res = self._lexer.lexer() if _res is None: break else: _lex, _val = _res _parser = _get_parser(_val) if _parser: # Push read token back self._lexer.unget(_val) _data = _parser.parse(self._lexer.sdata, _data) self._result.update(_data) else: self.error(_(u"Unknown keyword: %s") % _val) except LexerError, e: self.error(str(e)) return self._result