def parse_start_step(request, args): """ Parses common Colorizer parameters start, step, numbers. Uses L{wikiutil.parseAttributes} and sanitizes the results. Start and step must be a non negative number and default to 1, numbers might be on, off, or none and defaults to on. On or off means that numbers are switchable via JavaScript (html formatter), disabled means that numbers are disabled completely. attrdict is returned as last element in the tuple, to enable the calling parser to extract further arguments. @param request: a request instance @param arg: the argument string @returns: numbers, start, step, attrdict """ nums, start, step = 1, 1, 1 attrs, msg = wikiutil.parseAttributes(request, args) if not msg: try: start = int(attrs.get('start','"1"')[1:-1]) except ValueError: pass try: step = int(attrs.get('step','"1"')[1:-1]) except ValueError: pass if attrs.get('numbers','"on"')[1:-1].lower() in ('off', 'false', 'no'): nums = 0 elif attrs.get('numbers','"on"')[1:-1].lower() in ('none', 'disable'): nums = -1 return nums, start, step, attrs
def __init__ (self, raw, request, **kw): self.raw = raw self.request = request self.moin_parser = WikiParser(raw, request, **kw) self.out = kw.get('out', request) attrs, msg = wikiutil.parseAttributes(request, kw.get('format_args',''))
def __init__(self, raw, request, **kw): cfg = request.cfg try: self.bibtex2html = cfg.bibtex_bibtex2html self.bibtex = cfg.bibtex_bibtex except AttributeError: pass try: self.bstfiles = cfg.bibtex_bstfiles except AttributeError: pass self.raw = raw.encode('utf-8') self.request = request self.form = request.form self._ = request.getText self.out = kw.get('out', request) attrs, msg = wikiutil.parseAttributes(request, kw.get('format_args','')) self.style = attrs.get('style','"plain"')[1:-1] self.hlinsdb = attrs.get('hlinsdb','"HlinsDatabase"')[1:-1] self.abstract = self.keywords = self.usekeys = True if attrs.get('abstract','"on"')[1:-1].lower() in ('off', 'no', 'none', 'false'): self.abstract = False if attrs.get('keywords','"on"')[1:-1].lower() in ('off', 'no', 'none', 'false'): self.keywords = False if attrs.get('usekeys','"on"')[1:-1].lower() in ('off', 'no', 'none', 'false'): self.usekeys = False
def __init__(self, raw, request, **kw): self.request = request self.form = request.form self.raw = raw self._ = request.getText self.args = kw.get('format_args', '') attrs, msg = wikiutil.parseAttributes(request, self.args) self.delim = attrs.get('delim', '')[1:-1]
def __init__(self, raw, request, **kw): # save call arguments for later use in format() self.raw = raw.encode('utf-8') self.request = request self.layoutformat = 'png' self.graphengine = 'neato' attrs, msg = wikiutil.parseAttributes(request, kw.get('format_args', '')) if 'engine' in attrs: self.graphengine = attrs['engine'].encode('utf-8')[1:-1]
def parseDefinition(request, fielddef, fieldlist): """ Parse a form field definition and return the HTML markup for it """ _ = request.getText row = '<tr><td nowrap="nowrap" valign="top"> %s </td><td>%s</td></tr>\n' fields, msg = wikiutil.parseAttributes(request, fielddef) if not msg: for required in _required_attributes: if not fields.has_key(required): msg = _('Required attribute "%(attrname)s" missing') % { 'attrname': required} break if msg: # create visible error result = row % (msg, fielddef) elif fields['type'] == '"caption"': # create a centered, bold italic caption result = '<tr><td colspan="2" align="center"><em><strong>%s</strong></em></td></tr>\n' % ( fields['label'][1:-1]) else: # for submit buttons, use `label` as the value if fields['type'] == '"submit"': fields['value'] = fields['label'] fields['label'] = '' # make sure user cannot use a system name fields['name'] = '"form_' + fields['name'][1:] fieldlist.append(fields['name'][1:-1]) wrapper = ('<input', '>\n') if fields['type'] == '"textarea"': wrapper = ('<textarea', '></textarea>\n') result = wrapper[0] for key, val in fields.items(): if key == 'label': continue result = '%s %s=%s' % (result, key, val) result = result + wrapper[1] #result = result + wikiutil.escape(`fields`) if fields['type'] == '"submit"': result = '<tr><td colspan="2" align="center">%s</td></tr>\n' % result else: result = row % (fields['label'][1:-1], result) return result
def __init__(self, raw, request, **kw): # save call arguments for later use in format() self.raw = raw self.request = request # get args; 1st one is the macro name args = kw.get('format_args', '').split() self.macro_name = args[0] # next args are options args = ' '.join(args[1:]) self.attrs, msg = wikiutil.parseAttributes(request, args) self.raw = "#if " + kw.get('format_args', '') + "\n" + self.raw
def __init__(self, raw, request, **kw): # save call arguments for later use in format() self.raw = raw ##.encode('utf-8') self.request = request self.attrs, msg = wikiutil.parseAttributes(request, kw.get('format_args', '')) # Some regexes that we will need p1_re = "(?P<p1>.*?)" p2_re = "(?P<p2>.*?)" end_re = "( *//.*)?" # a shapefile self.sfi_re = re.compile( r'\[ *shapefile=(?P<quote>[\'"])(?P<shapefile>.+?)(?P=quote) *]', re.IGNORECASE) # an URL self.url_re = re.compile( r'\[ *URL=(?P<quote>[\'"])(?P<url>.+?)(?P=quote) *]', re.IGNORECASE) # non-wiki URLs self.notwiki_re = re.compile(r'[a-z0-9_]*:.*', re.IGNORECASE) # include pseudo-macro self.inc_re = re.compile( r'\[\[ *Include *\( *%s( *, *%s)? *\) *\]\]%s' % (p1_re, p2_re, end_re)) # set pseudo-macro self.set_re = re.compile( r'\[\[ *Set *\( *%s *, *(?P<quote>[\'"])%s(?P=quote) *\) *\]\]%s' % (p1_re, p2_re, end_re)) # get pseudo-macro self.get_re = re.compile(r'\[\[ *Get *\( *%s *\) *\]\]' % (p1_re))
def _getTableAttrs(self, attrdef): # skip "|" and initial "<" while attrdef and attrdef[0] == "|": attrdef = attrdef[1:] if not attrdef or attrdef[0] != "<": return {}, '' attrdef = attrdef[1:] # extension for special table markup def table_extension(key, parser, attrs, wiki_parser=self): """ returns: tuple (found_flag, msg) found_flag: whether we found something and were able to process it here true for special stuff like 100% or - or #AABBCC false for style xxx="yyy" attributes msg: "" or an error msg """ _ = wiki_parser._ found = False msg = '' if key[0] in "0123456789": token = parser.get_token() if token != '%': wanted = '%' msg = _('Expected "%(wanted)s" after "%(key)s", got "%(token)s"') % { 'wanted': wanted, 'key': key, 'token': token} else: try: dummy = int(key) except ValueError: msg = _('Expected an integer "%(key)s" before "%(token)s"') % { 'key': key, 'token': token} else: found = True attrs['width'] = '"%s%%"' % key elif key == '-': arg = parser.get_token() try: dummy = int(arg) except ValueError: msg = _('Expected an integer "%(arg)s" after "%(key)s"') % { 'arg': arg, 'key': key} else: found = True attrs['colspan'] = '"%s"' % arg elif key == '|': arg = parser.get_token() try: dummy = int(arg) except ValueError: msg = _('Expected an integer "%(arg)s" after "%(key)s"') % { 'arg': arg, 'key': key} else: found = True attrs['rowspan'] = '"%s"' % arg elif key == '(': found = True attrs['align'] = '"left"' elif key == ':': found = True attrs['align'] = '"center"' elif key == ')': found = True attrs['align'] = '"right"' elif key == '^': found = True attrs['valign'] = '"top"' elif key == 'v': found = True attrs['valign'] = '"bottom"' elif key == '#': arg = parser.get_token() try: if len(arg) != 6: raise ValueError dummy = int(arg, 16) except ValueError: msg = _('Expected a color value "%(arg)s" after "%(key)s"') % { 'arg': arg, 'key': key} else: found = True attrs['bgcolor'] = '"#%s"' % arg return found, self.formatter.rawHTML(msg) # scan attributes attr, msg = wikiutil.parseAttributes(self.request, attrdef, '>', table_extension) if msg: msg = '<strong class="highlight">%s</strong>' % msg #self.request.log("parseAttributes returned %r" % attr) return attr, msg
def _getTableAttrs(self, attrdef): # skip "|" and initial "<" while attrdef and attrdef[0] == "|": attrdef = attrdef[1:] if not attrdef or attrdef[0] != "<": return {}, '' attrdef = attrdef[1:] # extension for special table markup def table_extension(key, parser, attrs, wiki_parser=self): _ = wiki_parser._ msg = '' if key[0] in "0123456789": token = parser.get_token() if token != '%': wanted = '%' msg = _('Expected "%(wanted)s" after "%(key)s", got "%(token)s"') % { 'wanted': wanted, 'key': key, 'token': token} else: try: dummy = int(key) except ValueError: msg = _('Expected an integer "%(key)s" before "%(token)s"') % { 'key': key, 'token': token} else: attrs['width'] = '"%s%%"' % key elif key == '-': arg = parser.get_token() try: dummy = int(arg) except ValueError: msg = _('Expected an integer "%(arg)s" after "%(key)s"') % { 'arg': arg, 'key': key} else: attrs['colspan'] = '"%s"' % arg elif key == '|': arg = parser.get_token() try: dummy = int(arg) except ValueError: msg = _('Expected an integer "%(arg)s" after "%(key)s"') % { 'arg': arg, 'key': key} else: attrs['rowspan'] = '"%s"' % arg elif key == '(': attrs['align'] = '"left"' elif key == ':': attrs['align'] = '"center"' elif key == ')': attrs['align'] = '"right"' elif key == '^': attrs['valign'] = '"top"' elif key == 'v': attrs['valign'] = '"bottom"' elif key == '#': arg = parser.get_token() try: if len(arg) != 6: raise ValueError dummy = int(arg, 16) except ValueError: msg = _('Expected a color value "%(arg)s" after "%(key)s"') % { 'arg': arg, 'key': key} else: attrs['bgcolor'] = '"#%s"' % arg else: msg = None #print "key: %s\nattrs: %s" % (key, str(attrs)) return msg # scan attributes attr, msg = wikiutil.parseAttributes(self.request, attrdef, '>', table_extension) if msg: msg = '<strong class="highlight">%s</strong>' % msg #print attr return attr, msg
def _metatable_parseargs(request, args, cat_re, temp_re): # Arg placeholders argset = set([]) keyspec = list() excluded_keys = list() orderspec = list() limitregexps = dict() limitops = dict() # Capacity for storing indirection keys in metadata comparisons # and regexps, eg. k->c=/.+/ indirection_keys = list() # list styles styles = dict() # Flag: were there page arguments? pageargs = False # Regex preprocessing for arg in (x.strip() for x in args.split(',') if x.strip()): # metadata key spec, move on if arg.startswith('||') and arg.endswith('||'): # take order, strip empty ones, look at styles for key in arg.split('||'): if not key: continue # Grab styles if key.startswith('<') and '>' in key: style = parseAttributes(request, key[1:], '>') key = key[key.index('>') + 1:].strip() if style: styles[key] = style[0] # Grab key exclusions if key.startswith('!'): excluded_keys.append(key.lstrip('!')) continue keyspec.append(key.strip()) continue op_match = False # Check for Python operator comparisons for op in OPERATORS: if op in arg: data = arg.rsplit(op) # If this is not a comparison but indirection, # continue. Good: k->s>3, bad: k->s=/.+/ if op == '>' and data[0].endswith('-'): continue # Must have real comparison if not len(data) == 2: if op == '==': data.append('') else: continue key, comp = map(string.strip, data) # Add indirection key if '->' in key: indirection_keys.append(key) limitops.setdefault(key, list()).append((comp, op)) op_match = True # One of the operators matched, no need to go forward if op_match: break # One of the operators matched, process next arg if op_match: continue # Metadata regexp, move on if '=' in arg: data = arg.split("=") key = data[0] # Add indirection key if '->' in key: indirection_keys.append(key) val = '='.join(data[1:]) # Assume that value limits are regexps, if # not, escape them into exact regexp matches if not REGEX_RE.match(val): from MoinMoin.parser.text_moin_wiki import Parser # If the value is a page, make it a non-matching # regexp so that all link variations will generate a # match. An alternative would be to match from links # also, but in this case old-style metalinks, which # cannot be edited, would appear in metatables, which # is not wanted (old-style eg. [[Page| key: Page]]) # Only allow non-matching regexp for values if they # are WikiWords. Eg. 'WikiWord some text' would match # 'WikiWord', emulating ye olde matching behaviour, # but 'nonwikiword some text' would not match # 'nonwikiword' if re.match(Parser.word_rule_js, val): re_val = "(%s|" % (re.escape(val)) else: re_val = "(^%s$|" % (re.escape(val)) # or as bracketed link re_val += "(?P<sta>\[\[)%s(?(sta)\]\])|" % (re.escape(val)) # or as commented bracketed link re_val += "(?P<stb>\[\[)%s(?(stb)\|[^\]]*\]\]))" % \ (re.escape(val)) limitregexps.setdefault(key, set()).add( re.compile(re_val, re.UNICODE)) # else strip the //:s else: if len(val) > 1: val = val[1:-1] limitregexps.setdefault(key, set()).add( re.compile(val, re.IGNORECASE | re.UNICODE)) continue # order spec if arg.startswith('>>') or arg.startswith('<<'): # eg. [('<<', 'koo'), ('>>', 'kk')] orderspec = re.findall('(?:(<<|>>)([^<>]+))', arg) continue # Ok, we have a page arg, i.e. a page or page regexp in args pageargs = True # Normal pages, check perms, encode and move on if not REGEX_RE.match(arg): # Fix relative links if (arg.startswith('/') or arg.startswith('./') or arg.startswith('../')): arg = AbsPageName(request.page.page_name, arg) argset.add(arg) continue # Ok, it's a page regexp # if there's something wrong with the regexp, ignore it and move on try: arg = arg[1:-1] # Fix relative links if (arg.startswith('/') or arg.startswith('./') or arg.startswith('../')): arg = AbsPageName(request.page.page_name, arg) page_re = re.compile("%s" % arg) except: continue # Get all pages, check which of them match to the supplied regexp for page in request.graphdata: if page_re.match(page): argset.add(page) return (argset, pageargs, keyspec, excluded_keys, orderspec, limitregexps, limitops, indirection_keys, styles)
def _getTableAttrs(self, attrdef): # skip "|" and initial "<" while attrdef and attrdef[0] == "|": attrdef = attrdef[1:] if not attrdef or attrdef[0] != "<": return {}, '' attrdef = attrdef[1:] # extension for special table markup def table_extension(key, parser, attrs, wiki_parser=self): """ returns: tuple (found_flag, msg) found_flag: whether we found something and were able to process it here true for special stuff like 100% or - or #AABBCC false for style xxx="yyy" attributes msg: "" or an error msg """ _ = wiki_parser._ found = False msg = '' if key[0] in "0123456789": token = parser.get_token() if token != '%': wanted = '%' msg = _( 'Expected "%(wanted)s" after "%(key)s", got "%(token)s"' ) % { 'wanted': wanted, 'key': key, 'token': token } else: try: dummy = int(key) except ValueError: msg = _( 'Expected an integer "%(key)s" before "%(token)s"' ) % { 'key': key, 'token': token } else: found = True attrs['width'] = '"%s%%"' % key elif key == '-': arg = parser.get_token() try: dummy = int(arg) except ValueError: msg = _( 'Expected an integer "%(arg)s" after "%(key)s"') % { 'arg': arg, 'key': key } else: found = True attrs['colspan'] = '"%s"' % arg elif key == '|': arg = parser.get_token() try: dummy = int(arg) except ValueError: msg = _( 'Expected an integer "%(arg)s" after "%(key)s"') % { 'arg': arg, 'key': key } else: found = True attrs['rowspan'] = '"%s"' % arg elif key == '(': found = True attrs['align'] = '"left"' elif key == ':': found = True attrs['align'] = '"center"' elif key == ')': found = True attrs['align'] = '"right"' elif key == '^': found = True attrs['valign'] = '"top"' elif key == 'v': found = True attrs['valign'] = '"bottom"' elif key == '#': arg = parser.get_token() try: if len(arg) != 6: raise ValueError dummy = int(arg, 16) except ValueError: msg = _( 'Expected a color value "%(arg)s" after "%(key)s"') % { 'arg': arg, 'key': key } else: found = True attrs['bgcolor'] = '"#%s"' % arg return found, self.formatter.rawHTML(msg) # scan attributes attr, msg = wikiutil.parseAttributes(self.request, attrdef, '>', table_extension) if msg: msg = '<strong class="highlight">%s</strong>' % msg #self.request.log("parseAttributes returned %r" % attr) return attr, msg
def _metatable_parseargs(request, args, cat_re, temp_re): # Arg placeholders argset = set([]) keyspec = list() excluded_keys = list() orderspec = list() limitregexps = dict() limitops = dict() # Capacity for storing indirection keys in metadata comparisons # and regexps, eg. k->c=/.+/ indirection_keys = list() # list styles styles = dict() # Flag: were there page arguments? pageargs = False # Regex preprocessing for arg in (x.strip() for x in args.split(',') if x.strip()): # metadata key spec, move on if arg.startswith('||') and arg.endswith('||'): # take order, strip empty ones, look at styles for key in arg.split('||'): if not key: continue # Grab styles if key.startswith('<') and '>' in key: style = parseAttributes(request, key[1:], '>') key = key[key.index('>') + 1:].strip() if style: styles[key] = style[0] # Grab key exclusions if key.startswith('!'): excluded_keys.append(key.lstrip('!')) continue keyspec.append(key.strip()) continue op_match = False # Check for Python operator comparisons for op in OPERATORS: if op in arg: data = arg.rsplit(op) # If this is not a comparison but indirection, # continue. Good: k->s>3, bad: k->s=/.+/ if op == '>' and data[0].endswith('-'): continue # Must have real comparison if not len(data) == 2: if op == '==': data.append('') else: continue key, comp = map(string.strip, data) # Add indirection key if '->' in key: indirection_keys.append(key) limitops.setdefault(key, list()).append((comp, op)) op_match = True # One of the operators matched, no need to go forward if op_match: break # One of the operators matched, process next arg if op_match: continue # Metadata regexp, move on if '=' in arg: data = arg.split("=") key = data[0] # Add indirection key if '->' in key: indirection_keys.append(key) val = '='.join(data[1:]) # Assume that value limits are regexps, if # not, escape them into exact regexp matches if not REGEX_RE.match(val): from MoinMoin.parser.text_moin_wiki import Parser # If the value is a page, make it a non-matching # regexp so that all link variations will generate a # match. An alternative would be to match from links # also, but in this case old-style metalinks, which # cannot be edited, would appear in metatables, which # is not wanted (old-style eg. [[Page| key: Page]]) # Only allow non-matching regexp for values if they # are WikiWords. Eg. 'WikiWord some text' would match # 'WikiWord', emulating ye olde matching behaviour, # but 'nonwikiword some text' would not match # 'nonwikiword' if re.match(Parser.word_rule_js, val): re_val = "(%s|" % (re.escape(val)) else: re_val = "(^%s$|" % (re.escape(val)) # or as bracketed link re_val += "(?P<sta>\[\[)%s(?(sta)\]\])|" % (re.escape(val)) # or as commented bracketed link re_val += "(?P<stb>\[\[)%s(?(stb)\|[^\]]*\]\]))" % \ (re.escape(val)) limitregexps.setdefault( key, set()).add(re.compile(re_val, re.UNICODE)) # else strip the //:s else: if len(val) > 1: val = val[1:-1] limitregexps.setdefault( key, set()).add(re.compile(val, re.IGNORECASE | re.UNICODE)) continue # order spec if arg.startswith('>>') or arg.startswith('<<'): # eg. [('<<', 'koo'), ('>>', 'kk')] orderspec = re.findall('(?:(<<|>>)([^<>]+))', arg) continue # Ok, we have a page arg, i.e. a page or page regexp in args pageargs = True # Normal pages, check perms, encode and move on if not REGEX_RE.match(arg): # Fix relative links if (arg.startswith('/') or arg.startswith('./') or arg.startswith('../')): arg = AbsPageName(request.page.page_name, arg) argset.add(arg) continue # Ok, it's a page regexp # if there's something wrong with the regexp, ignore it and move on try: arg = arg[1:-1] # Fix relative links if (arg.startswith('/') or arg.startswith('./') or arg.startswith('../')): arg = AbsPageName(request.page.page_name, arg) page_re = re.compile("%s" % arg) except: continue # Get all pages, check which of them match to the supplied regexp for page in request.graphdata: if page_re.match(page): argset.add(page) return (argset, pageargs, keyspec, excluded_keys, orderspec, limitregexps, limitops, indirection_keys, styles)
def _getTableAttrs(self, attrdef): # skip "|" and initial "<" while attrdef and attrdef[0] == "|": attrdef = attrdef[1:] if not attrdef or attrdef[0] != "<": return {}, "" attrdef = attrdef[1:] # extension for special table markup def table_extension(key, parser, attrs, wiki_parser=self): """ returns: tuple (found_flag, msg) found_flag: whether we found something and were able to process it here true for special stuff like 100% or - or #AABBCC false for style xxx="yyy" attributes msg: "" or an error msg """ _ = wiki_parser._ found = False msg = "" if key[0] in "0123456789": token = parser.get_token() if token != "%": wanted = "%" msg = _('Expected "%(wanted)s" after "%(key)s", got "%(token)s"') % { "wanted": wanted, "key": key, "token": token, } else: try: dummy = int(key) except ValueError: msg = _('Expected an integer "%(key)s" before "%(token)s"') % {"key": key, "token": token} else: found = True attrs["width"] = '"%s%%"' % key elif key == "-": arg = parser.get_token() try: dummy = int(arg) except ValueError: msg = _('Expected an integer "%(arg)s" after "%(key)s"') % {"arg": arg, "key": key} else: found = True attrs["colspan"] = '"%s"' % arg elif key == "|": arg = parser.get_token() try: dummy = int(arg) except ValueError: msg = _('Expected an integer "%(arg)s" after "%(key)s"') % {"arg": arg, "key": key} else: found = True attrs["rowspan"] = '"%s"' % arg elif key == "(": found = True attrs["align"] = '"left"' elif key == ":": found = True attrs["align"] = '"center"' elif key == ")": found = True attrs["align"] = '"right"' elif key == "^": found = True attrs["valign"] = '"top"' elif key == "v": found = True attrs["valign"] = '"bottom"' elif key == "#": arg = parser.get_token() try: if len(arg) != 6: raise ValueError dummy = int(arg, 16) except ValueError: msg = _('Expected a color value "%(arg)s" after "%(key)s"') % {"arg": arg, "key": key} else: found = True attrs["bgcolor"] = '"#%s"' % arg return found, self.formatter.rawHTML(msg) # scan attributes attr, msg = wikiutil.parseAttributes(self.request, attrdef, ">", table_extension) if msg: msg = '<strong class="highlight">%s</strong>' % msg return attr, msg