Ejemplo n.º 1
0
def metas_to_abs_links(request, page, values):
    new_values = list()
    stripped = False
    for value in values:
        if is_meta_link(value) != 'link':
            new_values.append(value)
            continue
        if ((value.startswith('[[') and value.endswith(']]')) or
            (value.startswith('{{') and value.endswith('}}'))):
            stripped = True
            value = value.lstrip('[')
            value = value.lstrip('{')
        attachment = ''
        for scheme in ('attachment:', 'inline:', 'drawing:'):
            if value.startswith(scheme):
                if len(value.split('/')) == 1:
                    value = ':'.join(value.split(':')[1:])
                    if not '|' in value:
                        # If page does not have descriptive text, try
                        # to shorten the link to the attachment name.
                        value = "%s|%s" % (value.rstrip(']').rstrip('}'), value)
                    value = "%s%s/%s" % (scheme, page, value)
                else:
                    att_page = value.split(':')[1]
                    if (att_page.startswith('./') or
                        att_page.startswith('/') or
                        att_page.startswith('../')):
                        attachment = scheme
                        value = ':'.join(value.split(':')[1:])
        if (value.startswith('./') or
            value.startswith('/') or
            value.startswith('../')):
            value = AbsPageName(page, value)
        if value.startswith('#'):
            value = page + value

        value = attachment + value
        if stripped:
            if value.endswith(']'):
                value = '[[' + value 
            elif value.endswith('}'):
                value = '{{' + value 
        new_values.append(value)

    return new_values
Ejemplo n.º 2
0
def metas_to_abs_links(request, page, values):
    new_values = list()
    stripped = False
    for value in values:
        if is_meta_link(value) != 'link':
            new_values.append(value)
            continue
        if ((value.startswith('[[') and value.endswith(']]'))
                or (value.startswith('{{') and value.endswith('}}'))):
            stripped = True
            value = value.lstrip('[')
            value = value.lstrip('{')
        attachment = ''
        for scheme in ('attachment:', 'inline:', 'drawing:'):
            if value.startswith(scheme):
                if len(value.split('/')) == 1:
                    value = ':'.join(value.split(':')[1:])
                    if not '|' in value:
                        # If page does not have descriptive text, try
                        # to shorten the link to the attachment name.
                        value = "%s|%s" % (value.rstrip(']').rstrip('}'),
                                           value)
                    value = "%s%s/%s" % (scheme, page, value)
                else:
                    att_page = value.split(':')[1]
                    if (att_page.startswith('./') or att_page.startswith('/')
                            or att_page.startswith('../')):
                        attachment = scheme
                        value = ':'.join(value.split(':')[1:])
        if (value.startswith('./') or value.startswith('/')
                or value.startswith('../')):
            value = AbsPageName(page, value)
        if value.startswith('#'):
            value = page + value

        value = attachment + value
        if stripped:
            if value.endswith(']'):
                value = '[[' + value
            elif value.endswith('}'):
                value = '{{' + value
        new_values.append(value)

    return new_values
Ejemplo n.º 3
0
def _metatable_parseargs(request, args, cat_re, temp_re):
    # Arg placeholders
    argset = set([])
    keyspec = list()
    excluded_keys = list()
    orderspec = list()
    limitregexps = dict()
    limitops = dict()

    # Capacity for storing indirection keys in metadata comparisons
    # and regexps, eg. k->c=/.+/
    indirection_keys = list()

    # list styles
    styles = dict()

    # Flag: were there page arguments?
    pageargs = False

    # Regex preprocessing
    for arg in (x.strip() for x in args.split(',') if x.strip()):
        # metadata key spec, move on
        if arg.startswith('||') and arg.endswith('||'):
            # take order, strip empty ones, look at styles
            for key in arg.split('||'):
                if not key:
                    continue
                # Grab styles
                if key.startswith('<') and '>' in key:
                    style = parseAttributes(request, key[1:], '>')
                    key = key[key.index('>') + 1:].strip()

                    if style:
                        styles[key] = style[0]

                # Grab key exclusions
                if key.startswith('!'):
                    excluded_keys.append(key.lstrip('!'))
                    continue

                keyspec.append(key.strip())

            continue

        op_match = False
        # Check for Python operator comparisons
        for op in OPERATORS:
            if op in arg:
                data = arg.rsplit(op)

                # If this is not a comparison but indirection,
                # continue. Good: k->s>3, bad: k->s=/.+/
                if op == '>' and data[0].endswith('-'):
                    continue

                # Must have real comparison
                if not len(data) == 2:
                    if op == '==':
                        data.append('')
                    else:
                        continue

                key, comp = map(string.strip, data)

                # Add indirection key
                if '->' in key:
                    indirection_keys.append(key)

                limitops.setdefault(key, list()).append((comp, op))
                op_match = True

            # One of the operators matched, no need to go forward
            if op_match:
                break

        # One of the operators matched, process next arg
        if op_match:
            continue

        # Metadata regexp, move on
        if '=' in arg:
            data = arg.split("=")
            key = data[0]

            # Add indirection key
            if '->' in key:
                indirection_keys.append(key)

            val = '='.join(data[1:])

            # Assume that value limits are regexps, if
            # not, escape them into exact regexp matches
            if not REGEX_RE.match(val):
                from MoinMoin.parser.text_moin_wiki import Parser

                # If the value is a page, make it a non-matching
                # regexp so that all link variations will generate a
                # match. An alternative would be to match from links
                # also, but in this case old-style metalinks, which
                # cannot be edited, would appear in metatables, which
                # is not wanted (old-style eg. [[Page| key: Page]])

                # Only allow non-matching regexp for values if they
                # are WikiWords. Eg. 'WikiWord some text' would match
                # 'WikiWord', emulating ye olde matching behaviour,
                # but 'nonwikiword some text' would not match
                # 'nonwikiword'
                if re.match(Parser.word_rule_js, val):
                    re_val = "(%s|" % (re.escape(val))
                else:
                    re_val = "(^%s$|" % (re.escape(val))
                # or as bracketed link
                re_val += "(?P<sta>\[\[)%s(?(sta)\]\])|" % (re.escape(val))

                # or as commented bracketed link
                re_val += "(?P<stb>\[\[)%s(?(stb)\|[^\]]*\]\]))" % \
                    (re.escape(val))

                limitregexps.setdefault(key, set()).add(
                    re.compile(re_val, re.UNICODE))

            # else strip the //:s
            else:
                if len(val) > 1:
                    val = val[1:-1]

                limitregexps.setdefault(key, set()).add(
                    re.compile(val, re.IGNORECASE | re.UNICODE))
            continue

        # order spec
        if arg.startswith('>>') or arg.startswith('<<'):
            # eg. [('<<', 'koo'), ('>>', 'kk')]
            orderspec = re.findall('(?:(<<|>>)([^<>]+))', arg)
            continue

        # Ok, we have a page arg, i.e. a page or page regexp in args
        pageargs = True

        # Normal pages, check perms, encode and move on
        if not REGEX_RE.match(arg):
            # Fix relative links
            if (arg.startswith('/') or arg.startswith('./')
                    or arg.startswith('../')):
                arg = AbsPageName(request.page.page_name, arg)

            argset.add(arg)
            continue

        # Ok, it's a page regexp

        # if there's something wrong with the regexp, ignore it and move on
        try:
            arg = arg[1:-1]
            # Fix relative links
            if (arg.startswith('/') or arg.startswith('./')
                    or arg.startswith('../')):
                arg = AbsPageName(request.page.page_name, arg)

            page_re = re.compile("%s" % arg)
        except:
            continue

        # Get all pages, check which of them match to the supplied regexp
        for page in request.graphdata:
            if page_re.match(page):
                argset.add(page)

    return (argset, pageargs, keyspec, excluded_keys, orderspec, limitregexps,
            limitops, indirection_keys, styles)
Ejemplo n.º 4
0
def _metatable_parseargs(request, args, cat_re, temp_re):
    # Arg placeholders
    argset = set([])
    keyspec = list()
    excluded_keys = list()
    orderspec = list()
    limitregexps = dict()
    limitops = dict()

    # Capacity for storing indirection keys in metadata comparisons
    # and regexps, eg. k->c=/.+/
    indirection_keys = list()

    # list styles
    styles = dict()

    # Flag: were there page arguments?
    pageargs = False

    # Regex preprocessing
    for arg in (x.strip() for x in args.split(',') if x.strip()):
        # metadata key spec, move on
        if arg.startswith('||') and arg.endswith('||'):
            # take order, strip empty ones, look at styles
            for key in arg.split('||'):
                if not key:
                    continue
                # Grab styles
                if key.startswith('<') and '>' in key:
                    style = parseAttributes(request,
                                                     key[1:], '>')
                    key = key[key.index('>') + 1:].strip()

                    if style:
                        styles[key] = style[0]

                # Grab key exclusions
                if key.startswith('!'):
                    excluded_keys.append(key.lstrip('!'))
                    continue
                    
                keyspec.append(key.strip())

            continue

        op_match = False
        # Check for Python operator comparisons
        for op in OPERATORS:
            if op in arg:
                data = arg.rsplit(op)
                
                # If this is not a comparison but indirection,
                # continue. Good: k->s>3, bad: k->s=/.+/
                if op == '>' and data[0].endswith('-'):
                    continue

                # Must have real comparison
                if not len(data) == 2:
                    if op == '==':
                        data.append('')
                    else:
                        continue

                key, comp = map(string.strip, data)

                # Add indirection key
                if '->' in key:
                    indirection_keys.append(key)

                limitops.setdefault(key, list()).append((comp, op))
                op_match = True

            # One of the operators matched, no need to go forward
            if op_match:
                break

        # One of the operators matched, process next arg
        if op_match:
            continue

        # Metadata regexp, move on
        if '=' in arg:
            data = arg.split("=")
            key = data[0]

            # Add indirection key
            if '->' in key:
                indirection_keys.append(key)

            val = '='.join(data[1:])

            # Assume that value limits are regexps, if
            # not, escape them into exact regexp matches
            if not REGEX_RE.match(val):
                from MoinMoin.parser.text_moin_wiki import Parser

                # If the value is a page, make it a non-matching
                # regexp so that all link variations will generate a
                # match. An alternative would be to match from links
                # also, but in this case old-style metalinks, which
                # cannot be edited, would appear in metatables, which
                # is not wanted (old-style eg. [[Page| key: Page]])

                # Only allow non-matching regexp for values if they
                # are WikiWords. Eg. 'WikiWord some text' would match
                # 'WikiWord', emulating ye olde matching behaviour,
                # but 'nonwikiword some text' would not match
                # 'nonwikiword'
                if re.match(Parser.word_rule_js, val):
                    re_val = "(%s|" % (re.escape(val)) 
                else:
                    re_val = "(^%s$|" % (re.escape(val)) 
                # or as bracketed link
                re_val += "(?P<sta>\[\[)%s(?(sta)\]\])|" % (re.escape(val)) 

                # or as commented bracketed link
                re_val += "(?P<stb>\[\[)%s(?(stb)\|[^\]]*\]\]))" % \
                    (re.escape(val)) 
                
                limitregexps.setdefault(
                    key, set()).add(re.compile(re_val, re.UNICODE))

            # else strip the //:s
            else:
                if len(val) > 1:
                    val = val[1:-1]

                limitregexps.setdefault(
                    key, set()).add(re.compile(val, 
                                               re.IGNORECASE | re.UNICODE))
            continue

        # order spec
        if arg.startswith('>>') or arg.startswith('<<'):
            # eg. [('<<', 'koo'), ('>>', 'kk')]
            orderspec = re.findall('(?:(<<|>>)([^<>]+))', arg)
            continue

        # Ok, we have a page arg, i.e. a page or page regexp in args
        pageargs = True

        # Normal pages, check perms, encode and move on
        if not REGEX_RE.match(arg):
            # Fix relative links
            if (arg.startswith('/') or arg.startswith('./') or
                arg.startswith('../')):
                arg = AbsPageName(request.page.page_name, arg)

            argset.add(arg)
            continue

        # Ok, it's a page regexp

        # if there's something wrong with the regexp, ignore it and move on
        try:
            arg = arg[1:-1]
            # Fix relative links
            if (arg.startswith('/') or arg.startswith('./') or
                arg.startswith('../')):
                arg = AbsPageName(request.page.page_name, arg)

            page_re = re.compile("%s" % arg)
        except:
            continue

        # Get all pages, check which of them match to the supplied regexp
        for page in request.graphdata:
            if page_re.match(page):
                argset.add(page)

    return (argset, pageargs, keyspec, excluded_keys, orderspec, 
            limitregexps, limitops, indirection_keys, styles)