Exemplo n.º 1
0
    def _sendNotification(self, comment, emails, email_lang, revisions, trivial):
        """
        Send notification email for a single language.
        @param comment: editor's comment given when saving the page
        @param emails: list of email addresses
        @param email_lang: language of emails
        @param revisions: revisions of this page
        @param trivial: the change is marked as trivial
        @rtype: int
        @return: sendmail result
        """
        _ = lambda s, formatted=True, r=self.request, l=email_lang: r.getText(s, formatted=formatted, lang=l)

        mailBody = _("Dear Wiki user,\n\n"
            'You have subscribed to a wiki page or wiki category on "%(sitename)s" for change notification.\n\n'
            "The following page has been changed by %(editor)s:\n"
            "%(pagelink)s\n\n", formatted=False) % {
                'editor': self.uid_override or user.getUserIdentification(self.request),
                'pagelink': self.request.getQualifiedURL(self.url(self.request)),
                'sitename': self.cfg.sitename or self.request.getBaseURL(),
        }

        if comment:
            mailBody = mailBody + \
                _("The comment on the change is:\n%(comment)s\n\n", formatted=False) % {'comment': comment}

        # append a diff (or append full page text if there is no diff)
        if len(revisions) < 2:
            mailBody = mailBody + \
                _("New page:\n", formatted=False) + \
                self.get_raw_body()
        else:
            lines = wikiutil.pagediff(self.request, self.page_name, revisions[1],
                                      self.page_name, revisions[0])
            
            if lines:
                mailBody = mailBody + "%s\n%s\n" % (("-" * 78), '\n'.join(lines))
            else:
                mailBody = mailBody + _("No differences found!\n", formatted=False)
        
        return util.mail.sendmail(self.request, emails,
            _('[%(sitename)s] %(trivial)sUpdate of "%(pagename)s" by %(username)s', formatted=False) % {
                'trivial' : (trivial and _("Trivial ", formatted=False)) or "",
                'sitename': self.cfg.sitename or "Wiki",
                'pagename': self.page_name,
                'username': self.uid_override or user.getUserIdentification(self.request),
            },
            mailBody, mail_from=self.cfg.mail_from)
Exemplo n.º 2
0
def execute(pagename, request):
    """ Send recent changes as an RSS document
    """
    cfg = request.cfg

    # get params
    items_limit = 100
    try:
        max_items = int(request.values["items"])
        max_items = min(max_items, items_limit)  # not more than `items_limit`
    except (KeyError, ValueError):
        # not more than 15 items in a RSS file by default
        max_items = 15
    try:
        unique = int(request.values.get("unique", 0))
    except ValueError:
        unique = 0
    try:
        diffs = int(request.values.get("diffs", 0))
    except ValueError:
        diffs = 0
    ## ddiffs inserted by Ralf Zosel <*****@*****.**>, 04.12.2003
    try:
        ddiffs = int(request.values.get("ddiffs", 0))
    except ValueError:
        ddiffs = 0

    urlfilter = request.values.get("filter")
    if urlfilter:
        urlfilter = re.compile(urlfilter)
    else:
        urlfilter = None

    # get data
    log = editlog.EditLog(request)
    logdata = []
    counter = 0
    pages = {}
    lastmod = 0
    for line in log.reverse():
        if urlfilter and not (urlfilter.match(line.pagename)):
            continue
        if not request.user.may.read(line.pagename):
            continue
        if not line.action.startswith("SAVE") or ((line.pagename in pages) and unique):
            continue
        # if log.dayChanged() and log.daycount > _MAX_DAYS: break
        line.editor = line.getInterwikiEditorData(request)
        line.time = timefuncs.tmtuple(wikiutil.version2timestamp(line.ed_time_usecs))  # UTC
        logdata.append(line)
        pages[line.pagename] = None

        if not lastmod:
            lastmod = wikiutil.version2timestamp(line.ed_time_usecs)

        counter += 1
        if counter >= max_items:
            break
    del log

    timestamp = timefuncs.formathttpdate(lastmod)
    etag = "%d-%d-%d-%d-%d" % (lastmod, max_items, diffs, ddiffs, unique)

    # for 304, we look at if-modified-since and if-none-match headers,
    # one of them must match and the other is either not there or must match.
    if request.if_modified_since == timestamp:
        if request.if_none_match:
            if request.if_none_match == etag:
                request.status_code = 304
        else:
            request.status_code = 304
    elif request.if_none_match == etag:
        if request.if_modified_since:
            if request.if_modified_since == timestamp:
                request.status_code = 304
        else:
            request.status_code = 304
    else:
        # generate an Expires header, using whatever setting the admin
        # defined for suggested cache lifetime of the RecentChanges RSS doc
        expires = time.time() + cfg.rss_cache

        request.mimetype = "application/rss+xml"
        request.expires = expires
        request.last_modified = lastmod
        request.headers["Etag"] = etag

        # send the generated XML document
        baseurl = request.url_root

        logo = re.search(r'src="([^"]*)"', cfg.logo_string)
        if logo:
            logo = request.getQualifiedURL(logo.group(1))

        # prepare output
        output = structencoder(indent=u"yes")

        FEED_HEADER_COMMENT = """
<!--
    Add an "items=nnn" URL parameter to get more than the default 15 items.
    You cannot get more than %d items though.
    
    Add "unique=1" to get a list of changes where page names are unique,
    i.e. where only the latest change of each page is reflected.
    Add "diffs=1" to add change diffs to the description of each items.
    
    Add "ddiffs=1" to link directly to the diff (good for FeedReader).
    Current settings: items=%i, unique=%i, diffs=%i, ddiffs=%i
-->
        """ % (
            items_limit,
            max_items,
            unique,
            diffs,
            ddiffs,
        )

        # Feed envelope
        page = Page(request, pagename)
        f = output.cofeed(
            ROOT(NS(u"", ATOM_NAMESPACE), NS(u"wiki", RSSWIKI_NAMESPACE), E_CURSOR((ATOM_NAMESPACE, u"feed")))
        )
        f.send(E((ATOM_NAMESPACE, u"id"), full_url(request, page).encode(config.charset))),
        f.send(E((ATOM_NAMESPACE, u"title"), cfg.sitename.encode(config.charset))),
        f.send(E((ATOM_NAMESPACE, u"link"), {u"href": request.url_root.encode(config.charset)})),
        f.send(E((ATOM_NAMESPACE, u"summary"), ("RecentChanges at %s" % cfg.sitename).encode(config.charset))),
        # Icon
        # E((ATOM_NAMESPACE, u'link'), {u'href': logo.encode(config.charset)}),

        # if cfg.interwikiname:
        #    handler.simpleNode(('wiki', 'interwiki'), cfg.interwikiname)

        for item in logdata:
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            page = Page(request, item.pagename)
            # link = full_url(request, page, anchor=anchor)
            if ddiffs:
                link = full_url(request, page, querystr={"action": "diff"})
            else:
                link = full_url(request, page)

            # description
            desc_text = item.comment
            if diffs:
                # TODO: rewrite / extend wikiutil.pagediff
                # searching for the matching pages doesn't really belong here
                revisions = page.getRevList()

                rl = len(revisions)
                for idx in range(rl):
                    rev = revisions[idx]
                    if rev <= item.rev:
                        if idx + 1 < rl:
                            lines = wikiutil.pagediff(
                                request, item.pagename, revisions[idx + 1], item.pagename, 0, ignorews=1
                            )
                            if len(lines) > 20:
                                lines = lines[:20] + ["...\n"]
                            lines = "\n".join(lines)
                            lines = wikiutil.escape(lines)
                            desc_text = "%s\n<pre>\n%s\n</pre>\n" % (desc_text, lines)
                        break
            # if desc_text:
            #    handler.simpleNode('description', desc_text)

            # contributor
            edattr = {}
            # if cfg.show_hosts:
            #    edattr[(handler.xmlns['wiki'], 'host')] = item.hostname
            if item.editor[0] == "interwiki":
                edname = "%s:%s" % item.editor[1]
                ##edattr[(None, 'link')] = baseurl + wikiutil.quoteWikiname(edname)
            else:  # 'ip'
                edname = item.editor[1]
                ##edattr[(None, 'link')] = link + "?action=info"

            history_link = full_url(request, page, querystr={"action": "info"})

            f.send(
                E(
                    (ATOM_NAMESPACE, u"entry"),
                    E((ATOM_NAMESPACE, u"id"), link.encode(config.charset)),
                    E((ATOM_NAMESPACE, u"title"), item.pagename.encode(config.charset)),
                    E((ATOM_NAMESPACE, u"updated"), timefuncs.W3CDate(item.time).encode(config.charset)),
                    E((ATOM_NAMESPACE, u"link"), {u"href": link.encode(config.charset)}),
                    E((ATOM_NAMESPACE, u"summary"), desc_text.encode(config.charset)),
                    E((ATOM_NAMESPACE, u"author"), E((ATOM_NAMESPACE, u"name"), edname.encode(config.charset))),
                    # E((ATOM_NAMESPACE, u'title'), item.pagename.encode(config.charset)),
                    # wiki extensions
                    E((RSSWIKI_NAMESPACE, u"wiki:version"), ("%i" % (item.ed_time_usecs)).encode(config.charset)),
                    E((RSSWIKI_NAMESPACE, u"wiki:status"), (u"deleted", u"updated")[page.exists()]),
                    E((RSSWIKI_NAMESPACE, u"wiki:diff"), link.encode(config.charset)),
                    E((RSSWIKI_NAMESPACE, u"wiki:history"), history_link.encode(config.charset)),
                    # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor )
                    # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA )
                )
            )

        # emit logo data
        # if logo:
        #    handler.startNode('image', attr={
        #        (handler.xmlns['rdf'], 'about'): logo,
        #        })
        #    handler.simpleNode('title', cfg.sitename)
        #    handler.simpleNode('link', baseurl)
        #    handler.simpleNode('url', logo)
        #    handler.endNode('image')

        f.close()
        request.write(output.read())
Exemplo n.º 3
0
def execute(pagename, request):
    """ Send recent changes as an RSS document
    """
    if not wikixml.ok:
        #XXX send error message
        pass

    cfg = request.cfg

    # get params
    items_limit = 100
    try:
        max_items = int(request.form['items'][0])
        max_items = min(max_items, items_limit) # not more than `items_limit`
    except (KeyError, ValueError):
        # not more than 15 items in a RSS file by default
        max_items = 15
    try:
        unique = int(request.form.get('unique', [0])[0])
    except ValueError:
        unique = 0
    try:
        diffs = int(request.form.get('diffs', [0])[0])
    except ValueError:
        diffs = 0
    ## ddiffs inserted by Ralf Zosel <*****@*****.**>, 04.12.2003
    try:
        ddiffs = int(request.form.get('ddiffs', [0])[0])
    except ValueError:
        ddiffs = 0

    # prepare output
    out = StringIO.StringIO()
    handler = RssGenerator(out)

    # get data
    interwiki = request.getBaseURL()
    if interwiki[-1] != "/": interwiki = interwiki + "/"

    logo = re.search(r'src="([^"]*)"', cfg.logo_string)
    if logo: logo = request.getQualifiedURL(logo.group(1))
    
    log = editlog.EditLog(request)
    logdata = []
    counter = 0
    pages = {}
    for line in log.reverse():
        if not request.user.may.read(line.pagename):
            continue
        if ((line.action[:4] != 'SAVE') or
            ((line.pagename in pages) and unique)): continue
        #if log.dayChanged() and log.daycount > _MAX_DAYS: break
        line.editor = line.getEditorData(request)[1]
        line.time = util.datetime.tmtuple(wikiutil.version2timestamp(line.ed_time_usecs)) # UTC
        logdata.append(line)
        pages[line.pagename] = None
        counter += 1
        if counter >= max_items:
            break
    del log

    # start SAX stream
    handler.startDocument()
    handler._out.write(
        '<!--\n'
        '    Add an "items=nnn" URL parameter to get more than the default 15 items.\n'
        '    You cannot get more than %d items though.\n'
        '    \n'
        '    Add "unique=1" to get a list of changes where page names are unique,\n'
        '    i.e. where only the latest change of each page is reflected.\n'
        '    \n'
        '    Add "diffs=1" to add change diffs to the description of each items.\n'
        '    \n'
        '    Add "ddiffs=1" to link directly to the diff (good for FeedReader).\n'
        '    Current settings: items=%i, unique=%i, diffs=%i, ddiffs=%i'
        '-->\n' % (items_limit, max_items, unique, diffs, ddiffs)
        )

    # emit channel description
    handler.startNode('channel', {
        (handler.xmlns['rdf'], 'about'): request.getBaseURL(),
        })
    handler.simpleNode('title', cfg.sitename)
    handler.simpleNode('link', interwiki + wikiutil.quoteWikinameURL(pagename))
    handler.simpleNode('description', 'RecentChanges at %s' % cfg.sitename)
    if logo:
        handler.simpleNode('image', None, {
            (handler.xmlns['rdf'], 'resource'): logo,
            })
    if cfg.interwikiname:
        handler.simpleNode(('wiki', 'interwiki'), cfg.interwikiname)

    handler.startNode('items')
    handler.startNode(('rdf', 'Seq'))
    for item in logdata:
        link = "%s%s#%04d%02d%02d%02d%02d%02d" % ((interwiki,
                wikiutil.quoteWikinameURL(item.pagename),) + item.time[:6])
        handler.simpleNode(('rdf', 'li'), None, attr={
            (handler.xmlns['rdf'], 'resource'): link,
        })
    handler.endNode(('rdf', 'Seq'))
    handler.endNode('items')
    handler.endNode('channel')

    # emit logo data
    if logo:
        handler.startNode('image', attr={
            (handler.xmlns['rdf'], 'about'): logo,
            })
        handler.simpleNode('title', cfg.sitename)
        handler.simpleNode('link', interwiki)
        handler.simpleNode('url', logo)
        handler.endNode('image')

    # emit items
    for item in logdata:
        page = Page(request, item.pagename)
        link = interwiki + wikiutil.quoteWikinameURL(item.pagename)
        rdflink = "%s#%04d%02d%02d%02d%02d%02d" % ((link,) + item.time[:6])
        handler.startNode('item', attr={
            (handler.xmlns['rdf'], 'about'): rdflink,
        })

        # general attributes
        handler.simpleNode('title', item.pagename)
        if ddiffs:
            handler.simpleNode('link', link+"?action=diff")
        else:
            handler.simpleNode('link', link)
            
        handler.simpleNode(('dc', 'date'), util.W3CDate(item.time))

        # description
        desc_text = item.comment
        if diffs:
            # TODO: rewrite / extend wikiutil.pagediff
            # searching for the matching pages doesn't really belong here
            revisions = page.getRevList()

            rl = len(revisions)
            for idx in range(rl):
                rev = revisions[idx]
                if rev <= item.rev:
                    if idx+1 < rl:
                        lines = wikiutil.pagediff(request, item.pagename, revisions[idx+1], item.pagename, 0, ignorews=1)
                        if len(lines) > 20: lines = lines[:20] + ['...\n']
                        desc_text = '%s\n<pre>\n%s\n</pre>\n' % (desc_text, '\n'.join(lines))
                    break
        if desc_text:
            handler.simpleNode('description', desc_text)

        # contributor
        edattr = {}
        if cfg.show_hosts:
            edattr[(handler.xmlns['wiki'], 'host')] = item.hostname
        if isinstance(item.editor, Page):
            edname = item.editor.page_name
            ##edattr[(None, 'link')] = interwiki + wikiutil.quoteWikiname(edname)
        else:
            edname = item.editor
            ##edattr[(None, 'link')] = link + "?action=info"
        
        # this edattr stuff, esp. None as first tuple element breaks things (tracebacks)
        # if you know how to do this right, please send us a patch
        
        handler.startNode(('dc', 'contributor'))
        handler.startNode(('rdf', 'Description'), attr=edattr)
        handler.simpleNode(('rdf', 'value'), edname)
        handler.endNode(('rdf', 'Description'))
        handler.endNode(('dc', 'contributor'))

        # wiki extensions
        handler.simpleNode(('wiki', 'version'), "%i" % (item.ed_time_usecs))
        handler.simpleNode(('wiki', 'status'), ('deleted', 'updated')[page.exists()])
        handler.simpleNode(('wiki', 'diff'), link + "?action=diff")
        handler.simpleNode(('wiki', 'history'), link + "?action=info")
        # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor ) 
        # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA ) 

        handler.endNode('item')

    # end SAX stream
    handler.endDocument()

    # send the generated XML document
    request.http_headers(["Content-Type: text/xml; charset=%s" % config.charset] + request.nocache)
    request.write(out.getvalue())
    request.finish()
    request.no_closing_html_code = 1
Exemplo n.º 4
0
def execute(pagename, request):
    """ Send recent changes as an RSS document
    """
    if not wikixml.ok:
        request.mimetype = 'text/plain'
        request.write(
            "rss_rc action is not supported because of missing pyxml module.")
        return
    if request.isSpiderAgent:  # reduce bot cpu usage
        return ''

    cfg = request.cfg
    _ = request.getText

    # get params
    def_max_items = max_items = cfg.rss_items_default
    items_limit = cfg.rss_items_limit
    unique = cfg.rss_unique
    diffs = cfg.rss_diffs
    ddiffs = cfg.rss_ddiffs
    max_lines = cfg.rss_lines_default
    lines_limit = cfg.rss_lines_limit
    show_att = cfg.rss_show_attachment_entries
    page_pattern = cfg.rss_page_filter_pattern

    try:
        max_items = min(int(request.values.get('items', max_items)),
                        items_limit)
    except ValueError:
        pass
    try:
        unique = int(request.values.get('unique', unique))
    except ValueError:
        pass
    try:
        diffs = int(request.values.get('diffs', diffs))
    except ValueError:
        pass
    ## ddiffs inserted by Ralf Zosel <*****@*****.**>, 04.12.2003
    try:
        ddiffs = int(request.values.get('ddiffs', ddiffs))
    except ValueError:
        pass
    try:
        max_lines = min(int(request.values.get('lines', max_lines)),
                        lines_limit)
    except ValueError:
        pass
    try:
        show_att = int(request.values.get('show_att', show_att))
    except ValueError:
        pass
    try:
        page_pattern = request.values.get('page', page_pattern)
    except ValueError:
        pass

    # if we are just interested in a specific page, using the local edit-log
    # of that page is much faster than the global one - esp. if the page was
    # NOT recently changed and the global edit-log is rather big.
    kw = dict(rootpagename=page_pattern) if is_single_page_match(
        page_pattern) else {}
    log = editlog.EditLog(request, **kw)
    logdata = []
    counter = 0
    pages = {}
    lastmod = 0
    for line in log.reverse():
        if not request.user.may.read(line.pagename):
            continue
        if ((not show_att and not line.action.startswith('SAVE'))
                or ((line.pagename in pages) and unique)
                or not match_page(line.pagename, page_pattern)):
            continue
        line.editor = line.getInterwikiEditorData(request)
        line.time = timefuncs.tmtuple(
            wikiutil.version2timestamp(line.ed_time_usecs))  # UTC
        logdata.append(line)
        pages[line.pagename] = None

        if not lastmod:
            lastmod = wikiutil.version2timestamp(line.ed_time_usecs)

        counter += 1
        if counter >= max_items:
            break
    del log

    timestamp = timefuncs.formathttpdate(lastmod)
    etag = "%d-%d-%d-%d-%d-%d-%d" % (lastmod, max_items, diffs, ddiffs, unique,
                                     max_lines, show_att)

    # for 304, we look at if-modified-since and if-none-match headers,
    # one of them must match and the other is either not there or must match.
    if request.if_modified_since == timestamp:
        if request.if_none_match:
            if request.if_none_match == etag:
                request.status_code = 304
        else:
            request.status_code = 304
    elif request.if_none_match == etag:
        if request.if_modified_since:
            if request.if_modified_since == timestamp:
                request.status_code = 304
        else:
            request.status_code = 304
    else:
        # generate an Expires header, using whatever setting the admin
        # defined for suggested cache lifetime of the RecentChanges RSS doc
        expires = time.time() + cfg.rss_cache

        request.mimetype = 'application/rss+xml'
        request.expires = expires
        request.last_modified = lastmod
        request.headers['Etag'] = etag

        # send the generated XML document
        baseurl = request.url_root

        logo = re.search(r'src="([^"]*)"', cfg.logo_string)
        if logo:
            logo = request.getQualifiedURL(logo.group(1))

        # prepare output
        out = StringIO.StringIO()
        handler = RssGenerator(out)

        # start SAX stream
        handler.startDocument()
        handler.write(
            u'<!--\n'
            u'    Add an "items=nnn" URL parameter to get more than the \n'
            u'    default %(def_max_items)d items. You cannot get more than \n'
            u'    %(items_limit)d items though.\n'
            u'    \n'
            u'    Add "unique=1" to get a list of changes where page names are unique,\n'
            u'    i.e. where only the latest change of each page is reflected.\n'
            u'    \n'
            u'    Add "diffs=1" to add change diffs to the description of each items.\n'
            u'    \n'
            u'    Add "ddiffs=1" to link directly to the diff (good for FeedReader).\n'
            u'    \n'
            u'    Add "lines=nnn" to change maximum number of diff/body lines \n'
            u'    to show. Cannot be more than %(lines_limit)d.\n'
            u'    \n'
            u'    Add "show_att=1" to show items related to attachments.\n'
            u'    \n'
            u'    Add "page=pattern" to show feed only for specific pages.\n'
            u'    Pattern can be empty (it would match to all pages), \n'
            u'    can start with circumflex (it would be interpreted as \n'
            u'    regular expression in this case), end with slash (for \n'
            u'    getting feed for page tree) or point to specific page (if \n'
            u'    none of the above can be applied).\n'
            u'    \n'
            u'    Current settings: items=%(max_items)i, unique=%(unique)i, \n'
            u'    diffs=%(diffs)i, ddiffs=%(ddiffs)i, lines=%(max_lines)i, \n'
            u'    show_att=%(show_att)i\n'
            u'-->\n' % locals())

        # emit channel description
        handler.startNode('channel', {
            (handler.xmlns['rdf'], 'about'): request.url_root,
        })
        handler.simpleNode('title', cfg.sitename)
        page = Page(request, pagename)
        handler.simpleNode('link', full_url(request, page))
        handler.simpleNode('description',
                           u'RecentChanges at %s' % cfg.sitename)
        if logo:
            handler.simpleNode('image', None, {
                (handler.xmlns['rdf'], 'resource'): logo,
            })
        if cfg.interwikiname:
            handler.simpleNode(('wiki', 'interwiki'), cfg.interwikiname)

        handler.startNode('items')
        handler.startNode(('rdf', 'Seq'))
        for item in logdata:
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            page = Page(request, item.pagename)
            link = full_url(request, page, anchor=anchor)
            handler.simpleNode(('rdf', 'li'),
                               None,
                               attr={
                                   (handler.xmlns['rdf'], 'resource'): link,
                               })
        handler.endNode(('rdf', 'Seq'))
        handler.endNode('items')
        handler.endNode('channel')

        # emit logo data
        if logo:
            handler.startNode('image',
                              attr={
                                  (handler.xmlns['rdf'], 'about'): logo,
                              })
            handler.simpleNode('title', cfg.sitename)
            handler.simpleNode('link', baseurl)
            handler.simpleNode('url', logo)
            handler.endNode('image')

        # Mapping { oldname: curname } for maintaining page renames
        pagename_map = {}

        # emit items
        for item in logdata:
            if item.pagename in pagename_map:
                cur_pagename = pagename_map[item.pagename]
            else:
                cur_pagename = item.pagename
            page = Page(request, cur_pagename)
            action = item.action
            comment = item.comment
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            rdflink = full_url(request, page, anchor=anchor)
            handler.startNode('item',
                              attr={
                                  (handler.xmlns['rdf'], 'about'): rdflink,
                              })

            # general attributes
            handler.simpleNode('title', item.pagename)
            handler.simpleNode(('dc', 'date'), timefuncs.W3CDate(item.time))

            show_diff = diffs

            if action.startswith('ATT'):  # Attachment
                show_diff = 0
                filename = wikiutil.url_unquote(item.extra)
                att_exists = AttachFile.exists(request, cur_pagename, filename)

                if action == 'ATTNEW':
                    # Once attachment deleted this link becomes invalid but we
                    # preserve it to prevent appearance of new RSS entries in
                    # RSS readers.
                    if ddiffs:
                        handler.simpleNode(
                            'link',
                            attach_url(request,
                                       cur_pagename,
                                       filename,
                                       do='view'))

                    comment = _(u"Upload of attachment '%(filename)s'.") % {
                        'filename': filename
                    }

                elif action == 'ATTDEL':
                    if ddiffs:
                        handler.simpleNode(
                            'link',
                            full_url(request,
                                     page,
                                     querystr={'action': 'AttachFile'}))

                    comment = _(u"Attachment '%(filename)s' deleted.") % {
                        'filename': filename
                    }

                elif action == 'ATTDRW':
                    if ddiffs:
                        handler.simpleNode(
                            'link',
                            attach_url(request,
                                       cur_pagename,
                                       filename,
                                       do='view'))

                    comment = _(u"Drawing '%(filename)s' saved.") % {
                        'filename': filename
                    }

            elif action.startswith('SAVE'):
                if action == 'SAVE/REVERT':
                    to_rev = int(item.extra)
                    comment = (_(u"Revert to revision %(rev)d.") % {
                        'rev': to_rev}) + "<br />" \
                        + _("Comment:") + " " + comment

                elif action == 'SAVE/RENAME':
                    show_diff = 0
                    comment = (_(u"Renamed from '%(oldpagename)s'.") % {
                        'oldpagename': item.extra}) + "<br />" \
                        + _("Comment:") + " " + comment
                    if item.pagename in pagename_map:
                        newpage = pagename_map[item.pagename]
                        del pagename_map[item.pagename]
                        pagename_map[item.extra] = newpage
                    else:
                        pagename_map[item.extra] = item.pagename

                elif action == 'SAVENEW':
                    comment = _(u"New page:\n") + comment

                item_rev = int(item.rev)

                # If we use diffs/ddiffs, we should calculate proper links and
                # content
                if ddiffs:
                    # first revision can't have older revisions to diff with
                    if item_rev == 1:
                        handler.simpleNode(
                            'link',
                            full_url(request,
                                     page,
                                     querystr={
                                         'action': 'recall',
                                         'rev': str(item_rev)
                                     }))
                    else:
                        handler.simpleNode(
                            'link',
                            full_url(request,
                                     page,
                                     querystr={
                                         'action': 'diff',
                                         'rev1': str(item_rev),
                                         'rev2': str(item_rev - 1)
                                     }))

                if show_diff:
                    if item_rev == 1:
                        lines = Page(request, cur_pagename,
                                     rev=item_rev).getlines()
                    else:
                        lines = wikiutil.pagediff(request,
                                                  cur_pagename,
                                                  item_rev - 1,
                                                  cur_pagename,
                                                  item_rev,
                                                  ignorews=1)

                    if len(lines) > max_lines:
                        lines = lines[:max_lines] + ['...\n']

                    lines = '\n'.join(lines)
                    lines = wikiutil.escape(lines)

                    comment = u'%s\n<pre>\n%s\n</pre>\n' % (comment, lines)

                if not ddiffs:
                    handler.simpleNode('link', full_url(request, page))

            if comment:
                handler.simpleNode('description', comment)

            # contributor
            if cfg.show_names:
                edattr = {}
                if cfg.show_hosts:
                    edattr[(handler.xmlns['wiki'], 'host')] = item.hostname
                if item.editor[0] == 'interwiki':
                    edname = "%s:%s" % item.editor[1]
                    ##edattr[(None, 'link')] = baseurl + wikiutil.quoteWikiname(edname)
                else:  # 'ip'
                    edname = item.editor[1]
                    ##edattr[(None, 'link')] = link + "?action=info"

                # this edattr stuff, esp. None as first tuple element breaks things (tracebacks)
                # if you know how to do this right, please send us a patch

                handler.startNode(('dc', 'contributor'))
                handler.startNode(('rdf', 'Description'), attr=edattr)
                handler.simpleNode(('rdf', 'value'), edname)
                handler.endNode(('rdf', 'Description'))
                handler.endNode(('dc', 'contributor'))

            # wiki extensions
            handler.simpleNode(('wiki', 'version'),
                               "%i" % (item.ed_time_usecs))
            handler.simpleNode(('wiki', 'status'),
                               ('deleted', 'updated')[page.exists()])
            handler.simpleNode(('wiki', 'diff'),
                               full_url(request,
                                        page,
                                        querystr={'action': 'diff'}))
            handler.simpleNode(('wiki', 'history'),
                               full_url(request,
                                        page,
                                        querystr={'action': 'info'}))
            # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor )
            # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA )

            handler.endNode('item')

        # end SAX stream
        handler.endDocument()

        request.write(out.getvalue())
Exemplo n.º 5
0
def page_change_message(msgtype, request, page, lang, **kwargs):
    """Prepare a notification text for a page change of given type

    @param msgtype: a type of message to send (page_changed, page_renamed, ...)
    @type msgtype: str or unicode
    @param **kwargs: a dictionary of additional parameters, which depend on msgtype

    @return: dictionary containing data about the changed page
    @rtype: dict

    """
    _ = lambda text: request.getText(text, lang=lang)
    changes = {
        'page_name': page.page_name,
        'revision': str(page.getRevList()[0])
    }

    if msgtype == "page_changed":
        revisions = kwargs['revisions']

    if msgtype == "page_changed":
        changes['text'] = _(
            "Dear Wiki user,\n\n"
            'You have subscribed to a wiki page or wiki category on "%(sitename)s" for change notification.\n\n'
            'The "%(pagename)s" page has been changed by %(editor)s:\n') % {
                'pagename': page.page_name,
                'editor': page.uid_override
                or user.getUserIdentification(request),
                'sitename': page.cfg.sitename or request.url_root,
            }

        # append a diff (or append full page text if there is no diff)
        if len(revisions) < 2:
            changes['diff'] = _("New page:\n") + page.get_raw_body()
        else:
            lines = wikiutil.pagediff(request, page.page_name, revisions[1],
                                      page.page_name, revisions[0])
            if lines:
                changes['diff'] = '\n'.join(lines)
            else:
                changes['diff'] = _("No differences found!\n")

    elif msgtype == "page_deleted":
        changes['text'] = _(
            "Dear wiki user,\n\n"
            'You have subscribed to a wiki page "%(sitename)s" for change notification.\n\n'
            'The page "%(pagename)s" has been deleted by %(editor)s:\n\n') % {
                'pagename': page.page_name,
                'editor': page.uid_override
                or user.getUserIdentification(request),
                'sitename': page.cfg.sitename or request.url_root,
            }

    elif msgtype == "page_renamed":
        changes['text'] = _(
            "Dear wiki user,\n\n"
            'You have subscribed to a wiki page "%(sitename)s" for change notification.\n\n'
            'The page "%(pagename)s" has been renamed from "%(oldname)s" by %(editor)s:\n'
        ) % {
            'editor': page.uid_override or user.getUserIdentification(request),
            'pagename': page.page_name,
            'sitename': page.cfg.sitename or request.url_root,
            'oldname': kwargs['old_name']
        }

        changes['old_name'] = kwargs['old_name']

    else:
        raise UnknownChangeType()

    changes['editor'] = page.uid_override or user.getUserIdentification(
        request)
    if 'comment' in kwargs and kwargs['comment']:
        changes['comment'] = kwargs['comment']

    return changes
Exemplo n.º 6
0
def page_change_message(msgtype, request, page, lang, **kwargs):
    """Prepare a notification text for a page change of given type

    @param msgtype: a type of message to send (page_changed, page_renamed, ...)
    @type msgtype: str or unicode
    @param **kwargs: a dictionary of additional parameters, which depend on msgtype

    @return: dictionary containing data about the changed page
    @rtype: dict

    """
    _ = lambda text: request.getText(text, lang=lang)
    changes = {"page_name": page.page_name, "revision": str(page.getRevList()[0])}

    if msgtype == "page_changed":
        revisions = kwargs["revisions"]

    if msgtype == "page_changed":
        changes["text"] = _(
            "Dear Wiki user,\n\n"
            'You have subscribed to a wiki page or wiki category on "%(sitename)s" for change notification.\n\n'
            'The "%(pagename)s" page has been changed by %(editor)s:\n'
        ) % {
            "pagename": page.page_name,
            "editor": page.uid_override or user.getUserIdentification(request),
            "sitename": page.cfg.sitename or request.url_root,
        }

        # append a diff (or append full page text if there is no diff)
        if len(revisions) < 2:
            changes["diff"] = _("New page:\n") + page.get_raw_body()
        else:
            lines = wikiutil.pagediff(request, page.page_name, revisions[1], page.page_name, revisions[0])
            if lines:
                changes["diff"] = "\n".join(lines)
            else:
                changes["diff"] = _("No differences found!\n")

    elif msgtype == "page_deleted":
        changes["text"] = _(
            "Dear wiki user,\n\n"
            'You have subscribed to a wiki page "%(sitename)s" for change notification.\n\n'
            'The page "%(pagename)s" has been deleted by %(editor)s:\n\n'
        ) % {
            "pagename": page.page_name,
            "editor": page.uid_override or user.getUserIdentification(request),
            "sitename": page.cfg.sitename or request.url_root,
        }

    elif msgtype == "page_renamed":
        changes["text"] = _(
            "Dear wiki user,\n\n"
            'You have subscribed to a wiki page "%(sitename)s" for change notification.\n\n'
            'The page "%(pagename)s" has been renamed from "%(oldname)s" by %(editor)s:\n'
        ) % {
            "editor": page.uid_override or user.getUserIdentification(request),
            "pagename": page.page_name,
            "sitename": page.cfg.sitename or request.url_root,
            "oldname": kwargs["old_name"],
        }

        changes["old_name"] = kwargs["old_name"]

    else:
        raise UnknownChangeType()

    changes["editor"] = page.uid_override or user.getUserIdentification(request)
    if "comment" in kwargs and kwargs["comment"]:
        changes["comment"] = kwargs["comment"]

    return changes
Exemplo n.º 7
0
def execute(pagename, request):
    """ Send recent changes as an RSS document
    """
    if not wikixml.ok:
        request.mimetype = 'text/plain'
        request.write(
            "rss_rc action is not supported because of missing pyxml module.")
        return

    cfg = request.cfg

    # get params
    items_limit = 100
    try:
        max_items = int(request.values['items'])
        max_items = min(max_items, items_limit)  # not more than `items_limit`
    except (KeyError, ValueError):
        # not more than 15 items in a RSS file by default
        max_items = 15
    try:
        unique = int(request.values.get('unique', 0))
    except ValueError:
        unique = 0
    try:
        diffs = int(request.values.get('diffs', 0))
    except ValueError:
        diffs = 0
    ## ddiffs inserted by Ralf Zosel <*****@*****.**>, 04.12.2003
    try:
        ddiffs = int(request.values.get('ddiffs', 0))
    except ValueError:
        ddiffs = 0

    # get data
    log = editlog.EditLog(request)
    logdata = []
    counter = 0
    pages = {}
    lastmod = 0
    for line in log.reverse():
        if not request.user.may.read(line.pagename):
            continue
        if (not line.action.startswith('SAVE')
                or ((line.pagename in pages) and unique)):
            continue
        #if log.dayChanged() and log.daycount > _MAX_DAYS: break
        line.editor = line.getInterwikiEditorData(request)
        line.time = timefuncs.tmtuple(
            wikiutil.version2timestamp(line.ed_time_usecs))  # UTC
        logdata.append(line)
        pages[line.pagename] = None

        if not lastmod:
            lastmod = wikiutil.version2timestamp(line.ed_time_usecs)

        counter += 1
        if counter >= max_items:
            break
    del log

    timestamp = timefuncs.formathttpdate(lastmod)
    etag = "%d-%d-%d-%d-%d" % (lastmod, max_items, diffs, ddiffs, unique)

    # for 304, we look at if-modified-since and if-none-match headers,
    # one of them must match and the other is either not there or must match.
    if request.if_modified_since == timestamp:
        if request.if_none_match:
            if request.if_none_match == etag:
                request.status_code = 304
        else:
            request.status_code = 304
    elif request.if_none_match == etag:
        if request.if_modified_since:
            if request.if_modified_since == timestamp:
                request.status_code = 304
        else:
            request.status_code = 304
    else:
        # generate an Expires header, using whatever setting the admin
        # defined for suggested cache lifetime of the RecentChanges RSS doc
        expires = time.time() + cfg.rss_cache

        request.mimetype = 'application/rss+xml'
        request.expires = expires
        request.last_modified = lastmod
        request.headers['Etag'] = etag

        # send the generated XML document
        baseurl = request.url_root

        logo = re.search(r'src="([^"]*)"', cfg.logo_string)
        if logo:
            logo = request.getQualifiedURL(logo.group(1))

        # prepare output
        out = StringIO.StringIO()
        handler = RssGenerator(out)

        # start SAX stream
        handler.startDocument()
        handler._out.write(
            '<!--\n'
            '    Add an "items=nnn" URL parameter to get more than the default 15 items.\n'
            '    You cannot get more than %d items though.\n'
            '    \n'
            '    Add "unique=1" to get a list of changes where page names are unique,\n'
            '    i.e. where only the latest change of each page is reflected.\n'
            '    \n'
            '    Add "diffs=1" to add change diffs to the description of each items.\n'
            '    \n'
            '    Add "ddiffs=1" to link directly to the diff (good for FeedReader).\n'
            '    Current settings: items=%i, unique=%i, diffs=%i, ddiffs=%i'
            '-->\n' % (items_limit, max_items, unique, diffs, ddiffs))

        # emit channel description
        handler.startNode('channel', {
            (handler.xmlns['rdf'], 'about'): request.url_root,
        })
        handler.simpleNode('title', cfg.sitename)
        page = Page(request, pagename)
        handler.simpleNode('link', full_url(request, page))
        handler.simpleNode('description', 'RecentChanges at %s' % cfg.sitename)
        if logo:
            handler.simpleNode('image', None, {
                (handler.xmlns['rdf'], 'resource'): logo,
            })
        if cfg.interwikiname:
            handler.simpleNode(('wiki', 'interwiki'), cfg.interwikiname)

        handler.startNode('items')
        handler.startNode(('rdf', 'Seq'))
        for item in logdata:
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            page = Page(request, item.pagename)
            link = full_url(request, page, anchor=anchor)
            handler.simpleNode(('rdf', 'li'),
                               None,
                               attr={
                                   (handler.xmlns['rdf'], 'resource'): link,
                               })
        handler.endNode(('rdf', 'Seq'))
        handler.endNode('items')
        handler.endNode('channel')

        # emit logo data
        if logo:
            handler.startNode('image',
                              attr={
                                  (handler.xmlns['rdf'], 'about'): logo,
                              })
            handler.simpleNode('title', cfg.sitename)
            handler.simpleNode('link', baseurl)
            handler.simpleNode('url', logo)
            handler.endNode('image')

        # emit items
        for item in logdata:
            page = Page(request, item.pagename)
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            rdflink = full_url(request, page, anchor=anchor)
            handler.startNode('item',
                              attr={
                                  (handler.xmlns['rdf'], 'about'): rdflink,
                              })

            # general attributes
            handler.simpleNode('title', item.pagename)
            if ddiffs:
                handler.simpleNode(
                    'link', full_url(request,
                                     page,
                                     querystr={'action': 'diff'}))
            else:
                handler.simpleNode('link', full_url(request, page))

            handler.simpleNode(('dc', 'date'), timefuncs.W3CDate(item.time))

            # description
            desc_text = item.comment
            if diffs:
                # TODO: rewrite / extend wikiutil.pagediff
                # searching for the matching pages doesn't really belong here
                revisions = page.getRevList()

                rl = len(revisions)
                for idx in range(rl):
                    rev = revisions[idx]
                    if rev <= item.rev:
                        if idx + 1 < rl:
                            lines = wikiutil.pagediff(request,
                                                      item.pagename,
                                                      revisions[idx + 1],
                                                      item.pagename,
                                                      0,
                                                      ignorews=1)
                            if len(lines) > 20:
                                lines = lines[:20] + ['...\n']
                            lines = '\n'.join(lines)
                            lines = wikiutil.escape(lines)
                            desc_text = '%s\n<pre>\n%s\n</pre>\n' % (desc_text,
                                                                     lines)
                        break
            if desc_text:
                handler.simpleNode('description', desc_text)

            # contributor
            edattr = {}
            if cfg.show_hosts:
                edattr[(handler.xmlns['wiki'], 'host')] = item.hostname
            if item.editor[0] == 'interwiki':
                edname = "%s:%s" % item.editor[1]
                ##edattr[(None, 'link')] = baseurl + wikiutil.quoteWikiname(edname)
            else:  # 'ip'
                edname = item.editor[1]
                ##edattr[(None, 'link')] = link + "?action=info"

            # this edattr stuff, esp. None as first tuple element breaks things (tracebacks)
            # if you know how to do this right, please send us a patch

            handler.startNode(('dc', 'contributor'))
            handler.startNode(('rdf', 'Description'), attr=edattr)
            handler.simpleNode(('rdf', 'value'), edname)
            handler.endNode(('rdf', 'Description'))
            handler.endNode(('dc', 'contributor'))

            # wiki extensions
            handler.simpleNode(('wiki', 'version'),
                               "%i" % (item.ed_time_usecs))
            handler.simpleNode(('wiki', 'status'),
                               ('deleted', 'updated')[page.exists()])
            handler.simpleNode(('wiki', 'diff'),
                               full_url(request,
                                        page,
                                        querystr={'action': 'diff'}))
            handler.simpleNode(('wiki', 'history'),
                               full_url(request,
                                        page,
                                        querystr={'action': 'info'}))
            # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor )
            # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA )

            handler.endNode('item')

        # end SAX stream
        handler.endDocument()

        request.write(out.getvalue())
Exemplo n.º 8
0
def execute(pagename, request):
    """ Send recent changes as an RSS document
    """
    if not wikixml.ok:
        httpheaders = ["Content-Type: text/plain; charset=%s" % config.charset]
        request.emit_http_headers(httpheaders)
        request.write("rss_rc action is not supported because of missing pyxml module.")
        return

    cfg = request.cfg

    # get params
    items_limit = 100
    try:
        max_items = int(request.form['items'][0])
        max_items = min(max_items, items_limit) # not more than `items_limit`
    except (KeyError, ValueError):
        # not more than 15 items in a RSS file by default
        max_items = 15
    try:
        unique = int(request.form.get('unique', [0])[0])
    except ValueError:
        unique = 0
    try:
        diffs = int(request.form.get('diffs', [1])[0])
    except ValueError:
        diffs = 1 
    ## ddiffs inserted by Ralf Zosel <*****@*****.**>, 04.12.2003
    try:
        ddiffs = int(request.form.get('ddiffs', [1])[0])
    except ValueError:
        ddiffs = 1 

    # get data
    log = editlog.EditLog(request)
    logdata = []
    counter = 0
    pages = {}
    lastmod = 0
    for line in log.reverse():
        if not request.user.may.read(line.pagename):
            continue
        if (not line.action.startswith('SAVE') or
            ((line.pagename in pages) and unique)): continue
        #if log.dayChanged() and log.daycount > _MAX_DAYS: break
        line.editor = line.getInterwikiEditorData(request)
        line.time = timefuncs.tmtuple(wikiutil.version2timestamp(line.ed_time_usecs)) # UTC
        logdata.append(line)
        pages[line.pagename] = None

        if not lastmod:
            lastmod = wikiutil.version2timestamp(line.ed_time_usecs)

        counter += 1
        if counter >= max_items:
            break
    del log

    timestamp = timefuncs.formathttpdate(lastmod)
    etag = "%d-%d-%d-%d-%d" % (lastmod, max_items, diffs, ddiffs, unique)

    # for 304, we look at if-modified-since and if-none-match headers,
    # one of them must match and the other is either not there or must match.
    if request.if_modified_since == timestamp:
        if request.if_none_match:
            if request.if_none_match == etag:
                request.emit_http_headers(["Status: 304 Not modified"])
        else:
            request.emit_http_headers(["Status: 304 Not modified"])
    elif request.if_none_match == etag:
        if request.if_modified_since:
            if request.if_modified_since == timestamp:
                request.emit_http_headers(["Status: 304 Not modified"])
        else:
            request.emit_http_headers(["Status: 304 Not modified"])
    else:
        # generate an Expires header, using whatever setting the admin
        # defined for suggested cache lifetime of the RecentChanges RSS doc
        expires = timefuncs.formathttpdate(time.time() + cfg.rss_cache)

        httpheaders = ["Content-Type: text/xml; charset=%s" % config.charset,
                       "Expires: %s" % expires,
                       "Last-Modified: %s" % timestamp,
                       "Etag: %s" % etag, ]

        # send the generated XML document
        request.emit_http_headers(httpheaders)

        baseurl = request.getBaseURL()
        if not baseurl.endswith('/'):
            baseurl += '/'

        logo = re.search(r'src="([^"]*)"', cfg.logo_string)
        if logo:
            logo = request.getQualifiedURL(logo.group(1))

        # prepare output
        out = StringIO.StringIO()
        handler = RssGenerator(out)

        # start SAX stream
        handler.startDocument()
        handler._out.write(
            '<!--\n'
            '    Add an "items=nnn" URL parameter to get more than the default 15 items.\n'
            '    You cannot get more than %d items though.\n'
            '    \n'
            '    Add "unique=1" to get a list of changes where page names are unique,\n'
            '    i.e. where only the latest change of each page is reflected.\n'
            '    \n'
            '    Add "diffs=0" to remove change diffs to the description of each items.\n'
            '    \n'
            '    Add "ddiffs=0" to link directly to the wikipage\n'
            '    Current settings: items=%i, unique=%i, diffs=%i, ddiffs=%i'
            '-->\n' % (items_limit, max_items, unique, diffs, ddiffs)
            )

        # emit channel description
        handler.startNode('channel', {
            (handler.xmlns['rdf'], 'about'): request.getBaseURL(),
            })
        handler.simpleNode('title', cfg.sitename)
        page = Page(request, pagename)
        handler.simpleNode('link', full_url(request, page))
        handler.simpleNode('description', 'RecentChanges at %s' % cfg.sitename)
        if logo:
            handler.simpleNode('image', None, {
                (handler.xmlns['rdf'], 'resource'): logo,
                })
        if cfg.interwikiname:
            handler.simpleNode(('wiki', 'interwiki'), cfg.interwikiname)

        handler.startNode('items')
        handler.startNode(('rdf', 'Seq'))
        for item in logdata:
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            page = Page(request, item.pagename)
            link = full_url(request, page, anchor=anchor)
            handler.simpleNode(('rdf', 'li'), None, attr={(handler.xmlns['rdf'], 'resource'): link, })
        handler.endNode(('rdf', 'Seq'))
        handler.endNode('items')
        handler.endNode('channel')

        # emit logo data
        if logo:
            handler.startNode('image', attr={
                (handler.xmlns['rdf'], 'about'): logo,
                })
            handler.simpleNode('title', cfg.sitename)
            handler.simpleNode('link', baseurl)
            handler.simpleNode('url', logo)
            handler.endNode('image')

        # emit items
        for item in logdata:
            page = Page(request, item.pagename)
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            rdflink = full_url(request, page, anchor=anchor)
            handler.startNode('item', attr={(handler.xmlns['rdf'], 'about'): rdflink, })

            # general attributes
            handler.simpleNode('title', item.pagename)
            if ddiffs:
                handler.simpleNode('link', full_url(request, page, querystr={'action': 'diff'}))
            else:
                handler.simpleNode('link', full_url(request, page))

            handler.simpleNode(('dc', 'date'), timefuncs.W3CDate(item.time))

            # description
            if item.editor[0] == 'interwiki':
                edname = "%s:%s" % item.editor[1]
                ##edattr[(None, 'link')] = baseurl + wikiutil.quoteWikiname(edname)
            else: # 'ip'
                edname = item.editor[1]
                ##edattr[(None, 'link')] = link + "?action=info"

            # this edattr stuff, esp. None as first tuple element breaks things (tracebacks)
            # if you know how to do this right, please send us a patch
            user = edname.split(':')[-1]
            user_link = full_url(request, Page(request, user))

            desc_text = 'Cambio por <a href="%s">%s</a> -- "%s"' % (user_link, user, item.comment)
            if diffs:
                # TODO: rewrite / extend wikiutil.pagediff
                # searching for the matching pages doesn't really belong here
                revisions = page.getRevList()

                rl = len(revisions)
                for idx in range(rl):
                    rev = revisions[idx]
                    if rev <= item.rev:
                        if idx + 1 < rl:
                            lines = wikiutil.pagediff(request, item.pagename, revisions[idx+1], item.pagename, 0, ignorews=1)
                            if len(lines) > 20:
                                lines = lines[:20] + ['... (Continua)\n']

                            # vamos a colorear las lineas!
                            fixed_lines = []
                            for line in lines:
                                line = wikiutil.escape(line)
                                if line.startswith('+'):
                                    line = "<font color='green'>%s</font>" % line
                                elif line.startswith('-'):
                                    line = "<font color='red'>%s</font>" % line
                                fixed_lines.append(line)
                            lines = fixed_lines    
                            lines = '\n'.join(lines)
                            desc_text = '%s\n<pre>\n%s\n</pre>\n' % (desc_text, lines)
                        break
            if desc_text:
                handler.simpleNode('description', desc_text)

            # contributor
            edattr = {}
            if cfg.show_hosts:
                edattr[(handler.xmlns['wiki'], 'host')] = item.hostname
           
            handler.startNode(('dc', 'contributor'))
            handler.startNode(('rdf', 'Description'), attr=edattr)
            handler.simpleNode(('rdf', 'value'), edname)
            handler.endNode(('rdf', 'Description'))
            handler.endNode(('dc', 'contributor'))

            # wiki extensions
            handler.simpleNode(('wiki', 'version'), "%i" % (item.ed_time_usecs))
            handler.simpleNode(('wiki', 'status'), ('deleted', 'updated')[page.exists()])
            handler.simpleNode(('wiki', 'diff'), full_url(request, page, querystr={'action': 'diff'}))
            handler.simpleNode(('wiki', 'history'), full_url(request, page, querystr={'action': 'info'}))
            # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor )
            # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA )

            handler.endNode('item')

        # end SAX stream
        handler.endDocument()

        request.write(out.getvalue())
Exemplo n.º 9
0
def page_change_message(msgtype, request, page, lang, **kwargs):
    """Prepare a notification text for a page change of given type

    @param msgtype: a type of message to send (page_changed, page_renamed, ...)
    @type msgtype: str or unicode
    @param **kwargs: a dictionary of additional parameters, which depend on msgtype

    @return: dictionary containing data about the changed page
    @rtype: dict

    """
    _ = lambda text: request.getText(text, lang=lang)
    cfg = request.cfg
    data = {}
    data['revision'] = str(page.getRevList()[0])
    data['page_name'] = pagename = page.page_name
    sitename = page.cfg.sitename or request.url_root
    data[
        'editor'] = editor = username = page.uid_override or user.getUserIdentification(
            request)

    trivial = (kwargs.get('trivial') and _("Trivial ")) or ""

    if msgtype == "page_changed":
        data['subject'] = _(cfg.mail_notify_page_changed_subject) % locals()
        data['text'] = _(cfg.mail_notify_page_changed_intro) % locals()

        revisions = kwargs['revisions']
        # append a diff (or append full page text if there is no diff)
        if len(revisions) < 2:
            data['diff'] = _("New page:\n") + page.get_raw_body()
        else:
            lines = wikiutil.pagediff(request, page.page_name, revisions[1],
                                      page.page_name, revisions[0])
            if lines:
                data['diff'] = '\n'.join(lines)
            else:
                data['diff'] = _("No differences found!\n")

    elif msgtype == "page_deleted":
        data['subject'] = _(cfg.mail_notify_page_deleted_subject) % locals()
        data['text'] = _(cfg.mail_notify_page_deleted_intro) % locals()

        revisions = kwargs['revisions']
        latest_existing = revisions[0]
        lines = wikiutil.pagediff(request, page.page_name, latest_existing,
                                  page.page_name, latest_existing + 1)
        if lines:
            data['diff'] = '\n'.join(lines)
        else:
            data['diff'] = _("No differences found!\n")

    elif msgtype == "page_renamed":
        data['old_name'] = oldname = kwargs['old_name']
        data['subject'] = _(cfg.mail_notify_page_renamed_subject) % locals()
        data['text'] = _(cfg.mail_notify_page_renamed_intro) % locals()
        data['diff'] = ''

    else:
        raise UnknownChangeType()

    if 'comment' in kwargs and kwargs['comment']:
        data['comment'] = kwargs['comment']

    return data
Exemplo n.º 10
0
def execute(pagename, request):
    """ Send recent changes as an RSS document
    """
    if not wikixml.ok:
        request.mimetype = "text/plain"
        request.write("rss_rc action is not supported because of missing pyxml module.")
        return

    cfg = request.cfg
    _ = request.getText

    # get params
    def_max_items = max_items = cfg.rss_items_default
    items_limit = cfg.rss_items_limit
    unique = cfg.rss_unique
    diffs = cfg.rss_diffs
    ddiffs = cfg.rss_ddiffs
    max_lines = cfg.rss_lines_default
    lines_limit = cfg.rss_lines_limit
    show_att = cfg.rss_show_attachment_entries
    page_pattern = cfg.rss_page_filter_pattern

    try:
        max_items = min(int(request.values.get("items", max_items)), items_limit)
    except ValueError:
        pass
    try:
        unique = int(request.values.get("unique", unique))
    except ValueError:
        pass
    try:
        diffs = int(request.values.get("diffs", diffs))
    except ValueError:
        pass
    ## ddiffs inserted by Ralf Zosel <*****@*****.**>, 04.12.2003
    try:
        ddiffs = int(request.values.get("ddiffs", ddiffs))
    except ValueError:
        pass
    try:
        max_lines = min(int(request.values.get("lines", max_lines)), lines_limit)
    except ValueError:
        pass
    try:
        show_att = int(request.values.get("show_att", show_att))
    except ValueError:
        pass
    try:
        page_pattern = request.values.get("page", page_pattern)
    except ValueError:
        pass

    # get data
    log = editlog.EditLog(request)
    logdata = []
    counter = 0
    pages = {}
    lastmod = 0
    for line in log.reverse():
        if not request.user.may.read(line.pagename):
            continue
        if (
            (not show_att and not line.action.startswith("SAVE"))
            or ((line.pagename in pages) and unique)
            or not match_page(line.pagename, page_pattern)
        ):
            continue
        line.editor = line.getInterwikiEditorData(request)
        line.time = timefuncs.tmtuple(wikiutil.version2timestamp(line.ed_time_usecs))  # UTC
        logdata.append(line)
        pages[line.pagename] = None

        if not lastmod:
            lastmod = wikiutil.version2timestamp(line.ed_time_usecs)

        counter += 1
        if counter >= max_items:
            break
    del log

    timestamp = timefuncs.formathttpdate(lastmod)
    etag = "%d-%d-%d-%d-%d-%d-%d" % (lastmod, max_items, diffs, ddiffs, unique, max_lines, show_att)

    # for 304, we look at if-modified-since and if-none-match headers,
    # one of them must match and the other is either not there or must match.
    if request.if_modified_since == timestamp:
        if request.if_none_match:
            if request.if_none_match == etag:
                request.status_code = 304
        else:
            request.status_code = 304
    elif request.if_none_match == etag:
        if request.if_modified_since:
            if request.if_modified_since == timestamp:
                request.status_code = 304
        else:
            request.status_code = 304
    else:
        # generate an Expires header, using whatever setting the admin
        # defined for suggested cache lifetime of the RecentChanges RSS doc
        expires = time.time() + cfg.rss_cache

        request.mimetype = "application/rss+xml"
        request.expires = expires
        request.last_modified = lastmod
        request.headers["Etag"] = etag

        # send the generated XML document
        baseurl = request.url_root

        logo = re.search(r'src="([^"]*)"', cfg.logo_string)
        if logo:
            logo = request.getQualifiedURL(logo.group(1))

        # prepare output
        out = StringIO.StringIO()
        handler = RssGenerator(out)

        # start SAX stream
        handler.startDocument()
        handler._out.write(
            unicode(
                "<!--\n"
                '    Add an "items=nnn" URL parameter to get more than the \n'
                "    default %(def_max_items)d items. You cannot get more than \n"
                "    %(items_limit)d items though.\n"
                "    \n"
                '    Add "unique=1" to get a list of changes where page names are unique,\n'
                "    i.e. where only the latest change of each page is reflected.\n"
                "    \n"
                '    Add "diffs=1" to add change diffs to the description of each items.\n'
                "    \n"
                '    Add "ddiffs=1" to link directly to the diff (good for FeedReader).\n'
                "    \n"
                '    Add "lines=nnn" to change maximum number of diff/body lines \n'
                "    to show. Cannot be more than %(lines_limit)d.\n"
                "    \n"
                '    Add "show_att=1" to show items related to attachments.\n'
                "    \n"
                '    Add "page=pattern" to show feed only for specific pages.\n'
                "    Pattern can be empty (it would match to all pages), \n"
                "    can start with circumflex (it would be interpreted as \n"
                "    regular expression in this case), end with slash (for \n"
                "    getting feed for page tree) or point to specific page (if \n"
                "    none of the above can be applied).\n"
                "    \n"
                "    Current settings: items=%(max_items)i, unique=%(unique)i, \n"
                "    diffs=%(diffs)i, ddiffs=%(ddiffs)i, lines=%(max_lines)i, \n"
                "    show_att=%(show_att)i\n"
                "-->\n" % locals()
            ).encode(config.charset)
        )

        # emit channel description
        handler.startNode("channel", {(handler.xmlns["rdf"], "about"): request.url_root})
        handler.simpleNode("title", cfg.sitename)
        page = Page(request, pagename)
        handler.simpleNode("link", full_url(request, page))
        handler.simpleNode("description", u"RecentChanges at %s" % cfg.sitename)
        if logo:
            handler.simpleNode("image", None, {(handler.xmlns["rdf"], "resource"): logo})
        if cfg.interwikiname:
            handler.simpleNode(("wiki", "interwiki"), cfg.interwikiname)

        handler.startNode("items")
        handler.startNode(("rdf", "Seq"))
        for item in logdata:
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            page = Page(request, item.pagename)
            link = full_url(request, page, anchor=anchor)
            handler.simpleNode(("rdf", "li"), None, attr={(handler.xmlns["rdf"], "resource"): link})
        handler.endNode(("rdf", "Seq"))
        handler.endNode("items")
        handler.endNode("channel")

        # emit logo data
        if logo:
            handler.startNode("image", attr={(handler.xmlns["rdf"], "about"): logo})
            handler.simpleNode("title", cfg.sitename)
            handler.simpleNode("link", baseurl)
            handler.simpleNode("url", logo)
            handler.endNode("image")

        # Mapping { oldname: curname } for maintaining page renames
        pagename_map = {}

        # emit items
        for item in logdata:
            if item.pagename in pagename_map:
                cur_pagename = pagename_map[item.pagename]
            else:
                cur_pagename = item.pagename
            page = Page(request, cur_pagename)
            action = item.action
            comment = item.comment
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            rdflink = full_url(request, page, anchor=anchor)
            handler.startNode("item", attr={(handler.xmlns["rdf"], "about"): rdflink})

            # general attributes
            handler.simpleNode("title", item.pagename)
            handler.simpleNode(("dc", "date"), timefuncs.W3CDate(item.time))

            show_diff = diffs

            if action.startswith("ATT"):  # Attachment
                show_diff = 0
                filename = wikiutil.url_unquote(item.extra)
                att_exists = AttachFile.exists(request, cur_pagename, filename)

                if action == "ATTNEW":
                    # Once attachment deleted this link becomes invalid but we
                    # preserve it to prevent appearance of new RSS entries in
                    # RSS readers.
                    if ddiffs:
                        handler.simpleNode("link", attach_url(request, cur_pagename, filename, do="view"))

                    comment = _(u"Upload of attachment '%(filename)s'.") % {"filename": filename}

                elif action == "ATTDEL":
                    if ddiffs:
                        handler.simpleNode("link", full_url(request, page, querystr={"action": "AttachFile"}))

                    comment = _(u"Attachment '%(filename)s' deleted.") % {"filename": filename}

                elif action == "ATTDRW":
                    if ddiffs:
                        handler.simpleNode("link", attach_url(request, cur_pagename, filename, do="view"))

                    comment = _(u"Drawing '%(filename)s' saved.") % {"filename": filename}

            elif action.startswith("SAVE"):
                if action == "SAVE/REVERT":
                    to_rev = int(item.extra)
                    comment = (
                        (_(u"Revert to revision %(rev)d.") % {"rev": to_rev}) + "<br />" + _("Comment:") + " " + comment
                    )

                elif action == "SAVE/RENAME":
                    show_diff = 0
                    comment = (
                        (_(u"Renamed from '%(oldpagename)s'.") % {"oldpagename": item.extra})
                        + "<br />"
                        + _("Comment:")
                        + " "
                        + comment
                    )
                    if item.pagename in pagename_map:
                        newpage = pagename_map[item.pagename]
                        del pagename_map[item.pagename]
                        pagename_map[item.extra] = newpage
                    else:
                        pagename_map[item.extra] = item.pagename

                elif action == "SAVENEW":
                    comment = _(u"New page:\n") + comment

                item_rev = int(item.rev)

                # If we use diffs/ddiffs, we should calculate proper links and
                # content
                if ddiffs:
                    # first revision can't have older revisions to diff with
                    if item_rev == 1:
                        handler.simpleNode(
                            "link", full_url(request, page, querystr={"action": "recall", "rev": str(item_rev)})
                        )
                    else:
                        handler.simpleNode(
                            "link",
                            full_url(
                                request,
                                page,
                                querystr={"action": "diff", "rev1": str(item_rev), "rev2": str(item_rev - 1)},
                            ),
                        )

                if show_diff:
                    if item_rev == 1:
                        lines = Page(request, cur_pagename, rev=item_rev).getlines()
                    else:
                        lines = wikiutil.pagediff(
                            request, cur_pagename, item_rev - 1, cur_pagename, item_rev, ignorews=1
                        )

                    if len(lines) > max_lines:
                        lines = lines[:max_lines] + ["...\n"]

                    lines = "\n".join(lines)
                    lines = wikiutil.escape(lines)

                    comment = u"%s\n<pre>\n%s\n</pre>\n" % (comment, lines)

                if not ddiffs:
                    handler.simpleNode("link", full_url(request, page))

            if comment:
                handler.simpleNode("description", comment)

            # contributor
            if cfg.show_names:
                edattr = {}
                if cfg.show_hosts:
                    edattr[(handler.xmlns["wiki"], "host")] = item.hostname
                if item.editor[0] == "interwiki":
                    edname = "%s:%s" % item.editor[1]
                    ##edattr[(None, 'link')] = baseurl + wikiutil.quoteWikiname(edname)
                else:  # 'ip'
                    edname = item.editor[1]
                    ##edattr[(None, 'link')] = link + "?action=info"

                # this edattr stuff, esp. None as first tuple element breaks things (tracebacks)
                # if you know how to do this right, please send us a patch

                handler.startNode(("dc", "contributor"))
                handler.startNode(("rdf", "Description"), attr=edattr)
                handler.simpleNode(("rdf", "value"), edname)
                handler.endNode(("rdf", "Description"))
                handler.endNode(("dc", "contributor"))

            # wiki extensions
            handler.simpleNode(("wiki", "version"), "%i" % (item.ed_time_usecs))
            handler.simpleNode(("wiki", "status"), ("deleted", "updated")[page.exists()])
            handler.simpleNode(("wiki", "diff"), full_url(request, page, querystr={"action": "diff"}))
            handler.simpleNode(("wiki", "history"), full_url(request, page, querystr={"action": "info"}))
            # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor )
            # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA )

            handler.endNode("item")

        # end SAX stream
        handler.endDocument()

        request.write(out.getvalue())
Exemplo n.º 11
0
def execute(pagename, request):
    """ Send recent changes as an RSS document
    """
    cfg = request.cfg

    # get params
    items_limit = 100
    try:
        max_items = int(request.values['items'])
        max_items = min(max_items, items_limit)  # not more than `items_limit`
    except (KeyError, ValueError):
        # not more than 15 items in a RSS file by default
        max_items = 15
    try:
        unique = int(request.values.get('unique', 0))
    except ValueError:
        unique = 0
    try:
        diffs = int(request.values.get('diffs', 0))
    except ValueError:
        diffs = 0
    ## ddiffs inserted by Ralf Zosel <*****@*****.**>, 04.12.2003
    try:
        ddiffs = int(request.values.get('ddiffs', 0))
    except ValueError:
        ddiffs = 0

    urlfilter = request.values.get('filter')
    if urlfilter:
        urlfilter = re.compile(urlfilter)
    else:
        urlfilter = None

    # get data
    log = editlog.EditLog(request)
    logdata = []
    counter = 0
    pages = {}
    lastmod = 0
    for line in log.reverse():
        if urlfilter and not (urlfilter.match(line.pagename)):
            continue
        if not request.user.may.read(line.pagename):
            continue
        if (not line.action.startswith('SAVE')
                or ((line.pagename in pages) and unique)):
            continue
        #if log.dayChanged() and log.daycount > _MAX_DAYS: break
        line.editor = line.getInterwikiEditorData(request)
        line.time = timefuncs.tmtuple(
            wikiutil.version2timestamp(line.ed_time_usecs))  # UTC
        logdata.append(line)
        pages[line.pagename] = None

        if not lastmod:
            lastmod = wikiutil.version2timestamp(line.ed_time_usecs)

        counter += 1
        if counter >= max_items:
            break
    del log

    timestamp = timefuncs.formathttpdate(lastmod)
    etag = "%d-%d-%d-%d-%d" % (lastmod, max_items, diffs, ddiffs, unique)

    # for 304, we look at if-modified-since and if-none-match headers,
    # one of them must match and the other is either not there or must match.
    if request.if_modified_since == timestamp:
        if request.if_none_match:
            if request.if_none_match == etag:
                request.status_code = 304
        else:
            request.status_code = 304
    elif request.if_none_match == etag:
        if request.if_modified_since:
            if request.if_modified_since == timestamp:
                request.status_code = 304
        else:
            request.status_code = 304
    else:
        # generate an Expires header, using whatever setting the admin
        # defined for suggested cache lifetime of the RecentChanges RSS doc
        expires = time.time() + cfg.rss_cache

        request.mimetype = 'application/rss+xml'
        request.expires = expires
        request.last_modified = lastmod
        request.headers['Etag'] = etag

        # send the generated XML document
        baseurl = request.url_root

        logo = re.search(r'src="([^"]*)"', cfg.logo_string)
        if logo:
            logo = request.getQualifiedURL(logo.group(1))

        # prepare output
        output = structencoder(indent=u"yes")

        FEED_HEADER_COMMENT = '''
<!--
    Add an "items=nnn" URL parameter to get more than the default 15 items.
    You cannot get more than %d items though.
    
    Add "unique=1" to get a list of changes where page names are unique,
    i.e. where only the latest change of each page is reflected.
    Add "diffs=1" to add change diffs to the description of each items.
    
    Add "ddiffs=1" to link directly to the diff (good for FeedReader).
    Current settings: items=%i, unique=%i, diffs=%i, ddiffs=%i
-->
        ''' % (items_limit, max_items, unique, diffs, ddiffs)

        # Feed envelope
        page = Page(request, pagename)
        f = output.cofeed(
            ROOT(NS(u'', ATOM_NAMESPACE), NS(u'wiki', RSSWIKI_NAMESPACE),
                 E_CURSOR((ATOM_NAMESPACE, u'feed'), )))
        f.send(
            E((ATOM_NAMESPACE, u'id'),
              full_url(request, page).encode(config.charset))),
        f.send(
            E((ATOM_NAMESPACE, u'title'),
              cfg.sitename.encode(config.charset))),
        f.send(
            E((ATOM_NAMESPACE, u'link'),
              {u'href': request.url_root.encode(config.charset)})),
        f.send(
            E((ATOM_NAMESPACE, u'summary'),
              ('RecentChanges at %s' % cfg.sitename).encode(config.charset))),
        #Icon
        #E((ATOM_NAMESPACE, u'link'), {u'href': logo.encode(config.charset)}),

        #if cfg.interwikiname:
        #    handler.simpleNode(('wiki', 'interwiki'), cfg.interwikiname)

        for item in logdata:
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            page = Page(request, item.pagename)
            #link = full_url(request, page, anchor=anchor)
            if ddiffs:
                link = full_url(request, page, querystr={'action': 'diff'})
            else:
                link = full_url(request, page)

            # description
            desc_text = item.comment
            if diffs:
                # TODO: rewrite / extend wikiutil.pagediff
                # searching for the matching pages doesn't really belong here
                revisions = page.getRevList()

                rl = len(revisions)
                for idx in range(rl):
                    rev = revisions[idx]
                    if rev <= item.rev:
                        if idx + 1 < rl:
                            lines = wikiutil.pagediff(request,
                                                      item.pagename,
                                                      revisions[idx + 1],
                                                      item.pagename,
                                                      0,
                                                      ignorews=1)
                            if len(lines) > 20:
                                lines = lines[:20] + ['...\n']
                            lines = '\n'.join(lines)
                            lines = wikiutil.escape(lines)
                            desc_text = '%s\n<pre>\n%s\n</pre>\n' % (desc_text,
                                                                     lines)
                        break
            #if desc_text:
            #    handler.simpleNode('description', desc_text)

            # contributor
            edattr = {}
            #if cfg.show_hosts:
            #    edattr[(handler.xmlns['wiki'], 'host')] = item.hostname
            if item.editor[0] == 'interwiki':
                edname = "%s:%s" % item.editor[1]
                ##edattr[(None, 'link')] = baseurl + wikiutil.quoteWikiname(edname)
            else:  # 'ip'
                edname = item.editor[1]
                ##edattr[(None, 'link')] = link + "?action=info"

            history_link = full_url(request, page, querystr={'action': 'info'})

            f.send(
                E(
                    (ATOM_NAMESPACE, u'entry'),
                    E((ATOM_NAMESPACE, u'id'), link.encode(config.charset)),
                    E((ATOM_NAMESPACE, u'title'),
                      item.pagename.encode(config.charset)),
                    E((ATOM_NAMESPACE, u'updated'),
                      timefuncs.W3CDate(item.time).encode(config.charset)),
                    E((ATOM_NAMESPACE, u'link'),
                      {u'href': link.encode(config.charset)}),
                    E((ATOM_NAMESPACE, u'summary'),
                      desc_text.encode(config.charset)),
                    E((ATOM_NAMESPACE, u'author'),
                      E((ATOM_NAMESPACE, u'name'),
                        edname.encode(config.charset))),
                    #E((ATOM_NAMESPACE, u'title'), item.pagename.encode(config.charset)),
                    # wiki extensions
                    E((RSSWIKI_NAMESPACE, u'wiki:version'),
                      ("%i" % (item.ed_time_usecs)).encode(config.charset)),
                    E((RSSWIKI_NAMESPACE, u'wiki:status'),
                      (u'deleted', u'updated')[page.exists()]),
                    E((RSSWIKI_NAMESPACE, u'wiki:diff'),
                      link.encode(config.charset)),
                    E((RSSWIKI_NAMESPACE, u'wiki:history'),
                      history_link.encode(config.charset)),
                    # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor )
                    # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA )
                ))

        # emit logo data
        #if logo:
        #    handler.startNode('image', attr={
        #        (handler.xmlns['rdf'], 'about'): logo,
        #        })
        #    handler.simpleNode('title', cfg.sitename)
        #    handler.simpleNode('link', baseurl)
        #    handler.simpleNode('url', logo)
        #    handler.endNode('image')

        f.close()
        request.write(output.read())
Exemplo n.º 12
0
def execute(pagename, request):
    """ Send recent changes as an RSS document
    """
    if not wikixml.ok:
        request.mimetype = 'text/plain'
        request.write("rss_rc action is not supported because of missing pyxml module.")
        return
    if request.isSpiderAgent: # reduce bot cpu usage
        return ''

    cfg = request.cfg
    _ = request.getText

    # get params
    def_max_items = max_items = cfg.rss_items_default
    items_limit = cfg.rss_items_limit
    unique = cfg.rss_unique
    diffs = cfg.rss_diffs
    ddiffs = cfg.rss_ddiffs
    max_lines = cfg.rss_lines_default
    lines_limit = cfg.rss_lines_limit
    show_att = cfg.rss_show_attachment_entries
    page_pattern = cfg.rss_page_filter_pattern

    try:
        max_items = min(int(request.values.get('items', max_items)),
                        items_limit)
    except ValueError:
        pass
    try:
        unique = int(request.values.get('unique', unique))
    except ValueError:
        pass
    try:
        diffs = int(request.values.get('diffs', diffs))
    except ValueError:
        pass
    ## ddiffs inserted by Ralf Zosel <*****@*****.**>, 04.12.2003
    try:
        ddiffs = int(request.values.get('ddiffs', ddiffs))
    except ValueError:
        pass
    try:
        max_lines = min(int(request.values.get('lines', max_lines)),
                        lines_limit)
    except ValueError:
        pass
    try:
        show_att = int(request.values.get('show_att', show_att))
    except ValueError:
        pass
    try:
        page_pattern = request.values.get('page', page_pattern)
    except ValueError:
        pass

    # if we are just interested in a specific page, using the local edit-log
    # of that page is much faster than the global one - esp. if the page was
    # NOT recently changed and the global edit-log is rather big.
    kw = dict(rootpagename=page_pattern) if is_single_page_match(page_pattern) else {}
    log = editlog.EditLog(request, **kw)
    logdata = []
    counter = 0
    pages = {}
    lastmod = 0
    for line in log.reverse():
        if not request.user.may.read(line.pagename):
            continue
        if ((not show_att and not line.action.startswith('SAVE')) or
            ((line.pagename in pages) and unique) or
            not match_page(line.pagename, page_pattern)):
            continue
        line.editor = line.getInterwikiEditorData(request)
        line.time = timefuncs.tmtuple(wikiutil.version2timestamp(line.ed_time_usecs)) # UTC
        logdata.append(line)
        pages[line.pagename] = None

        if not lastmod:
            lastmod = wikiutil.version2timestamp(line.ed_time_usecs)

        counter += 1
        if counter >= max_items:
            break
    del log

    timestamp = timefuncs.formathttpdate(lastmod)
    etag = "%d-%d-%d-%d-%d-%d-%d" % (lastmod, max_items, diffs, ddiffs, unique,
        max_lines, show_att)

    # for 304, we look at if-modified-since and if-none-match headers,
    # one of them must match and the other is either not there or must match.
    if request.if_modified_since == timestamp:
        if request.if_none_match:
            if request.if_none_match == etag:
                request.status_code = 304
        else:
            request.status_code = 304
    elif request.if_none_match == etag:
        if request.if_modified_since:
            if request.if_modified_since == timestamp:
                request.status_code = 304
        else:
            request.status_code = 304
    else:
        # generate an Expires header, using whatever setting the admin
        # defined for suggested cache lifetime of the RecentChanges RSS doc
        expires = time.time() + cfg.rss_cache

        request.mimetype = 'application/rss+xml'
        request.expires = expires
        request.last_modified = lastmod
        request.headers['Etag'] = etag

        # send the generated XML document
        baseurl = request.url_root

        logo = re.search(r'src="([^"]*)"', cfg.logo_string)
        if logo:
            logo = request.getQualifiedURL(logo.group(1))

        # prepare output
        out = StringIO.StringIO()
        handler = RssGenerator(out)

        # start SAX stream
        handler.startDocument()
        handler._write(
            u'<!--\n'
            u'    Add an "items=nnn" URL parameter to get more than the \n'
            u'    default %(def_max_items)d items. You cannot get more than \n'
            u'    %(items_limit)d items though.\n'
            u'    \n'
            u'    Add "unique=1" to get a list of changes where page names are unique,\n'
            u'    i.e. where only the latest change of each page is reflected.\n'
            u'    \n'
            u'    Add "diffs=1" to add change diffs to the description of each items.\n'
            u'    \n'
            u'    Add "ddiffs=1" to link directly to the diff (good for FeedReader).\n'
            u'    \n'
            u'    Add "lines=nnn" to change maximum number of diff/body lines \n'
            u'    to show. Cannot be more than %(lines_limit)d.\n'
            u'    \n'
            u'    Add "show_att=1" to show items related to attachments.\n'
            u'    \n'
            u'    Add "page=pattern" to show feed only for specific pages.\n'
            u'    Pattern can be empty (it would match to all pages), \n'
            u'    can start with circumflex (it would be interpreted as \n'
            u'    regular expression in this case), end with slash (for \n'
            u'    getting feed for page tree) or point to specific page (if \n'
            u'    none of the above can be applied).\n'
            u'    \n'
            u'    Current settings: items=%(max_items)i, unique=%(unique)i, \n'
            u'    diffs=%(diffs)i, ddiffs=%(ddiffs)i, lines=%(max_lines)i, \n'
            u'    show_att=%(show_att)i\n'
            u'-->\n' % locals()
            )

        # emit channel description
        handler.startNode('channel', {
            (handler.xmlns['rdf'], 'about'): request.url_root,
            })
        handler.simpleNode('title', cfg.sitename)
        page = Page(request, pagename)
        handler.simpleNode('link', full_url(request, page))
        handler.simpleNode('description', u'RecentChanges at %s' % cfg.sitename)
        if logo:
            handler.simpleNode('image', None, {
                (handler.xmlns['rdf'], 'resource'): logo,
                })
        if cfg.interwikiname:
            handler.simpleNode(('wiki', 'interwiki'), cfg.interwikiname)

        handler.startNode('items')
        handler.startNode(('rdf', 'Seq'))
        for item in logdata:
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            page = Page(request, item.pagename)
            link = full_url(request, page, anchor=anchor)
            handler.simpleNode(('rdf', 'li'), None, attr={(handler.xmlns['rdf'], 'resource'): link, })
        handler.endNode(('rdf', 'Seq'))
        handler.endNode('items')
        handler.endNode('channel')

        # emit logo data
        if logo:
            handler.startNode('image', attr={
                (handler.xmlns['rdf'], 'about'): logo,
                })
            handler.simpleNode('title', cfg.sitename)
            handler.simpleNode('link', baseurl)
            handler.simpleNode('url', logo)
            handler.endNode('image')

        # Mapping { oldname: curname } for maintaining page renames
        pagename_map = {}

        # emit items
        for item in logdata:
            if item.pagename in pagename_map:
                cur_pagename = pagename_map[item.pagename]
            else:
                cur_pagename = item.pagename
            page = Page(request, cur_pagename)
            action = item.action
            comment = item.comment
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            rdflink = full_url(request, page, anchor=anchor)
            handler.startNode('item', attr={(handler.xmlns['rdf'], 'about'): rdflink, })

            # general attributes
            handler.simpleNode('title', item.pagename)
            handler.simpleNode(('dc', 'date'), timefuncs.W3CDate(item.time))

            show_diff = diffs

            if action.startswith('ATT'): # Attachment
                show_diff = 0
                filename = wikiutil.url_unquote(item.extra)
                att_exists = AttachFile.exists(request, cur_pagename, filename)

                if action == 'ATTNEW':
                    # Once attachment deleted this link becomes invalid but we
                    # preserve it to prevent appearance of new RSS entries in
                    # RSS readers.
                    if ddiffs:
                        handler.simpleNode('link', attach_url(request,
                            cur_pagename, filename, do='view'))

                    comment = _(u"Upload of attachment '%(filename)s'.") % {
                        'filename': filename}

                elif action == 'ATTDEL':
                    if ddiffs:
                        handler.simpleNode('link', full_url(request, page,
                            querystr={'action': 'AttachFile'}))

                    comment = _(u"Attachment '%(filename)s' deleted.") % {
                        'filename': filename}

                elif action == 'ATTDRW':
                    if ddiffs:
                        handler.simpleNode('link', attach_url(request,
                            cur_pagename, filename, do='view'))

                    comment = _(u"Drawing '%(filename)s' saved.") % {
                        'filename': filename}

            elif action.startswith('SAVE'):
                if action == 'SAVE/REVERT':
                    to_rev = int(item.extra)
                    comment = (_(u"Revert to revision %(rev)d.") % {
                        'rev': to_rev}) + "<br />" \
                        + _("Comment:") + " " + comment

                elif action == 'SAVE/RENAME':
                    show_diff = 0
                    comment = (_(u"Renamed from '%(oldpagename)s'.") % {
                        'oldpagename': item.extra}) + "<br />" \
                        + _("Comment:") + " " + comment
                    if item.pagename in pagename_map:
                        newpage = pagename_map[item.pagename]
                        del pagename_map[item.pagename]
                        pagename_map[item.extra] = newpage
                    else:
                        pagename_map[item.extra] = item.pagename

                elif action == 'SAVENEW':
                    comment = _(u"New page:\n") + comment

                item_rev = int(item.rev)

                # If we use diffs/ddiffs, we should calculate proper links and
                # content
                if ddiffs:
                    # first revision can't have older revisions to diff with
                    if item_rev == 1:
                        handler.simpleNode('link', full_url(request, page,
                            querystr={'action': 'recall',
                                      'rev': str(item_rev)}))
                    else:
                        handler.simpleNode('link', full_url(request, page,
                            querystr={'action': 'diff',
                                      'rev1': str(item_rev),
                                      'rev2': str(item_rev - 1)}))

                if show_diff:
                    if item_rev == 1:
                        lines = Page(request, cur_pagename,
                            rev=item_rev).getlines()
                    else:
                        lines = wikiutil.pagediff(request, cur_pagename,
                            item_rev - 1, cur_pagename, item_rev, ignorews=1)

                    if len(lines) > max_lines:
                        lines = lines[:max_lines] + ['...\n']

                    lines = '\n'.join(lines)
                    lines = wikiutil.escape(lines)

                    comment = u'%s\n<pre>\n%s\n</pre>\n' % (comment, lines)

                if not ddiffs:
                    handler.simpleNode('link', full_url(request, page))

            if comment:
                handler.simpleNode('description', comment)

            # contributor
            if cfg.show_names:
                edattr = {}
                if cfg.show_hosts:
                    edattr[(handler.xmlns['wiki'], 'host')] = item.hostname
                if item.editor[0] == 'interwiki':
                    edname = "%s:%s" % item.editor[1]
                    ##edattr[(None, 'link')] = baseurl + wikiutil.quoteWikiname(edname)
                else: # 'ip'
                    edname = item.editor[1]
                    ##edattr[(None, 'link')] = link + "?action=info"

                # this edattr stuff, esp. None as first tuple element breaks things (tracebacks)
                # if you know how to do this right, please send us a patch

                handler.startNode(('dc', 'contributor'))
                handler.startNode(('rdf', 'Description'), attr=edattr)
                handler.simpleNode(('rdf', 'value'), edname)
                handler.endNode(('rdf', 'Description'))
                handler.endNode(('dc', 'contributor'))

            # wiki extensions
            handler.simpleNode(('wiki', 'version'), "%i" % (item.ed_time_usecs))
            handler.simpleNode(('wiki', 'status'), ('deleted', 'updated')[page.exists()])
            handler.simpleNode(('wiki', 'diff'), full_url(request, page, querystr={'action': 'diff'}))
            handler.simpleNode(('wiki', 'history'), full_url(request, page, querystr={'action': 'info'}))
            # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor )
            # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA )

            handler.endNode('item')

        # end SAX stream
        handler.endDocument()

        request.write(out.getvalue())
def page_change_message(msgtype, request, page, lang, **kwargs):
    """Prepare a notification text for a page change of given type

    @param msgtype: a type of message to send (page_changed, page_renamed, ...)
    @type msgtype: str or unicode
    @param **kwargs: a dictionary of additional parameters, which depend on msgtype

    @return: dictionary containing data about the changed page
    @rtype: dict

    """
    _ = lambda text: request.getText(text, lang=lang)
    cfg = request.cfg
    data = {}
    data['revision'] = str(page.getRevList()[0])
    data['page_name'] = pagename = page.page_name
    sitename = page.cfg.sitename or request.url_root
    data['editor'] = editor = username = page.uid_override or user.getUserIdentification(request)

    trivial = (kwargs.get('trivial') and _("Trivial ")) or ""

    if msgtype == "page_changed":
        data['subject'] = _(cfg.mail_notify_page_changed_subject) % locals()
        data['text'] = _(cfg.mail_notify_page_changed_intro) % locals()

        revisions = kwargs['revisions']
        # append a diff (or append full page text if there is no diff)
        if len(revisions) < 2:
            data['diff'] = _("New page:\n") + page.get_raw_body()
        else:
            lines = wikiutil.pagediff(request, page.page_name, revisions[1],
                                      page.page_name, revisions[0])
            if lines:
                data['diff'] = '\n'.join(lines)
            else:
                data['diff'] = _("No differences found!\n")

    elif msgtype == "page_deleted":
        data['subject'] = _(cfg.mail_notify_page_deleted_subject) % locals()
        data['text'] = _(cfg.mail_notify_page_deleted_intro) % locals()

        revisions = kwargs['revisions']
        latest_existing = revisions[0]
        lines = wikiutil.pagediff(request, page.page_name, latest_existing,
                                  page.page_name, latest_existing + 1)
        if lines:
            data['diff'] = '\n'.join(lines)
        else:
            data['diff'] = _("No differences found!\n")

    elif msgtype == "page_renamed":
        data['old_name'] = oldname = kwargs['old_name']
        data['subject'] = _(cfg.mail_notify_page_renamed_subject) % locals()
        data['text'] = _(cfg.mail_notify_page_renamed_intro) % locals()
        data['diff'] = ''

    else:
        raise UnknownChangeType()

    if 'comment' in kwargs and kwargs['comment']:
        data['comment'] = kwargs['comment']

    return data