示例#1
0
 def logs(request):
     """ generator for log objects """
     # global edit-log
     yield editlog.EditLog(request)
     # local edit-log of every page
     for pn in request.rootpage.getPageList(exists=False):
         yield editlog.EditLog(request, rootpagename=pn)
示例#2
0
文件: AttachFile.py 项目: aahlad/soar
def _addLogEntry(request, action, pagename, filename):
    """ Add an entry to the edit log on uploads and deletes.

        `action` should be "ATTNEW" or "ATTDEL"
    """
    from MoinMoin.logfile import editlog
    t = wikiutil.timestamp2version(time.time())
    fname = wikiutil.url_quote(filename)

    # Write to global log
    log = editlog.EditLog(request)
    log.add(request, t, 99999999, action, pagename, request.remote_addr, fname)

    # Write to local log
    log = editlog.EditLog(request, rootpagename=pagename)
    log.add(request, t, 99999999, action, pagename, request.remote_addr, fname)
示例#3
0
文件: Despam.py 项目: aahlad/soar
def revert_page(request, pagename, editor):
    if not request.user.may.revert(pagename):
        return

    log = editlog.EditLog(request, rootpagename=pagename)

    first = True
    rev = u"00000000"
    for line in log.reverse():
        if first:
            first = False
            if repr(line.getInterwikiEditorData(request)) != editor:
                return
        else:
            if repr(line.getInterwikiEditorData(request)) != editor:
                rev = line.rev
                break

    if rev == u"00000000": # page created by spammer
        comment = u"Page deleted by Despam action"
        pg = PageEditor.PageEditor(request, pagename, do_editor_backup=0)
        try:
            savemsg = pg.deletePage(comment)
        except pg.SaveError, msg:
            savemsg = unicode(msg)
示例#4
0
文件: Despam.py 项目: aahlad/soar
def revert_pages(request, editor, timestamp):
    _ = request.getText

    editor = wikiutil.url_unquote(editor)
    timestamp = int(timestamp * 1000000)
    log = editlog.EditLog(request)
    pages = {}
    revertpages = []
    for line in log.reverse():
        if line.ed_time_usecs < timestamp:
            break

        if not request.user.may.read(line.pagename):
            continue

        if not line.pagename in pages:
            pages[line.pagename] = 1
            if repr(line.getInterwikiEditorData(request)) == editor:
                revertpages.append(line.pagename)

    request.write("Pages to revert:<br>%s" % "<br>".join([wikiutil.escape(p) for p in revertpages]))
    for pagename in revertpages:
        request.write("Begin reverting %s ...<br>" % wikiutil.escape(pagename))
        msg = revert_page(request, pagename, editor)
        if msg:
            request.write("<p>%s: %s</p>" % (
                Page.Page(request, pagename).link_to(request), msg))
        request.write("Finished reverting %s.<br>" % wikiutil.escape(pagename))
示例#5
0
def addLogEntry(request, action, pagename, msg):
    # Add an entry to the edit log on adding comments.
    from MoinMoin.logfile import editlog
    t = wikiutil.timestamp2version(time.time())
    msg = unicode(msg)

    pg = Page(request, pagename)
    #rev = pg.current_rev()
    rev = 99999999

    # TODO: for now we simply write 2 logs, maybe better use some multilog stuff
    # Write to global log
    log = editlog.EditLog(request)
    log.add(request, t, rev, action, pagename, request.remote_addr, '', msg)

    # Write to local log
    log = editlog.EditLog(request, rootpagename=pagename)
    log.add(request, t, rev, action, pagename, request.remote_addr, '', msg)
示例#6
0
def latest_edit(request):
    log = editlog.EditLog(request)
    entry = ''

    for x in log.reverse():
        entry = x
        break
    
    return entry.ed_time_usecs
示例#7
0
    def xmlrpc_getRecentChanges(self, date):
        """
        Get RecentChanges since date

        @param date: date since when rc will be listed
        @rtype: list
        @return: a list of changed pages since date, which should be in
            UTC. The result is a list, where each element is a struct:
            * name (string) :
                Name of the page. The name is in UTF-8.
            * lastModified (date) :
                Date of last modification, in UTC.
            * author (string) :
                Name of the author (if available). UTF-8.
            * version (int) :
                Current version.
        """

        return_items = []

        edit_log = editlog.EditLog(self.request)
        for log in edit_log.reverse():
            # get last-modified UTC (DateTime) from log
            gmtuple = tuple(
                time.gmtime(wikiutil.version2timestamp(log.ed_time_usecs)))
            lastModified_date = xmlrpclib.DateTime(gmtuple)

            # skip if older than "date"
            if lastModified_date < date:
                break

            # skip if knowledge not permitted
            if not self.request.user.may.read(log.pagename):
                continue

            # get page name (str) from log
            pagename_str = self._outstr(log.pagename)

            # get user name (str) from log
            author_str = log.hostname
            if log.userid:
                userdata = user.User(self.request, log.userid)
                if userdata.name:
                    author_str = userdata.name
            author_str = self._outstr(author_str)

            return_item = {
                'name': pagename_str,
                'lastModified': lastModified_date,
                'author': author_str,
                'version': int(log.rev)
            }
            return_items.append(return_item)

        return return_items
示例#8
0
 def __getattr__(self, name):
     if name == 'editlog':
         if "editlog" not in self.__dict__:
             from MoinMoin.logfile import editlog
             self.request.rootpage = self.rootpage
             self.editlog = editlog.EditLog(self.request)
         return self.editlog
     try:
         return getattr(self.request, name)
     except AttributeError, e:
         return super(HTTPContext, self).__getattribute__(name)
示例#9
0
def edit_logfile_append(self,
                        pagename,
                        pagefile,
                        rev,
                        action,
                        logname='edit-log',
                        comment=u'',
                        author=u"Scripting Subsystem"):
    glog = editlog.EditLog(self.request, uid_override=author)
    pagelog = Page(self.request, pagename).getPagePath(logname,
                                                       use_underlay=0,
                                                       isfile=1)
    llog = editlog.EditLog(self.request, filename=pagelog, uid_override=author)
    mtime_usecs = wikiutil.timestamp2version(os.path.getmtime(pagefile))
    host = '::1'
    extra = u''
    glog.add(self.request, mtime_usecs, rev, action, pagename, host, comment)
    llog.add(self.request, mtime_usecs, rev, action, pagename, host, extra,
             comment)
    event_logfile(self, pagename, pagefile)
示例#10
0
def execute(pagename, request):
    log = editlog.EditLog(request)
    try:
        lastmod = wikiutil.version2timestamp(log.date())
    except:
        lastmod = 0

    timestamp = timefuncs.formathttpdate(lastmod)
    etag = "%d" % lastmod

    # for 304, we look at if-modified-since and if-none-match headers,
    # one of them must match and the other is either not there or must match.
    if request.if_modified_since == timestamp:
        if request.if_none_match:
            if request.if_none_match == etag:
                request.status_code = 304
        else:
            request.status_code = 304
    elif request.if_none_match == etag:
        if request.if_modified_since:
            if request.if_modified_since == timestamp:
                request.status_code = 304
        else:
            request.status_code = 304
    else:
        # generate an Expires header, using 1d cache lifetime of sisterpages list
        expires = time.time() + 24 * 3600

        request.mimetype = 'text/plain'
        request.expires = expires
        request.last_modified = timestamp
        request.headers['Etag'] = etag

        # send the generated XML document
        # Get list of user readable pages
        pages = request.rootpage.getPageList()
        pages.sort()
        for pn in pages:
            p = Page(request, pn)
            entry = u"%s %s\r\n" % (request.getQualifiedURL(
                p.url(request)), p.page_name)
            request.write(entry.encode('utf-8'))
示例#11
0
文件: Despam.py 项目: aahlad/soar
def show_pages(request, pagename, editor, timestamp):
    _ = request.getText

    timestamp = int(timestamp * 1000000)
    log = editlog.EditLog(request)
    pages = {}
    #  mimic macro object for use of RecentChanges subfunctions
    macro = tmp()
    macro.request = request
    macro.formatter = request.html_formatter

    request.write("<table>")
    for line in log.reverse():
        if line.ed_time_usecs < timestamp:
            break

        if not request.user.may.read(line.pagename):
            continue

        if not line.pagename in pages:
            pages[line.pagename] = 1
            if repr(line.getInterwikiEditorData(request)) == editor:
                line.time_tuple = request.user.getTime(wikiutil.version2timestamp(line.ed_time_usecs))
                request.write(RecentChanges.format_page_edits(macro, [line], timestamp))

    request.write('''
</table>
<p>
<form method="post" action="%(url)s">
<input type="hidden" name="action" value="Despam">
<input type="hidden" name="ticket" value="%(ticket)s">
<input type="hidden" name="editor" value="%(editor)s">
<input type="submit" name="ok" value="%(label)s">
</form>
</p>
''' % dict(
        url=request.href(pagename),
        ticket=wikiutil.createTicket(request),
        editor=wikiutil.url_quote(editor),
        label=_("Revert all!"),
    ))
示例#12
0
    def _indexingRequest(self, request):
        """ Return a new request that can be used for index building.

        This request uses a security policy that lets the current user
        read any page. Without this policy some pages will not render,
        which will create broken pagelinks index.

        @param request: current request
        """
        import copy
        from MoinMoin.security import Permissions
        from MoinMoin.logfile import editlog

        class SecurityPolicy(Permissions):
            def read(self, *args, **kw):
                return True

        r = copy.copy(request)
        r.user.may = SecurityPolicy(r.user)
        r.editlog = editlog.EditLog(r)
        return r
示例#13
0
文件: Despam.py 项目: aahlad/soar
def show_editors(request, pagename, timestamp):
    _ = request.getText

    timestamp = int(timestamp * 1000000)
    log = editlog.EditLog(request)
    editors = {}
    pages = {}
    for line in log.reverse():
        if line.ed_time_usecs < timestamp:
            break

        if not request.user.may.read(line.pagename):
            continue

        editor = line.getInterwikiEditorData(request)
        if not line.pagename in pages:
            pages[line.pagename] = 1
            editors[editor] = editors.get(editor, 0) + 1

    editors = [(nr, editor) for editor, nr in editors.iteritems()]
    editors.sort()
    editors.reverse()

    pg = Page.Page(request, pagename)

    dataset = TupleDataset()
    dataset.columns = [Column('editor', label=_("Editor"), align='left'),
                       Column('pages', label=_("Pages"), align='right'),
                       Column('link', label='', align='left')]
    for nr, editor in editors:
        dataset.addRow((render(editor), unicode(nr),
            pg.link_to(request, text=_("Select Author"),
                querystr={
                    'action': 'Despam',
                    'editor': repr(editor),
                })))

    table = DataBrowserWidget(request)
    table.setData(dataset)
    return table.render(method="GET")
示例#14
0
    def history(page, pagename, request):
        # show history as default
        _ = request.getText
        default_count, limit_max_count = request.cfg.history_count[0:2]
        paging = request.cfg.history_paging

        try:
            max_count = int(request.values.get('max_count', default_count))
        except ValueError:
            max_count = default_count
        max_count = max(1, min(max_count, limit_max_count))

        # read in the complete log of this page
        log = editlog.EditLog(request, rootpagename=pagename)

        offset = 0
        paging_info_html = ""
        paging_nav_html = ""
        count_select_html = ""

        f = request.formatter

        if paging:
            log_size = log.lines()

            try:
                offset = int(request.values.get('offset', 0))
            except ValueError:
                offset = 0
            offset = max(min(offset, log_size - 1), 0)

            paging_info_html += f.paragraph(1, css_class="searchstats info-paging-info") + _("Showing page edit history entries from '''%(start_offset)d''' to '''%(end_offset)d''' out of '''%(total_count)d''' entries total.", wiki=True) % {
                'start_offset': log_size - min(log_size, offset + max_count) + 1,
                'end_offset': log_size - offset,
                'total_count': log_size,
            } + f.paragraph(0)

            # generating offset navigating links
            if max_count < log_size or offset != 0:
                offset_links = []
                cur_offset = max_count
                near_count = 5 # request.cfg.pagination_size

                min_offset = max(0, (offset + max_count - 1) / max_count - near_count)
                max_offset = min((log_size - 1) / max_count, offset / max_count + near_count)
                offset_added = False

                def add_offset_link(offset, caption=None):
                    offset_links.append(f.table_cell(1, css_class="info-offset-item") +
                        page.link_to(request, on=1, querystr={
                            'action': 'info',
                            'offset': str(offset),
                            'max_count': str(max_count),
                            }, css_class="info-offset-nav-link", rel="nofollow") + f.text(caption or str(log_size - offset)) + page.link_to(request, on=0) +
                        f.table_cell(0)
                    )

                # link to previous page - only if not at start
                if offset > 0:
                    add_offset_link(((offset - 1) / max_count) * max_count, _("Newer"))

                # link to beggining of event log - if min_offset is not minimal
                if min_offset > 0:
                    add_offset_link(0)
                    # adding gap only if min_offset not explicitly following beginning
                    if min_offset > 1:
                        offset_links.append(f.table_cell(1, css_class="info-offset-gap") + f.text(u'\u2026') + f.table_cell(0))

                # generating near pages links
                for cur_offset in range(min_offset, max_offset + 1):
                    # note that current offset may be not multiple of max_count,
                    # so we check whether we should add current offset marker like this
                    if not offset_added and offset <= cur_offset * max_count:
                        # current info history view offset
                        offset_links.append(f.table_cell(1, css_class="info-offset-item info-cur-offset") + f.text(str(log_size - offset)) + f.table_cell(0))
                        offset_added = True

                    # add link, if not at this offset
                    if offset != cur_offset * max_count:
                        add_offset_link(cur_offset * max_count)

                # link to the last page of event log
                if max_offset < (log_size - 1) / max_count:
                    if max_offset < (log_size - 1) / max_count - 1:
                        offset_links.append(f.table_cell(1, css_class="info-offset-gap") + f.text(u'\u2026') + f.table_cell(0))
                    add_offset_link(((log_size - 1) / max_count) * max_count)

                # special case - if offset is greater than max_offset * max_count
                if offset > max_offset * max_count:
                    offset_links.append(f.table_cell(1, css_class="info-offset-item info-cur-offset") + f.text(str(log_size - offset)) + f.table_cell(0))

                # link to next page
                if offset < (log_size - max_count):
                    add_offset_link(((offset + max_count) / max_count) * max_count, _("Older"))

                # generating html
                paging_nav_html += "".join([
                    f.table(1, css_class="searchpages"),
                    f.table_row(1),
                    "".join(offset_links),
                    f.table_row(0),
                    f.table(0),
                ])

        # generating max_count switcher
        # we do it only in case history_count has additional values
        if len(request.cfg.history_count) > 2:
            max_count_possibilities = list(set(request.cfg.history_count))
            max_count_possibilities.sort()
            max_count_html = []
            cur_count_added = False


            for count in max_count_possibilities:
                # max count value can be not in list of predefined values
                if max_count <= count and not cur_count_added:
                    max_count_html.append("".join([
                        f.span(1, css_class="info-count-item info-cur-count"),
                        f.text(str(max_count)),
                        f.span(0),
                    ]))
                    cur_count_added = True

                # checking for limit_max_count to prevent showing unavailable options
                if max_count != count and count <= limit_max_count:
                    max_count_html.append("".join([
                        f.span(1, css_class="info-count-item"),
                        page.link_to(request, on=1, querystr={
                            'action': 'info',
                            'offset': str(offset),
                            'max_count': str(count),
                            }, css_class="info-count-link", rel="nofollow"),
                        f.text(str(count)),
                        page.link_to(request, on=0),
                        f.span(0),
                    ]))

            count_select_html += "".join([
                f.span(1, css_class="info-count-selector"),
                    f.text(" ("),
                    f.text(_("%s items per page")) % (f.span(1, css_class="info-count-selector info-count-selector-divider") + f.text(" | ") + f.span(0)).join(max_count_html),
                    f.text(")"),
                f.span(0),
            ])

        # open log for this page
        from MoinMoin.util.dataset import TupleDataset, Column

        history = TupleDataset()
        history.columns = [
            Column('rev', label='#', align='right'),
            Column('mtime', label=_('Date'), align='right'),
            Column('size', label=_('Size'), align='right'),
            Column('diff', label='<input type="submit" value="%s">' % (_("Diff"))),
            Column('editor', label=_('Editor'), hidden=not request.cfg.show_names),
            Column('comment', label=_('Comment')),
            Column('action', label=_('Action')),
            ]

        # generate history list

        def render_action(text, query, **kw):
            kw.update(dict(rel='nofollow'))
            return page.link_to(request, text, querystr=query, **kw)

        def render_file_action(text, pagename, filename, request, do):
            url = AttachFile.getAttachUrl(pagename, filename, request, do=do)
            if url:
                f = request.formatter
                link = f.url(1, url) + f.text(text) + f.url(0)
                return link

        may_write = request.user.may.write(pagename)
        may_delete = request.user.may.delete(pagename)

        count = 0
        pgactioncount = 0
        for line in log.reverse():
            count += 1

            if paging and count <= offset:
                continue

            rev = int(line.rev)
            actions = []
            if line.action in ('SAVE', 'SAVENEW', 'SAVE/REVERT', 'SAVE/RENAME', ):
                size = page.size(rev=rev)
                actions.append(render_action(_('view'), {'action': 'recall', 'rev': '%d' % rev}))
                if pgactioncount == 0:
                    rchecked = ' checked="checked"'
                    lchecked = ''
                elif pgactioncount == 1:
                    lchecked = ' checked="checked"'
                    rchecked = ''
                else:
                    lchecked = rchecked = ''
                diff = '<input type="radio" name="rev1" value="%d"%s><input type="radio" name="rev2" value="%d"%s>' % (rev, lchecked, rev, rchecked)
                if rev > 1:
                    diff += render_action(' ' + _('to previous'), {'action': 'diff', 'rev1': rev-1, 'rev2': rev})
                comment = line.comment
                if not comment:
                    if '/REVERT' in line.action:
                        comment = _("Revert to revision %(rev)d.") % {'rev': int(line.extra)}
                    elif '/RENAME' in line.action:
                        comment = _("Renamed from '%(oldpagename)s'.") % {'oldpagename': line.extra}
                pgactioncount += 1
            else: # ATT*
                rev = '-'
                diff = '-'

                filename = wikiutil.url_unquote(line.extra)
                comment = "%s: %s %s" % (line.action, filename, line.comment)
                if AttachFile.exists(request, pagename, filename):
                    size = AttachFile.size(request, pagename, filename)
                    actions.append(render_file_action(_('view'), pagename, filename, request, do='view'))
                    actions.append(render_file_action(_('get'), pagename, filename, request, do='get'))
                    if may_delete:
                        actions.append(render_file_action(_('del'), pagename, filename, request, do='del'))
                    if may_write:
                        actions.append(render_file_action(_('edit'), pagename, filename, request, do='modify'))
                else:
                    size = 0

            history.addRow((
                rev,
                request.user.getFormattedDateTime(wikiutil.version2timestamp(line.ed_time_usecs)),
                str(size),
                diff,
                line.getEditor(request) or _("N/A"),
                wikiutil.escape(comment) or '&nbsp;',
                "&nbsp;".join(a for a in actions if a),
            ))
            if (count >= max_count + offset) or (paging and count >= log_size):
                break

        # print version history
        from MoinMoin.widget.browser import DataBrowserWidget

        request.write(unicode(html.H2().append(_('Revision History'))))

        if not count: # there was no entry in logfile
            request.write(_('No log entries found.'))
            return

        history_table = DataBrowserWidget(request)
        history_table.setData(history)

        div = html.DIV(id="page-history")
        div.append(html.INPUT(type="hidden", name="action", value="diff"))
        div.append(history_table.render(method="GET"))

        form = html.FORM(method="GET", action="")
        if paging:
            form.append(f.div(1, css_class="info-paging-info") + paging_info_html + count_select_html + f.div(0))
            form.append("".join([
                f.div(1, css_class="info-paging-nav info-paging-nav-top"),
                paging_nav_html,
                f.div(0),
            ]))
        form.append(div)
        if paging:
            form.append("".join([
                f.div(1, css_class="info-paging-nav info-paging-nav-bottom"),
                paging_nav_html,
                f.div(0)
            ]))
        request.write(unicode(form))
示例#15
0
文件: rss_rc.py 项目: aahlad/soar
def execute(pagename, request):
    """ Send recent changes as an RSS document
    """
    if not wikixml.ok:
        request.mimetype = 'text/plain'
        request.write(
            "rss_rc action is not supported because of missing pyxml module.")
        return

    cfg = request.cfg

    # get params
    items_limit = 100
    try:
        max_items = int(request.values['items'])
        max_items = min(max_items, items_limit)  # not more than `items_limit`
    except (KeyError, ValueError):
        # not more than 15 items in a RSS file by default
        max_items = 15
    try:
        unique = int(request.values.get('unique', 0))
    except ValueError:
        unique = 0
    try:
        diffs = int(request.values.get('diffs', 0))
    except ValueError:
        diffs = 0
    ## ddiffs inserted by Ralf Zosel <*****@*****.**>, 04.12.2003
    try:
        ddiffs = int(request.values.get('ddiffs', 0))
    except ValueError:
        ddiffs = 0

    # get data
    log = editlog.EditLog(request)
    logdata = []
    counter = 0
    pages = {}
    lastmod = 0
    for line in log.reverse():
        if not request.user.may.read(line.pagename):
            continue
        if (not line.action.startswith('SAVE')
                or ((line.pagename in pages) and unique)):
            continue
        #if log.dayChanged() and log.daycount > _MAX_DAYS: break
        line.editor = line.getInterwikiEditorData(request)
        line.time = timefuncs.tmtuple(
            wikiutil.version2timestamp(line.ed_time_usecs))  # UTC
        logdata.append(line)
        pages[line.pagename] = None

        if not lastmod:
            lastmod = wikiutil.version2timestamp(line.ed_time_usecs)

        counter += 1
        if counter >= max_items:
            break
    del log

    timestamp = timefuncs.formathttpdate(lastmod)
    etag = "%d-%d-%d-%d-%d" % (lastmod, max_items, diffs, ddiffs, unique)

    # for 304, we look at if-modified-since and if-none-match headers,
    # one of them must match and the other is either not there or must match.
    if request.if_modified_since == timestamp:
        if request.if_none_match:
            if request.if_none_match == etag:
                request.status_code = 304
        else:
            request.status_code = 304
    elif request.if_none_match == etag:
        if request.if_modified_since:
            if request.if_modified_since == timestamp:
                request.status_code = 304
        else:
            request.status_code = 304
    else:
        # generate an Expires header, using whatever setting the admin
        # defined for suggested cache lifetime of the RecentChanges RSS doc
        expires = time.time() + cfg.rss_cache

        request.mimetype = 'application/rss+xml'
        request.expires = expires
        request.last_modified = lastmod
        request.headers['Etag'] = etag

        # send the generated XML document
        baseurl = request.url_root

        logo = re.search(r'src="([^"]*)"', cfg.logo_string)
        if logo:
            logo = request.getQualifiedURL(logo.group(1))

        # prepare output
        out = StringIO.StringIO()
        handler = RssGenerator(out)

        # start SAX stream
        handler.startDocument()
        handler._out.write(
            '<!--\n'
            '    Add an "items=nnn" URL parameter to get more than the default 15 items.\n'
            '    You cannot get more than %d items though.\n'
            '    \n'
            '    Add "unique=1" to get a list of changes where page names are unique,\n'
            '    i.e. where only the latest change of each page is reflected.\n'
            '    \n'
            '    Add "diffs=1" to add change diffs to the description of each items.\n'
            '    \n'
            '    Add "ddiffs=1" to link directly to the diff (good for FeedReader).\n'
            '    Current settings: items=%i, unique=%i, diffs=%i, ddiffs=%i'
            '-->\n' % (items_limit, max_items, unique, diffs, ddiffs))

        # emit channel description
        handler.startNode('channel', {
            (handler.xmlns['rdf'], 'about'): request.url_root,
        })
        handler.simpleNode('title', cfg.sitename)
        page = Page(request, pagename)
        handler.simpleNode('link', full_url(request, page))
        handler.simpleNode('description', 'RecentChanges at %s' % cfg.sitename)
        if logo:
            handler.simpleNode('image', None, {
                (handler.xmlns['rdf'], 'resource'): logo,
            })
        if cfg.interwikiname:
            handler.simpleNode(('wiki', 'interwiki'), cfg.interwikiname)

        handler.startNode('items')
        handler.startNode(('rdf', 'Seq'))
        for item in logdata:
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            page = Page(request, item.pagename)
            link = full_url(request, page, anchor=anchor)
            handler.simpleNode(('rdf', 'li'),
                               None,
                               attr={
                                   (handler.xmlns['rdf'], 'resource'): link,
                               })
        handler.endNode(('rdf', 'Seq'))
        handler.endNode('items')
        handler.endNode('channel')

        # emit logo data
        if logo:
            handler.startNode('image',
                              attr={
                                  (handler.xmlns['rdf'], 'about'): logo,
                              })
            handler.simpleNode('title', cfg.sitename)
            handler.simpleNode('link', baseurl)
            handler.simpleNode('url', logo)
            handler.endNode('image')

        # emit items
        for item in logdata:
            page = Page(request, item.pagename)
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            rdflink = full_url(request, page, anchor=anchor)
            handler.startNode('item',
                              attr={
                                  (handler.xmlns['rdf'], 'about'): rdflink,
                              })

            # general attributes
            handler.simpleNode('title', item.pagename)
            if ddiffs:
                handler.simpleNode(
                    'link', full_url(request,
                                     page,
                                     querystr={'action': 'diff'}))
            else:
                handler.simpleNode('link', full_url(request, page))

            handler.simpleNode(('dc', 'date'), timefuncs.W3CDate(item.time))

            # description
            desc_text = item.comment
            if diffs:
                # TODO: rewrite / extend wikiutil.pagediff
                # searching for the matching pages doesn't really belong here
                revisions = page.getRevList()

                rl = len(revisions)
                for idx in range(rl):
                    rev = revisions[idx]
                    if rev <= item.rev:
                        if idx + 1 < rl:
                            lines = wikiutil.pagediff(request,
                                                      item.pagename,
                                                      revisions[idx + 1],
                                                      item.pagename,
                                                      0,
                                                      ignorews=1)
                            if len(lines) > 20:
                                lines = lines[:20] + ['...\n']
                            lines = '\n'.join(lines)
                            lines = wikiutil.escape(lines)
                            desc_text = '%s\n<pre>\n%s\n</pre>\n' % (desc_text,
                                                                     lines)
                        break
            if desc_text:
                handler.simpleNode('description', desc_text)

            # contributor
            edattr = {}
            if cfg.show_hosts:
                edattr[(handler.xmlns['wiki'], 'host')] = item.hostname
            if item.editor[0] == 'interwiki':
                edname = "%s:%s" % item.editor[1]
                ##edattr[(None, 'link')] = baseurl + wikiutil.quoteWikiname(edname)
            else:  # 'ip'
                edname = item.editor[1]
                ##edattr[(None, 'link')] = link + "?action=info"

            # this edattr stuff, esp. None as first tuple element breaks things (tracebacks)
            # if you know how to do this right, please send us a patch

            handler.startNode(('dc', 'contributor'))
            handler.startNode(('rdf', 'Description'), attr=edattr)
            handler.simpleNode(('rdf', 'value'), edname)
            handler.endNode(('rdf', 'Description'))
            handler.endNode(('dc', 'contributor'))

            # wiki extensions
            handler.simpleNode(('wiki', 'version'),
                               "%i" % (item.ed_time_usecs))
            handler.simpleNode(('wiki', 'status'),
                               ('deleted', 'updated')[page.exists()])
            handler.simpleNode(('wiki', 'diff'),
                               full_url(request,
                                        page,
                                        querystr={'action': 'diff'}))
            handler.simpleNode(('wiki', 'history'),
                               full_url(request,
                                        page,
                                        querystr={'action': 'info'}))
            # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor )
            # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA )

            handler.endNode('item')

        # end SAX stream
        handler.endDocument()

        request.write(out.getvalue())
示例#16
0
文件: diff.py 项目: aahlad/soar
def execute(pagename, request):
    """ Handle "action=diff"
        checking for either a "rev=formerrevision" parameter
        or rev1 and rev2 parameters
    """
    if not request.user.may.read(pagename):
        Page(request, pagename).send_page()
        return

    try:
        date = request.values['date']
        try:
            date = long(date)  # must be long for py 2.2.x
        except StandardError:
            date = 0
    except KeyError:
        date = 0

    try:
        rev1 = int(request.values.get('rev1', -1))
    except StandardError:
        rev1 = 0
    try:
        rev2 = int(request.values.get('rev2', 0))
    except StandardError:
        rev2 = 0

    if rev1 == -1 and rev2 == 0:
        rev1 = request.rev
        if rev1 is None:
            rev1 = -1

    # spacing flag?
    ignorews = int(request.values.get('ignorews', 0))

    _ = request.getText

    # get a list of old revisions, and back out if none are available
    currentpage = Page(request, pagename)
    currentrev = currentpage.current_rev()
    if currentrev < 2:
        request.theme.add_msg(_("No older revisions available!"), "error")
        currentpage.send_page()
        return

    if date:  # this is how we get called from RecentChanges
        rev1 = 0
        log = editlog.EditLog(request, rootpagename=pagename)
        for line in log.reverse():
            if date >= line.ed_time_usecs and int(line.rev) != 99999999:
                rev1 = int(line.rev)
                break
        else:
            rev1 = 1
        rev2 = 0

    if rev1 > 0 and rev2 > 0 and rev1 > rev2 or rev1 == 0 and rev2 > 0:
        rev1, rev2 = rev2, rev1

    if rev1 == -1:
        oldrev = currentrev - 1
        oldpage = Page(request, pagename, rev=oldrev)
    elif rev1 == 0:
        oldrev = currentrev
        oldpage = currentpage
    else:
        oldrev = rev1
        oldpage = Page(request, pagename, rev=oldrev)

    if rev2 == 0:
        newrev = currentrev
        newpage = currentpage
    else:
        newrev = rev2
        newpage = Page(request, pagename, rev=newrev)

    oldlog = oldpage.editlog_entry()
    newlog = newpage.editlog_entry()

    if not oldlog or not newlog:
        # We use "No log entries found." msg because we already have i18n
        # for that. Better would "At least one log entry was not found.".
        request.theme.add_msg(_("No log entries found."), "error")
        currentpage.send_page()
        return

    edit_count = abs(newrev - oldrev)

    # Start output
    # This action generates content in the user language
    request.setContentLanguage(request.lang)

    request.theme.send_title(_('Diff for "%s"') % (pagename, ),
                             pagename=pagename,
                             allow_doubleclick=1)

    f = request.formatter
    request.write(f.div(1, id="content"))

    oldrev = oldpage.get_real_rev()
    newrev = newpage.get_real_rev()

    title = _('Differences between revisions %d and %d') % (oldrev, newrev)
    if edit_count > 1:
        title += ' ' + _('(spanning %d versions)') % (edit_count, )
    title = f.text(title)

    page_url = wikiutil.escape(currentpage.url(request), True)

    def enabled(val):
        return not val and u' disabled="disabled"' or u''

    revert_html = ""
    if request.user.may.revert(pagename):
        revert_html = """
  <form action="%s" method="get">
   <div style="text-align:center">
    <input name="action" value="revert" type="hidden">
    <input name="rev" value="%d" type="hidden">
    <input value="%s" type="submit"%s>
   </div>
  </form>
 """ % (page_url, rev2, _("Revert to this revision"),
        enabled(newrev < currentrev))

    other_diff_button_html = """
 <td style="border:0;">
  <form action="%s" method="get">
   <div style="text-align:%s">
    <input name="action" value="diff" type="hidden">
    <input name="rev1" value="%d" type="hidden">
    <input name="rev2" value="%d" type="hidden">
    <input value="%s" type="submit"%s>
   </div>
  </form>
 </td>
"""

    navigation_html = """
<span class="diff-header">%%s</span>
<table class="diff">
<tr>
 %(button)s
 <td style="border:0">
   %%s
 </td>
 %(button)s
</tr>
</table>
""" % {
        'button': other_diff_button_html
    }

    prev_oldrev = (oldrev > 1) and (oldrev - 1) or 1
    next_oldrev = (oldrev < currentrev) and (oldrev + 1) or currentrev

    prev_newrev = (newrev > 1) and (newrev - 1) or 1
    next_newrev = (newrev < currentrev) and (newrev + 1) or currentrev

    navigation_html = navigation_html % (
        title,
        page_url,
        "left",
        prev_oldrev,
        oldrev,
        _("Previous change"),
        enabled(oldrev > 1),
        revert_html,
        page_url,
        "right",
        newrev,
        next_newrev,
        _("Next change"),
        enabled(newrev < currentrev),
    )

    request.write(f.rawHTML(navigation_html))

    def rev_nav_link(enabled, old_rev, new_rev, caption, css_classes,
                     enabled_title, disabled_title):
        if enabled:
            return currentpage.link_to(
                request,
                on=1,
                querystr={
                    'action': 'diff',
                    'rev1': old_rev,
                    'rev2': new_rev,
                },
                css_class="diff-nav-link %s" % css_classes,
                title=enabled_title) + request.formatter.text(
                    caption) + currentpage.link_to(request, on=0)
        else:
            return '<span class="diff-no-nav-link %(css_classes)s" title="%(disabled_title)s">%(caption)s</span>' % {
                'css_classes': css_classes,
                'disabled_title': disabled_title,
                'caption': caption,
            }

    rev_info_html = """
  <div class="diff-info diff-info-header">%%(rev_first_link)s %%(rev_prev_link)s %(rev_header)s %%(rev_next_link)s %%(rev_last_link)s</div>
  <div class="diff-info diff-info-rev-size"><span class="diff-info-caption">%(rev_size_caption)s:</span> <span class="diff-info-value">%%(rev_size)d</span></div>
  <div class="diff-info diff-info-rev-author"><span class="diff-info-caption">%(rev_author_caption)s:</span> <span class="diff-info-value">%%(rev_author)s</span></div>
  <div class="diff-info diff-info-rev-comment"><span class="diff-info-caption">%(rev_comment_caption)s:</span> <span class="diff-info-value">%%(rev_comment)s</span></div>
""" % {
        'rev_header': _('Revision %(rev)d as of %(date)s'),
        'rev_size_caption': _('Size'),
        'rev_author_caption': _('Editor'),
        'rev_ts_caption': _('Date'),
        'rev_comment_caption': _('Comment'),
    }

    rev_info_old_html = rev_info_html % {
        'rev_first_link':
        rev_nav_link(oldrev > 1, 1, newrev, u'\u21e4',
                     'diff-first-link diff-old-rev',
                     _('Diff with oldest revision in left pane'),
                     _("No older revision available for diff")),
        'rev_prev_link':
        rev_nav_link(oldrev > 1, prev_oldrev, newrev, u'\u2190',
                     'diff-prev-link diff-old-rev',
                     _('Diff with older revision in left pane'),
                     _("No older revision available for diff")),
        'rev_next_link':
        rev_nav_link(
            (oldrev < currentrev) and (next_oldrev < newrev), next_oldrev,
            newrev, u'\u2192', 'diff-next-link diff-old-rev',
            _('Diff with newer revision in left pane'),
            _("Can't change to revision newer than in right pane")),
        'rev_last_link':
        '',
        'rev':
        oldrev,
        'rev_size':
        oldpage.size(),
        'rev_author':
        oldlog.getEditor(request) or _('N/A'),
        'date':
        request.user.getFormattedDateTime(
            wikiutil.version2timestamp(oldlog.ed_time_usecs)) or _('N/A'),
        'rev_comment':
        wikiutil.escape(oldlog.comment) or '',
    }

    rev_info_new_html = rev_info_html % {
        'rev_first_link':
        '',
        'rev_prev_link':
        rev_nav_link(
            (newrev > 1) and (oldrev < prev_newrev), oldrev, prev_newrev,
            u'\u2190', 'diff-prev-link diff-new-rev',
            _('Diff with older revision in right pane'),
            _("Can't change to revision older than revision in left pane")),
        'rev_next_link':
        rev_nav_link(newrev < currentrev, oldrev, next_newrev, u'\u2192',
                     'diff-next-link diff-new-rev',
                     _('Diff with newer revision in right pane'),
                     _("No newer revision available for diff")),
        'rev_last_link':
        rev_nav_link(newrev < currentrev, oldrev, currentrev, u'\u21e5',
                     'diff-last-link diff-old-rev',
                     _('Diff with newest revision in right pane'),
                     _("No newer revision available for diff")),
        'rev':
        newrev,
        'rev_size':
        newpage.size(),
        'rev_author':
        newlog.getEditor(request) or _('N/A'),
        'date':
        request.user.getFormattedDateTime(
            wikiutil.version2timestamp(newlog.ed_time_usecs)) or _('N/A'),
        'rev_comment':
        wikiutil.escape(newlog.comment) or '',
    }

    if request.user.show_fancy_diff:
        from MoinMoin.util import diff_html
        request.write(
            f.rawHTML(
                diff_html.diff(request,
                               oldpage.get_raw_body(),
                               newpage.get_raw_body(),
                               old_top=rev_info_old_html,
                               new_top=rev_info_new_html,
                               old_top_class="diff-info",
                               new_top_class="diff-info")))
        newpage.send_page(count_hit=0,
                          content_only=1,
                          content_id="content-below-diff")
    else:
        request.write(
            f.rawHTML(
                '<table class="diff"><tr><td class="diff-info">%s</td><td class="diff-info">%s</td></tr></table>'
                % (rev_info_old_html, rev_info_new_html)))

        from MoinMoin.util import diff_text
        lines = diff_text.diff(oldpage.getlines(), newpage.getlines())
        if not lines:
            msg = f.text(" - " + _("No differences found!"))
            if edit_count > 1:
                msg = msg + f.paragraph(1) + f.text(
                    _('The page was saved %(count)d times, though!') %
                    {'count': edit_count}) + f.paragraph(0)
            request.write(msg)
        else:
            if ignorews:
                request.write(f.text(_('(ignoring whitespace)')),
                              f.linebreak())
            else:
                qstr = {
                    'action': 'diff',
                    'ignorews': '1',
                }
                if rev1:
                    qstr['rev1'] = str(rev1)
                if rev2:
                    qstr['rev2'] = str(rev2)
                request.write(
                    f.paragraph(1),
                    Page(request, pagename).link_to(
                        request,
                        text=_('Ignore changes in the amount of whitespace'),
                        querystr=qstr,
                        rel='nofollow'), f.paragraph(0))

            request.write(f.preformatted(1))
            for line in lines:
                if line[0] == "@":
                    request.write(f.rule(1))
                request.write(f.text(line + '\n'))
            request.write(f.preformatted(0))

    request.write(f.div(0))  # end content div
    request.theme.send_footer(pagename)
    request.theme.send_closing_html()
示例#17
0
def execute(pagename, request):
    """ Send recent changes as an RSS document
    """
    if not wikixml.ok:
        request.mimetype = 'text/plain'
        request.write(
            "rss_rc action is not supported because of missing pyxml module.")
        return
    if request.isSpiderAgent:  # reduce bot cpu usage
        return ''

    cfg = request.cfg
    _ = request.getText

    # get params
    def_max_items = max_items = cfg.rss_items_default
    items_limit = cfg.rss_items_limit
    unique = cfg.rss_unique
    diffs = cfg.rss_diffs
    ddiffs = cfg.rss_ddiffs
    max_lines = cfg.rss_lines_default
    lines_limit = cfg.rss_lines_limit
    show_att = cfg.rss_show_attachment_entries
    page_pattern = cfg.rss_page_filter_pattern

    try:
        max_items = min(int(request.values.get('items', max_items)),
                        items_limit)
    except ValueError:
        pass
    try:
        unique = int(request.values.get('unique', unique))
    except ValueError:
        pass
    try:
        diffs = int(request.values.get('diffs', diffs))
    except ValueError:
        pass
    ## ddiffs inserted by Ralf Zosel <*****@*****.**>, 04.12.2003
    try:
        ddiffs = int(request.values.get('ddiffs', ddiffs))
    except ValueError:
        pass
    try:
        max_lines = min(int(request.values.get('lines', max_lines)),
                        lines_limit)
    except ValueError:
        pass
    try:
        show_att = int(request.values.get('show_att', show_att))
    except ValueError:
        pass
    try:
        page_pattern = request.values.get('page', page_pattern)
    except ValueError:
        pass

    # if we are just interested in a specific page, using the local edit-log
    # of that page is much faster than the global one - esp. if the page was
    # NOT recently changed and the global edit-log is rather big.
    kw = dict(rootpagename=page_pattern) if is_single_page_match(
        page_pattern) else {}
    log = editlog.EditLog(request, **kw)
    logdata = []
    counter = 0
    pages = {}
    lastmod = 0
    for line in log.reverse():
        if not request.user.may.read(line.pagename):
            continue
        if ((not show_att and not line.action.startswith('SAVE'))
                or ((line.pagename in pages) and unique)
                or not match_page(line.pagename, page_pattern)):
            continue
        line.editor = line.getInterwikiEditorData(request)
        line.time = timefuncs.tmtuple(
            wikiutil.version2timestamp(line.ed_time_usecs))  # UTC
        logdata.append(line)
        pages[line.pagename] = None

        if not lastmod:
            lastmod = wikiutil.version2timestamp(line.ed_time_usecs)

        counter += 1
        if counter >= max_items:
            break
    del log

    timestamp = timefuncs.formathttpdate(lastmod)
    etag = "%d-%d-%d-%d-%d-%d-%d" % (lastmod, max_items, diffs, ddiffs, unique,
                                     max_lines, show_att)

    # for 304, we look at if-modified-since and if-none-match headers,
    # one of them must match and the other is either not there or must match.
    if request.if_modified_since == timestamp:
        if request.if_none_match:
            if request.if_none_match == etag:
                request.status_code = 304
        else:
            request.status_code = 304
    elif request.if_none_match == etag:
        if request.if_modified_since:
            if request.if_modified_since == timestamp:
                request.status_code = 304
        else:
            request.status_code = 304
    else:
        # generate an Expires header, using whatever setting the admin
        # defined for suggested cache lifetime of the RecentChanges RSS doc
        expires = time.time() + cfg.rss_cache

        request.mimetype = 'application/rss+xml'
        request.expires = expires
        request.last_modified = lastmod
        request.headers['Etag'] = etag

        # send the generated XML document
        baseurl = request.url_root

        logo = re.search(r'src="([^"]*)"', cfg.logo_string)
        if logo:
            logo = request.getQualifiedURL(logo.group(1))

        # prepare output
        out = StringIO.StringIO()
        handler = RssGenerator(out)

        # start SAX stream
        handler.startDocument()
        handler.write(
            u'<!--\n'
            u'    Add an "items=nnn" URL parameter to get more than the \n'
            u'    default %(def_max_items)d items. You cannot get more than \n'
            u'    %(items_limit)d items though.\n'
            u'    \n'
            u'    Add "unique=1" to get a list of changes where page names are unique,\n'
            u'    i.e. where only the latest change of each page is reflected.\n'
            u'    \n'
            u'    Add "diffs=1" to add change diffs to the description of each items.\n'
            u'    \n'
            u'    Add "ddiffs=1" to link directly to the diff (good for FeedReader).\n'
            u'    \n'
            u'    Add "lines=nnn" to change maximum number of diff/body lines \n'
            u'    to show. Cannot be more than %(lines_limit)d.\n'
            u'    \n'
            u'    Add "show_att=1" to show items related to attachments.\n'
            u'    \n'
            u'    Add "page=pattern" to show feed only for specific pages.\n'
            u'    Pattern can be empty (it would match to all pages), \n'
            u'    can start with circumflex (it would be interpreted as \n'
            u'    regular expression in this case), end with slash (for \n'
            u'    getting feed for page tree) or point to specific page (if \n'
            u'    none of the above can be applied).\n'
            u'    \n'
            u'    Current settings: items=%(max_items)i, unique=%(unique)i, \n'
            u'    diffs=%(diffs)i, ddiffs=%(ddiffs)i, lines=%(max_lines)i, \n'
            u'    show_att=%(show_att)i\n'
            u'-->\n' % locals())

        # emit channel description
        handler.startNode('channel', {
            (handler.xmlns['rdf'], 'about'): request.url_root,
        })
        handler.simpleNode('title', cfg.sitename)
        page = Page(request, pagename)
        handler.simpleNode('link', full_url(request, page))
        handler.simpleNode('description',
                           u'RecentChanges at %s' % cfg.sitename)
        if logo:
            handler.simpleNode('image', None, {
                (handler.xmlns['rdf'], 'resource'): logo,
            })
        if cfg.interwikiname:
            handler.simpleNode(('wiki', 'interwiki'), cfg.interwikiname)

        handler.startNode('items')
        handler.startNode(('rdf', 'Seq'))
        for item in logdata:
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            page = Page(request, item.pagename)
            link = full_url(request, page, anchor=anchor)
            handler.simpleNode(('rdf', 'li'),
                               None,
                               attr={
                                   (handler.xmlns['rdf'], 'resource'): link,
                               })
        handler.endNode(('rdf', 'Seq'))
        handler.endNode('items')
        handler.endNode('channel')

        # emit logo data
        if logo:
            handler.startNode('image',
                              attr={
                                  (handler.xmlns['rdf'], 'about'): logo,
                              })
            handler.simpleNode('title', cfg.sitename)
            handler.simpleNode('link', baseurl)
            handler.simpleNode('url', logo)
            handler.endNode('image')

        # Mapping { oldname: curname } for maintaining page renames
        pagename_map = {}

        # emit items
        for item in logdata:
            if item.pagename in pagename_map:
                cur_pagename = pagename_map[item.pagename]
            else:
                cur_pagename = item.pagename
            page = Page(request, cur_pagename)
            action = item.action
            comment = item.comment
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            rdflink = full_url(request, page, anchor=anchor)
            handler.startNode('item',
                              attr={
                                  (handler.xmlns['rdf'], 'about'): rdflink,
                              })

            # general attributes
            handler.simpleNode('title', item.pagename)
            handler.simpleNode(('dc', 'date'), timefuncs.W3CDate(item.time))

            show_diff = diffs

            if action.startswith('ATT'):  # Attachment
                show_diff = 0
                filename = wikiutil.url_unquote(item.extra)
                att_exists = AttachFile.exists(request, cur_pagename, filename)

                if action == 'ATTNEW':
                    # Once attachment deleted this link becomes invalid but we
                    # preserve it to prevent appearance of new RSS entries in
                    # RSS readers.
                    if ddiffs:
                        handler.simpleNode(
                            'link',
                            attach_url(request,
                                       cur_pagename,
                                       filename,
                                       do='view'))

                    comment = _(u"Upload of attachment '%(filename)s'.") % {
                        'filename': filename
                    }

                elif action == 'ATTDEL':
                    if ddiffs:
                        handler.simpleNode(
                            'link',
                            full_url(request,
                                     page,
                                     querystr={'action': 'AttachFile'}))

                    comment = _(u"Attachment '%(filename)s' deleted.") % {
                        'filename': filename
                    }

                elif action == 'ATTDRW':
                    if ddiffs:
                        handler.simpleNode(
                            'link',
                            attach_url(request,
                                       cur_pagename,
                                       filename,
                                       do='view'))

                    comment = _(u"Drawing '%(filename)s' saved.") % {
                        'filename': filename
                    }

            elif action.startswith('SAVE'):
                if action == 'SAVE/REVERT':
                    to_rev = int(item.extra)
                    comment = (_(u"Revert to revision %(rev)d.") % {
                        'rev': to_rev}) + "<br />" \
                        + _("Comment:") + " " + comment

                elif action == 'SAVE/RENAME':
                    show_diff = 0
                    comment = (_(u"Renamed from '%(oldpagename)s'.") % {
                        'oldpagename': item.extra}) + "<br />" \
                        + _("Comment:") + " " + comment
                    if item.pagename in pagename_map:
                        newpage = pagename_map[item.pagename]
                        del pagename_map[item.pagename]
                        pagename_map[item.extra] = newpage
                    else:
                        pagename_map[item.extra] = item.pagename

                elif action == 'SAVENEW':
                    comment = _(u"New page:\n") + comment

                item_rev = int(item.rev)

                # If we use diffs/ddiffs, we should calculate proper links and
                # content
                if ddiffs:
                    # first revision can't have older revisions to diff with
                    if item_rev == 1:
                        handler.simpleNode(
                            'link',
                            full_url(request,
                                     page,
                                     querystr={
                                         'action': 'recall',
                                         'rev': str(item_rev)
                                     }))
                    else:
                        handler.simpleNode(
                            'link',
                            full_url(request,
                                     page,
                                     querystr={
                                         'action': 'diff',
                                         'rev1': str(item_rev),
                                         'rev2': str(item_rev - 1)
                                     }))

                if show_diff:
                    if item_rev == 1:
                        lines = Page(request, cur_pagename,
                                     rev=item_rev).getlines()
                    else:
                        lines = wikiutil.pagediff(request,
                                                  cur_pagename,
                                                  item_rev - 1,
                                                  cur_pagename,
                                                  item_rev,
                                                  ignorews=1)

                    if len(lines) > max_lines:
                        lines = lines[:max_lines] + ['...\n']

                    lines = '\n'.join(lines)
                    lines = wikiutil.escape(lines)

                    comment = u'%s\n<pre>\n%s\n</pre>\n' % (comment, lines)

                if not ddiffs:
                    handler.simpleNode('link', full_url(request, page))

            if comment:
                handler.simpleNode('description', comment)

            # contributor
            if cfg.show_names:
                edattr = {}
                if cfg.show_hosts:
                    edattr[(handler.xmlns['wiki'], 'host')] = item.hostname
                if item.editor[0] == 'interwiki':
                    edname = "%s:%s" % item.editor[1]
                    ##edattr[(None, 'link')] = baseurl + wikiutil.quoteWikiname(edname)
                else:  # 'ip'
                    edname = item.editor[1]
                    ##edattr[(None, 'link')] = link + "?action=info"

                # this edattr stuff, esp. None as first tuple element breaks things (tracebacks)
                # if you know how to do this right, please send us a patch

                handler.startNode(('dc', 'contributor'))
                handler.startNode(('rdf', 'Description'), attr=edattr)
                handler.simpleNode(('rdf', 'value'), edname)
                handler.endNode(('rdf', 'Description'))
                handler.endNode(('dc', 'contributor'))

            # wiki extensions
            handler.simpleNode(('wiki', 'version'),
                               "%i" % (item.ed_time_usecs))
            handler.simpleNode(('wiki', 'status'),
                               ('deleted', 'updated')[page.exists()])
            handler.simpleNode(('wiki', 'diff'),
                               full_url(request,
                                        page,
                                        querystr={'action': 'diff'}))
            handler.simpleNode(('wiki', 'history'),
                               full_url(request,
                                        page,
                                        querystr={'action': 'info'}))
            # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor )
            # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA )

            handler.endNode('item')

        # end SAX stream
        handler.endDocument()

        request.write(out.getvalue())
示例#18
0
    def _addRevisionHistory(self, targetNode):
        """
        This will generate a revhistory element which it will populate with
        revision nodes. Each revision has the revnumber, date and author-
        initial elements, and if a comment was supplied, the comment element.

        The date elements format depends on the users settings, so it will
        be in the same format as the revision history as viewed in the
        page info on the wiki.

        The authorinitials will be the UserName or if it was an anonymous
        edit, then it will be the hostname/ip-address.

        The revision history of included documents is NOT included at the
        moment due to technical difficulties.
        """
        _ = self.request.getText
        log = editlog.EditLog(self.request, rootpagename=self.title)
        user_cache = {}

        history = self.doc.createElement("revhistory")

        # read in the complete log of this page
        for line in log.reverse():
            if not line.action in (
                    'SAVE',
                    'SAVENEW',
                    'SAVE/REVERT',
                    'SAVE/RENAME',
            ):
                #Let's ignore adding of attachments
                continue
            revision = self.doc.createElement("revision")

            # Revision number (without preceeding zeros)
            self._addTextElem(revision, "revnumber", line.rev.lstrip('0'))

            # Date of revision
            date_text = self.request.user.getFormattedDateTime(
                wikiutil.version2timestamp(line.ed_time_usecs))
            self._addTextElem(revision, "date", date_text)

            # Author or revision
            if not (line.userid in user_cache):
                user_cache[line.userid] = user.User(
                    self.request, line.userid, auth_method="text_docbook:740")
            author = user_cache[line.userid]
            if author and author.name:
                self._addTextElem(revision, "authorinitials", author.name)
            else:
                self._addTextElem(revision, "authorinitials", line.hostname)

            # Comment from author of revision
            comment = line.comment
            if not comment:
                if '/REVERT' in line.action:
                    comment = _("Revert to revision %(rev)d.") % {
                        'rev': int(line.extra)
                    }
                elif '/RENAME' in line.action:
                    comment = _("Renamed from '%(oldpagename)s'.") % {
                        'oldpagename': line.extra
                    }
            if comment:
                self._addTextElem(revision, "revremark", comment)

            history.appendChild(revision)

        if history.firstChild:
            #only add revision history is there is history to add
            targetNode.appendChild(history)
示例#19
0
def macro_RecentChanges(macro, abandoned=False):
    # handle abandoned keyword
    if abandoned:
        return print_abandoned(macro)

    request = macro.request
    _ = request.getText
    output = []
    user = request.user
    page = macro.formatter.page
    pagename = page.page_name

    d = {}
    d['page'] = page
    d['q_page_name'] = wikiutil.quoteWikinameURL(pagename)

    log = editlog.EditLog(request)

    tnow = time.time()
    msg = ""

    # get bookmark from valid user
    bookmark_usecs = request.user.getBookmark() or 0

    # add bookmark link if valid user
    d['rc_curr_bookmark'] = None
    d['rc_update_bookmark'] = None
    if request.user.valid:
        d['rc_curr_bookmark'] = _('(no bookmark set)')
        if bookmark_usecs:
            currentBookmark = wikiutil.version2timestamp(bookmark_usecs)
            currentBookmark = user.getFormattedDateTime(currentBookmark)
            currentBookmark = _('(currently set to %s)') % currentBookmark
            deleteBookmark = page.link_to(request, _("Delete bookmark"), querystr={'action': 'bookmark', 'time': 'del'}, rel='nofollow')
            d['rc_curr_bookmark'] = currentBookmark + ' ' + deleteBookmark

        version = wikiutil.timestamp2version(tnow)
        d['rc_update_bookmark'] = page.link_to(request, _("Set bookmark"), querystr={'action': 'bookmark', 'time': '%d' % version}, rel='nofollow')

    # set max size in days
    max_days = min(int(request.values.get('max_days', 0)), _DAYS_SELECTION[-1])
    # default to _MAX_DAYS for useres without bookmark
    if not max_days and not bookmark_usecs:
        max_days = _MAX_DAYS
    d['rc_max_days'] = max_days

    # give known user the option to extend the normal display
    if request.user.valid:
        d['rc_days'] = _DAYS_SELECTION
    else:
        d['rc_days'] = []

    output.append(request.theme.recentchanges_header(d))

    pages = {}
    ignore_pages = {}

    today = request.user.getTime(tnow)[0:3]
    this_day = today
    day_count = 0

    for line in log.reverse():

        if not request.user.may.read(line.pagename):
            continue

        line.time_tuple = request.user.getTime(wikiutil.version2timestamp(line.ed_time_usecs))
        day = line.time_tuple[0:3]
        hilite = line.ed_time_usecs > (bookmark_usecs or line.ed_time_usecs)

        if ((this_day != day or (not hilite and not max_days))) and len(pages) > 0:
            # new day or bookmark reached: print out stuff
            this_day = day
            for p in pages:
                ignore_pages[p] = None
            pages = pages.values()
            pages.sort(cmp_lines)
            pages.reverse()

            if request.user.valid:
                bmtime = pages[0][0].ed_time_usecs
                d['bookmark_link_html'] = page.link_to(request, _("Set bookmark"), querystr={'action': 'bookmark', 'time': '%d' % bmtime}, rel='nofollow')
            else:
                d['bookmark_link_html'] = None
            d['date'] = request.user.getFormattedDate(wikiutil.version2timestamp(pages[0][0].ed_time_usecs))
            output.append(request.theme.recentchanges_daybreak(d))

            for p in pages:
                output.append(format_page_edits(macro, p, bookmark_usecs))
            pages = {}
            day_count += 1
            if max_days and (day_count >= max_days):
                break

        elif this_day != day:
            # new day but no changes
            this_day = day

        if line.pagename in ignore_pages:
            continue

        # end listing by default if user has a bookmark and we reached it
        if not max_days and not hilite:
            msg = _('[Bookmark reached]')
            break

        if line.pagename in pages:
            pages[line.pagename].append(line)
        else:
            pages[line.pagename] = [line]
    else:
        if len(pages) > 0:
            # end of loop reached: print out stuff
            # XXX duplicated code from above
            # but above does not trigger if we have the first day in wiki history
            for p in pages:
                ignore_pages[p] = None
            pages = pages.values()
            pages.sort(cmp_lines)
            pages.reverse()

            if request.user.valid:
                bmtime = pages[0][0].ed_time_usecs
                d['bookmark_link_html'] = page.link_to(request, _("Set bookmark"), querystr={'action': 'bookmark', 'time': '%d' % bmtime}, rel='nofollow')
            else:
                d['bookmark_link_html'] = None
            d['date'] = request.user.getFormattedDate(wikiutil.version2timestamp(pages[0][0].ed_time_usecs))
            output.append(request.theme.recentchanges_daybreak(d))

            for p in pages:
                output.append(format_page_edits(macro, p, bookmark_usecs))


    d['rc_msg'] = msg
    output.append(request.theme.recentchanges_footer(d))

    return ''.join(output)
示例#20
0
def execute(pagename, request):
    """ Send recent changes as an RSS document
    """
    cfg = request.cfg

    # get params
    items_limit = 100
    try:
        max_items = int(request.values['items'])
        max_items = min(max_items, items_limit)  # not more than `items_limit`
    except (KeyError, ValueError):
        # not more than 15 items in a RSS file by default
        max_items = 15
    try:
        unique = int(request.values.get('unique', 0))
    except ValueError:
        unique = 0
    try:
        diffs = int(request.values.get('diffs', 0))
    except ValueError:
        diffs = 0
    ## ddiffs inserted by Ralf Zosel <*****@*****.**>, 04.12.2003
    try:
        ddiffs = int(request.values.get('ddiffs', 0))
    except ValueError:
        ddiffs = 0

    urlfilter = request.values.get('filter')
    if urlfilter:
        urlfilter = re.compile(urlfilter)
    else:
        urlfilter = None

    # get data
    log = editlog.EditLog(request)
    logdata = []
    counter = 0
    pages = {}
    lastmod = 0
    for line in log.reverse():
        if urlfilter and not (urlfilter.match(line.pagename)):
            continue
        if not request.user.may.read(line.pagename):
            continue
        if (not line.action.startswith('SAVE')
                or ((line.pagename in pages) and unique)):
            continue
        #if log.dayChanged() and log.daycount > _MAX_DAYS: break
        line.editor = line.getInterwikiEditorData(request)
        line.time = timefuncs.tmtuple(
            wikiutil.version2timestamp(line.ed_time_usecs))  # UTC
        logdata.append(line)
        pages[line.pagename] = None

        if not lastmod:
            lastmod = wikiutil.version2timestamp(line.ed_time_usecs)

        counter += 1
        if counter >= max_items:
            break
    del log

    timestamp = timefuncs.formathttpdate(lastmod)
    etag = "%d-%d-%d-%d-%d" % (lastmod, max_items, diffs, ddiffs, unique)

    # for 304, we look at if-modified-since and if-none-match headers,
    # one of them must match and the other is either not there or must match.
    if request.if_modified_since == timestamp:
        if request.if_none_match:
            if request.if_none_match == etag:
                request.status_code = 304
        else:
            request.status_code = 304
    elif request.if_none_match == etag:
        if request.if_modified_since:
            if request.if_modified_since == timestamp:
                request.status_code = 304
        else:
            request.status_code = 304
    else:
        # generate an Expires header, using whatever setting the admin
        # defined for suggested cache lifetime of the RecentChanges RSS doc
        expires = time.time() + cfg.rss_cache

        request.mimetype = 'application/rss+xml'
        request.expires = expires
        request.last_modified = lastmod
        request.headers['Etag'] = etag

        # send the generated XML document
        baseurl = request.url_root

        logo = re.search(r'src="([^"]*)"', cfg.logo_string)
        if logo:
            logo = request.getQualifiedURL(logo.group(1))

        # prepare output
        output = structencoder(indent=u"yes")

        FEED_HEADER_COMMENT = '''
<!--
    Add an "items=nnn" URL parameter to get more than the default 15 items.
    You cannot get more than %d items though.
    
    Add "unique=1" to get a list of changes where page names are unique,
    i.e. where only the latest change of each page is reflected.
    Add "diffs=1" to add change diffs to the description of each items.
    
    Add "ddiffs=1" to link directly to the diff (good for FeedReader).
    Current settings: items=%i, unique=%i, diffs=%i, ddiffs=%i
-->
        ''' % (items_limit, max_items, unique, diffs, ddiffs)

        # Feed envelope
        page = Page(request, pagename)
        f = output.cofeed(
            ROOT(NS(u'', ATOM_NAMESPACE), NS(u'wiki', RSSWIKI_NAMESPACE),
                 E_CURSOR((ATOM_NAMESPACE, u'feed'), )))
        f.send(
            E((ATOM_NAMESPACE, u'id'),
              full_url(request, page).encode(config.charset))),
        f.send(
            E((ATOM_NAMESPACE, u'title'),
              cfg.sitename.encode(config.charset))),
        f.send(
            E((ATOM_NAMESPACE, u'link'),
              {u'href': request.url_root.encode(config.charset)})),
        f.send(
            E((ATOM_NAMESPACE, u'summary'),
              ('RecentChanges at %s' % cfg.sitename).encode(config.charset))),
        #Icon
        #E((ATOM_NAMESPACE, u'link'), {u'href': logo.encode(config.charset)}),

        #if cfg.interwikiname:
        #    handler.simpleNode(('wiki', 'interwiki'), cfg.interwikiname)

        for item in logdata:
            anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
            page = Page(request, item.pagename)
            #link = full_url(request, page, anchor=anchor)
            if ddiffs:
                link = full_url(request, page, querystr={'action': 'diff'})
            else:
                link = full_url(request, page)

            # description
            desc_text = item.comment
            if diffs:
                # TODO: rewrite / extend wikiutil.pagediff
                # searching for the matching pages doesn't really belong here
                revisions = page.getRevList()

                rl = len(revisions)
                for idx in range(rl):
                    rev = revisions[idx]
                    if rev <= item.rev:
                        if idx + 1 < rl:
                            lines = wikiutil.pagediff(request,
                                                      item.pagename,
                                                      revisions[idx + 1],
                                                      item.pagename,
                                                      0,
                                                      ignorews=1)
                            if len(lines) > 20:
                                lines = lines[:20] + ['...\n']
                            lines = '\n'.join(lines)
                            lines = wikiutil.escape(lines)
                            desc_text = '%s\n<pre>\n%s\n</pre>\n' % (desc_text,
                                                                     lines)
                        break
            #if desc_text:
            #    handler.simpleNode('description', desc_text)

            # contributor
            edattr = {}
            #if cfg.show_hosts:
            #    edattr[(handler.xmlns['wiki'], 'host')] = item.hostname
            if item.editor[0] == 'interwiki':
                edname = "%s:%s" % item.editor[1]
                ##edattr[(None, 'link')] = baseurl + wikiutil.quoteWikiname(edname)
            else:  # 'ip'
                edname = item.editor[1]
                ##edattr[(None, 'link')] = link + "?action=info"

            history_link = full_url(request, page, querystr={'action': 'info'})

            f.send(
                E(
                    (ATOM_NAMESPACE, u'entry'),
                    E((ATOM_NAMESPACE, u'id'), link.encode(config.charset)),
                    E((ATOM_NAMESPACE, u'title'),
                      item.pagename.encode(config.charset)),
                    E((ATOM_NAMESPACE, u'updated'),
                      timefuncs.W3CDate(item.time).encode(config.charset)),
                    E((ATOM_NAMESPACE, u'link'),
                      {u'href': link.encode(config.charset)}),
                    E((ATOM_NAMESPACE, u'summary'),
                      desc_text.encode(config.charset)),
                    E((ATOM_NAMESPACE, u'author'),
                      E((ATOM_NAMESPACE, u'name'),
                        edname.encode(config.charset))),
                    #E((ATOM_NAMESPACE, u'title'), item.pagename.encode(config.charset)),
                    # wiki extensions
                    E((RSSWIKI_NAMESPACE, u'wiki:version'),
                      ("%i" % (item.ed_time_usecs)).encode(config.charset)),
                    E((RSSWIKI_NAMESPACE, u'wiki:status'),
                      (u'deleted', u'updated')[page.exists()]),
                    E((RSSWIKI_NAMESPACE, u'wiki:diff'),
                      link.encode(config.charset)),
                    E((RSSWIKI_NAMESPACE, u'wiki:history'),
                      history_link.encode(config.charset)),
                    # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor )
                    # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA )
                ))

        # emit logo data
        #if logo:
        #    handler.startNode('image', attr={
        #        (handler.xmlns['rdf'], 'about'): logo,
        #        })
        #    handler.simpleNode('title', cfg.sitename)
        #    handler.simpleNode('link', baseurl)
        #    handler.simpleNode('url', logo)
        #    handler.endNode('image')

        f.close()
        request.write(output.read())
示例#21
0
def convert_editlog(page, output=None, overwrite=False):
    pagedir = page.getPagePath()
    pagename = wikiname(pagedir)
    if not output:
        output = pagename
    edit_log = editlog.EditLog(request, page.getPagePath('edit-log'))

    changes = {}
    for log in edit_log:
        # not supported. perhaps add anyway?
        if log.action in ('ATTNEW', 'ATTDEL', 'ATTDRW'):
            continue

        # 1201095949  192.168.2.23    E   start   [email protected]
        author = log.hostname
        if log.userid:
            userdata = user.User(request, log.userid)
            if userdata.name:
                author = userdata.name

        try:
            action = {
                'SAVE': 'E',
                'SAVENEW': 'C',
                'SAVE/REVERT': 'R',
            }[log.action]
        except KeyError:
            action = log.action

        mtime = str(log.ed_time_usecs / USEC)
        changes[mtime] = u"\t".join([
            mtime, log.addr, action,
            dw.cleanID(log.pagename), author, log.comment
        ])

    # see if we have missing entries, try to recover
    page = Page(request, pagename)
    if len(page.getRevList()) != len(changes):
        print "RECOVERING edit-log, missing %d entries" % (
            len(page.getRevList()) - len(changes))
        for rev in page.getRevList():
            page = Page(request, pagename, rev=rev)
            mtime = page.mtime_usecs() / USEC

            if not mtime:
                pagefile, realrev, exists = page.get_rev(rev=rev)
                if os.path.exists(pagefile):
                    mtime = int(os.path.getmtime(pagefile))
                    print "Recovered %s: %s" % (rev, mtime)

            mtime = str(mtime)
            if not changes.has_key(mtime):
                changes[mtime] = u"\t".join([
                    mtime, '127.0.0.1', '?',
                    dw.cleanID(pagename), 'root', 'recovered entry'
                ])
                print "ADDING %s" % mtime

    changes = sorted(changes.values())
    out_file = os.path.join(output_dir, 'meta', dw.metaFN(output, '.changes'))
    writefile(out_file, changes, overwrite=overwrite)
示例#22
0
文件: SystemInfo.py 项目: aahlad/soar
    def getInfo(self):
        _ = self.request.getText
        request = self.request

        buf = StringIO()

        row = lambda label, value, buf=buf: buf.write(u'<dt>%s</dt><dd>%s</dd>'
                                                      % (label, value))

        buf.write(u'<dl>')
        row(_('Python Version'), sys.version)
        row(
            _('MoinMoin Version'),
            _('Release %s [Revision %s]') %
            (version.release, version.revision))

        if not request.user.valid:
            # for an anonymous user it ends here.
            buf.write(u'</dl>')
            return buf.getvalue()

        if request.user.isSuperUser():
            # superuser gets all page dependent stuff only
            try:
                import Ft
                ftversion = Ft.__version__
            except ImportError:
                ftversion = None
            except AttributeError:
                ftversion = 'N/A'

            if ftversion:
                row(_('4Suite Version'), ftversion)

            # TODO add python-xml check and display it

            # Get the full pagelist of the wiki
            pagelist = request.rootpage.getPageList(user='')
            systemPages = []
            totalsize = 0
            for page in pagelist:
                if wikiutil.isSystemPage(request, page):
                    systemPages.append(page)
                totalsize += Page(request, page).size()

            row(_('Number of pages'), str(len(pagelist) - len(systemPages)))
            row(_('Number of system pages'), str(len(systemPages)))

            row(_('Accumulated page sizes'),
                self.formatInReadableUnits(totalsize))
            data_dir = request.cfg.data_dir
            row(
                _('Disk usage of %(data_dir)s/pages/') %
                {'data_dir': data_dir},
                self.formatInReadableUnits(
                    self.getDirectorySize(os.path.join(data_dir, 'pages'))))
            row(
                _('Disk usage of %(data_dir)s/') % {'data_dir': data_dir},
                self.formatInReadableUnits(self.getDirectorySize(data_dir)))

            edlog = editlog.EditLog(request)
            row(
                _('Entries in edit log'), "%s (%s)" %
                (edlog.lines(), self.formatInReadableUnits(edlog.size())))

            # This puts a heavy load on the server when the log is large
            eventlogger = eventlog.EventLog(request)
            row('Event log', self.formatInReadableUnits(eventlogger.size()))

        nonestr = _("NONE")
        # a valid user gets info about all installed extensions
        row(_('Global extension macros'), ', '.join(macro.modules) or nonestr)
        row(
            _('Local extension macros'),
            ', '.join(wikiutil.wikiPlugins('macro', self.macro.cfg))
            or nonestr)

        glob_actions = [
            x for x in action.modules if not x in request.cfg.actions_excluded
        ]
        row(_('Global extension actions'), ', '.join(glob_actions) or nonestr)
        loc_actions = [
            x for x in wikiutil.wikiPlugins('action', self.macro.cfg)
            if not x in request.cfg.actions_excluded
        ]
        row(_('Local extension actions'), ', '.join(loc_actions) or nonestr)

        row(_('Global parsers'), ', '.join(parser.modules) or nonestr)
        row(
            _('Local extension parsers'),
            ', '.join(wikiutil.wikiPlugins('parser', self.macro.cfg))
            or nonestr)

        try:
            import xapian
            xapVersion = 'Xapian %s' % xapian.version_string()
        except ImportError:
            xapian = None
            xapVersion = _(
                'Xapian and/or Python Xapian bindings not installed')

        xapian_enabled = request.cfg.xapian_search
        xapState = (_('Disabled'), _('Enabled'))
        xapRow = '%s, %s' % (xapState[xapian_enabled], xapVersion)

        if xapian and xapian_enabled:
            from MoinMoin.search.Xapian.indexing import XapianIndex
            idx = XapianIndex(request)
            idxState = (_('index unavailable'), _('index available'))
            idx_exists = idx.exists()
            xapRow += ', %s' % idxState[idx_exists]
            if idx_exists:
                xapRow += ', %s' % (
                    _('last modified: %s') %
                    request.user.getFormattedDateTime(idx.mtime()))

        row(_('Xapian search'), xapRow)

        if xapian and xapian_enabled:
            stems = xapian.Stem.get_available_languages()
            row(
                _('Stemming for Xapian'),
                xapState[request.cfg.xapian_stemming] + " (%s)" %
                (stems or nonestr))

        try:
            from threading import activeCount
            t_count = activeCount()
        except ImportError:
            t_count = None

        row(_('Active threads'), t_count or _('N/A'))
        buf.write(u'</dl>')

        return buf.getvalue()