示例#1
0
文件: bookmark.py 项目: aahlad/soar
def execute(pagename, request):
    """ set bookmarks (in time) for RecentChanges or delete them """
    _ = request.getText
    if not request.user.valid:
        actname = __name__.split('.')[-1]
        request.theme.add_msg(
            _("You must login to use this action: %(action)s.") %
            {"action": actname}, "error")
        return Page(request, pagename).send_page()

    timestamp = request.values.get('time')
    if timestamp is not None:
        if timestamp == 'del':
            tm = None
        else:
            try:
                tm = int(timestamp)
            except StandardError:
                tm = wikiutil.timestamp2version(time.time())
    else:
        tm = wikiutil.timestamp2version(time.time())

    if tm is None:
        request.user.delBookmark()
    else:
        request.user.setBookmark(tm)
    request.page.send_page()
示例#2
0
def execute(pagename, request):
    """ set bookmarks (in time) for RecentChanges or delete them """
    _ = request.getText
    if not request.user.valid:
        actname = __name__.split('.')[-1]
        request.theme.add_msg(_("You must login to use this action: %(action)s.") % {"action": actname}, "error")
        return Page(request, pagename).send_page()

    timestamp = request.form.get('time', [None])[0]
    if timestamp is not None:
        if timestamp == 'del':
            tm = None
        else:
            try:
                tm = int(timestamp)
            except StandardError:
                tm = wikiutil.timestamp2version(time.time())
    else:
        tm = wikiutil.timestamp2version(time.time())

    if tm is None:
        request.user.delBookmark()
    else:
        request.user.setBookmark(tm)
    request.page.send_page()
    def add(self, request, eventtype, values=None, add_http_info=1,
            mtime_usecs=None):
        """ Write an event of type `eventtype, with optional key/value
            pairs appended (i.e. you have to pass a dict).
        """
        cfg = request.cfg
        if cfg.log_events_format == 0 or request.isSpiderAgent:
            # no event logging enabled or user agent is a bot / spider
            return

        if mtime_usecs is None:
            mtime_usecs = wikiutil.timestamp2version(time.time())

        if values is None:
            values = {}
        if cfg.log_remote_addr and add_http_info:
            # if cfg.log_remote_addr is False (usually for privacy reasons),
            # we likely do not want to log user agent and http referer either.
            for key in ['remote_addr', 'http_user_agent', 'http_referer']:
                value = getattr(request, key, '')
                if value:
                    # Save those http headers in UPPERcase
                    values[key.upper()] = value

        if cfg.log_events_format == 2:
            values['username'] = request.user.name
            values['wikiname'] = cfg.interwikiname
            values['url'] = request.url

        # Encode values in a query string TODO: use more readable format
        values = wikiutil.makeQueryString(values)
        self._add(u"%d\t%s\t%s\n" % (mtime_usecs, eventtype, values))
示例#4
0
def gather_editlog(dir_from, el_from):
    """ this gathers everything that is in edit-log into internal
        data structures, converting to the future format
    """
    if not os.path.exists(el_from): 
        return
    for l in open(el_from):
        data = l.rstrip('\n').split('\t')
        origlen = len(data)
        while len(data) < 7: data.append('')
        (pagename,ip,timestamp,host,id,comment,action) = data
        if origlen == 6:
            action = comment
            comment = ''
        
        extra = ''
        if action == 'SAVE/REVERT': # we missed to convert that in mig4
            ts = long(comment) # must be long for py 2.2.x
            if ts < 4000000000: # UNIX timestamp (secs)
                extra = str(wikiutil.timestamp2version(ts))
            else: # usecs timestamp
                extra = str(ts)
            # later we convert this timestamp to a revision number
            comment = ''
        if action in ['ATTNEW','ATTDRW','ATTDEL',]:
            extra = comment # filename
            comment = '' # so we can use comments on ATT* in future

        timestamp = long(timestamp) # must be long for py 2.2.x
        data = [timestamp,'',action,pagename,ip,host,id,extra,comment]
        
        entry = info.get(pagename, {})
        entry[timestamp] = [None, data]
        info[pagename] = entry
示例#5
0
文件: lupy.py 项目: imosts/flume
 def _index_file(self, request, writer, filename, update):
     """ index a file as it were a page named pagename
         Assumes that the write lock is acquired
     """
     fs_rootpage = 'FS' # XXX FS hardcoded
     try:
         mtime = os.path.getmtime(filename)
         mtime = wikiutil.timestamp2version(mtime)
         if update:
             query = BooleanQuery()
             query.add(TermQuery(Term("pagename", fs_rootpage)), True, False)
             query.add(TermQuery(Term("attachment", filename)), True, False)
             docs = self._search(query)
             updated = len(docs) == 0 or mtime > int(docs[0].get('mtime'))
         else:
             updated = True
         request.log("%s %r" % (filename, updated))
         if updated:
             file_content = self.contentfilter(filename)
             d = document.Document()
             d.add(document.Keyword('pagename', fs_rootpage))
             d.add(document.Keyword('mtime', str(mtime)))
             d.add(document.Keyword('attachment', filename)) # XXX we should treat files like real pages, not attachments
             pagename = " ".join(os.path.join(fs_rootpage, filename).split("/"))
             d.add(document.Text('title', pagename, store=False))        
             d.add(document.Text('text', file_content, store=False))
             writer.addDocument(d)
     except (OSError, IOError), err:
         pass
示例#6
0
文件: storage.py 项目: happytk/moin
    def history(self, request):
        # files = self._list_files()
        pages = []
        for root, dirs, files in os.walk(self.basepath):
            for name in dirs: pages.append(os.path.join(root, name))
            for name in files: pages.append(os.path.join(root, name))

        # pages = sorted(pages, lambda x,y: os.path.getmtime(x) < os.path.getmtime(y), reverse=True)
        # logging.warning(str(pages))
        pages = sorted(pages, key=lambda(x): os.path.getmtime(x), reverse=True)
        
        _usercache = {}
        for filename in pages:
            result = editlog.EditLogLine(_usercache)
            result.ed_time_usecs = wikiutil.timestamp2version(os.path.getmtime(filename))
            result.rev = 0
            result.action = 'SAVE'
            filename = filename[len(self.basepath)+1:].replace(os.sep, '/')
            if filename.endswith(request.cfg.fs_extension):
                filename = filename[:-len(request.cfg.fs_extension)]
            result.pagename = filename.decode(request.cfg.fs_encoding)
            result.addr = ''
            result.hostname = ''
            result.userid = ''
            result.extra = None
            result.comment = ''
            yield result
示例#7
0
文件: eventlog.py 项目: aahlad/soar
    def add(self,
            request,
            eventtype,
            values=None,
            add_http_info=1,
            mtime_usecs=None):
        """ Write an event of type `eventtype, with optional key/value
            pairs appended (i.e. you have to pass a dict).
        """
        if request.isSpiderAgent:
            return

        if mtime_usecs is None:
            mtime_usecs = wikiutil.timestamp2version(time.time())

        if values is None:
            values = {}
        if request.cfg.log_remote_addr and add_http_info:
            # if cfg.log_remote_addr is False (usually for privacy reasons),
            # we likely do not want to log user agent and http referer either.
            for key in ['remote_addr', 'http_user_agent', 'http_referer']:
                value = getattr(request, key, '')
                if value:
                    # Save those http headers in UPPERcase
                    values[key.upper()] = value
        # Encode values in a query string TODO: use more readable format
        values = wikiutil.makeQueryString(values)
        self._add(u"%d\t%s\t%s\n" % (mtime_usecs, eventtype, values))
示例#8
0
def do_bookmark(pagename, request):
    if request.form.has_key('time'):
        if request.form['time'][0] == 'del':
            tm = None
        else:
            try:
                tm = long(request.form["time"][0]) # must be long for py 2.2.x
            except StandardError:
                tm = wikiutil.timestamp2version(time.time())
    else:
        tm = wikiutil.timestamp2version(time.time())
  
    if tm is None:
        request.user.delBookmark()
    else:
        request.user.setBookmark(tm)
    Page(request, pagename).send_page(request)
示例#9
0
 def _writeLockFile(self):
     """Write new lock file."""
     self._deleteLockFile()
     try:
         editlog.EditLog(self.request, filename=self._filename()).add(
            self.request, wikiutil.timestamp2version(self.now), 0, "LOCK", self.page_name)
     except IOError:
         pass
示例#10
0
文件: lupy.py 项目: imosts/flume
    def _index_page(self, writer, page, update):
        """ Index a page - assumes that the write lock is acquired
            @arg writer: the index writer object
            @arg page: a page object
            @arg update: False = index in any case, True = index only when changed
        """
        pagename = page.page_name
        request = page.request
        mtime = page.mtime_usecs()
        if update:
            query = BooleanQuery()
            query.add(TermQuery(Term("pagename", pagename)), True, False)
            query.add(TermQuery(Term("attachment", "")), True, False)
            docs = self._search(query)
            updated = len(docs) == 0 or mtime > int(docs[0].get('mtime'))
        else:
            updated = True
        request.log("%s %r" % (pagename, updated))
        if updated:
            d = document.Document()
            d.add(document.Keyword('pagename', pagename))
            d.add(document.Keyword('mtime', str(mtime)))
            d.add(document.Keyword('attachment', '')) # this is a real page, not an attachment
            d.add(document.Text('title', pagename, store=False))        
            d.add(document.Text('text', page.get_raw_body(), store=False))
            
            links = page.getPageLinks(request)
            t = document.Text('links', '', store=False)
            t.stringVal = links
            d.add(t)
            d.add(document.Text('link_text', ' '.join(links), store=False))

            writer.addDocument(d)
        
        from MoinMoin.action import AttachFile

        attachments = AttachFile._get_files(request, pagename)
        for att in attachments:
            filename = AttachFile.getFilename(request, pagename, att)
            mtime = wikiutil.timestamp2version(os.path.getmtime(filename))
            if update:
                query = BooleanQuery()
                query.add(TermQuery(Term("pagename", pagename)), True, False)
                query.add(TermQuery(Term("attachment", att)), True, False)
                docs = self._search(query)
                updated = len(docs) == 0 or mtime > int(docs[0].get('mtime'))
            else:
                updated = True
            request.log("%s %s %r" % (pagename, att, updated))
            if updated:
                att_content = self.contentfilter(filename)
                d = document.Document()
                d.add(document.Keyword('pagename', pagename))
                d.add(document.Keyword('mtime', str(mtime)))
                d.add(document.Keyword('attachment', att)) # this is an attachment, store its filename
                d.add(document.Text('title', att, store=False)) # the filename is the "title" of an attachment
                d.add(document.Text('text', att_content, store=False))
                writer.addDocument(d)
示例#11
0
    def _index_attachment(self,
                          request,
                          connection,
                          pagename,
                          attachmentname,
                          mode='update'):
        """ Index an attachment

        @param request: request suitable for indexing
        @param connection: the Indexer connection object
        @param pagename: the page name
        @param attachmentname: the attachment's name
        @param mode: 'add' = just add, no checks
                     'update' = check if already in index and update if needed (mtime)
        """
        from MoinMoin.action import AttachFile
        wikiname = request.cfg.interwikiname or u"Self"
        itemid = "%s:%s//%s" % (wikiname, pagename, attachmentname)

        filename = AttachFile.getFilename(request, pagename, attachmentname)
        # check if the file is still there. as we might be doing queued index updates,
        # the file could be gone meanwhile...
        if os.path.exists(filename):
            mtime = wikiutil.timestamp2version(os.path.getmtime(filename))
            doc = self._get_document(connection, itemid, mtime, mode)
            logging.debug("%r %r %r" % (pagename, attachmentname, doc))
            if doc:
                page = Page(request, pagename)
                mimetype, att_content = self.contentfilter(filename)

                fields = {}
                fields['wikiname'] = wikiname
                fields['pagename'] = pagename
                fields['attachment'] = attachmentname
                fields['mtime'] = str(mtime)
                fields['revision'] = '0'
                fields['title'] = '%s/%s' % (pagename, attachmentname)
                fields['content'] = att_content
                fields['lang'], fields['stem_lang'] = self._get_languages(page)

                multivalued_fields = {}
                multivalued_fields['mimetype'] = [
                    mt for mt in [mimetype] + mimetype.split('/')
                ]
                multivalued_fields['domain'] = self._get_domains(page)

                self._add_fields_to_document(request, doc, fields,
                                             multivalued_fields)

                try:
                    connection.replace(doc)
                except xapian.Error, err:
                    logging.error(
                        'attachment %r (page %r) could not be updated in index: %s'
                        % (attachmentname, pagename, str(err)))
                else:
                    logging.debug('attachment %r (page %r) updated in index' %
                                  (attachmentname, pagename))
示例#12
0
def edit_logfile_append(self, pagename, pagefile, rev, action, logname='edit-log', comment=u'', author=u"Scripting Subsystem"):
    glog = editlog.EditLog(self.request, uid_override=author)
    pagelog = Page(self.request, pagename).getPagePath(logname, use_underlay=0, isfile=1)
    llog = editlog.EditLog(self.request, filename=pagelog,
                               uid_override=author)
    mtime_usecs = wikiutil.timestamp2version(os.path.getmtime(pagefile))
    host = '::1'
    extra = u''
    glog.add(self.request, mtime_usecs, rev, action, pagename, host, comment)
    llog.add(self.request, mtime_usecs, rev, action, pagename, host, extra, comment)
    event_logfile(self, pagename, pagefile)
def convert_userdir(dir_from, dir_to):
    os.mkdir(dir_to)
    for fname in listdir(dir_from):
        if fname.endswith('.bookmark'):
            bm = open(opj(dir_from, fname)).read().strip()
            bm = str(wikiutil.timestamp2version(float(bm)))
            f = open(opj(dir_to, fname), 'w')
            f.write(bm)
            f.close()
        else:
            copy_file(opj(dir_from, fname), opj(dir_to, fname))
示例#14
0
def convert_userdir(dir_from, dir_to):
    os.mkdir(dir_to)
    for fname in listdir(dir_from):
        if fname.endswith('.bookmark'):
            bm = open(opj(dir_from, fname)).read().strip()
            bm = str(wikiutil.timestamp2version(float(bm)))
            f = open(opj(dir_to, fname), 'w')
            f.write(bm)
            f.close()
        else:
            copy_file(opj(dir_from, fname), opj(dir_to, fname))
示例#15
0
文件: Xapian.py 项目: steveyen/moingo
    def _index_file(self, request, writer, filename, mode='update'):
        """ index a file as it were a page named pagename
            Assumes that the write lock is acquired
        """
        fs_rootpage = 'FS' # XXX FS hardcoded

        try:
            wikiname = request.cfg.interwikiname or 'Self'
            itemid = "%s:%s" % (wikiname, os.path.join(fs_rootpage, filename))
            mtime = os.path.getmtime(filename)
            mtime = wikiutil.timestamp2version(mtime)
            if mode == 'update':
                query = xapidx.RawQuery(xapdoc.makePairForWrite('itemid', itemid))
                enq, mset, docs = writer.search(query, valuesWanted=['pagename', 'attachment', 'mtime', 'wikiname', ])
                if docs:
                    doc = docs[0] # there should be only one
                    uid = doc['uid']
                    docmtime = long(doc['values']['mtime'])
                    updated = mtime > docmtime
                    logging.debug("uid %r: mtime %r > docmtime %r == updated %r" % (uid, mtime, docmtime, updated))
                else:
                    uid = None
                    updated = True
            elif mode == 'add':
                updated = True
            logging.debug("%s %r" % (filename, updated))
            if updated:
                xitemid = xapdoc.Keyword('itemid', itemid)
                mimetype, file_content = self.contentfilter(filename)
                xwname = xapdoc.SortKey('wikiname', request.cfg.interwikiname or "Self")
                xpname = xapdoc.SortKey('pagename', fs_rootpage)
                xattachment = xapdoc.SortKey('attachment', filename) # XXX we should treat files like real pages, not attachments
                xmtime = xapdoc.SortKey('mtime', mtime)
                xrev = xapdoc.SortKey('revision', '0')
                title = " ".join(os.path.join(fs_rootpage, filename).split("/"))
                xtitle = xapdoc.Keyword('title', title)
                xmimetypes = [xapdoc.Keyword('mimetype', mt) for mt in [mimetype, ] + mimetype.split('/')]
                xcontent = xapdoc.TextField('content', file_content)
                doc = xapdoc.Document(textFields=(xcontent, ),
                                      keywords=xmimetypes + [xtitle, xitemid, ],
                                      sortFields=(xpname, xattachment,
                                          xmtime, xwname, xrev, ),
                                     )
                doc.analyzerFactory = getWikiAnalyzerFactory()
                if mode == 'update':
                    logging.debug("%s (replace %r)" % (filename, uid))
                    doc.uid = uid
                    id = writer.index(doc)
                elif mode == 'add':
                    logging.debug("%s (add)" % (filename, ))
                    id = writer.index(doc)
        except (OSError, IOError):
            pass
示例#16
0
def update_page(page, db):
    id_file = page.getPagePath("xapian.id", check_create=0, isfile=1)
    try:
        f = open(id_file, 'w')
        id = f.read()
        f.close()
        docid = int(id)
    except:
        add_page(page, db)
        return
    print "update id: %s <br>" % docid
    if wikiutil.timestamp2version(os.path.getmtime(id_file)) < page.mtime_usecs():
        db.replace_document(docid, _index_page(page))
def convert_textdir(dir_from, dir_to, enc_from, enc_to, is_backupdir=0):
    os.mkdir(dir_to)
    for fname_from in listdir(dir_from):
        if is_backupdir:
            fname, timestamp = fname_from.split('.',1)
            timestamp = str(wikiutil.timestamp2version(float(timestamp)))
        else:
            fname = fname_from
        fname = qf_convert_string(fname, enc_from, enc_to)
        if is_backupdir:
            fname_to = '.'.join([fname, timestamp])
        else:
            fname_to = fname
        convert_file(opj(dir_from, fname_from), opj( dir_to, fname_to),
                     enc_from, enc_to)
示例#18
0
def convert_textdir(dir_from, dir_to, enc_from, enc_to, is_backupdir=0):
    os.mkdir(dir_to)
    for fname_from in listdir(dir_from):
        if is_backupdir:
            fname, timestamp = fname_from.split('.', 1)
            timestamp = str(wikiutil.timestamp2version(float(timestamp)))
        else:
            fname = fname_from
        fname = qf_convert_string(fname, enc_from, enc_to)
        if is_backupdir:
            fname_to = '.'.join([fname, timestamp])
        else:
            fname_to = fname
        convert_file(opj(dir_from, fname_from), opj(dir_to, fname_to),
                     enc_from, enc_to)
示例#19
0
    def _index_attachment(self, request, connection, pagename, attachmentname, mode="update"):
        """ Index an attachment

        @param request: request suitable for indexing
        @param connection: the Indexer connection object
        @param pagename: the page name
        @param attachmentname: the attachment's name
        @param mode: 'add' = just add, no checks
                     'update' = check if already in index and update if needed (mtime)
        """
        from MoinMoin.action import AttachFile

        wikiname = request.cfg.interwikiname or u"Self"
        itemid = "%s:%s//%s" % (wikiname, pagename, attachmentname)

        filename = AttachFile.getFilename(request, pagename, attachmentname)
        # check if the file is still there. as we might be doing queued index updates,
        # the file could be gone meanwhile...
        if os.path.exists(filename):
            mtime = wikiutil.timestamp2version(os.path.getmtime(filename))
            doc = self._get_document(connection, itemid, mtime, mode)
            logging.debug("%s %s %r" % (pagename, attachmentname, doc))
            if doc:
                page = Page(request, pagename)
                mimetype, att_content = self.contentfilter(filename)

                fields = {}
                fields["wikiname"] = wikiname
                fields["pagename"] = pagename
                fields["attachment"] = attachmentname
                fields["mtime"] = str(mtime)
                fields["revision"] = "0"
                fields["title"] = "%s/%s" % (pagename, attachmentname)
                fields["content"] = att_content
                fields["lang"], fields["stem_lang"] = self._get_languages(page)

                multivalued_fields = {}
                multivalued_fields["mimetype"] = [mt for mt in [mimetype] + mimetype.split("/")]
                multivalued_fields["domain"] = self._get_domains(page)

                self._add_fields_to_document(request, doc, fields, multivalued_fields)

                connection.replace(doc)
                logging.debug("attachment %s (page %s) updated in index" % (attachmentname, pagename))
        else:
            # attachment file was deleted, remove it from index also
            connection.delete(itemid)
            logging.debug("attachment %s (page %s) removed from index" % (attachmentname, pagename))
示例#20
0
文件: storage.py 项目: happytk/moin
 def history(self, request):
     files = self._list_files(request)
     files = sorted(files, lambda x,y:os.path.getmtime(x) < os.path.getmtime(y), reverse=True)
     _usercache = {}
     for filename in files:
         result = editlog.EditLogLine(_usercache)
         result.ed_time_usecs = wikiutil.timestamp2version(os.path.getmtime(filename))
         result.rev = 0
         result.action = 'SAVE'
         result.pagename = wikiutil.quoteWikinameFS(os.path.splitext(os.path.basename(filename))[0].decode(request.cfg.fs_encoding))
         result.addr = ''
         result.hostname = ''
         result.userid = ''
         result.extra = None
         result.comment = ''
         yield result
def convert_editlog(log_from, log_to, enc_from, enc_to):
        file_from = open(log_from)
        file_to = open(log_to, "w")
        for line in file_from:
            line = line.replace('\r','')
            line = line.replace('\n','')
            if not line.strip(): # skip empty lines
                continue
            fields = line.split('\t')
            fields[0] = qf_convert_string(fields[0], enc_from, enc_to)
            fields[2] = str(wikiutil.timestamp2version(float(fields[2])))
            if len(fields) < 6:
                fields.append('SAVE')
            fields[5] = convert_string(fields[5], enc_from, enc_to)
            line = '\t'.join(fields) + '\n'
            file_to.write(line)
示例#22
0
文件: storage.py 项目: happytk/moin
 def history(self, request):
     _usercache = {}
     for title, rev, date, author, comment in self.hgdb.history():
         if comment.startswith('HgHidden:'):
             continue
         result = editlog.EditLogLine(_usercache)
         result.ed_time_usecs = wikiutil.timestamp2version((date - datetime.datetime(1970,1,1)).total_seconds())
         result.rev = rev
         result.action = 'SAVE'
         result.pagename = wikiutil.unquoteWikiname(title)
         result.addr = ''
         result.hostname = ''
         result.userid = author
         result.extra = None
         result.comment = '' if comment == 'comment' or comment.startswith('MoinEdited:') else comment
         yield result
示例#23
0
def _addLogEntry(request, action, pagename, filename):
    """ Add an entry to the edit log on uploads and deletes.

        `action` should be "ATTNEW" or "ATTDEL"
    """
    from MoinMoin.logfile import editlog
    t = wikiutil.timestamp2version(time.time())
    fname = wikiutil.url_quote(filename)

    # Write to global log
    log = editlog.EditLog(request)
    log.add(request, t, 99999999, action, pagename, request.remote_addr, fname)

    # Write to local log
    log = editlog.EditLog(request, rootpagename=pagename)
    log.add(request, t, 99999999, action, pagename, request.remote_addr, fname)
示例#24
0
文件: AttachFile.py 项目: aahlad/soar
def _addLogEntry(request, action, pagename, filename):
    """ Add an entry to the edit log on uploads and deletes.

        `action` should be "ATTNEW" or "ATTDEL"
    """
    from MoinMoin.logfile import editlog
    t = wikiutil.timestamp2version(time.time())
    fname = wikiutil.url_quote(filename)

    # Write to global log
    log = editlog.EditLog(request)
    log.add(request, t, 99999999, action, pagename, request.remote_addr, fname)

    # Write to local log
    log = editlog.EditLog(request, rootpagename=pagename)
    log.add(request, t, 99999999, action, pagename, request.remote_addr, fname)
示例#25
0
def _addLogEntry(request, action, pagename, filename):
    """ Add an entry to the edit log on uploads and deletes.

        `action` should be "ATTNEW" or "ATTDEL"
    """
    from MoinMoin.logfile import editlog
    t = wikiutil.timestamp2version(time.time())
    fname = wikiutil.url_quote(filename, want_unicode=True)

    # TODO: for now we simply write 2 logs, maybe better use some multilog stuff
    # Write to global log
    log = editlog.EditLog(request)
    log.add(request, t, 99999999, action, pagename, request.remote_addr, fname)

    # Write to local log
    log = editlog.EditLog(request, rootpagename=pagename)
    log.add(request, t, 99999999, action, pagename, request.remote_addr, fname)
示例#26
0
def addLogEntry(request, action, pagename, msg):
    # Add an entry to the edit log on adding comments.
    from MoinMoin.logfile import editlog
    t = wikiutil.timestamp2version(time.time())
    msg = unicode(msg)

    pg = Page(request, pagename)
    #rev = pg.current_rev()
    rev = 99999999

    # TODO: for now we simply write 2 logs, maybe better use some multilog stuff
    # Write to global log
    log = editlog.EditLog(request)
    log.add(request, t, rev, action, pagename, request.remote_addr, '', msg)

    # Write to local log
    log = editlog.EditLog(request, rootpagename=pagename)
    log.add(request, t, rev, action, pagename, request.remote_addr, '', msg)
示例#27
0
def convert_editlog(log_from, log_to, enc_from, enc_to):
    file_from = open(log_from)
    file_to = open(log_to, "w")
    for line in file_from:
        line = line.replace('\r', '')
        line = line.replace('\n', '')
        if not line.strip():  # skip empty lines
            continue
        fields = line.split('\t')
        fields[0] = qf_convert_string(fields[0], enc_from, enc_to)
        fields[2] = str(wikiutil.timestamp2version(float(fields[2])))
        if len(fields) < 6:
            fields.append('')  # comment
        if len(fields) < 7:
            fields.append('SAVE')  # action
        fields[5] = convert_string(fields[5], enc_from, enc_to)
        line = '\t'.join(fields) + '\n'
        file_to.write(line)
示例#28
0
def addLogEntry(request, action, pagename, msg):
    # Add an entry to the edit log on adding comments.
    from MoinMoin.logfile import editlog
    t = wikiutil.timestamp2version(time.time())
    msg = unicode(msg)

    pg = Page( request, pagename )
    #rev = pg.current_rev()
    rev = 99999999

    # TODO: for now we simply write 2 logs, maybe better use some multilog stuff
    # Write to global log
    log = editlog.EditLog(request)
    log.add(request, t, rev, action, pagename, request.remote_addr, '', msg)

    # Write to local log
    log = editlog.EditLog(request, rootpagename=pagename)
    log.add(request, t, rev, action, pagename, request.remote_addr, '', msg)
示例#29
0
    def _index_file(self, request, connection, filename, mode='update'):
        """ index files (that are NOT attachments, just arbitrary files)

        @param request: request suitable for indexing
        @param connection: the Indexer connection object
        @param filename: a filesystem file name
        @param mode: 'add' = just add, no checks
                     'update' = check if already in index and update if needed (mtime)
        """
        wikiname = request.cfg.interwikiname or u"Self"
        fs_rootpage = 'FS'  # XXX FS hardcoded

        try:
            itemid = "%s:%s" % (wikiname, os.path.join(fs_rootpage, filename))
            mtime = wikiutil.timestamp2version(os.path.getmtime(filename))

            doc = self._get_document(connection, itemid, mtime, mode)
            logging.debug("%s %r" % (filename, doc))
            if doc:
                mimetype, file_content = self.contentfilter(filename)

                fields = {}
                fields['wikiname'] = wikiname
                fields['pagename'] = fs_rootpage
                fields[
                    'attachment'] = filename  # XXX we should treat files like real pages, not attachments
                fields['mtime'] = str(mtime)
                fields['revision'] = '0'
                fields['title'] = " ".join(
                    os.path.join(fs_rootpage, filename).split("/"))
                fields['content'] = file_content

                multivalued_fields = {}
                multivalued_fields['mimetype'] = [
                    mt for mt in [mimetype] + mimetype.split('/')
                ]

                self._add_fields_to_document(request, doc, fields,
                                             multivalued_fields)

                connection.replace(doc)

        except (OSError, IOError, UnicodeError):
            logging.exception("_index_file crashed:")
示例#30
0
 def write(self, fname, deleted=False):
     """ write complete edit-log to disk """
     if self.data:
         editlog = self.data.items()
         editlog.sort()
         f = file(
             fname, 'wb'
         )  # write in binary mode, so it stays exactly as we write it, even on windows.
         # the code in MoinMoin.logfile also uses binary mode and writes \n only.
         max_rev = 0
         for key, fields in editlog:
             timestamp, rev, action, pagename, ip, hostname, userid, extra, comment = fields
             if action.startswith('ATT'):
                 try:
                     fname = urllib.unquote(extra).decode('utf-8')
                 except UnicodeDecodeError:
                     fname = urllib.unquote(extra).decode('iso-8859-1')
                 if ('FILE', pagename, fname) in self.renames:
                     fname = self.renames[('FILE', pagename, fname)]
                 extra = urllib.quote(fname.encode('utf-8'))
             if ('PAGE', pagename) in self.renames:
                 pagename = self.renames[('PAGE', pagename)]
             timestamp = str(timestamp)
             if rev != 99999999:
                 max_rev = max(rev, max_rev)
             revstr = '%08d' % rev
             pagename = wikiutil.quoteWikinameFS(pagename)
             fields = timestamp, revstr, action, pagename, ip, hostname, userid, extra, comment
             log_str = '\t'.join(fields) + '\n'
             f.write(log_str)
         if create_rev and not deleted:
             timestamp = str(wikiutil.timestamp2version(time.time()))
             revstr = '%08d' % (max_rev + 1)
             action = 'SAVE'
             ip = '127.0.0.1'
             hostname = 'localhost'
             userid = ''
             extra = ''
             comment = "converted to 1.6 markup"
             fields = timestamp, revstr, action, pagename, ip, hostname, userid, extra, comment
             log_str = '\t'.join(fields) + '\n'
             f.write(log_str)
         f.close()
示例#31
0
def edit_logfile_append(self,
                        pagename,
                        pagefile,
                        rev,
                        action,
                        logname='edit-log',
                        comment=u'',
                        author=u"Scripting Subsystem"):
    glog = editlog.EditLog(self.request, uid_override=author)
    pagelog = Page(self.request, pagename).getPagePath(logname,
                                                       use_underlay=0,
                                                       isfile=1)
    llog = editlog.EditLog(self.request, filename=pagelog, uid_override=author)
    mtime_usecs = wikiutil.timestamp2version(os.path.getmtime(pagefile))
    host = '::1'
    extra = u''
    glog.add(self.request, mtime_usecs, rev, action, pagename, host, comment)
    llog.add(self.request, mtime_usecs, rev, action, pagename, host, extra,
             comment)
    event_logfile(self, pagename, pagefile)
示例#32
0
 def write(self, fname, deleted=False):
     """ write complete edit-log to disk """
     if self.data:
         editlog = self.data.items()
         editlog.sort()
         f = file(fname, 'wb') # write in binary mode, so it stays exactly as we write it, even on windows.
                               # the code in MoinMoin.logfile also uses binary mode and writes \n only.
         max_rev = 0
         for key, fields in editlog:
             timestamp, rev, action, pagename, ip, hostname, userid, extra, comment = fields
             if action.startswith('ATT'):
                 try:
                     fname = urllib.unquote(extra).decode('utf-8')
                 except UnicodeDecodeError:
                     fname = urllib.unquote(extra).decode('iso-8859-1')
                 if ('FILE', pagename, fname) in self.renames:
                     fname = self.renames[('FILE', pagename, fname)]
                 extra = urllib.quote(fname.encode('utf-8'))
             if ('PAGE', pagename) in self.renames:
                 pagename = self.renames[('PAGE', pagename)]
             timestamp = str(timestamp)
             if rev != 99999999:
                 max_rev = max(rev, max_rev)
             revstr = '%08d' % rev
             pagename = wikiutil.quoteWikinameFS(pagename)
             fields = timestamp, revstr, action, pagename, ip, hostname, userid, extra, comment
             log_str = '\t'.join(fields) + '\n'
             f.write(log_str)
         if create_rev and not deleted:
             timestamp = str(wikiutil.timestamp2version(time.time()))
             revstr = '%08d' % (max_rev + 1)
             action = 'SAVE'
             ip = '127.0.0.1'
             hostname = 'localhost'
             userid = ''
             extra = ''
             comment = "converted to 1.6 markup"
             fields = timestamp, revstr, action, pagename, ip, hostname, userid, extra, comment
             log_str = '\t'.join(fields) + '\n'
             f.write(log_str)
         f.close()
示例#33
0
 def history(self, request):
     if not self.user or (request.user.valid and request.user.name == self.user):
         files = self._list_files(request)
         # files = sorted(files, lambda x,y:os.path.getmtime(x) < os.path.getmtime(y), reverse=True)
         files = sorted(files, key=lambda x:os.path.getmtime(x), reverse=True)
         _usercache = {}
         for filename in files:
             result = editlog.EditLogLine(_usercache)
             result.ed_time_usecs = wikiutil.timestamp2version(os.path.getmtime(filename))
             result.rev = 0
             result.action = 'SAVE'
             result.pagename = wikiutil.unquoteWikiname(self.prefix + os.path.splitext(os.path.basename(filename))[0])
             result.addr = ''
             result.hostname = ''
             if self.user:
                 result.userid = request.user.id #restrict_user is self
             else:
                 result.userid = ''
             result.extra = None
             result.comment = ''
             yield result
示例#34
0
 def write(self, fname, deleted=False):
     """ write complete edit-log to disk """
     if self.data:
         editlog = self.data.items()
         editlog.sort()
         f = file(fname, "w")
         max_rev = 0
         for key, fields in editlog:
             timestamp, rev, action, pagename, ip, hostname, userid, extra, comment = fields
             if action.startswith("ATT"):
                 try:
                     fname = urllib.unquote(extra).decode("utf-8")
                 except UnicodeDecodeError:
                     fname = urllib.unquote(extra).decode("iso-8859-1")
                 if ("FILE", pagename, fname) in self.renames:
                     fname = self.renames[("FILE", pagename, fname)]
                 extra = urllib.quote(fname.encode("utf-8"))
             if ("PAGE", pagename) in self.renames:
                 pagename = self.renames[("PAGE", pagename)]
             timestamp = str(timestamp)
             if rev != 99999999:
                 max_rev = max(rev, max_rev)
             revstr = "%08d" % rev
             pagename = wikiutil.quoteWikinameFS(pagename)
             fields = timestamp, revstr, action, pagename, ip, hostname, userid, extra, comment
             log_str = "\t".join(fields) + "\n"
             f.write(log_str)
         if create_rev and not deleted:
             timestamp = str(wikiutil.timestamp2version(time.time()))
             revstr = "%08d" % (max_rev + 1)
             action = "SAVE"
             ip = "127.0.0.1"
             hostname = "localhost"
             userid = ""
             extra = ""
             comment = "converted to 1.6 markup"
             fields = timestamp, revstr, action, pagename, ip, hostname, userid, extra, comment
             log_str = "\t".join(fields) + "\n"
             f.write(log_str)
         f.close()
示例#35
0
    def add(self, request, eventtype, values=None, add_http_info=1, mtime_usecs=None):
        """ Write an event of type `eventtype, with optional key/value
            pairs appended (i.e. you have to pass a dict).
        """
        if request.isSpiderAgent:
            return

        if mtime_usecs is None:
            mtime_usecs = wikiutil.timestamp2version(time.time())

        if values is None:
            values = {}
        if add_http_info:
            # All these are ascii
            for key in ["remote_addr", "http_user_agent", "http_referer"]:
                value = getattr(request, key, "")
                if value:
                    # Save those http headers in UPPERcase
                    values[key.upper()] = value
        # Encode values in a query string TODO: use more readable format
        values = wikiutil.makeQueryString(values, want_unicode=True)
        self._add(u"%d\t%s\t%s\n" % (mtime_usecs, eventtype, values))
示例#36
0
def gather_editlog(dir_from, el_from):
    """ this gathers everything that is in edit-log into internal
        data structures, converting to the future format
    """
    if not os.path.exists(el_from):
        return
    for l in open(el_from):
        data = l.rstrip('\n').split('\t')
        origlen = len(data)
        while len(data) < 7:
            data.append('')
        (pagename, ip, timestamp, host, id, comment, action) = data
        if origlen == 6:
            action = comment
            comment = ''

        extra = ''
        if action == 'SAVE/REVERT':  # we missed to convert that in mig4
            ts = long(comment)  # must be long for py 2.2.x
            if ts < 4000000000:  # UNIX timestamp (secs)
                extra = str(wikiutil.timestamp2version(ts))
            else:  # usecs timestamp
                extra = str(ts)
            # later we convert this timestamp to a revision number
            comment = ''
        if action in [
                'ATTNEW',
                'ATTDRW',
                'ATTDEL',
        ]:
            extra = comment  # filename
            comment = ''  # so we can use comments on ATT* in future

        timestamp = long(timestamp)  # must be long for py 2.2.x
        data = [timestamp, '', action, pagename, ip, host, id, extra, comment]

        entry = info.get(pagename, {})
        entry[timestamp] = [None, data]
        info[pagename] = entry
    def _index_file(self, request, connection, filename, mode='update'):
        """ index files (that are NOT attachments, just arbitrary files)

        @param request: request suitable for indexing
        @param connection: the Indexer connection object
        @param filename: a filesystem file name
        @param mode: 'add' = just add, no checks
                     'update' = check if already in index and update if needed (mtime)
        """
        wikiname = request.cfg.interwikiname or u"Self"
        fs_rootpage = 'FS' # XXX FS hardcoded

        try:
            itemid = "%s:%s" % (wikiname, os.path.join(fs_rootpage, filename))
            mtime = wikiutil.timestamp2version(os.path.getmtime(filename))

            doc = self._get_document(connection, itemid, mtime, mode)
            logging.debug("%s %r" % (filename, doc))
            if doc:
                mimetype, file_content = self.contentfilter(filename)

                fields = {}
                fields['wikiname'] = wikiname
                fields['pagename'] = fs_rootpage
                fields['attachment'] = filename # XXX we should treat files like real pages, not attachments
                fields['mtime'] = str(mtime)
                fields['revision'] = '0'
                fields['title'] = " ".join(os.path.join(fs_rootpage, filename).split("/"))
                fields['content'] = file_content

                multivalued_fields = {}
                multivalued_fields['mimetype'] = [mt for mt in [mimetype] + mimetype.split('/')]

                self._add_fields_to_document(request, doc, fields, multivalued_fields)

                connection.replace(doc)

        except (OSError, IOError, UnicodeError):
            logging.exception("_index_file crashed:")
示例#38
0
    def add(self, request, eventtype, values=None, add_http_info=1, mtime_usecs=None):
        """ Write an event of type `eventtype, with optional key/value
        pairs appended (i.e. you have to pass a dict).
        """
        # Dont log spiders XXX TODO: does it make sense? 
        if web.isSpiderAgent(request):
            return
        
        if mtime_usecs is None:
            mtime_usecs = wikiutil.timestamp2version(time.time())

        if values is None:
            values = {}
        if add_http_info:
            # All these are ascii
            for key in ['remote_addr', 'http_user_agent', 'http_referer']:
                value = getattr(request, key, '')
                if value:
                    # Save those http headers in UPPERcase
                    values[key.upper()] = value
        # Encode values in a query string TODO: use more reaable format
        values = web.makeQueryString(values)
        self._add(u"%d\t%s\t%s\n" % (mtime_usecs, eventtype, values))
示例#39
0
class PluginScript(MoinScript):
    """Purpose:
========
This script imports the wiki page from given file into the wiki.

Detailed Instructions:
======================
General syntax: moin [options] import wikipage [wikipage-options]

[options] usually should be:
    --config-dir=/path/to/cfg --wiki-url=http://wiki.example.org/ --page=Page
"""
    def __init__(self, argv, def_values):
        MoinScript.__init__(self, argv, def_values)
        self.parser.add_option('--acl',
                               dest='acl',
                               default='',
                               metavar='ACL',
                               help='Set a specific ACL for the wiki page')
        self.parser.add_option(
            '--author',
            dest='author',
            metavar='AUTHOR',
            default='PageImporter',
            help='Use AUTHOR for edit history / RecentChanges')
        self.parser.add_option(
            '--mtime',
            dest='mtime',
            metavar='mtime',
            default=None,
            help=
            'Use TIME (YYYY-MM-DD HH:MM:SS) in UTC for edit history / RecentChanges. Default value is the current UTC time'
        )
        self.parser.add_option('--comment',
                               dest='comment',
                               metavar='COMMENT',
                               default='',
                               help='COMMENT for edit history / RecentChanges')
        self.parser.add_option('--file',
                               dest='file',
                               default='',
                               metavar='FILE',
                               help='Read the wiki page from the given file')
        self.parser.add_option(
            '--no-backup',
            dest='revision_backup',
            default=True,
            action='store_false',
            help="Suppress making a page backup per revision")
        self._update_option_help(
            '--page', 'Name of the wiki page which should be imported')

    def mainloop(self):
        self.init_request()
        request = self.request
        request.user.may = IAmRoot()

        if not self.options.page:
            fatal('You must specify a wiki page name (--page=Page)!')
        if not self.options.file:
            fatal('You must specify a FILE to read from (--file=FILE)!')

        try:
            fileObj = open(self.options.file, 'rb')
        except IOError, err:
            fatal(str(err))
        page_content = decodeUnknownInput(fileObj.read()).rstrip()
        fileObj.close()

        if not self.options.acl:
            acl = ''
        else:
            acl = '#acl %s\n' % self.options.acl
        comment = clean_input(self.options.comment)

        if self.options.mtime:
            mtime = timestamp2version(
                calendar.timegm(
                    time.strptime(self.options.mtime, "%Y-%m-%d %H:%M:%S")))
        else:
            mtime = timestamp2version(time.time())

        pe = PageEditor(request,
                        self.options.page,
                        do_editor_backup=0,
                        uid_override=self.options.author,
                        mtime=mtime,
                        do_revision_backup=int(self.options.revision_backup))
        try:
            pe.saveText(acl + page_content, 0, comment=comment)
        except PageEditor.Unchanged:
            log("info: wikipage was not modified - ignored update.")
        except PageEditor.SaveError, err:
            log("error: %r" % err)
示例#40
0
def event_logfile(self, pagename, pagefile):
    # add event log entry
    eventtype = 'SAVENEW'
    mtime_usecs = wikiutil.timestamp2version(os.path.getmtime(pagefile))
    elog = eventlog.EventLog(self.request)
    elog.add(self.request, eventtype, {'pagename': pagename}, 1, mtime_usecs)
示例#41
0
文件: Xapian.py 项目: steveyen/moingo
    def _index_page(self, writer, page, mode='update'):
        """ Index a page - assumes that the write lock is acquired

        @arg writer: the index writer object
        @arg page: a page object
        @arg mode: 'add' = just add, no checks
                   'update' = check if already in index and update if needed (mtime)
        """
        request = page.request
        wikiname = request.cfg.interwikiname or "Self"
        pagename = page.page_name
        mtime = page.mtime_usecs()
        revision = str(page.get_real_rev())
        itemid = "%s:%s:%s" % (wikiname, pagename, revision)
        author = page.edit_info().get('editor', '?')
        # XXX: Hack until we get proper metadata
        language, stem_language = self._get_languages(page)
        categories = self._get_categories(page)
        domains = tuple(self._get_domains(page))
        updated = False

        if mode == 'update':
            # from #xapian: if you generate a special "unique id" term,
            # you can just call database.replace_document(uid_term, doc)
            # -> done in xapwrap.index.Index.index()
            query = xapidx.RawQuery(xapdoc.makePairForWrite('itemid', itemid))
            enq, mset, docs = writer.search(query, valuesWanted=['pagename', 'attachment', 'mtime', 'wikiname', ])
            if docs:
                doc = docs[0] # there should be only one
                uid = doc['uid']
                docmtime = long(doc['values']['mtime'])
                updated = mtime > docmtime
                logging.debug("uid %r: mtime %r > docmtime %r == updated %r" % (uid, mtime, docmtime, updated))
            else:
                uid = None
                updated = True
        elif mode == 'add':
            updated = True
        logging.debug("%s %r" % (pagename, updated))
        if updated:
            xwname = xapdoc.SortKey('wikiname', wikiname)
            xpname = xapdoc.SortKey('pagename', pagename)
            xattachment = xapdoc.SortKey('attachment', '') # this is a real page, not an attachment
            xmtime = xapdoc.SortKey('mtime', str(mtime))
            xrev = xapdoc.SortKey('revision', revision)
            xtitle = xapdoc.TextField('title', pagename, True) # prefixed
            mimetype = 'text/%s' % page.pi['format']  # XXX improve this
            xkeywords = [xapdoc.Keyword('itemid', itemid),
                    xapdoc.Keyword('lang', language),
                    xapdoc.Keyword('stem_lang', stem_language),
                    xapdoc.Keyword('fulltitle', pagename),
                    xapdoc.Keyword('revision', revision),
                    xapdoc.Keyword('author', author),
                ] + \
                [xapdoc.Keyword('mimetype', mt) for mt in [mimetype, ] + mimetype.split('/')]

            for pagelink in page.getPageLinks(request):
                xkeywords.append(xapdoc.Keyword('linkto', pagelink))
            for category in categories:
                xkeywords.append(xapdoc.Keyword('category', category))
            for domain in domains:
                xkeywords.append(xapdoc.Keyword('domain', domain))
            xcontent = xapdoc.TextField('content', page.get_raw_body())
            doc = xapdoc.Document(textFields=(xcontent, xtitle),
                                  keywords=xkeywords,
                                  sortFields=(xpname, xattachment,
                                      xmtime, xwname, xrev),
                                 )
            doc.analyzerFactory = getWikiAnalyzerFactory(request,
                    stem_language)

            if mode == 'update':
                logging.debug("%s (replace %r)" % (pagename, uid))
                doc.uid = uid
                id = writer.index(doc)
            elif mode == 'add':
                logging.debug("%s (add)" % (pagename, ))
                id = writer.index(doc)

        from MoinMoin.action import AttachFile

        attachments = AttachFile._get_files(request, pagename)
        for att in attachments:
            filename = AttachFile.getFilename(request, pagename, att)
            att_itemid = "%s:%s//%s" % (wikiname, pagename, att)
            mtime = wikiutil.timestamp2version(os.path.getmtime(filename))
            if mode == 'update':
                query = xapidx.RawQuery(xapdoc.makePairForWrite('itemid', att_itemid))
                enq, mset, docs = writer.search(query, valuesWanted=['pagename', 'attachment', 'mtime', ])
                logging.debug("##%r %r" % (filename, docs))
                if docs:
                    doc = docs[0] # there should be only one
                    uid = doc['uid']
                    docmtime = long(doc['values']['mtime'])
                    updated = mtime > docmtime
                    logging.debug("uid %r: mtime %r > docmtime %r == updated %r" % (uid, mtime, docmtime, updated))
                else:
                    uid = None
                    updated = True
            elif mode == 'add':
                updated = True
            logging.debug("%s %s %r" % (pagename, att, updated))
            if updated:
                xatt_itemid = xapdoc.Keyword('itemid', att_itemid)
                xpname = xapdoc.SortKey('pagename', pagename)
                xwname = xapdoc.SortKey('wikiname', request.cfg.interwikiname or "Self")
                xattachment = xapdoc.SortKey('attachment', att) # this is an attachment, store its filename
                xmtime = xapdoc.SortKey('mtime', mtime)
                xrev = xapdoc.SortKey('revision', '0')
                xtitle = xapdoc.Keyword('title', '%s/%s' % (pagename, att))
                xlanguage = xapdoc.Keyword('lang', language)
                xstem_language = xapdoc.Keyword('stem_lang', stem_language)
                mimetype, att_content = self.contentfilter(filename)
                xmimetypes = [xapdoc.Keyword('mimetype', mt) for mt in [mimetype, ] + mimetype.split('/')]
                xcontent = xapdoc.TextField('content', att_content)
                xtitle_txt = xapdoc.TextField('title',
                        '%s/%s' % (pagename, att), True)
                xfulltitle = xapdoc.Keyword('fulltitle', pagename)
                xdomains = [xapdoc.Keyword('domain', domain)
                        for domain in domains]
                doc = xapdoc.Document(textFields=(xcontent, xtitle_txt),
                                      keywords=xdomains + xmimetypes + [xatt_itemid,
                                          xtitle, xlanguage, xstem_language,
                                          xfulltitle, ],
                                      sortFields=(xpname, xattachment, xmtime,
                                          xwname, xrev, ),
                                     )
                doc.analyzerFactory = getWikiAnalyzerFactory(request,
                        stem_language)
                if mode == 'update':
                    logging.debug("%s (replace %r)" % (pagename, uid))
                    doc.uid = uid
                    id = writer.index(doc)
                elif mode == 'add':
                    logging.debug("%s (add)" % (pagename, ))
                    id = writer.index(doc)
def macro_RecentChanges(macro, abandoned=False):
    # handle abandoned keyword
    if abandoned:
        return print_abandoned(macro)

    request = macro.request
    _ = request.getText
    output = []
    user = request.user
    page = macro.formatter.page
    pagename = page.page_name

    d = {}
    d["page"] = page
    d["q_page_name"] = wikiutil.quoteWikinameURL(pagename)

    log = editlog.EditLog(request)

    tnow = time.time()
    msg = ""

    # get bookmark from valid user
    bookmark_usecs = request.user.getBookmark() or 0

    # add bookmark link if valid user
    d["rc_curr_bookmark"] = None
    d["rc_update_bookmark"] = None
    if request.user.valid:
        d["rc_curr_bookmark"] = _("(no bookmark set)")
        if bookmark_usecs:
            currentBookmark = wikiutil.version2timestamp(bookmark_usecs)
            currentBookmark = user.getFormattedDateTime(currentBookmark)
            currentBookmark = _("(currently set to %s)") % currentBookmark
            deleteBookmark = page.link_to(
                request, _("Delete bookmark"), querystr={"action": "bookmark", "time": "del"}, rel="nofollow"
            )
            d["rc_curr_bookmark"] = currentBookmark + " " + deleteBookmark

        version = wikiutil.timestamp2version(tnow)
        d["rc_update_bookmark"] = page.link_to(
            request, _("Set bookmark"), querystr={"action": "bookmark", "time": "%d" % version}, rel="nofollow"
        )

    # set max size in days
    max_days = min(int(request.values.get("max_days", 0)), _DAYS_SELECTION[-1])
    # default to _MAX_DAYS for useres without bookmark
    if not max_days and not bookmark_usecs:
        max_days = _MAX_DAYS
    d["rc_max_days"] = max_days

    # give known user the option to extend the normal display
    if request.user.valid:
        d["rc_days"] = _DAYS_SELECTION
    else:
        d["rc_days"] = []

    output.append(request.theme.recentchanges_header(d))

    pages = {}
    ignore_pages = {}

    today = request.user.getTime(tnow)[0:3]
    this_day = today
    day_count = 0

    for line in log.reverse():
        line.time_tuple = request.user.getTime(wikiutil.version2timestamp(line.ed_time_usecs))
        day = line.time_tuple[0:3]
        hilite = line.ed_time_usecs > (bookmark_usecs or line.ed_time_usecs)

        if this_day != day or (not hilite and not max_days):
            # new day or bookmark reached: print out stuff
            this_day = day
            for p in pages:
                ignore_pages[p] = None
            pages = filter_pages(request, pages.values())
            pages.sort(cmp_lines)
            pages.reverse()

            if len(pages) > 0:
                if request.user.valid:
                    bmtime = pages[0][0].ed_time_usecs
                    d["bookmark_link_html"] = page.link_to(
                        request,
                        _("Set bookmark"),
                        querystr={"action": "bookmark", "time": "%d" % bmtime},
                        rel="nofollow",
                    )
                else:
                    d["bookmark_link_html"] = None
                d["date"] = request.user.getFormattedDate(wikiutil.version2timestamp(pages[0][0].ed_time_usecs))
                output.append(request.theme.recentchanges_daybreak(d))

                for p in pages:
                    output.append(format_page_edits(macro, p, bookmark_usecs))

                day_count += 1
                if max_days and (day_count >= max_days):
                    break

            pages = {}

        elif this_day != day:
            # new day but no changes
            this_day = day

        if line.pagename in ignore_pages:
            continue

        # end listing by default if user has a bookmark and we reached it
        if not max_days and not hilite:
            msg = _("[Bookmark reached]")
            break

        if line.pagename in pages:
            pages[line.pagename].append(line)
        else:
            pages[line.pagename] = [line]
    else:
        # end of loop reached: print out stuff
        # XXX duplicated code from above
        # but above does not trigger if we have the first day in wiki history
        for p in pages:
            ignore_pages[p] = None
        pages = filter_pages(request, pages.values())
        pages.sort(cmp_lines)
        pages.reverse()

        if len(pages) > 0:
            if request.user.valid:
                bmtime = pages[0][0].ed_time_usecs
                d["bookmark_link_html"] = page.link_to(
                    request, _("Set bookmark"), querystr={"action": "bookmark", "time": "%d" % bmtime}, rel="nofollow"
                )
            else:
                d["bookmark_link_html"] = None
            d["date"] = request.user.getFormattedDate(wikiutil.version2timestamp(pages[0][0].ed_time_usecs))
            output.append(request.theme.recentchanges_daybreak(d))

            for p in pages:
                output.append(format_page_edits(macro, p, bookmark_usecs))

    d["rc_msg"] = msg
    output.append(request.theme.recentchanges_footer(d))

    return "".join(output)
示例#43
0
def event_logfile(self, pagename, pagefile):
    # add event log entry
    eventtype = 'SAVENEW'
    mtime_usecs = wikiutil.timestamp2version(os.path.getmtime(pagefile))
    elog = eventlog.EventLog(self.request)
    elog.add(self.request, eventtype, {'pagename': pagename}, 1, mtime_usecs)
示例#44
0
def gather_pagedirs(dir_from, is_backupdir=0):
    """ this gathers information from the pagedirs, i.e. text and backup
        files (and also the local editlog) and tries to merge/synchronize
        with the informations gathered from editlog
    """
    global pagelist
    pagelist = listdir(dir_from)
    for pagename in pagelist:
        editlog_from = opj(dir_from, pagename, 'edit-log')
        gather_editlog(dir_from, editlog_from)

        entry = info.get(pagename, {})

        loglist = []  # editlog timestamps of page revisions
        for ts, data in entry.items():
            if data[1][2] in [
                    'SAVE',
                    'SAVENEW',
                    'SAVE/REVERT',
            ]:
                loglist.append(ts)
        loglist.sort()
        lleftover = loglist[:]

        # remember the latest log entry
        if lleftover:
            llatest = lleftover[-1]
        else:
            llatest = None

        backupdir_from = opj(dir_from, pagename, 'backup')
        if os.path.exists(backupdir_from):
            backuplist = listdir(backupdir_from)
            bleftover = backuplist[:]
            for bfile in backuplist:
                backup_from = opj(backupdir_from, bfile)
                ts = long(bfile)
                if ts in loglist:  # we have an editlog entry, exact match
                    entry[ts][0] = backup_from
                    lleftover.remove(ts)
                    bleftover.remove(bfile)

        text_from = opj(dir_from, pagename, 'text')
        found_text = False
        if os.path.exists(
                text_from
        ):  # we have a text file, it should match latest log entry
            exists[pagename] = True
            mtime = os.path.getmtime(text_from)
            if llatest and llatest in lleftover:
                ts = llatest
                if abs(wikiutil.timestamp2version(mtime) -
                       ts) < 2000000:  # less than a second diff
                    entry[ts][0] = text_from
                    lleftover.remove(ts)
                    found_text = True
            else:  # we have no log entries left 8(
                ts = wikiutil.timestamp2version(mtime)
                data = [
                    ts, '', 'SAVE', pagename, '', '', '', '',
                    'missing editlog entry for this page version'
                ]
                entry[ts] = [text_from, data]
        else:
            # this page was maybe deleted, so we remember for later:
            exists[pagename] = False
            if llatest in lleftover:  # if a page is deleted, the last log entry has no file
                entry[llatest][0] = None
                lleftover.remove(llatest)

        if os.path.exists(backupdir_from):
            backuplist = listdir(backupdir_from)
            for bfile in backuplist:
                if not bfile in bleftover: continue
                backup_from = opj(backupdir_from, bfile)
                bts = long(bfile)  # must be long for py 2.2.x
                for ts in lleftover:
                    tdiff = abs(bts - ts)
                    if tdiff < 2000000:  # editlog, inexact match
                        entry[ts][0] = backup_from
                        lleftover.remove(ts)
                        bleftover.remove(bfile)
                    elif 3599000000 <= tdiff <= 3601000000:  # editlog, win32 daylight saving bug
                        entry[ts][0] = backup_from
                        lleftover.remove(ts)
                        bleftover.remove(bfile)
                        print "Warning: Win32 daylight saving bug encountered & fixed!"

            if len(bleftover) == 1 and len(
                    lleftover) == 1:  # only 1 left, must be this
                backup_from = opj(backupdir_from, bleftover[0])
                entry[lleftover[0]][0] = backup_from
                lleftover = []
                bleftover = []

            # fake some log entries
            for bfile in bleftover:
                backup_from = opj(backupdir_from, bfile)
                bts = long(bfile)  # must be long py 2.2.x
                data = [
                    ts, '', 'SAVE', pagename, '', '', '', '',
                    'missing editlog entry for this page version'
                ]
                entry[bts] = [backup_from, data]

        # check if we still haven't matched the "text" file
        if not found_text and os.path.exists(text_from):
            if llatest in lleftover:  # latest log entry still free
                entry[llatest][
                    0] = text_from  # take it. do not care about mtime of file.
                lleftover.remove(llatest)
            else:  # log for "text" file is missing or latest was taken by other rev 8(
                mtime = os.path.getmtime(text_from)
                ts = wikiutil.timestamp2version(
                    mtime)  # take mtime, we have nothing better
                data = [
                    ts, '', 'SAVE', pagename, '', '', '', '',
                    'missing editlog entry for this page version'
                ]
                entry[ts] = [text_from, data]

        # delete unmatching log entries
        for ts in lleftover:
            #print "XXX Deleting leftover log entry: %r" % entry[ts]
            del entry[ts]

        info[pagename] = entry
示例#45
0
def gather_pagedirs(dir_from, is_backupdir=0):
    """ this gathers information from the pagedirs, i.e. text and backup
        files (and also the local editlog) and tries to merge/synchronize
        with the informations gathered from editlog
    """
    global pagelist
    pagelist = listdir(dir_from)
    for pagename in pagelist:
        editlog_from = opj(dir_from, pagename, 'edit-log')
        gather_editlog(dir_from, editlog_from)
         
        entry = info.get(pagename, {})

        loglist = [] # editlog timestamps of page revisions
        for ts,data in entry.items():
            if data[1][2] in ['SAVE','SAVENEW','SAVE/REVERT',]:
                loglist.append(ts)
        loglist.sort()
        lleftover = loglist[:]
        
        # remember the latest log entry
        if lleftover:
            llatest = lleftover[-1]
        else:
            llatest = None
            
        backupdir_from = opj(dir_from, pagename, 'backup')
        if os.path.exists(backupdir_from):
            backuplist = listdir(backupdir_from)
            bleftover = backuplist[:]
            for bfile in backuplist:
                backup_from = opj(backupdir_from, bfile)
                ts = long(bfile)
                if ts in loglist: # we have an editlog entry, exact match
                    entry[ts][0] = backup_from
                    lleftover.remove(ts)
                    bleftover.remove(bfile)
            
        text_from = opj(dir_from, pagename, 'text')
        found_text = False
        if os.path.exists(text_from): # we have a text file, it should match latest log entry
            exists[pagename] = True
            mtime = os.path.getmtime(text_from)
            if llatest and llatest in lleftover:
                ts = llatest
                if abs(wikiutil.timestamp2version(mtime) - ts) < 2000000: # less than a second diff
                    entry[ts][0] = text_from
                    lleftover.remove(ts)
                    found_text = True
            else: # we have no log entries left 8(
                ts = wikiutil.timestamp2version(mtime)
                data = [ts,'','SAVE', pagename,'','','','','missing editlog entry for this page version']
                entry[ts] = [text_from, data]
        else:
            # this page was maybe deleted, so we remember for later:
            exists[pagename] = False
            if llatest in lleftover: # if a page is deleted, the last log entry has no file
                entry[llatest][0] = None
                lleftover.remove(llatest)
                        
        if os.path.exists(backupdir_from):
            backuplist = listdir(backupdir_from)
            for bfile in backuplist:
                if not bfile in bleftover: continue
                backup_from = opj(backupdir_from, bfile)
                bts = long(bfile) # must be long for py 2.2.x
                for ts in lleftover:
                    tdiff = abs(bts-ts)
                    if tdiff < 2000000: # editlog, inexact match
                        entry[ts][0] = backup_from
                        lleftover.remove(ts)
                        bleftover.remove(bfile)
                    elif 3599000000 <= tdiff <= 3601000000: # editlog, win32 daylight saving bug
                        entry[ts][0] = backup_from
                        lleftover.remove(ts)
                        bleftover.remove(bfile)
                        print "Warning: Win32 daylight saving bug encountered & fixed!"
                        
            if len(bleftover) == 1 and len(lleftover) == 1: # only 1 left, must be this
                backup_from = opj(backupdir_from, bleftover[0])
                entry[lleftover[0]][0] = backup_from
                lleftover = []
                bleftover = []
            
            # fake some log entries
            for bfile in bleftover:
                backup_from = opj(backupdir_from, bfile)
                bts = long(bfile) # must be long py 2.2.x
                data = [ts,'','SAVE',pagename,'','','','','missing editlog entry for this page version']
                entry[bts] = [backup_from, data]
                
        # check if we still haven't matched the "text" file
        if not found_text and os.path.exists(text_from):
            if llatest in lleftover: # latest log entry still free
                entry[llatest][0] = text_from # take it. do not care about mtime of file.
                lleftover.remove(llatest)
            else: # log for "text" file is missing or latest was taken by other rev 8(
                mtime = os.path.getmtime(text_from)
                ts = wikiutil.timestamp2version(mtime) # take mtime, we have nothing better
                data = [ts,'','SAVE', pagename,'','','','','missing editlog entry for this page version']
                entry[ts] = [text_from, data]
                
        # delete unmatching log entries
        for ts in lleftover:
            #print "XXX Deleting leftover log entry: %r" % entry[ts]
            del entry[ts]
        
        info[pagename] = entry
示例#46
0
def convert_ts(ts_from):
    if ts_from > 5000000000:  # far more than 32bits?
        ts_to = ts_from  # we already have usec kind of timestamp
    else:
        ts_to = wikiutil.timestamp2version(ts_from)
    return long(ts_to)  # must be long for py 2.2.x
示例#47
0
def macro_RecentChanges(macro, abandoned=False):
    # handle abandoned keyword
    if abandoned:
        return print_abandoned(macro)

    request = macro.request
    _ = request.getText
    output = []
    user = request.user
    page = macro.formatter.page
    pagename = page.page_name

    d = {}
    d['page'] = page
    d['q_page_name'] = wikiutil.quoteWikinameURL(pagename)

    log = editlog.EditLog(request)

    tnow = time.time()
    msg = ""

    # get bookmark from valid user
    bookmark_usecs = request.user.getBookmark() or 0

    # add bookmark link if valid user
    d['rc_curr_bookmark'] = None
    d['rc_update_bookmark'] = None
    if request.user.valid:
        d['rc_curr_bookmark'] = _('(no bookmark set)')
        if bookmark_usecs:
            currentBookmark = wikiutil.version2timestamp(bookmark_usecs)
            currentBookmark = user.getFormattedDateTime(currentBookmark)
            currentBookmark = _('(currently set to %s)') % currentBookmark
            deleteBookmark = page.link_to(request, _("Delete bookmark"), querystr={'action': 'bookmark', 'time': 'del'}, rel='nofollow')
            d['rc_curr_bookmark'] = currentBookmark + ' ' + deleteBookmark

        version = wikiutil.timestamp2version(tnow)
        d['rc_update_bookmark'] = page.link_to(request, _("Set bookmark"), querystr={'action': 'bookmark', 'time': '%d' % version}, rel='nofollow')

    # set max size in days
    max_days = min(int(request.values.get('max_days', 0)), _DAYS_SELECTION[-1])
    # default to _MAX_DAYS for useres without bookmark
    if not max_days and not bookmark_usecs:
        max_days = _MAX_DAYS
    d['rc_max_days'] = max_days

    # give known user the option to extend the normal display
    if request.user.valid:
        d['rc_days'] = _DAYS_SELECTION
    else:
        d['rc_days'] = []

    output.append(request.theme.recentchanges_header(d))

    pages = {}
    ignore_pages = {}

    today = request.user.getTime(tnow)[0:3]
    this_day = today
    day_count = 0

    for line in log.reverse():

        if not request.user.may.read(line.pagename):
            continue

        line.time_tuple = request.user.getTime(wikiutil.version2timestamp(line.ed_time_usecs))
        day = line.time_tuple[0:3]
        hilite = line.ed_time_usecs > (bookmark_usecs or line.ed_time_usecs)

        if ((this_day != day or (not hilite and not max_days))) and len(pages) > 0:
            # new day or bookmark reached: print out stuff
            this_day = day
            for p in pages:
                ignore_pages[p] = None
            pages = pages.values()
            pages.sort(cmp_lines)
            pages.reverse()

            if request.user.valid:
                bmtime = pages[0][0].ed_time_usecs
                d['bookmark_link_html'] = page.link_to(request, _("Set bookmark"), querystr={'action': 'bookmark', 'time': '%d' % bmtime}, rel='nofollow')
            else:
                d['bookmark_link_html'] = None
            d['date'] = request.user.getFormattedDate(wikiutil.version2timestamp(pages[0][0].ed_time_usecs))
            output.append(request.theme.recentchanges_daybreak(d))

            for p in pages:
                output.append(format_page_edits(macro, p, bookmark_usecs))
            pages = {}
            day_count += 1
            if max_days and (day_count >= max_days):
                break

        elif this_day != day:
            # new day but no changes
            this_day = day

        if line.pagename in ignore_pages:
            continue

        # end listing by default if user has a bookmark and we reached it
        if not max_days and not hilite:
            msg = _('[Bookmark reached]')
            break

        if line.pagename in pages:
            pages[line.pagename].append(line)
        else:
            pages[line.pagename] = [line]
    else:
        if len(pages) > 0:
            # end of loop reached: print out stuff
            # XXX duplicated code from above
            # but above does not trigger if we have the first day in wiki history
            for p in pages:
                ignore_pages[p] = None
            pages = pages.values()
            pages.sort(cmp_lines)
            pages.reverse()

            if request.user.valid:
                bmtime = pages[0][0].ed_time_usecs
                d['bookmark_link_html'] = page.link_to(request, _("Set bookmark"), querystr={'action': 'bookmark', 'time': '%d' % bmtime}, rel='nofollow')
            else:
                d['bookmark_link_html'] = None
            d['date'] = request.user.getFormattedDate(wikiutil.version2timestamp(pages[0][0].ed_time_usecs))
            output.append(request.theme.recentchanges_daybreak(d))

            for p in pages:
                output.append(format_page_edits(macro, p, bookmark_usecs))


    d['rc_msg'] = msg
    output.append(request.theme.recentchanges_footer(d))

    return ''.join(output)