Exemplo n.º 1
0
 def testHitsForAllAndFilter(self):
     """ macro test: 'all=True, event_type=SAVEPAGE' for Hits (all pages are counted for SAVEPAGE)"""
     eventlog.EventLog(self.request).add(self.request, 'SAVEPAGE',
                                         {'pagename': 'WikiSandBox'})
     eventlog.EventLog(self.request).add(self.request, 'SAVEPAGE',
                                         {'pagename': self.pagename})
     result = self._test_macro(u'Hits', u'all=True, event_type=SAVEPAGE')
     self._cleanStats()
     assert result == "2"
Exemplo n.º 2
0
 def testHitsForFilter(self):
     """ macro Hits test: 'event_type=SAVEPAGE' for Hits (SAVEPAGE counted for current page)"""
     eventlog.EventLog(self.request).add(self.request, 'SAVEPAGE',
                                         {'pagename': self.pagename})
     # simulate a log entry SAVEPAGE for WikiSandBox to destinguish current page
     eventlog.EventLog(self.request).add(self.request, 'SAVEPAGE',
                                         {'pagename': 'WikiSandBox'})
     result = self._test_macro(u'Hits', u'event_type=SAVEPAGE')
     self._cleanStats()
     assert result == "1"
Exemplo n.º 3
0
 def testHitsNoArg(self):
     """ macro Hits test: 'no args for Hits (Hits is executed on current page) """
     # <count> log entries for the current page and one for WikiSandBox simulating viewing
     count = 3
     eventlog.EventLog(self.request).add(self.request, 'VIEWPAGE',
                                         {'pagename': 'WikiSandBox'})
     for i in range(count):
         eventlog.EventLog(self.request).add(self.request, 'VIEWPAGE',
                                             {'pagename': self.pagename})
     result = self._test_macro(u'Hits', u'')
     self._cleanStats()
     assert result == str(count)
Exemplo n.º 4
0
 def testHitsForAll(self):
     """ macro Hits test: 'all=True' for Hits (all pages are counted for VIEWPAGE) """
     # <count> * <num_pages> log entries for simulating viewing
     pagenames = ['WikiSandBox', self.pagename]
     num_pages = len(pagenames)
     count = 2
     for i in range(count):
         for pagename in pagenames:
             eventlog.EventLog(self.request).add(self.request, 'VIEWPAGE',
                                                 {'pagename': pagename})
     result = self._test_macro(u'Hits', u'all=True')
     self._cleanStats()
     assert result == str(count * num_pages)
Exemplo n.º 5
0
 def testPageHits(self):
     """ macro PageHits test: updating of cache from event-log for multiple call of PageHits"""
     count = 20
     for counter in range(count):
         eventlog.EventLog(self.request).add(self.request, 'VIEWPAGE',
                                             {'pagename': 'PageHits'})
         result = self._test_macro(u'PageHits', u'')  # XXX SENSE???
     cache = caching.CacheEntry(self.request,
                                'charts',
                                'pagehits',
                                scope='wiki',
                                use_pickle=True)
     date, hits = 0, {}
     if cache.exists():
         try:
             date, hits = cache.content()
         except caching.CacheError:
             cache.remove()
     assert hits['PageHits'] == count
Exemplo n.º 6
0
def get_data(request):
    # get results from cache
    cache = caching.CacheEntry(request,
                               'charts',
                               'useragents',
                               scope='wiki',
                               use_pickle=True)
    cache_date, data = 0, {}
    if cache.exists():
        try:
            cache_date, data = cache.content()
        except:
            cache.remove()  # cache gone bad

    log = eventlog.EventLog(request)
    try:
        new_date = log.date()
    except logfile.LogMissing:
        new_date = None

    if new_date is not None:
        log.set_filter(['VIEWPAGE', 'SAVEPAGE'])
        for event in log.reverse():
            if event[0] <= cache_date:
                break
            ua = event[2].get('HTTP_USER_AGENT')
            if ua:
                try:
                    pos = ua.index(" (compatible; ")
                    ua = ua[pos:].split(';')[1].strip()
                except ValueError:
                    ua = ua.split()[0]
                #ua = ua.replace(';', '\n')
                data[ua] = data.get(ua, 0) + 1

        # write results to cache
        cache.update((new_date, data))

    data = [(cnt, ua) for ua, cnt in data.items()]
    data.sort()
    data.reverse()
    return data
Exemplo n.º 7
0
    def addHitsFromLog(self, hits, cacheDate):
        """ Parse the log, add hits after cacheDate and update the cache """
        event_log = eventlog.EventLog(self.request)
        event_log.set_filter(['VIEWPAGE'])

        changed = False
        # don't use event_log.date()
        latest = None
        for event in event_log.reverse():
            if latest is None:
                latest = event[0]
            if event[0] <= cacheDate:
                break
            page = event[2].get('pagename', None)
            if page:
                hits[page] = hits.get(page, 0) + 1
                changed = True

        if changed:
            self.updateCache(latest, hits)
Exemplo n.º 8
0
def event_logfile(self, pagename, pagefile):
    # add event log entry
    eventtype = 'SAVENEW'
    mtime_usecs = wikiutil.timestamp2version(os.path.getmtime(pagefile))
    elog = eventlog.EventLog(self.request)
    elog.add(self.request, eventtype, {'pagename': pagename}, 1, mtime_usecs)
Exemplo n.º 9
0
def get_data(pagename, request, filterpage=None):
    cache_days, cache_views, cache_edits = [], [], []
    cache_date = 0

    # Get results from cache
    if filterpage:
        arena = Page(request, pagename)
        cache = caching.CacheEntry(request, arena, 'hitcounts', scope='item', use_pickle=True)
    else:
        arena = 'charts'
        cache = caching.CacheEntry(request, arena, 'hitcounts', scope='wiki', use_pickle=True)

    if cache.exists():
        try:
            cache_date, cache_days, cache_views, cache_edits = cache.content()
        except:
            cache.remove() # cache gone bad

    # Get new results from the log
    log = eventlog.EventLog(request)
    try:
        new_date = log.date()
    except logfile.LogMissing:
        new_date = None

    # prepare data
    days = []
    views = []
    edits = []
    ratchet_day = None
    ratchet_time = None
    if new_date is not None:
        log.set_filter(['VIEWPAGE', 'SAVEPAGE'])
        latest = None
        for event in log.reverse():
            # don't use event_log.date()
            if latest is None:
                latest = event[0]
            event_usecs = event[0]
            if event_usecs <= cache_date:
                break
            eventpage = event[2].get('pagename', '')
            if filterpage and eventpage != filterpage:
                continue
            event_secs = wikiutil.version2timestamp(event_usecs)
            time_tuple = time.gmtime(event_secs) # must be UTC
            day = tuple(time_tuple[0:3])
            if day != ratchet_day:
                # new day
                while ratchet_time:
                    ratchet_time -= 86400 # seconds per day
                    rday = tuple(time.gmtime(ratchet_time)[0:3]) # must be UTC
                    if rday <= day:
                        break
                    days.append(DATE_FMT % rday)
                    views.append(0)
                    edits.append(0)
                days.append(DATE_FMT % day)
                views.append(0)
                edits.append(0)
                ratchet_day = day
                ratchet_time = event_secs
            if event[1] == 'VIEWPAGE':
                views[-1] += 1
            elif event[1] == 'SAVEPAGE':
                edits[-1] += 1

        days.reverse()
        views.reverse()
        edits.reverse()

    # merge the day on the end of the cache
    if cache_days and days and days[0] == cache_days[-1]:
        cache_edits[-1] += edits[0]
        cache_views[-1] += views[0]
        days, views, edits = days[1:], views[1:], edits[1:]

    # Update and save the cache
    cache_days.extend(days)
    cache_views.extend(views)
    cache_edits.extend(edits)
    if new_date is not None:
        cache.update((latest, cache_days, cache_views, cache_edits))

    return cache_days, cache_views, cache_edits
Exemplo n.º 10
0
    def getInfo(self):
        _ = self.request.getText
        request = self.request

        buf = StringIO()

        row = lambda label, value, buf=buf: buf.write(u'<dt>%s</dt><dd>%s</dd>'
                                                      % (label, value))

        buf.write(u'<dl>')
        row(_('Python Version'), sys.version)
        row(
            _('MoinMoin Version'),
            _('Release %s [Revision %s]') %
            (version.release, version.revision))

        if not request.user.valid:
            # for an anonymous user it ends here.
            buf.write(u'</dl>')
            return buf.getvalue()

        if request.user.isSuperUser():
            # superuser gets all page dependent stuff only
            try:
                import Ft
                ftversion = Ft.__version__
            except ImportError:
                ftversion = None
            except AttributeError:
                ftversion = 'N/A'

            if ftversion:
                row(_('4Suite Version'), ftversion)

            # TODO add python-xml check and display it

            # Get the full pagelist of the wiki
            pagelist = request.rootpage.getPageList(user='')
            systemPages = []
            totalsize = 0
            for page in pagelist:
                if wikiutil.isSystemPage(request, page):
                    systemPages.append(page)
                totalsize += Page(request, page).size()

            row(_('Number of pages'), str(len(pagelist) - len(systemPages)))
            row(_('Number of system pages'), str(len(systemPages)))

            row(_('Accumulated page sizes'),
                self.formatInReadableUnits(totalsize))
            data_dir = request.cfg.data_dir
            row(
                _('Disk usage of %(data_dir)s/pages/') %
                {'data_dir': data_dir},
                self.formatInReadableUnits(
                    self.getDirectorySize(os.path.join(data_dir, 'pages'))))
            row(
                _('Disk usage of %(data_dir)s/') % {'data_dir': data_dir},
                self.formatInReadableUnits(self.getDirectorySize(data_dir)))

            edlog = editlog.EditLog(request)
            row(
                _('Entries in edit log'), "%s (%s)" %
                (edlog.lines(), self.formatInReadableUnits(edlog.size())))

            # This puts a heavy load on the server when the log is large
            eventlogger = eventlog.EventLog(request)
            row('Event log', self.formatInReadableUnits(eventlogger.size()))

        nonestr = _("NONE")
        # a valid user gets info about all installed extensions
        row(_('Global extension macros'), ', '.join(macro.modules) or nonestr)
        row(
            _('Local extension macros'),
            ', '.join(wikiutil.wikiPlugins('macro', self.macro.cfg))
            or nonestr)

        glob_actions = [
            x for x in action.modules if not x in request.cfg.actions_excluded
        ]
        row(_('Global extension actions'), ', '.join(glob_actions) or nonestr)
        loc_actions = [
            x for x in wikiutil.wikiPlugins('action', self.macro.cfg)
            if not x in request.cfg.actions_excluded
        ]
        row(_('Local extension actions'), ', '.join(loc_actions) or nonestr)

        row(_('Global parsers'), ', '.join(parser.modules) or nonestr)
        row(
            _('Local extension parsers'),
            ', '.join(wikiutil.wikiPlugins('parser', self.macro.cfg))
            or nonestr)

        try:
            import xapian
            xapVersion = 'Xapian %s' % xapian.version_string()
        except ImportError:
            xapian = None
            xapVersion = _(
                'Xapian and/or Python Xapian bindings not installed')

        xapian_enabled = request.cfg.xapian_search
        xapState = (_('Disabled'), _('Enabled'))
        xapRow = '%s, %s' % (xapState[xapian_enabled], xapVersion)

        if xapian and xapian_enabled:
            from MoinMoin.search.Xapian.indexing import XapianIndex
            idx = XapianIndex(request)
            idxState = (_('index unavailable'), _('index available'))
            idx_exists = idx.exists()
            xapRow += ', %s' % idxState[idx_exists]
            if idx_exists:
                xapRow += ', %s' % (
                    _('last modified: %s') %
                    request.user.getFormattedDateTime(idx.mtime()))

        row(_('Xapian search'), xapRow)

        if xapian and xapian_enabled:
            stems = xapian.Stem.get_available_languages()
            row(
                _('Stemming for Xapian'),
                xapState[request.cfg.xapian_stemming] + " (%s)" %
                (stems or nonestr))

        try:
            from threading import activeCount
            t_count = activeCount()
        except ImportError:
            t_count = None

        row(_('Active threads'), t_count or _('N/A'))
        buf.write(u'</dl>')

        return buf.getvalue()