コード例 #1
0
ファイル: models.py プロジェクト: zby1234/NewsBlur
    def collect_statistics_feeds_fetched(cls):
        feeds_fetched = RStats.count('feed_fetch', hours=24)
        cls.objects(key='feeds_fetched').update_one(upsert=True,
                                                    set__key='feeds_fetched',
                                                    set__value=feeds_fetched)

        return feeds_fetched
コード例 #2
0
ファイル: models.py プロジェクト: AndrewJHart/NewsBlur
 def collect_statistics_feeds_fetched(cls):
     feeds_fetched = RStats.count('feed_fetch', hours=24)
     cls.objects(key='feeds_fetched').update_one(upsert=True, 
                                                 set__key='feeds_fetched',
                                                 set__value=feeds_fetched)
     
     return feeds_fetched
コード例 #3
0
ファイル: log.py プロジェクト: 4iji/NewsBlur
def user(u, msg, request=None):
    from apps.statistics.models import MAnalyticsPageLoad
    
    if not u:
        return debug(msg)
        
    platform = '------'
    time_elapsed = ""
    if isinstance(u, WSGIRequest) or request:
        if not request:
            request = u
            u = request.user
        platform = extract_user_agent(request)

        if hasattr(request, 'start_time'):
            seconds = time.time() - request.start_time
            color = '~FK~SB'
            if seconds >= 1:
                color = '~FR'
            elif seconds <= .2:
                color = '~FB'
            time_elapsed = "[%s%.4ss~SB] " % (
                color,
                seconds,
            )
    is_premium = u.is_authenticated() and u.profile.is_premium
    premium = '*' if is_premium else ''
    username = cipher(unicode(u)) if settings.CIPHER_USERNAMES else unicode(u)
    info(' ---> [~FB~SN%-6s~SB] %s[%s%s] %s' % (platform, time_elapsed, username, premium, msg))
    page_load_paths = [
        "/reader/feed/",
        "/social/stories/",
        "/reader/river_stories/",
        "/social/river_stories/"
    ]
    if request:
        path = MAnalyticsPageLoad.clean_path(request.path)
        if path in page_load_paths:
            MAnalyticsPageLoad.add(user=u, is_premium=is_premium, platform=platform, path=path, 
                                   duration=seconds)
            RStats.add('page_load', duration=seconds)
コード例 #4
0
    def collect_statistics_sites_loaded(cls):
        now = round_time(datetime.datetime.now(), round_to=60)
        sites_loaded = []
        avg_time_taken = []
        last_5_min_time_taken = 0
        r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL)

        for hour in range(24):
            start_hours_ago = now - datetime.timedelta(hours=hour + 1)

            pipe = r.pipeline()
            for m in range(60):
                minute = start_hours_ago + datetime.timedelta(minutes=m)
                key = "%s:%s" % (RStats.stats_type('page_load'),
                                 minute.strftime('%s'))
                pipe.get("%s:s" % key)
                pipe.get("%s:a" % key)

            times = pipe.execute()

            counts = [int(c) for c in times[::2] if c]
            avgs = [float(a) for a in times[1::2] if a]

            if hour == 0:
                last_5_min_time_taken = round(
                    sum(avgs[:1]) / max(1, sum(counts[:1])), 2)

            if counts and avgs:
                count = max(1, sum(counts))
                avg = round(sum(avgs) / count, 3)
            else:
                count = 0
                avg = 0

            sites_loaded.append(count)
            avg_time_taken.append(avg)

        sites_loaded.reverse()
        avg_time_taken.reverse()

        values = (
            ('sites_loaded', json.encode(sites_loaded)),
            ('avg_time_taken', json.encode(avg_time_taken)),
            ('latest_sites_loaded', sites_loaded[-1]),
            ('latest_avg_time_taken', avg_time_taken[-1]),
            ('max_sites_loaded', max(sites_loaded)),
            ('max_avg_time_taken', max(1, max(avg_time_taken))),
            ('last_5_min_time_taken', last_5_min_time_taken),
        )
        for key, value in values:
            cls.objects(key=key).update_one(upsert=True,
                                            set__key=key,
                                            set__value=value)
コード例 #5
0
ファイル: log.py プロジェクト: stfenjobs/PyTune3
def user(u, msg, request=None, warn_color=True):
    msg = smart_unicode(msg)
    if not u:
        return debug(msg)

    platform = '------'
    time_elapsed = ""
    if isinstance(u, WSGIRequest) or request:
        if not request:
            request = u
            u = request.user
        platform = extract_user_agent(request)

        if hasattr(request, 'start_time'):
            seconds = time.time() - request.start_time
            color = '~FB'
            if warn_color:
                if seconds >= 1:
                    color = '~FR'
                elif seconds > .2:
                    color = '~SB~FK'
            time_elapsed = "[%s%.4ss~SB] " % (
                color,
                seconds,
            )
    is_premium = u.is_authenticated() and u.profile.is_premium
    premium = '*' if is_premium else ''
    username = cipher(unicode(u)) if settings.CIPHER_USERNAMES else unicode(u)
    info(' ---> [~FB~SN%-6s~SB] %s[%s%s] %s' % (platform, time_elapsed, username, premium, msg))
    page_load_paths = [
        "/reader/feed/",
        "/social/stories/",
        "/reader/river_stories/",
        "/social/river_stories/"
    ]
    if request:
        path = RStats.clean_path(request.path)
        if path in page_load_paths:
            RStats.add('page_load', duration=seconds)
コード例 #6
0
ファイル: models.py プロジェクト: manderson23/NewsBlur
    def collect_statistics_sites_loaded(cls):
        now = round_time(datetime.datetime.now(), round_to=60)
        sites_loaded = []
        avg_time_taken = []
        last_5_min_time_taken = 0
        r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL)

        for hour in range(24):
            start_hours_ago = now - datetime.timedelta(hours=hour+1)
    
            pipe = r.pipeline()
            for m in range(60):
                minute = start_hours_ago + datetime.timedelta(minutes=m)
                key = "%s:%s" % (RStats.stats_type('page_load'), minute.strftime('%s'))
                pipe.get("%s:s" % key)
                pipe.get("%s:a" % key)
    
            times = pipe.execute()
    
            counts = [int(c) for c in times[::2] if c]
            avgs = [float(a) for a in times[1::2] if a]
            
            if hour == 0:
                last_5_min_time_taken = round(sum(avgs[:1]) / max(1, sum(counts[:1])), 2)
                
            if counts and avgs:
                count = max(1, sum(counts))
                avg = round(sum(avgs) / count, 3)
            else:
                count = 0
                avg = 0

            sites_loaded.append(count)
            avg_time_taken.append(avg)

        sites_loaded.reverse()
        avg_time_taken.reverse()

        values = (
            ('sites_loaded',            json.encode(sites_loaded)),
            ('avg_time_taken',          json.encode(avg_time_taken)),
            ('latest_sites_loaded',     sites_loaded[-1]),
            ('latest_avg_time_taken',   avg_time_taken[-1]),
            ('max_sites_loaded',        max(sites_loaded)),
            ('max_avg_time_taken',      max(1, max(avg_time_taken))),
            ('last_5_min_time_taken',   last_5_min_time_taken),
        )
        for key, value in values:
            cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value)