def render(self, context): # Obtenir le nombre de MPs en attente du membre prof = self.profile.resolve(context) cnt = cache.get('mps_%i' % prof.id, False) if not cnt: cnt = 0 usertopics = UserTopic.objects \ .select_related('topic') \ .filter(user=prof, has_deleted=False) for ut in usertopics: if ut.last_read_post_id != ut.topic.last_message_id: cnt += 1 # Mettre à jour le cache cache.set('mps_%i' % prof.id, cnt + 1, 2*60) # 0 = False, on ne le veut pas, donc +1 else: cnt = cnt - 1 # On a mis +1 au début, ici on le retire if cnt != 0: return '<strong>(%i)</strong>' % cnt else: return ''
def render(self, context): anon, users, ok = cache.get('connected_users_count', [0, 0, False]) if not ok: # Supprimer les activités trop vieilles cursor = connection.cursor() cursor.execute("DELETE FROM general_activity WHERE date < FROM_UNIXTIME(UNIX_TIMESTAMP(NOW()) - (5 * 60))") transaction.commit_unless_managed() # Prendre les activités activities = Activity.objects \ .select_related('user') \ .only('user', 'user__user') \ .order_by('-date') # Compter les utilisateurs anon = 0 users = 0 connected_users = [] for act in activities: if act.user_id: users += 1 connected_users.append(act.user.user_id) else: anon += 1 # Définir le cache cache.set('connected_users_count', [anon, users, True], 5*60) cache.set('connected_users', connected_users, 30*60) # 30 min, mais renouvelé quand il faut # Afficher return '%i (%i)' % (users, anon)
def items(self): # Si le cache du rss existe, on l'utilise PList = cache.get('feeds_packages', False) if not PList: PList = Package.objects.select_related('arch').order_by( '-date')[:10] # Ecris le cache du RSS Packages de 30min cache.set('feeds_packages', list(PList), 30 * 60) return PList
def items(self): # Si le cache du rss existe, on l'utilise DList = cache.get('feeds_ask', False) if not DList: DList = Demand.objects.select_related( 'author', 'd_type').order_by('-created_at')[:10] # Ecris le cache du RSS des demandes de 30min cache.set('feeds_ask', list(DList), 30 * 60) return DList
def items(self): # Si le cache du rss existe, on l'utilise NList = cache.get('feeds_news', False) if not NList: NList = News.objects.select_related('category', 'author') \ .order_by('-date_published') \ .filter(published=True,is_private=0)[:10] # Ecris le cache du RSS news de 30min cache.set('feeds_news', list(NList), 30 * 60) return NList
def inner(request, *args, **kwargs): if request.method in methods: remote_addr = request.META.get('HTTP_X_FORWARDED_FOR') or request.META.get('REMOTE_ADDR') if cache.get(remote_addr) == limit: return HttpResponseForbidden('Try slowing down a little.') elif not cache.get(remote_addr): cache.set(remote_addr, 1, duration) else: cache.incr(remote_addr) return func(request, *args, **kwargs)
def items(self): # Si le cache du rss existe, on l'utilise JList = cache.get('feeds_journal', False) if not JList: JList = News.objects.select_related('category', 'author') \ .order_by('-date_published') \ .filter(published=True,is_private=1)[:10] # Ecris le cache du RSS jounral de 30min cache.set('feeds_journal', list(JList), 30 * 60) return JList
def items(self): # Si le cache du rss existe, on l'utilise MList = cache.get('feeds_msg', False) if not MList: MList = Topic.objects.select_related('last_post', 'last_post__author') \ .extra(select={'date_created': 'forum_post.date_created', 'contents': 'forum_post.contents'}) \ .order_by('-last_post__date_created')[:10] # Ecris le cache du RSS Message du forum de 30min cache.set('feeds_msg', list(MList), 30 * 60) return MList
def inner(request, *args, **kwargs): if request.method in methods: remote_addr = request.META.get( "HTTP_X_FORWARDED_FOR") or request.META.get("REMOTE_ADDR") if cache.get(remote_addr) == limit: return HttpResponseForbidden("Try slowing down a little.") elif not cache.get(remote_addr): cache.set(remote_addr, 1, duration) else: cache.incr(remote_addr) return func(request, *args, **kwargs)
def items(self): # Si le cache du rss existe, on l'utilise WList = cache.get('feeds2_wiki', False) if not WList: WList = LogEntry.objects \ .select_related('page', 'author_user') \ .filter(page__is_private=False) \ .order_by('-date')[:10] # Ecris le cache du RSS Wiki de 30min cache.set('feeds_wiki', list(WList), 30 * 60) return WList
def tpl(name, args, request): # Style à utiliser if request.user.is_anonymous(): # Style par défaut style = '/style/default' else: style = request.session.get('style', False) if not style: style = request.user.get_profile().style request.session['style'] = style # Enregistrer l'activité from pyv4.general.models import Activity, GlobalMessage act = Activity(ip=request.META.get('REMOTE_ADDR'), template=name) act.date = datetime.datetime.now() if not request.user.is_anonymous(): act.user = request.user.get_profile() act.save() # Prendre un éventuel message global lang = request.LANGUAGE_CODE.split('_')[0] globalmessages = cache.get('globalf_messages_%s' % lang, None) if not globalmessages: globalmessages = GlobalMessage.objects.filter( Q(lang=lang) | Q(lang__isnull=True) | Q(lang='')) globalmessages = list(globalmessages) cache.set('global_messages_%s' % lang, globalmessages, 10 * 60) # Rendre la template return render_to_response(name, args, context_instance=RequestContext( request, { 'style': style, 'globalmessages': globalmessages, 'settings': settings }))
def lcode(text): h = hash(text) rs = cache.get('lcode_%i' % h, False) markdown.TAB_LENGTH = 4 # Tabulation normale if rs == False: rs = markdown.markdown(force_unicode(text.replace('\\', '\\\\')), [ 'toc', 'def_list', 'tables', 'codehilite', 'wikilinks(base_url=/wiki-,end_url=.html' ], True) # Gérer les smileys for smiley in SMILEYS: rs = rs.replace( smiley[0], ' <img src="%s" alt="%s" class="smiley" /> ' % (smiley[1], smiley[0])) cache.set('lcode_%i' % h, rs) return rs
def index(request): # Nouvelles latest_news = cache.get('index_last_news', None) if not latest_news: latest_news = News.objects \ .select_related('author') \ .filter(published=True, is_private=False) \ .order_by('-date_published')[:5] latest_news = list(latest_news) cache.set('index_last_news', latest_news, 60) # Journaux latest_journals = cache.get('index_last_journals', None) if not latest_journals: latest_journals = News.objects \ .select_related('author') \ .filter(published=True, is_private=True) \ .order_by('-date_published')[:5] latest_journals = list(latest_journals) cache.set('index_last_journals', latest_journals, 60) # Messages du forum latest_topics = cache.get('index_last_topics', None) if not latest_topics: latest_topics = Topic.objects \ .select_related('last_post', 'last_post__author') \ .order_by('-last_post__date_created')[:5] latest_topics = list(latest_topics) cache.set('index_last_topics', latest_topics, 30) # Pages de wiki modifiées latest_wiki_changes = cache.get('index_last_wiki', None) if not latest_wiki_changes: latest_wiki_changes = LogEntry.objects \ .select_related('page', 'author_user') \ .filter(page__is_private=False) \ .order_by('-date')[:5] latest_wiki_changes = list(latest_wiki_changes) cache.set('index_last_wiki', latest_wiki_changes, 60) # Derniers paquets latest_packages = cache.get('index_last_packages', None) if not latest_packages: latest_packages = Package.objects \ .select_related('arch') \ .order_by('-date')[:5] latest_packages = list(latest_packages) cache.set('index_last_packages', latest_packages, 300) # Dernières demandes latest_demands = cache.get('index_last_demands', None) if not latest_demands: latest_demands = Demand.objects \ .select_related('reporter') \ .order_by('-updated_at')[:5] latest_demands = list(latest_demands) cache.set('index_last_demands', latest_demands, 60) # Sondage en cours mpoll = cache.get('index_last_poll', None) if not mpoll: mpoll = Poll.objects \ .select_related('topic') \ .order_by('-pub_date')[:1] if len(mpoll) == 1: mpoll = get_poll(request, mpoll[0]) else: mpoll = None cache.set('index_last_poll', mpoll, 60) # Savoir si on peut voter if mpoll: if request.user.is_anonymous(): mpoll['can_vote'] = False else: user_choices = UserChoice.objects \ .filter(user=request.user.get_profile(), choice__poll=mpoll['object']) mpoll['can_vote'] = (user_choices.count() == 0) #Statistique stats = cache.get('index_stats', False) if not stats: # Prendre les statistiques stats = {} # stats['logram_users'] (TODO dans longtemps) stats['users'] = Profile.objects.count() stats['last_user'] = Profile.objects.order_by('-id')[0] stats['open_demands'] = Demand.objects.filter( status__closed=False).count() stats['demands'] = Demand.objects.count() stats['forums'] = Forum.objects.count() stats['topics'] = Topic.objects.filter(p_type=0).count() stats['messages'] = Post.objects.count() stats['packages'] = Package.objects.count() # Mettre en cache pour 20 minutes cache.set('index_stats', stats, 20 * 60) # Si l'utilisateur le peut, afficher les news en attente de validation et les alertes modos latest_validate_news = False moderator_alerts = False if request.user.has_perm('news.change_news'): latest_validate_news = News.objects \ .select_related('author') \ .filter(to_validate=True, published=False, is_private=False) \ .order_by('-date_modified') if request.user.has_perm('forum.view_alerts') or request.user.has_perm( 'pastebin.view_alerts'): moderator_alerts = Alert.objects \ .select_related('topic', 'author','paste') return tpl( 'global/index.html', { 'latest_news': latest_news, 'latest_journals': latest_journals, 'latest_topics': latest_topics, 'latest_packages': latest_packages, 'latest_wiki': latest_wiki_changes, 'latest_demands': latest_demands, 'latest_validate_news': latest_validate_news, 'moderator_alerts': moderator_alerts, 'poll': mpoll, 'stats': stats }, request)