def get_related_channels(self, user): resolution_nouns = (Resolution.objects.filter( declarations__user=user, nouns__isnull=False, language=normalize_language_code(get_language()), ).order_by("-declarations__weight").values_list("nouns", flat=True)) supported_nouns = (Resolution.objects.filter( declarations__supporters=user, nouns__isnull=False, language=normalize_language_code(get_language()), ).order_by("-declarations__weight").values_list("nouns", flat=True)) noun_ids = list(resolution_nouns) + list(supported_nouns) noun_set = set(noun_ids) channels = Channel.objects.filter(nouns__in=noun_ids, language=normalize_language_code( get_language())).distinct() bundle = [] total_score = 0 for channel in channels: channel_dict = {"channel": channel.serialize(), "score": 0} for noun in channel.nouns.all(): if noun.pk in noun_set: channel_dict["score"] += noun_ids.count(noun.pk) total_score += channel_dict["score"] bundle.append(channel_dict) return sorted(bundle, key=lambda c: c["score"], reverse=True)
def build_graph(self, user): resolution_nouns = (Resolution.objects.filter( declarations__user=user, nouns__isnull=False, language=normalize_language_code(get_language()), ).order_by("-declarations__weight").values_list("nouns", flat=True)) supported_nouns = (Resolution.objects.filter( declarations__supporters=user, nouns__isnull=False, language=normalize_language_code(get_language()), ).order_by("-declarations__weight").values_list("nouns", flat=True)) noun_ids = set(resolution_nouns) ^ set(supported_nouns) channels = Channel.objects.filter(nouns__in=noun_ids, language=normalize_language_code( get_language())).distinct() graph = DiGraph() last_channel = None first_channel = None label = lambda x: x.title() for channel in channels: graph.add_node(channel.title, { "label": label(channel.title), "type": "channel" }) if last_channel: graph.add_edge(last_channel.title, channel.title) channel_nouns = channel.nouns.all() for channel_noun in channel_nouns: if channel_noun.id in noun_ids: graph.add_edge(channel_noun.text, channel.title) graph.add_node( channel_noun.text, { "label": label(channel_noun.text), "type": "noun" }, ) last_channel = channel if not first_channel: first_channel = channel graph.add_edge(first_channel.title, last_channel.title) return graph
def get_related_channels(self, speaker): resolution_nouns = Resolution.objects.filter( declarations__speaker=speaker, nouns__isnull=False, language=normalize_language_code(get_language()) ).order_by( '-declarations__weight' ).values_list( 'nouns', flat=True ) #supported_nouns = Resolution.objects.filter( # declarations__supporters=speaker, # nouns__isnull=False, # language=normalize_language_code(get_language()) #).order_by( # '-declarations__weight' #).values_list( # 'nouns', # flat=True #) supported_nouns = [] noun_ids = list(resolution_nouns) + list(supported_nouns) noun_set = set(noun_ids) channels = Channel.objects.filter( nouns__in=noun_ids, language=normalize_language_code(get_language()) ).distinct() bundle = [] total_score = 0 for channel in channels: channel_dict = { 'channel': channel.serialize(), 'score': 0, } for noun in channel.nouns.all(): if noun.pk in noun_set: channel_dict['score'] += noun_ids.count(noun.pk) total_score += channel_dict['score'] bundle.append(channel_dict) return sorted(bundle, key=lambda c: c['score'], reverse=True)
def get_related_channels(self, user): contention_nouns = Contention.objects.filter( premises__user=user, nouns__isnull=False, language=normalize_language_code(get_language()) ).order_by( '-premises__weight' ).values_list( 'nouns', flat=True ) supported_nouns = Contention.objects.filter( premises__supporters=user, nouns__isnull=False, language=normalize_language_code(get_language()) ).order_by( '-premises__weight' ).values_list( 'nouns', flat=True ) noun_ids = list(contention_nouns) + list(supported_nouns) noun_set = set(noun_ids) channels = Channel.objects.filter( nouns__in=noun_ids, language=normalize_language_code(get_language()) ).distinct() bundle = [] total_score = 0 for channel in channels: channel_dict = { 'channel': channel.serialize(), 'score': 0, } for noun in channel.nouns.all(): if noun.pk in noun_set: channel_dict['score'] += noun_ids.count(noun.pk) total_score += channel_dict['score'] bundle.append(channel_dict) return sorted(bundle, key=lambda c: c['score'], reverse=True)
def get_target(self): target_noun = self.cleaned_data['target_noun'] try: noun = Noun.objects.get(text=target_noun, language=normalize_language_code( get_language())) except Noun.DoesNotExist: noun = Noun.objects.create(text=target_noun, is_active=False, language=normalize_language_code( get_language())) return noun
def related_contentions(self): if self.related_nouns.exists(): source = self.related_nouns else: source = self.nouns nouns = source.prefetch_related("out_relations") noun_ids = set(nouns.values_list("pk", flat=True)) for noun in nouns.all(): relations = set(noun.out_relations.values_list("target", flat=True)) noun_ids = noun_ids.union(relations) available_nouns = ( Noun.objects.filter(language=normalize_language_code(get_language()), id__in=noun_ids) .annotate(contention_count=Count("contentions")) .filter(contentions__is_published=True) .prefetch_related("contentions") ) serialized = [ { "noun": noun, "contentions": ( noun.contentions.exclude(pk=self.pk) .values("title", "slug") .order_by("?") # find a proper way to randomize # suggestions [:7] ), } for noun in available_nouns ] return filter(itemgetter("contentions"), serialized)
def get(self, request, *args, **kwargs): self.object = self.get_object() host = request.META['HTTP_HOST'] if not host.startswith(settings.AVAILABLE_LANGUAGES): return redirect(self.object.get_full_url(), permanent=True) if not normalize_language_code(get_language()) == self.object.language: return redirect(self.object.get_full_url(), permanent=True) partial = request.GET.get('partial') level = request.GET.get('level') if partial: contention = self.object try: serialized = contention.partial_serialize( int(partial), self.request.user) except (StopIteration, ValueError): raise Http404 return render( request, 'premises/tree.html', { 'premises': serialized['premises'], 'serialized': serialized, 'level': int(level) }) return super(ContentionDetailView, self).get(request, *args, **kwargs)
def related_resolutions(self): if self.related_nouns.exists(): source = self.related_nouns else: source = self.nouns nouns = source.prefetch_related("out_relations") noun_ids = set(nouns.values_list("pk", flat=True)) for noun in nouns.all(): relations = set(noun.out_relations.values_list("target", flat=True)) noun_ids = noun_ids.union(relations) available_nouns = (Noun.objects.filter( language=normalize_language_code(get_language()), id__in=noun_ids ).annotate(resolution_count=Count("resolutions")).filter( resolutions__is_published=True).prefetch_related("resolutions")) serialized = [ { "noun": noun, "resolutions": ( noun.resolutions.exclude(pk=self.pk).values( "title", "slug").order_by("?") # find a proper way to randomize # suggestions [:7]), } for noun in available_nouns ] return filter(itemgetter("resolutions"), serialized)
def formatted_title(self, tag='a'): language = normalize_language_code(get_language()) title = strip_tags(self.title) select = {'length': 'Length(nouns_noun.text)'} nouns = (self.nouns.extra(select=select).filter( language=language).prefetch_related('keywords').order_by('-length') ) for noun in nouns: keywords = (noun.active_keywords().values_list('text', flat=True)) sorted_keywords = sorted(keywords, key=len, reverse=True) for keyword in sorted_keywords: replaced = replace_with_link(title, keyword, noun.get_absolute_url(), tag) if replaced is not None: title = replaced continue replaced = replace_with_link(title, noun.text, noun.get_absolute_url(), tag) if replaced is not None: title = replaced return title
def get_context_data(self, **kwargs): language = normalize_language_code(get_language()) fallacies = (Report.objects.filter( reason__isnull=False, contention__language=language).order_by( '-id')[self.get_offset():self.get_limit()]) return super(FallaciesView, self).get_context_data(fallacies=fallacies, **kwargs)
def get_supported_declarations(self, user): return (Declaration.objects.filter( is_approved=True, user=user, resolution__language=normalize_language_code(get_language()), ).annotate(supporter_count=Count("supporters", distinct=True)).filter( supporter_count__gt=0).order_by("-supporter_count")[:10])
def get_target(self): target_noun = self.cleaned_data['target_noun'] try: noun = Noun.objects.get( text=target_noun, language=normalize_language_code(get_language()) ) except Noun.DoesNotExist: noun = Noun.objects.create( text=target_noun, is_active=False, language=normalize_language_code(get_language()) ) return noun
def get_redirect_url(self, *args, **kwargs): resolution = (Resolution.objects.annotate( declaration_count=Count("declarations")).filter( declaration_count__gt=2, language=normalize_language_code(get_language()), ).order_by("?")[0]) return resolution.get_absolute_url()
def get_crowded_resolutions(self): return (Resolution.objects.annotate( declaration_count=Sum("declarations")).filter( language=normalize_language_code(get_language()), declaration_count__gt=0, **self.build_time_filters(date_field="date_creation")). order_by("-declaration_count")[:10])
def get(self, request, *args, **kwargs): self.object = self.get_object() host = request.META["HTTP_HOST"] if not host.startswith(settings.AVAILABLE_LANGUAGES): return redirect(self.object.get_full_url(), permanent=True) if not normalize_language_code(get_language()) == self.object.language: return redirect(self.object.get_full_url(), permanent=True) partial = request.GET.get("partial") level = request.GET.get("level") if partial: resolution = self.object try: serialized = resolution.partial_serialize( int(partial), self.request.user) except (StopIteration, ValueError): raise Http404 return render( request, "declarations/tree.html", { "declarations": serialized["declarations"], "serialized": serialized, "level": int(level), }, ) return super().get(request, *args, **kwargs)
def get_supported_premises(self, user): return Premise.objects.filter( is_approved=True, user=user, argument__language=normalize_language_code(get_language()) ).annotate(supporter_count=Count('supporters', distinct=True)).filter( supporter_count__gt=0).order_by('-supporter_count')[:10]
def get(self, request, *args, **kwargs): self.object = self.get_object() host = request.META['HTTP_HOST'] if not host.startswith(settings.AVAILABLE_LANGUAGES): return redirect(self.object.get_full_url(), permanent=True) if not normalize_language_code(get_language()) == self.object.language: return redirect(self.object.get_full_url(), permanent=True) partial = request.GET.get('partial') level = request.GET.get('level') if partial: contention = self.object try: serialized = contention.partial_serialize(int(partial), self.request.user) except (StopIteration, ValueError): raise Http404 return render(request, 'premises/tree.html', { 'premises': serialized['premises'], 'serialized': serialized, 'level': int(level) }) return super(ContentionDetailView, self).get(request, *args, **kwargs)
def formatted_title(self, tag="a"): language = normalize_language_code(get_language()) title = strip_tags(self.title) select = {"length": "Length(nouns_noun.text)"} nouns = ( self.nouns.extra(select=select).filter(language=language).prefetch_related("keywords").order_by("-length") ) for noun in nouns: keywords = noun.active_keywords().values_list("text", flat=True) sorted_keywords = sorted(keywords, key=len, reverse=True) for keyword in sorted_keywords: replaced = replace_with_link(title, keyword, noun.get_absolute_url(), tag) if replaced is not None: title = replaced continue replaced = replace_with_link(title, noun.text, noun.get_absolute_url(), tag) if replaced is not None: title = replaced return title
def get_redirect_url(self, *args, **kwargs): argument = Contention.objects.annotate( premise_count=Count('premises')).filter( premise_count__gt=2, language=normalize_language_code( get_language())).order_by('?')[0] return argument.get_absolute_url()
def indirect_contentions(self): from premises.models import Contention # to avoid circular import language = normalize_language_code(get_language()) nouns = self.out_relations.values_list('target', flat=True) return Contention.objects.filter(language=language, is_published=True, nouns__in=nouns).order_by('?')
def related_contentions(self): if self.related_nouns.exists(): source = self.related_nouns else: source = self.nouns nouns = source.prefetch_related('out_relations') noun_ids = set(nouns.values_list('pk', flat=True)) for noun in nouns.all(): relations = set(noun.out_relations.values_list('target', flat=True)) noun_ids = noun_ids.union(relations) available_nouns = (Noun.objects.filter( language=normalize_language_code(get_language()), id__in=noun_ids).annotate( contention_count=Count('contentions'), ).filter( contentions__is_published=True).prefetch_related( 'contentions')) serialized = [ { 'noun': noun, 'contentions': ( noun.contentions.exclude(pk=self.pk).values( 'title', 'slug').order_by('?') # find a proper way to randomize # suggestions [:7]) } for noun in available_nouns ] return filter(itemgetter('contentions'), serialized)
def get_crowded_contentions(self): return Contention.objects.annotate( premise_count=Sum("premises"), ).filter( language=normalize_language_code(get_language()), premise_count__gt=0, **self.build_time_filters(date_field="date_creation" )).order_by("-premise_count")[:10]
def get_crowded_resolutions(self): return Resolution.objects.annotate( declaration_count=Sum("declarations"), ).filter( language=normalize_language_code(get_language()), declaration_count__gt=0, **self.build_time_filters(date_field="date_creation") ).order_by("-declaration_count")[:10]
def get_crowded_contentions(self): return Contention.objects.annotate( premise_count=Sum("premises"), ).filter( language=normalize_language_code(get_language()), premise_count__gt=0, **self.build_time_filters(date_field="date_creation") ).order_by("-premise_count")[:10]
def indirect_resolutions(self): from declarations.models import Resolution # to avoid circular import language = normalize_language_code(get_language()) nouns = self.out_relations.values_list("target", flat=True) return Resolution.objects.filter(language=language, is_published=True, nouns__in=nouns).order_by("?")
def indirect_resolutions(self): from declarations.models import Resolution # to avoid circular import language = normalize_language_code(get_language()) nouns = self.out_relations.values_list('target', flat=True) return Resolution.objects.filter( language=language, is_published=True, nouns__in=nouns ).order_by('?')
def get_redirect_url(self, *args, **kwargs): argument = Contention.objects.annotate( premise_count=Count('premises') ).filter( premise_count__gt=2, language=normalize_language_code(get_language()) ).order_by( '?' )[0] return argument.get_absolute_url()
def get_redirect_url(self, *args, **kwargs): resolution = Resolution.objects.annotate( declaration_count=Count('declarations') ).filter( declaration_count__gt=2, language=normalize_language_code(get_language()) ).order_by( '?' )[0] return resolution.get_absolute_url()
def form_valid(self, form): form.instance.user = self.request.user form.instance.ip_address = get_ip_address(self.request) form.instance.language = normalize_language_code(get_language()) form.instance.is_published = True response = super(ArgumentCreationView, self).form_valid(form) form.instance.update_sibling_counts() form.instance.save_nouns() form.instance.save() return response
def get_premises(self, paginate=True): keywords = self.request.GET.get('keywords') if not keywords or len(keywords) < 3: result = Premise.objects.none() else: result = (Premise.objects.filter( argument__language=normalize_language_code(get_language()), text__contains=keywords)) if paginate: result = result[self.get_offset():self.get_limit()] return result
def get_declarations(self, paginate=True): keywords = self.request.GET.get('keywords') if not keywords or len(keywords) < 3: result = Declaration.objects.none() else: result = (Declaration.objects.filter( resolution__language=normalize_language_code(get_language()), text__contains=keywords)) if paginate: result = result[self.get_offset():self.get_limit()] return result
def get_context_data(self, **kwargs): language = normalize_language_code(get_language()) fallacies = (Report .objects .filter(reason__isnull=False, contention__language=language) .order_by('-id') [self.get_offset():self.get_limit()]) return super(FallaciesView, self).get_context_data( fallacies=fallacies, **kwargs)
def get_declarations(self, paginate=True): keywords = self.request.GET.get("keywords") if not keywords or len(keywords) < 3: result = Declaration.objects.none() else: result = Declaration.objects.filter( resolution__language=normalize_language_code(get_language()), text__contains=keywords, ) if paginate: result = result[self.get_offset():self.get_limit()] return result
def get_supported_declarations(self, user): return Declaration.objects.filter( is_approved=True, user=user, resolution__language=normalize_language_code(get_language()) ).annotate( supporter_count=Count('supporters', distinct=True) ).filter( supporter_count__gt=0 ).order_by( '-supporter_count' )[:10]
def get_supported_premises(self, user): return Premise.objects.filter( is_approved=True, user=user, argument__language=normalize_language_code(get_language()) ).annotate( supporter_count=Count('supporters', distinct=True) ).filter( supporter_count__gt=0 ).order_by( '-supporter_count' )[:10]
def get_related_channels(self, user): contention_nouns = Contention.objects.filter( premises__user=user, nouns__isnull=False, language=normalize_language_code( get_language())).order_by('-premises__weight').values_list( 'nouns', flat=True) supported_nouns = Contention.objects.filter( premises__supporters=user, nouns__isnull=False, language=normalize_language_code( get_language())).order_by('-premises__weight').values_list( 'nouns', flat=True) noun_ids = list(contention_nouns) + list(supported_nouns) noun_set = set(noun_ids) channels = Channel.objects.filter(nouns__in=noun_ids, language=normalize_language_code( get_language())).distinct() bundle = [] total_score = 0 for channel in channels: channel_dict = { 'channel': channel.serialize(), 'score': 0, } for noun in channel.nouns.all(): if noun.pk in noun_set: channel_dict['score'] += noun_ids.count(noun.pk) total_score += channel_dict['score'] bundle.append(channel_dict) return sorted(bundle, key=lambda c: c['score'], reverse=True)
def from_synset(cls, synset): lemmas = synset.lemma_names() text = lemmas[0] keywords = lemmas[1:] noun, created = cls.objects.get_or_create( text=from_lemma(text), defaults={ 'is_active': False, 'language': normalize_language_code(get_language()) }) for keyword in keywords: noun.add_keyword(from_lemma(keyword)) return noun
def get_contentions(self, paginate=True): keywords = self.request.GET.get('keywords') if not keywords or len(keywords) < 2: result = Contention.objects.none() else: result = (Contention.objects.filter( title__icontains=keywords, language=normalize_language_code(get_language()))) if paginate: result = result[self.get_offset():self.get_limit()] return result
def channel(self): from nouns.models import Channel if self.related_nouns.exists(): nouns = self.related_nouns.all() else: nouns = self.nouns.all() if not nouns: return channel = Channel.objects.filter(nouns__in=nouns, language=normalize_language_code(get_language())).first() return channel
def get_contentions(self, paginate=True): keywords = self.request.GET.get('keywords') if not keywords or len(keywords) < 2: result = Contention.objects.none() else: result = (Contention .objects .filter(title__icontains=keywords, language=normalize_language_code(get_language()))) if paginate: result = result[self.get_offset():self.get_limit()] return result
def channel(self): from nouns.models import Channel if self.related_nouns.exists(): nouns = self.related_nouns.all() else: nouns = self.nouns.all() if not nouns: return channel = Channel.objects.filter(nouns__in=nouns, language=normalize_language_code( get_language())).first() return channel
def related_resolutions(self): if self.related_nouns.exists(): source = self.related_nouns else: source = self.nouns nouns = source.prefetch_related('out_relations') noun_ids = set(nouns.values_list('pk', flat=True)) for noun in nouns.all(): relations = set(noun.out_relations.values_list('target', flat=True)) noun_ids = noun_ids.union(relations) available_nouns = ( Noun.objects.filter( language=normalize_language_code(get_language()), id__in=noun_ids ).annotate( resolution_count=Count('resolutions'), ).filter( resolutions__is_published=True ).prefetch_related( 'resolutions' ) ) serialized = [{ 'noun': noun, 'resolutions': ( noun .resolutions .exclude(pk=self.pk) .values('title', 'slug') .order_by('?') # find a proper way to randomize # suggestions [:7] ) } for noun in available_nouns] return filter(itemgetter('resolutions'), serialized)
def items(self): language = normalize_language_code(get_language()) return Declaration.objects.filter( resolution__language=language, is_approved=True )
def get_noun(self): return get_object_or_404( Noun, slug=self.kwargs.get('slug'), language=normalize_language_code(get_language()) )
def get_channel(self): language = normalize_language_code(get_language()) return get_object_or_404(Channel, slug=self.kwargs['slug'], language=language)
def get_channels(self): return Channel.objects.filter( language=normalize_language_code(get_language())).order_by('order')
def get_object(self, queryset=None): return get_object_or_404( self.queryset, slug=self.kwargs['slug'], language=normalize_language_code(get_language()) )
def language(self, language_code=None): if language_code is None: language_code = get_language() language_code = normalize_language_code(language_code) return self.filter(language=language_code)
def items(self): language = normalize_language_code(get_language()) return Resolution.objects.filter( language=language, is_published=True )
def build_graph(self, user): contention_nouns = Contention.objects.filter( premises__user=user, nouns__isnull=False, language=normalize_language_code(get_language()) ).order_by( '-premises__weight' ).values_list( 'nouns', flat=True ) supported_nouns = Contention.objects.filter( premises__supporters=user, nouns__isnull=False, language=normalize_language_code(get_language()) ).order_by( '-premises__weight' ).values_list( 'nouns', flat=True ) noun_ids = set(contention_nouns) ^ set(supported_nouns) channels = Channel.objects.filter( nouns__in=noun_ids, language=normalize_language_code(get_language()) ).distinct() graph = DiGraph() last_channel = None first_channel = None label = lambda x: x.title() for channel in channels: graph.add_node(channel.title, { "label": label(channel.title), "type": "channel" }) if last_channel: graph.add_edge(last_channel.title, channel.title) channel_nouns = channel.nouns.all() for channel_noun in channel_nouns: if channel_noun.id in noun_ids: graph.add_edge(channel_noun.text, channel.title) graph.add_node(channel_noun.text, { "label": label(channel_noun.text), "type": "noun" }) last_channel = channel if not first_channel: first_channel = channel graph.add_edge(first_channel.title, last_channel.title) return graph
def active_resolutions(self): language = normalize_language_code(get_language()) return self.resolutions.filter( is_published=True, language=language ).order_by('?')
def get_form(self, request, obj=None, **kwargs): form = super(NounAdmin, self).get_form(request, obj, **kwargs) form.base_fields['language'].initial = normalize_language_code(get_language()) return form
def active_contentions(self): language = normalize_language_code(get_language()) return self.contentions.filter(is_published=True, language=language).order_by('?')
def get_channels(self): return Channel.objects.filter( language=normalize_language_code(get_language()) ).order_by('order')