Exemplo n.º 1
0
def can_view_event(event, user):
    """return True if the current user has right to view this event"""
    if event.privacy == Event.PRIVACY_PUBLIC:
        return True
    elif not user.is_active:
        return False

    # you're logged in
    if event.privacy == Event.PRIVACY_COMPANY:
        # but then it's not good enough to be contributor
        if is_contributor(user):
            return False
    else:
        if not is_contributor(user):
            # staff can always see it
            return True

        curated_groups = [
            x[0] for x in
            CuratedGroup.objects.filter(event=event).values_list('name')
        ]
        if curated_groups:
            return any(
                [mozillians.in_group(user.email, x) for x in curated_groups]
            )

    return True
Exemplo n.º 2
0
def can_view_event(event, user):
    """return True if the current user has right to view this event"""
    if event.privacy == Event.PRIVACY_PUBLIC:
        return True
    elif not user.is_active:
        return False

    # you're logged in
    if event.privacy == Event.PRIVACY_COMPANY:
        # but then it's not good enough to be contributor
        if is_contributor(user):
            return False
    else:
        if not is_contributor(user):
            # staff can always see it
            return True

        curated_groups = [
            x[0] for x in CuratedGroup.objects.filter(
                event=event).values_list('name')
        ]
        if curated_groups:
            return any(
                [mozillians.in_group(user.email, x) for x in curated_groups])

    return True
Exemplo n.º 3
0
    def test_is_contributor(self):
        from airmozilla.main.views import is_contributor
        anonymous = AnonymousUser()
        ok_(not is_contributor(anonymous))

        employee_wo_profile = User.objects.create_user(
            'worker', '*****@*****.**', 'secret'
        )
        ok_(not is_contributor(employee_wo_profile))
        employee_w_profile = User.objects.create_user(
            'worker2', '*****@*****.**', 'secret'
        )
        assert not UserProfile.objects.filter(user=employee_wo_profile)
        up = UserProfile.objects.create(
            user=employee_w_profile,
            contributor=False
        )
        ok_(not is_contributor(employee_w_profile))
        up.contributor = True
        up.save()
        # re-fetch to avoid internal django cache on profile fetching
        employee_w_profile = User.objects.get(pk=employee_w_profile.pk)
        ok_(is_contributor(employee_w_profile))

        contributor = User.objects.create_user(
            'nigel', '*****@*****.**', 'secret'
        )
        UserProfile.objects.create(
            user=contributor,
            contributor=True
        )
        ok_(is_contributor(contributor))
Exemplo n.º 4
0
    def test_can_view_staticpage(self):
        from airmozilla.main.views import is_contributor
        anon = AnonymousUser()
        assert not is_contributor(anon)
        leonard = User.objects.create(username='******')
        UserProfile.objects.create(user=leonard, contributor=True)
        assert is_contributor(leonard)
        peter = User.objects.create(username='******')
        assert not is_contributor(peter)

        page1 = StaticPage.objects.create(title="Title 1", )
        ok_(can_view_staticpage(page1, anon))
        ok_(can_view_staticpage(page1, leonard))
        ok_(can_view_staticpage(page1, peter))

        page2 = StaticPage.objects.create(
            title="Title 2",
            privacy=Event.PRIVACY_CONTRIBUTORS,
        )
        ok_(not can_view_staticpage(page2, anon))
        ok_(can_view_staticpage(page2, leonard))
        ok_(can_view_staticpage(page2, peter))

        page3 = StaticPage.objects.create(
            title="Title 3",
            privacy=Event.PRIVACY_COMPANY,
        )
        ok_(not can_view_staticpage(page3, anon))
        ok_(not can_view_staticpage(page3, leonard))
        ok_(can_view_staticpage(page3, peter))
Exemplo n.º 5
0
def can_edit_event(event, user, default="manage:events"):
    if not user.has_perm("main.change_event_others") and user != event.creator:
        return redirect(default)
    if event.privacy == Event.PRIVACY_COMPANY and is_contributor(user):
        return redirect(default)
    elif CuratedGroup.objects.filter(event=event) and is_contributor(user):
        # Editing this event requires that you're also part of that curated
        # group.
        curated_group_names = [x[0] for x in CuratedGroup.objects.filter(event=event).values_list("name")]
        any_ = any([mozillians.in_group(user.email, x) for x in curated_group_names])
        if not any_:
            return redirect(default)
Exemplo n.º 6
0
    def test_can_view_event(self):
        event = Event.objects.get(title='Test event')
        assert event.privacy == Event.PRIVACY_PUBLIC  # default

        anonymous = AnonymousUser()
        employee_wo_profile = User.objects.create_user(
            'worker', '*****@*****.**', 'secret'
        )
        employee_w_profile = User.objects.create_user(
            'worker2', '*****@*****.**', 'secret'
        )
        assert not UserProfile.objects.filter(user=employee_wo_profile)
        UserProfile.objects.create(
            user=employee_w_profile,
            contributor=False
        )
        contributor = User.objects.create_user(
            'nigel', '*****@*****.**', 'secret'
        )
        UserProfile.objects.create(
            user=contributor,
            contributor=True
        )

        from airmozilla.main.views import can_view_event, is_contributor
        ok_(can_view_event(event, anonymous))
        assert not is_contributor(anonymous)

        ok_(can_view_event(event, contributor))
        assert is_contributor(contributor)

        ok_(can_view_event(event, employee_wo_profile))
        assert not is_contributor(employee_wo_profile)

        ok_(can_view_event(event, employee_w_profile))
        assert not is_contributor(employee_w_profile)

        event.privacy = Event.PRIVACY_COMPANY
        event.save()
        ok_(not can_view_event(event, anonymous))
        ok_(not can_view_event(event, contributor))
        ok_(can_view_event(event, employee_wo_profile))
        ok_(can_view_event(event, employee_w_profile))

        event.privacy = Event.PRIVACY_CONTRIBUTORS
        event.save()
        ok_(not can_view_event(event, anonymous))
        ok_(can_view_event(event, contributor))
        ok_(can_view_event(event, employee_wo_profile))
        ok_(can_view_event(event, employee_w_profile))
Exemplo n.º 7
0
def can_edit_event(event, user, default='manage:events'):
    if (not user.has_perm('main.change_event_others')
            and user != event.creator):
        return redirect(default)
    if event.privacy == Event.PRIVACY_COMPANY and is_contributor(user):
        return redirect(default)
    elif (CuratedGroup.objects.filter(event=event) and is_contributor(user)):
        # Editing this event requires that you're also part of that curated
        # group.
        curated_group_names = [
            x[0] for x in CuratedGroup.objects.filter(
                event=event).values_list('name')
        ]
        if not mozillians.in_groups(user.email, curated_group_names):
            return redirect(default)
def get_featured_events(
    channels,
    user,
    length=settings.FEATURED_SIDEBAR_COUNT
):
    """return a list of events that are sorted by their score"""
    anonymous = True
    contributor = False
    if user.is_active:
        anonymous = False
        if is_contributor(user):
            contributor = True

    cache_key = 'featured_events_%s_%s' % (int(anonymous), int(contributor))
    if channels:
        cache_key += ','.join(str(x.id) for x in channels)
    event = most_recent_event()
    if event:
        cache_key += str(event.modified.microsecond)
    featured = cache.get(cache_key)
    if featured is None:
        featured = _get_featured_events(channels, anonymous, contributor)
        featured = featured[:length]
        cache.set(cache_key, featured, 60 * 60)
    return [x.event for x in featured]
Exemplo n.º 9
0
def tag_cloud(request, THRESHOLD=1):
    context = {}
    qs = (
        Event.tags.through.objects
        .values('tag_id')
        .annotate(Count('tag__id'))
    )
    if request.user.is_active:
        if is_contributor(request.user):
            # because of a bug in Django we can't use qs.exclude()
            qs = qs.filter(
                Q(event__privacy=Event.PRIVACY_CONTRIBUTORS) |
                Q(event__privacy=Event.PRIVACY_PUBLIC)
            )
    else:
        qs = qs.filter(event__privacy=Event.PRIVACY_PUBLIC)
    tags_map = dict(
        (x['id'], x['name'])
        for x in
        Tag.objects.all()
        .values('id', 'name')
    )
    tags = []
    for each in qs.values('tag__id__count', 'tag_id'):
        count = each['tag__id__count']
        if count > THRESHOLD:
            tags.append(_Tag(tags_map[each['tag_id']], count))

    context['tags'] = cloud.calculate_cloud(
        tags,
        steps=10
    )
    return render(request, 'main/tag_cloud.html', context)
Exemplo n.º 10
0
def unpicked_pictures(request):
    """returns a report of all events that have pictures in the picture
    gallery but none has been picked yet. """
    pictures = Picture.objects.filter(event__isnull=False)
    events = Event.objects.archived()
    assert request.user.is_active
    if is_contributor(request.user):
        events = events.exclude(privacy=Event.PRIVACY_COMPANY)

    events = events.filter(id__in=pictures.values('event'))
    events = events.exclude(picture__in=pictures)
    count = events.count()
    events = events.order_by('?')[:20]
    pictures_counts = {}
    grouped_pictures = (Picture.objects.filter(
        event__in=events).values('event').annotate(Count('event')))
    for each in grouped_pictures:
        pictures_counts[each['event']] = each['event__count']

    context = {
        'count': count,
        'events': events,
        'pictures_counts': pictures_counts,
    }
    return render(request, 'main/unpicked_pictures.html', context)
Exemplo n.º 11
0
def channel_feed(request, slug):
    context = {}

    # this slug might be the slug of a parent
    channels = Channel.objects.filter(
        Q(slug=slug) |
        Q(parent__slug=slug)
    )

    privacy_filter = {}
    privacy_exclude = {}
    if request.user.is_active:
        if is_contributor(request.user):
            privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
    else:
        privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}

    archived_events = Event.objects.archived()
    if privacy_filter:
        archived_events = archived_events.filter(**privacy_filter)
    elif privacy_exclude:
        archived_events = archived_events.exclude(**privacy_exclude)
    archived_events = archived_events.order_by('-start_time')
    archived_events = archived_events.filter(channels__in=channels)
    page = 1
    archived_paged = paginate(archived_events, page, 100)

    context['events'] = archived_paged

    context['get_media_info'] = get_media_info

    response = render(request, 'roku/channel.xml', context)
    response['Content-Type'] = 'text/xml'
    return response
Exemplo n.º 12
0
def channels(request):
    channels = []

    privacy_filter = {}
    privacy_exclude = {}
    if request.user.is_active:
        if is_contributor(request.user):
            feed_privacy = 'contributors'
            privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
        else:
            feed_privacy = 'company'
    else:
        privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}
        feed_privacy = 'public'
    events = Event.objects.filter(status=Event.STATUS_SCHEDULED)
    if privacy_filter:
        events = events.filter(**privacy_filter)
    elif privacy_exclude:
        events = events.exclude(**privacy_exclude)

    children_channels = Channel.objects.filter(
        parent__parent__isnull=True,
        parent__isnull=False,
    )
    parents = collections.defaultdict(list)
    for channel in children_channels:
        parents[channel.parent_id].append(channel)

    channels_qs = (Channel.objects.filter(parent__isnull=True).exclude(
        slug=settings.DEFAULT_CHANNEL_SLUG))

    # make a dict of parental counts
    subchannel_counts = {}
    qs = (
        Channel.objects.filter(parent__isnull=False).values('parent_id').
        order_by()  # necessary because the model has a default ordering
        .annotate(Count('parent')))
    for each in qs:
        subchannel_counts[each['parent_id']] = each['parent__count']

    # make a dict of events counts by channel
    event_counts = {}
    qs = (Event.channels.through.objects.filter(
        event__in=events).values('channel_id').annotate(Count('channel')))
    for each in qs:
        event_counts[each['channel_id']] = each['channel__count']

    for channel in channels_qs:
        event_count = event_counts.get(channel.id, 0)
        subchannel_count = subchannel_counts.get(channel.id, 0)
        subchannels = parents.get(channel.id, [])
        if event_count or subchannel_count:
            channels.append(
                (channel, event_count, subchannel_count, subchannels))
    data = {
        'channels': channels,
        'feed_privacy': feed_privacy,
    }
    return render(request, 'main/channels.html', data)
Exemplo n.º 13
0
def channels(request):
    channels = []

    privacy_filter = {}
    privacy_exclude = {}
    if request.user.is_active:
        if is_contributor(request.user):
            feed_privacy = 'contributors'
            privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
        else:
            feed_privacy = 'company'
    else:
        privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}
        feed_privacy = 'public'
    events = Event.objects.filter(status=Event.STATUS_SCHEDULED)
    if privacy_filter:
        events = events.filter(**privacy_filter)
    elif privacy_exclude:
        events = events.exclude(**privacy_exclude)

    channels_qs = (
        Channel.objects
        .filter(parent__isnull=True)
        .exclude(slug=settings.DEFAULT_CHANNEL_SLUG)
    )

    # make a dict of parental counts
    subchannel_counts = {}
    qs = (
        Channel.objects
        .filter(parent__isnull=False)
        .values('parent_id')
        .order_by()  # necessary because the model has a default ordering
        .annotate(Count('parent'))
    )
    for each in qs:
        subchannel_counts[each['parent_id']] = each['parent__count']

    # make a dict of events counts by channel
    event_counts = {}
    qs = (
        Event.channels.through.objects.filter(event__in=events)
        .values('channel_id')
        .annotate(Count('channel'))
    )
    for each in qs:
        event_counts[each['channel_id']] = each['channel__count']

    for channel in channels_qs:
        event_count = event_counts.get(channel.id, 0)
        subchannel_count = subchannel_counts.get(channel.id, 0)
        if event_count or subchannel_count:
            channels.append((channel, event_count, subchannel_count))
    data = {
        'channels': channels,
        'feed_privacy': feed_privacy,
    }
    return render(request, 'main/channels.html', data)
Exemplo n.º 14
0
 def test_users_data_contributor(self):
     user, = User.objects.filter(username='******')
     UserProfile.objects.create(user=user, contributor=True)
     assert is_contributor(user)
     url = reverse('manage:users_data')
     response = self.client.get(url)
     eq_(response.status_code, 200)
     struct = json.loads(response.content)
     row = [x for x in struct['users'] if x['email'] == user.email][0]
     ok_(row['is_contributor'])
Exemplo n.º 15
0
def can_edit_event(event, user, default='manage:events'):
    if (not user.has_perm('main.change_event_others') and
            user != event.creator):
        return redirect(default)
    if event.privacy == Event.PRIVACY_COMPANY and is_contributor(user):
        return redirect(default)
    elif (
        CuratedGroup.objects.filter(event=event) and is_contributor(user)
    ):
        # Editing this event requires that you're also part of that curated
        # group.
        curated_group_names = [
            x[0] for x in
            CuratedGroup.objects.filter(event=event).values_list('name')
        ]
        if not mozillians.in_groups(
            user.email,
            curated_group_names
        ):
            return redirect(default)
Exemplo n.º 16
0
def _get_feed_privacy(user):
    """return 'public', 'contributors' or 'company' depending on the user
    profile.
    Because this is used very frequently and because it's expensive to
    pull out the entire user profile every time, we use cache to remember
    if the user is a contributor or not (applicable only if logged in)
    """
    if user.is_active:
        if is_contributor(user):
            return 'contributors'
        return 'company'
    return 'public'
Exemplo n.º 17
0
def _get_feed_privacy(user):
    """return 'public', 'contributors' or 'company' depending on the user
    profile.
    Because this is used very frequently and because it's expensive to
    pull out the entire user profile every time, we use cache to remember
    if the user is a contributor or not (applicable only if logged in)
    """
    if user.is_active:
        if is_contributor(user):
            return 'contributors'
        return 'company'
    return 'public'
Exemplo n.º 18
0
    def test_can_view_staticpage(self):
        from airmozilla.main.views import is_contributor
        anon = AnonymousUser()
        assert not is_contributor(anon)
        leonard = User.objects.create(
            username='******'
        )
        UserProfile.objects.create(
            user=leonard,
            contributor=True
        )
        assert is_contributor(leonard)
        peter = User.objects.create(
            username='******'
        )
        assert not is_contributor(peter)

        page1 = StaticPage.objects.create(
            title="Title 1",
        )
        ok_(can_view_staticpage(page1, anon))
        ok_(can_view_staticpage(page1, leonard))
        ok_(can_view_staticpage(page1, peter))

        page2 = StaticPage.objects.create(
            title="Title 2",
            privacy=Event.PRIVACY_CONTRIBUTORS,
        )
        ok_(not can_view_staticpage(page2, anon))
        ok_(can_view_staticpage(page2, leonard))
        ok_(can_view_staticpage(page2, peter))

        page3 = StaticPage.objects.create(
            title="Title 3",
            privacy=Event.PRIVACY_COMPANY,
        )
        ok_(not can_view_staticpage(page3, anon))
        ok_(not can_view_staticpage(page3, leonard))
        ok_(can_view_staticpage(page3, peter))
Exemplo n.º 19
0
def can_view_staticpage(page, user):
    if page.privacy == Event.PRIVACY_PUBLIC:
        return True

    if not user.is_active:
        return False

    from airmozilla.main.views import is_contributor
    if page.privacy == Event.PRIVACY_COMPANY:
        if is_contributor(user):
            return False

    return True
Exemplo n.º 20
0
def can_view_staticpage(page, user):
    if page.privacy == Event.PRIVACY_PUBLIC:
        return True

    if not user.is_active:
        return False

    from airmozilla.main.views import is_contributor
    if page.privacy == Event.PRIVACY_COMPANY:
        if is_contributor(user):
            return False

    return True
Exemplo n.º 21
0
 def test_users_data_contributor(self):
     user, = User.objects.filter(username='******')
     UserProfile.objects.create(
         user=user,
         contributor=True
     )
     assert is_contributor(user)
     url = reverse('manage:users_data')
     response = self.client.get(url)
     eq_(response.status_code, 200)
     struct = json.loads(response.content)
     row = [x for x in struct['users'] if x['email'] == user.email][0]
     ok_(row['is_contributor'])
Exemplo n.º 22
0
def get_featured_events(
    channels,
    user,
    length=settings.FEATURED_SIDEBAR_COUNT
):
    """return a list of events that are sorted by their score"""
    anonymous = True
    contributor = False
    if user.is_active:
        anonymous = False
        if is_contributor(user):
            contributor = True

    cache_key = 'featured_events_%s_%s' % (int(anonymous), int(contributor))
    if channels:
        cache_key += ','.join(str(x.id) for x in channels)
    event = most_recent_event()
    if event:
        cache_key += str(event.modified.microsecond)
    featured = cache.get(cache_key)
    if featured is None:
        featured = _get_featured_events(channels, anonymous, contributor)
        featured = featured[:length]
        cache.set(cache_key, featured, 60 * 60)

    # Sadly, in Django when you do a left outer join on a many-to-many
    # table you get repeats and you can't fix that by adding a simple
    # `distinct` on the first field.
    # In django, if you do `myqueryset.distinct('id')` it requires
    # that that's also something you order by.
    # In pure Postgresql you can do this:
    #   SELECT
    #     DISTINCT main_eventhitstats.id as id,
    #     (some formula) AS score,
    #     ...
    #   FROM ...
    #   INNER JOIN ...
    #   INNER JOIN ...
    #   ORDER BY score DESC
    #   LIMIT 5;
    #
    # But you can't do that with Django.
    # So we have to manually de-dupe. Hopefully we can alleviate this
    # problem altogether when we start doing aggregates where you have
    # many repeated EventHitStats *per* event and you need to look at
    # their total score across multiple vidly shortcodes.
    events = []
    for each in featured:
        if each.event not in events:
            events.append(each.event)
    return events
Exemplo n.º 23
0
def get_featured_events(
    channels,
    user,
    length=settings.FEATURED_SIDEBAR_COUNT
):
    """return a list of events that are sorted by their score"""
    anonymous = True
    contributor = False
    if user.is_active:
        anonymous = False
        if is_contributor(user):
            contributor = True

    cache_key = 'featured_events_%s_%s' % (int(anonymous), int(contributor))
    if channels:
        cache_key += ','.join(str(x.id) for x in channels)
    event = most_recent_event()
    if event:
        cache_key += str(event.modified.microsecond)
    featured = cache.get(cache_key)
    if featured is None:
        featured = _get_featured_events(channels, anonymous, contributor)
        featured = featured[:length]
        cache.set(cache_key, featured, 60 * 60)

    # Sadly, in Django when you do a left outer join on a many-to-many
    # table you get repeats and you can't fix that by adding a simple
    # `distinct` on the first field.
    # In django, if you do `myqueryset.distinct('id')` it requires
    # that that's also something you order by.
    # In pure Postgresql you can do this:
    #   SELECT
    #     DISTINCT main_eventhitstats.id as id,
    #     (some formula) AS score,
    #     ...
    #   FROM ...
    #   INNER JOIN ...
    #   INNER JOIN ...
    #   ORDER BY score DESC
    #   LIMIT 5;
    #
    # But you can't do that with Django.
    # So we have to manually de-dupe. Hopefully we can alleviate this
    # problem altogether when we start doing aggregates where you have
    # many repeated EventHitStats *per* event and you need to look at
    # their total score across multiple vidly shortcodes.
    events = []
    for each in featured:
        if each.event not in events:
            events.append(each.event)
    return events
Exemplo n.º 24
0
    def test_users_data_contributor(self):
        # Because the default user, created from the fixtures,
        # was created without a last_login.
        User.objects.filter(last_login__isnull=True).update(
            last_login=timezone.now())

        user, = User.objects.filter(username='******')
        UserProfile.objects.create(user=user, contributor=True)
        assert is_contributor(user)
        url = reverse('manage:users_data')
        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        row = [x for x in struct['users'] if x['email'] == user.email][0]
        ok_(row['is_contributor'])
Exemplo n.º 25
0
def calendar_data(request):
    form = forms.CalendarDataForm(request.GET)
    if not form.is_valid():
        return http.HttpResponseBadRequest(str(form.errors))

    start = form.cleaned_data['start']
    end = form.cleaned_data['end']

    start = start.replace(tzinfo=utc)
    end = end.replace(tzinfo=utc)

    privacy_filter = {}
    privacy_exclude = {}
    events = Event.objects.scheduled_or_processing()
    if request.user.is_active:
        if is_contributor(request.user):
            privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
    else:
        privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}
        events = events.approved()

    if privacy_filter:
        events = events.filter(**privacy_filter)
    elif privacy_exclude:
        events = events.exclude(**privacy_exclude)

    events = events.filter(
        start_time__gte=start,
        start_time__lt=end
    )
    event_objects = []
    for event in events.select_related('location'):
        start_time = event.start_time
        end_time = start_time + datetime.timedelta(
            seconds=max(event.duration or event.estimated_duration, 60 * 20)
        )
        # We don't need 'end' because we don't yet know how long the event
        # was or will be.
        event_objects.append({
            'title': event.title,
            'start': start_time.isoformat(),
            'end': end_time.isoformat(),
            'url': reverse('main:event', args=(event.slug,)),
            'description': short_desc(event),
            'allDay': False,
        })

    return event_objects
Exemplo n.º 26
0
def permission_denied(request, slug):
    context = {}
    event = get_object_or_404(Event, slug=slug)
    context['event'] = event
    context['is_contributor'] = is_contributor(request.user)
    context['is_company_only'] = event.privacy == Event.PRIVACY_COMPANY

    curated_groups = CuratedGroup.objects.filter(event=event).order_by('name')
    context['curated_groups'] = []
    for group in curated_groups:
        context['curated_groups'].append({
            'name': group.name,
            'url': group.url
        })

    return render(request, 'main/permission_denied.html', context)
Exemplo n.º 27
0
def permission_denied(request, slug):
    context = {}
    event = get_object_or_404(Event, slug=slug)
    context['event'] = event
    context['is_contributor'] = is_contributor(request.user)
    context['is_company_only'] = event.privacy == Event.PRIVACY_COMPANY

    curated_groups = CuratedGroup.objects.filter(event=event).order_by('name')
    context['curated_groups'] = []
    for group in curated_groups:
        context['curated_groups'].append({
            'name': group.name,
            'url': group.url
        })

    return render(request, 'main/permission_denied.html', context)
Exemplo n.º 28
0
def calendar_data(request):
    form = forms.CalendarDataForm(request.GET)
    if not form.is_valid():
        return http.HttpResponseBadRequest(str(form.errors))

    start = form.cleaned_data['start']
    end = form.cleaned_data['end']

    start = start.replace(tzinfo=utc)
    end = end.replace(tzinfo=utc)

    privacy_filter = {}
    privacy_exclude = {}
    events = Event.objects.scheduled_or_processing()
    if request.user.is_active:
        if is_contributor(request.user):
            privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
    else:
        privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}
        events = events.approved()

    if privacy_filter:
        events = events.filter(**privacy_filter)
    elif privacy_exclude:
        events = events.exclude(**privacy_exclude)

    events = events.filter(start_time__gte=start, start_time__lt=end)
    event_objects = []
    for event in events.select_related('location'):
        start_time = event.start_time
        end_time = start_time + datetime.timedelta(
            seconds=max(event.duration or event.estimated_duration, 60 * 20))
        # We don't need 'end' because we don't yet know how long the event
        # was or will be.
        event_objects.append({
            'title': event.title,
            'start': start_time.isoformat(),
            'end': end_time.isoformat(),
            'url': reverse('main:event', args=(event.slug, )),
            'description': short_desc(event),
            'allDay': False,
        })

    return event_objects
Exemplo n.º 29
0
    def test_users_data_contributor(self):
        # Because the default user, created from the fixtures,
        # was created without a last_login.
        User.objects.filter(last_login__isnull=True).update(
            last_login=timezone.now()
        )

        user, = User.objects.filter(username='******')
        UserProfile.objects.create(
            user=user,
            contributor=True
        )
        assert is_contributor(user)
        url = reverse('manage:users_data')
        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        row = [x for x in struct['users'] if x['email'] == user.email][0]
        ok_(row['is_contributor'])
    def test_users_data(self):
        # Because the default user, created from the fixtures,
        # was created without a last_login.
        User.objects.filter(last_login__isnull=True).update(
            last_login=timezone.now()
        )
        assert self.user.last_login
        url = reverse('manage:users_data')
        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        eq_(len(struct['users']), User.objects.all().count())
        ok_('manage:user_edit' in struct['urls'])
        user, = User.objects.filter(is_staff=False)
        assert not is_contributor(user)
        same_user, = [x for x in struct['users'] if x['id'] == user.id]
        ok_(not same_user.get('is_contributor'))
        ok_(not same_user.get('is_superuser'))
        ok_(not same_user.get('is_staff'))
        ok_(not same_user.get('is_inactive'))

        user.is_superuser = True
        user.is_staff = True
        user.is_active = False
        user.save()

        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        same_user, = [x for x in struct['users'] if x['id'] == user.id]
        ok_(same_user.get('is_superuser'))
        ok_(same_user.get('is_staff'))
        ok_(same_user.get('is_inactive'))
        ok_(not same_user.get('groups'))

        testgroup = Group.objects.create(name='testapprover')
        user.groups.add(testgroup)
        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        same_user, = [x for x in struct['users'] if x['id'] == user.id]
        eq_(same_user['groups'], [testgroup.name])
Exemplo n.º 31
0
    def test_users_data(self):
        # Because the default user, created from the fixtures,
        # was created without a last_login.
        User.objects.filter(last_login__isnull=True).update(
            last_login=timezone.now()
        )
        assert self.user.last_login
        url = reverse('manage:users_data')
        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        eq_(len(struct['users']), User.objects.all().count())
        ok_('manage:user_edit' in struct['urls'])
        user, = User.objects.filter(is_staff=False)
        assert not is_contributor(user)
        same_user, = [x for x in struct['users'] if x['id'] == user.id]
        ok_(not same_user.get('is_contributor'))
        ok_(not same_user.get('is_superuser'))
        ok_(not same_user.get('is_staff'))
        ok_(not same_user.get('is_inactive'))

        user.is_superuser = True
        user.is_staff = True
        user.is_active = False
        user.save()

        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        same_user, = [x for x in struct['users'] if x['id'] == user.id]
        ok_(same_user.get('is_superuser'))
        ok_(same_user.get('is_staff'))
        ok_(same_user.get('is_inactive'))
        ok_(not same_user.get('groups'))

        testgroup = Group.objects.create(name='testapprover')
        user.groups.add(testgroup)
        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        same_user, = [x for x in struct['users'] if x['id'] == user.id]
        eq_(same_user['groups'], [testgroup.name])
Exemplo n.º 32
0
def autocompeter(request):
    """We need to tell the Autocompeter service which groups the current
    user should be able to view."""
    key = getattr(settings, 'AUTOCOMPETER_KEY', None)
    if not key:
        return {}

    groups = []
    if request.user and request.user.is_active:
        groups.append(Event.PRIVACY_CONTRIBUTORS)
        if not is_contributor(request.user):
            groups.append(Event.PRIVACY_COMPANY)
    url = getattr(settings, 'AUTOCOMPETER_URL', '')
    domain = getattr(settings, 'AUTOCOMPETER_DOMAIN', '')
    return {
        'include_autocompeter': True,
        'autocompeter_domain': domain,
        'autocompeter_groups': ','.join(groups),
        'autocompeter_url': url,
    }
Exemplo n.º 33
0
def unpicked_pictures(request):
    """returns a report of all events that have pictures in the picture
    gallery but none has been picked yet. """
    pictures = Picture.objects.filter(event__isnull=False)
    events = Event.objects.archived()
    assert request.user.is_active
    if is_contributor(request.user):
        events = events.exclude(privacy=Event.PRIVACY_COMPANY)

    events = events.filter(id__in=pictures.values("event"))
    events = events.exclude(picture__in=pictures)
    count = events.count()
    events = events.order_by("?")[:20]
    pictures_counts = {}
    grouped_pictures = Picture.objects.filter(event__in=events).values("event").annotate(Count("event"))
    for each in grouped_pictures:
        pictures_counts[each["event"]] = each["event__count"]

    context = {"count": count, "events": events, "pictures_counts": pictures_counts}
    return render(request, "main/unpicked_pictures.html", context)
Exemplo n.º 34
0
def autocompeter(request):
    """We need to tell the Autocompeter service which groups the current
    user should be able to view."""
    key = getattr(settings, 'AUTOCOMPETER_KEY', None)
    if not key:
        return {}

    groups = []
    if request.user and request.user.is_active:
        groups.append(Event.PRIVACY_CONTRIBUTORS)
        if not is_contributor(request.user):
            groups.append(Event.PRIVACY_COMPANY)
    url = getattr(settings, 'AUTOCOMPETER_URL', '')
    domain = getattr(settings, 'AUTOCOMPETER_DOMAIN', '')
    enabled = getattr(settings, 'AUTOCOMPETER_ENABLED', True)
    return {
        'include_autocompeter': enabled,
        'autocompeter_domain': domain,
        'autocompeter_groups': ','.join(groups),
        'autocompeter_url': url,
    }
Exemplo n.º 35
0
def get_upcoming_events(channels, user,
                        length=settings.UPCOMING_SIDEBAR_COUNT):
    """return a queryset of upcoming events"""
    anonymous = True
    contributor = False
    if user.is_active:
        anonymous = False
        if is_contributor(user):
            contributor = True

    cache_key = 'upcoming_events_%s_%s' % (int(anonymous), int(contributor))
    cache_key += ','.join(str(x.id) for x in channels)
    event = most_recent_event()
    if event:
        cache_key += str(event.modified.microsecond)
    upcoming = cache.get(cache_key)
    if upcoming is None:
        upcoming = _get_upcoming_events(channels, anonymous, contributor)
        upcoming = upcoming[:length]
        cache.set(cache_key, upcoming, 60 * 60)
    return upcoming
Exemplo n.º 36
0
def channel_feed(request, slug):
    # this slug might be the slug of a parent
    channels = Channel.objects.filter(Q(slug=slug) | Q(parent__slug=slug))
    events = Event.objects.archived().approved()
    events = events.filter(channels__in=channels)
    privacy_filter = {}
    privacy_exclude = {}
    if request.user.is_active:
        if is_contributor(request.user):
            privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
    else:
        privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}

    if privacy_filter:
        events = events.filter(**privacy_filter)
    elif privacy_exclude:
        events = events.exclude(**privacy_exclude)
    events = events.order_by('-start_time')

    paged = paginate(events, 1, 100)
    return render_channel_events(paged, request)
Exemplo n.º 37
0
def get_upcoming_events(channels, user,
                        length=settings.UPCOMING_SIDEBAR_COUNT):
    """return a queryset of upcoming events"""
    anonymous = True
    contributor = False
    if user.is_active:
        anonymous = False
        if is_contributor(user):
            contributor = True

    cache_key = 'upcoming_events_%s_%s' % (int(anonymous), int(contributor))
    cache_key += ','.join(str(x.id) for x in channels)
    event = most_recent_event()
    if event:
        cache_key += str(event.modified.microsecond)
    upcoming = cache.get(cache_key)
    if upcoming is None:
        upcoming = _get_upcoming_events(channels, anonymous, contributor)
        upcoming = upcoming[:length]
        cache.set(cache_key, upcoming, 60 * 60)
    return upcoming
Exemplo n.º 38
0
def categories_feed(request):
    context = {}

    privacy_filter = {}
    privacy_exclude = {}
    if request.user.is_active:
        if is_contributor(request.user):
            # feed_privacy = 'contributors'
            privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
        # else:
            # feed_privacy = 'company'
    else:
        privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}
        # feed_privacy = 'public'
    events = Event.objects.filter(status=Event.STATUS_SCHEDULED)
    live_events = Event.objects.live()
    if privacy_filter:
        events = events.filter(**privacy_filter)
        live_events = live_events.filter(**privacy_filter)
    elif privacy_exclude:
        events = events.exclude(**privacy_exclude)
        live_events = live_events.exclude(**privacy_exclude)

    channels = get_channels(events)
    context['channels'] = channels

    context['live_events'] = live_events

    prefix = request.is_secure() and 'https' or 'http'
    root_url = '%s://%s' % (prefix, RequestSite(request).domain)

    def abs_url_maker(viewname, *args, **kwargs):
        return root_url + reverse(viewname, args=args, kwargs=kwargs)

    context['abs_url'] = abs_url_maker
    context['get_media_info'] = get_media_info

    response = render(request, 'roku/categories.xml', context)
    response['Content-Type'] = 'text/xml'
    return response
Exemplo n.º 39
0
def tag_cloud(request, THRESHOLD=1):
    context = {}
    qs = (Event.tags.through.objects.values('tag_id').annotate(
        Count('tag__id')))
    if request.user.is_active:
        if is_contributor(request.user):
            # because of a bug in Django we can't use qs.exclude()
            qs = qs.filter(
                Q(event__privacy=Event.PRIVACY_CONTRIBUTORS)
                | Q(event__privacy=Event.PRIVACY_PUBLIC))
    else:
        qs = qs.filter(event__privacy=Event.PRIVACY_PUBLIC)
    tags_map = dict(
        (x['id'], x['name']) for x in Tag.objects.all().values('id', 'name'))
    tags = []
    for each in qs.values('tag__id__count', 'tag_id'):
        count = each['tag__id__count']
        if count > THRESHOLD:
            tags.append(_Tag(tags_map[each['tag_id']], count))

    context['tags'] = cloud.calculate_cloud(tags, steps=10)
    return render(request, 'main/tag_cloud.html', context)
Exemplo n.º 40
0
def get_featured_events(channels,
                        user,
                        length=settings.FEATURED_SIDEBAR_COUNT):
    """return a list of events that are sorted by their score"""
    anonymous = True
    contributor = False
    if user.is_active:
        anonymous = False
        if is_contributor(user):
            contributor = True

    cache_key = 'featured_events_%s_%s' % (int(anonymous), int(contributor))
    cache_key += ','.join(str(x.id) for x in channels)
    event = most_recent_event()
    if event:
        cache_key += str(event.modified.microsecond)
    featured = cache.get(cache_key)
    if featured is None:
        featured = _get_featured_events(channels, anonymous, contributor)
        featured = featured[:length]
        cache.set(cache_key, featured, 60 * 60)
    return [x.event for x in featured]
Exemplo n.º 41
0
def channel_feed(request, slug):
    # this slug might be the slug of a parent
    channels = Channel.objects.filter(
        Q(slug=slug) |
        Q(parent__slug=slug)
    )
    events = Event.objects.archived().approved()
    events = events.filter(channels__in=channels)
    privacy_filter = {}
    privacy_exclude = {}
    if request.user.is_active:
        if is_contributor(request.user):
            privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
    else:
        privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}

    if privacy_filter:
        events = events.filter(**privacy_filter)
    elif privacy_exclude:
        events = events.exclude(**privacy_exclude)
    events = events.order_by('-start_time')

    paged = paginate(events, 1, 100)
    return render_channel_events(paged, request)
Exemplo n.º 42
0
    def test_users_data(self):
        url = reverse('manage:users_data')
        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        eq_(len(struct['users']), User.objects.all().count())
        ok_('manage:user_edit' in struct['urls'])
        user, = User.objects.filter(is_staff=False)
        assert not is_contributor(user)
        same_user, = [x for x in struct['users'] if x['id'] == user.id]
        ok_(not same_user.get('is_contributor'))
        ok_(not same_user.get('is_superuser'))
        ok_(not same_user.get('is_staff'))
        ok_(not same_user.get('is_inactive'))

        user.is_superuser = True
        user.is_staff = True
        user.is_active = False
        user.save()

        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        same_user, = [x for x in struct['users'] if x['id'] == user.id]
        ok_(same_user.get('is_superuser'))
        ok_(same_user.get('is_staff'))
        ok_(same_user.get('is_inactive'))
        ok_(not same_user.get('groups'))

        testgroup, = Group.objects.all()
        user.groups.add(testgroup)
        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        same_user, = [x for x in struct['users'] if x['id'] == user.id]
        eq_(same_user['groups'], [testgroup.name])
Exemplo n.º 43
0
    def test_users_data(self):
        url = reverse('manage:users_data')
        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        eq_(len(struct['users']), User.objects.all().count())
        ok_('manage:user_edit' in struct['urls'])
        user, = User.objects.filter(is_staff=False)
        assert not is_contributor(user)
        same_user, = [x for x in struct['users'] if x['id'] == user.id]
        ok_(not same_user.get('is_contributor'))
        ok_(not same_user.get('is_superuser'))
        ok_(not same_user.get('is_staff'))
        ok_(not same_user.get('is_inactive'))

        user.is_superuser = True
        user.is_staff = True
        user.is_active = False
        user.save()

        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        same_user, = [x for x in struct['users'] if x['id'] == user.id]
        ok_(same_user.get('is_superuser'))
        ok_(same_user.get('is_staff'))
        ok_(same_user.get('is_inactive'))
        ok_(not same_user.get('groups'))

        testgroup = Group.objects.create(name='testapprover')
        user.groups.add(testgroup)
        response = self.client.get(url)
        eq_(response.status_code, 200)
        struct = json.loads(response.content)
        same_user, = [x for x in struct['users'] if x['id'] == user.id]
        eq_(same_user['groups'], [testgroup.name])
Exemplo n.º 44
0
def too_few_tags(request):
    """returns a report of all events that very few tags"""
    if request.method == "POST":
        form = forms.EventEditTagsForm(request.POST)
        if form.is_valid():
            event = get_object_or_404(Event, id=form.cleaned_data["event_id"])
            assert request.user.is_active
            if is_contributor(request.user):
                assert event.privacy != Event.PRIVACY_COMPANY

            if not EventRevision.objects.filter(event=event).count():
                EventRevision.objects.create_from_event(event)

            value = set([x.strip() for x in form.cleaned_data["tags"].split(",") if x.strip()])
            prev = set([x.name for x in event.tags.all()])
            for tag in prev - value:
                tag_obj = Tag.objects.get(name=tag)
                event.tags.remove(tag_obj)
            added = []
            for tag in value - prev:
                try:
                    tag_obj = Tag.objects.get(name__iexact=tag)
                except Tag.DoesNotExist:
                    tag_obj = Tag.objects.create(name=tag)
                except Tag.MultipleObjectsReturned:
                    tag_obj, = Tag.objects.filter(name__iexact=tag)[:1]
                event.tags.add(tag_obj)
                added.append(tag_obj)
            EventRevision.objects.create_from_event(event, user=request.user)
            messages.success(request, "Thank you for adding: %s" % ", ".join(x.name for x in added))
            return redirect("main:too_few_tags")

    zero_tags = Event.objects.scheduled_or_processing().exclude(id__in=Event.tags.through.objects.values("event_id"))
    few_tags = (
        Event.tags.through.objects.filter(event__status=Event.STATUS_SCHEDULED)
        .values("event_id")
        .annotate(count=Count("event"))
        .filter(count__lt=2)
    )
    assert request.user.is_active
    if is_contributor(request.user):
        few_tags = few_tags.exclude(event__privacy=Event.PRIVACY_COMPANY)
        zero_tags = zero_tags.exclude(privacy=Event.PRIVACY_COMPANY)

    count = zero_tags.count()
    count += few_tags.count()
    try:
        event, = zero_tags.order_by("?")[:1]
    except ValueError:
        try:
            first, = few_tags.order_by("?")[:1]
            event = Event.objects.get(id=first["event_id"])
        except ValueError:
            # there's nothing!
            event = None
            assert count == 0

    context = {"count": count, "event": event}
    if event:
        initial = {"tags": ", ".join(x.name for x in event.tags.all()), "event_id": event.id}
        context["form"] = forms.EventEditTagsForm(initial=initial, instance=event)

    return render(request, "main/too_few_tags.html", context)
Exemplo n.º 45
0
def home(request, page=1, channel_slug=settings.DEFAULT_CHANNEL_SLUG):
    """Paginated recent videos and live videos."""
    channels = Channel.objects.filter(slug=channel_slug)
    if not channels.count():
        if channel_slug == settings.DEFAULT_CHANNEL_SLUG:
            # then, the Main channel hasn't been created yet
            Channel.objects.create(name=settings.DEFAULT_CHANNEL_NAME,
                                   slug=settings.DEFAULT_CHANNEL_SLUG)
            channels = Channel.objects.filter(slug=channel_slug)
        else:
            raise http.Http404('Channel not found')

    request.channels = channels

    privacy_filter = {}
    privacy_exclude = {}
    archived_events = Event.objects.archived()
    if request.user.is_active:
        if is_contributor(request.user):
            privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
    else:
        privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}
        archived_events = archived_events.approved()

    if privacy_filter:
        archived_events = archived_events.filter(**privacy_filter)
    elif privacy_exclude:
        archived_events = archived_events.exclude(**privacy_exclude)
    archived_events = archived_events.order_by('-start_time')

    archived_events = archived_events.select_related('picture')

    found_tags = []
    if request.GET.getlist('tag'):
        requested_tags = request.GET.getlist('tag')
        for each in requested_tags:
            found_tags.extend(Tag.objects.filter(name__iexact=each))
        if len(found_tags) < len(requested_tags):
            # invalid tags were used in the query string
            url = reverse('main:home')
            if found_tags:
                # some were good
                url += '?%s' % urllib.urlencode(
                    {'tag': [x.name for x in found_tags]}, True)
            return redirect(url, permanent=True)
        archived_events = archived_events.filter(tags__in=found_tags)
    if found_tags:
        # no live events when filtering by tag
        live_events = Event.objects.none()
    else:
        live_events = (Event.objects.live().order_by('start_time'))
        if not request.user.is_active:
            live_events = live_events.approved()

        if privacy_filter:
            live_events = live_events.filter(**privacy_filter)
        elif privacy_exclude:
            live_events = live_events.exclude(**privacy_exclude)

        # apply the mandatory channels filter
        # but only do this if it's not filtered by tags
        live_events = live_events.filter(channels=channels)
        archived_events = archived_events.filter(channels=channels)

        live_events = live_events.select_related('picture')

    if channels and channels[0].reverse_order:
        archived_events = archived_events.reverse()

    archived_paged = paginate(archived_events, page, 10)

    # to simplify the complexity of the template when it tries to make the
    # pagination URLs, we just figure it all out here
    next_page_url = prev_page_url = None
    channel = channels[0]
    if archived_paged.has_next():
        if channel.slug == settings.DEFAULT_CHANNEL_SLUG:
            next_page_url = reverse('main:home',
                                    args=(archived_paged.next_page_number(), ))
        else:
            next_page_url = reverse('main:home_channels',
                                    args=(channel.slug,
                                          archived_paged.next_page_number()))
    if archived_paged.has_previous():
        if channel.slug == settings.DEFAULT_CHANNEL_SLUG:
            prev_page_url = reverse(
                'main:home', args=(archived_paged.previous_page_number(), ))
        else:
            prev_page_url = reverse(
                'main:home_channels',
                args=(channel.slug, archived_paged.previous_page_number()))

    events_qs = Event.objects.archived().all()
    if request.user.is_active:
        if is_contributor(request.user):
            feed_privacy = 'contributors'
            events_qs = events_qs.exclude(privacy=Event.PRIVACY_COMPANY)
        else:
            feed_privacy = 'company'
    else:
        events_qs = events_qs.filter(privacy=Event.PRIVACY_PUBLIC)
        feed_privacy = 'public'

    channel_children = []
    for child in channel.get_children().order_by('name'):
        channel_children.append(
            (child, events_qs.filter(channels=child).count()))

    curated_groups_map = collections.defaultdict(list)
    curated_groups = (CuratedGroup.objects.all().values_list(
        'event_id', 'name').order_by('name'))
    for event_id, name in curated_groups:
        curated_groups_map[event_id].append(name)

    def get_curated_groups(event):
        return curated_groups_map.get(event.id)

    context = {
        'events': archived_paged,
        'live_events': live_events,
        'tags': found_tags,
        'Event': Event,
        'channel': channel,
        'channel_children': channel_children,
        'feed_privacy': feed_privacy,
        'next_page_url': next_page_url,
        'prev_page_url': prev_page_url,
        'get_curated_groups': get_curated_groups,
    }

    return render(request, 'main/home.html', context)
Exemplo n.º 46
0
def events_data(request):
    events = []
    qs = (
        Event.objects.all()
        .order_by('-modified')
    )
    _can_change_event_others = (
        request.user.has_perm('main.change_event_others')
    )
    base_filter = {}
    base_exclude = {}
    if not request.user.has_perm('main.change_event_others'):
        base_filter['creator'] = request.user
    if is_contributor(request.user):
        base_exclude['privacy'] = Event.PRIVACY_COMPANY
    qs = qs.filter(**base_filter)
    qs = qs.exclude(**base_exclude)

    event_channel_names = collections.defaultdict(list)
    _channel_names = dict(
        (x['id'], x['name'])
        for x in Channel.objects.all().values('id', 'name')
    )
    for each in Event.channels.through.objects.all().values():
        event_channel_names[each['event_id']].append(
            _channel_names[each['channel_id']]
        )

    now = timezone.now()
    live_time = now + datetime.timedelta(minutes=settings.LIVE_MARGIN)

    all_needs_approval = (
        Approval.objects
        .filter(processed=False)
        .values_list('event_id', flat=True)
    )

    pictures_counts = {}
    grouped_pictures = (
        Picture.objects
        .filter(event__in=qs)
        .values('event')
        .annotate(Count('event'))
    )
    for each in grouped_pictures:
        pictures_counts[each['event']] = each['event__count']

    if request.GET.get('limit'):
        try:
            limit = int(request.GET['limit'])
            assert limit > 0
            qs = qs[:limit]
        except (ValueError, AssertionError):
            pass

    locations = dict(
        (x.pk, x) for x in Location.objects.all()
    )
    template_names = dict(
        (x['id'], x['name'])
        for x in Template.objects.all().values('id', 'name')
    )
    for event in qs:
        event.location = locations.get(event.location_id)
        if event.location:
            start_time = event.location_time.strftime('%d %b %Y %I:%M%p')
            start_time_iso = event.location_time.isoformat()
        else:
            start_time = event.start_time.strftime('%d %b %Y %I:%M%p %Z')
            start_time_iso = event.start_time.isoformat()

        needs_approval = event.pk in all_needs_approval
        is_live = False
        is_upcoming = False
        if event.status == Event.STATUS_SCHEDULED and not needs_approval:
            if not event.archive_time and event.start_time < live_time:
                is_live = True
            elif not event.archive_time and event.start_time > live_time:
                is_upcoming = True

        row = {
            'modified': event.modified.isoformat(),
            'status': event.status,
            'status_display': event.get_status_display(),
            'privacy': event.privacy,
            'privacy_display': event.get_privacy_display(),
            'title': event.title,
            'slug': event.slug,
            'location': event.location and event.location.name or '',
            'id': event.pk,
            'start_time': start_time,
            'start_time_iso': start_time_iso,
            'channels': event_channel_names.get(event.pk, []),
            'archive_time': (
                event.archive_time.isoformat()
                if event.archive_time
                else None
            ),
            'can': [],  # actions you can take on the event
        }

        # to make the size of the JSON file as small as possible,
        # only include certain fields if they're true
        if event.status == Event.STATUS_PENDING:
            row['is_pending'] = True
        elif event.status == Event.STATUS_SCHEDULED:
            row['is_scheduled'] = True
        if is_live:
            row['is_live'] = True
        if is_upcoming:
            row['is_upcoming'] = is_upcoming
        if needs_approval:
            row['needs_approval'] = True
        if event.mozillian:
            row['mozillian'] = event.mozillian
        if event.id in pictures_counts:
            row['pictures'] = pictures_counts[event.id]
        if event.picture_id:
            row['picture'] = event.picture_id

        if row.get('is_pending'):
            # this one is only relevant if it's pending
            template_name = template_names.get(event.template_id)
            if template_name:
                row['has_vidly_template'] = 'Vid.ly' in template_name
        if event.popcorn_url and not is_upcoming:
            row['popcorn_url'] = event.popcorn_url

        if not row.get('picture') and not event.placeholder_img:
            row['nopicture'] = True

        if _can_change_event_others:
            row['can'].append('duplicate')
            row['can'].append('archive')
            # row['archive_url'] = reverse(
            #     'manage:event_archive',
            #     args=(event.pk,)
            # )

        events.append(row)

    urls = {
        'manage:event_edit': reverse('manage:event_edit', args=('0',)),
        'manage:event_duplicate': reverse(
            'manage:event_duplicate', args=('0',)
        ),
        'manage:redirect_event_thumbnail': reverse(
            'manage:redirect_event_thumbnail', args=('0',)
        ),
        'manage:event_archive': reverse(
            'manage:event_archive', args=('0',)
        ),
        'manage:event_duplicate': reverse(
            'manage:event_duplicate', args=('0',)
        ),
        'manage:picturegallery': reverse('manage:picturegallery'),
    }

    return {'events': events, 'urls': urls}
Exemplo n.º 47
0
def home(request, page=1, channel_slug=settings.DEFAULT_CHANNEL_SLUG):
    """Paginated recent videos and live videos."""
    channels = Channel.objects.filter(slug=channel_slug)
    if not channels.count():
        if channel_slug == settings.DEFAULT_CHANNEL_SLUG:
            # then, the Main channel hasn't been created yet
            Channel.objects.create(
                name=settings.DEFAULT_CHANNEL_NAME,
                slug=settings.DEFAULT_CHANNEL_SLUG
            )
            channels = Channel.objects.filter(slug=channel_slug)
        else:
            raise http.Http404('Channel not found')

    request.channels = channels

    privacy_filter = {}
    privacy_exclude = {}
    archived_events = Event.objects.archived()
    if request.user.is_active:
        if is_contributor(request.user):
            privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
    else:
        # privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}
        privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
        archived_events = archived_events.approved()

    if privacy_filter:
        archived_events = archived_events.filter(**privacy_filter)
    elif privacy_exclude:
        archived_events = archived_events.exclude(**privacy_exclude)
    archived_events = archived_events.order_by('-start_time')

    archived_events = archived_events.select_related('picture')

    found_tags = []
    if request.GET.getlist('tag'):
        requested_tags = request.GET.getlist('tag')
        for each in requested_tags:
            found_tags.extend(Tag.objects.filter(name__iexact=each))
        if len(found_tags) < len(requested_tags):
            # invalid tags were used in the query string
            url = reverse('main:home')
            if found_tags:
                # some were good
                url += '?%s' % urllib.urlencode({
                    'tag': [x.name for x in found_tags]
                }, True)
            return redirect(url, permanent=True)
        archived_events = archived_events.filter(tags__in=found_tags)
    if found_tags:
        # no live events when filtering by tag
        live_events = Event.objects.none()
    else:
        live_events = (Event.objects.live()
                       .order_by('start_time'))
        if not request.user.is_active:
            live_events = live_events.approved()

        if privacy_filter:
            live_events = live_events.filter(**privacy_filter)
        elif privacy_exclude:
            live_events = live_events.exclude(**privacy_exclude)

        # apply the mandatory channels filter
        # but only do this if it's not filtered by tags
        live_events = live_events.filter(channels=channels)
        archived_events = archived_events.filter(channels=channels)

        live_events = live_events.select_related('picture')

    if channels and channels[0].reverse_order:
        archived_events = archived_events.reverse()

    archived_paged = paginate(archived_events, page, 10)

    # to simplify the complexity of the template when it tries to make the
    # pagination URLs, we just figure it all out here
    next_page_url = prev_page_url = None
    channel = channels[0]
    if archived_paged.has_next():
        if channel.slug == settings.DEFAULT_CHANNEL_SLUG:
            next_page_url = reverse(
                'main:home',
                args=(archived_paged.next_page_number(),)
            )
        else:
            next_page_url = reverse(
                'main:home_channels',
                args=(channel.slug,
                      archived_paged.next_page_number())
            )
    if archived_paged.has_previous():
        if channel.slug == settings.DEFAULT_CHANNEL_SLUG:
            prev_page_url = reverse(
                'main:home',
                args=(archived_paged.previous_page_number(),)
            )
        else:
            prev_page_url = reverse(
                'main:home_channels',
                args=(channel.slug,
                      archived_paged.previous_page_number())
            )

    events_qs = Event.objects.archived().all()
    if request.user.is_active:
        if is_contributor(request.user):
            feed_privacy = 'contributors'
            events_qs = events_qs.exclude(privacy=Event.PRIVACY_COMPANY)
        else:
            feed_privacy = 'company'
    else:
        events_qs = events_qs.filter(privacy=Event.PRIVACY_PUBLIC)
        feed_privacy = 'public'

    channel_children = []
    for child in channel.get_children().order_by('name'):
        channel_children.append((
            child,
            events_qs.filter(channels=child).count()
        ))

    curated_groups_map = collections.defaultdict(list)
    curated_groups = (
        CuratedGroup.objects.all()
        .values_list('event_id', 'name')
        .order_by('name')
    )
    for event_id, name in curated_groups:
        curated_groups_map[event_id].append(name)

    def get_curated_groups(event):
        return curated_groups_map.get(event.id)

    context = {
        'events': archived_paged,
        'live_events': live_events,
        'tags': found_tags,
        'Event': Event,
        'channel': channel,
        'channel_children': channel_children,
        'feed_privacy': feed_privacy,
        'next_page_url': next_page_url,
        'prev_page_url': prev_page_url,
        'get_curated_groups': get_curated_groups,
    }

    return render(request, 'main/home.html', context)
Exemplo n.º 48
0
def find_related_events(
    event, user, boost_title=None, boost_tags=None, size=None,
    use_title=True, use_tags=True, explain=False
):
    assert use_title or use_tags
    if boost_title is None:
        boost_title = settings.RELATED_CONTENT_BOOST_TITLE
    if boost_tags is None:
        boost_tags = settings.RELATED_CONTENT_BOOST_TAGS
    if size is None:
        size = settings.RELATED_CONTENT_SIZE
    index = related.get_index()
    doc_type = 'event'

    es = related.get_connection()

    fields = ['title']
    if list(event.channels.all()) != [
            Channel.objects.get(slug=settings.DEFAULT_CHANNEL_SLUG)]:
        fields.append('channel')

    mlt_queries = []
    if use_title:
        mlt_queries.append({
            'more_like_this': {
                'fields': ['title'],
                # 'analyzer': 'snowball',
                'docs': [
                    {
                        '_index': index,
                        '_type': doc_type,
                        '_id': event.id
                    }],
                'min_term_freq': 1,
                'max_query_terms': 20,
                'min_doc_freq': 1,
                # 'max_doc_freq': 2,
                # 'stop_words': ['your', 'about'],
                'boost': boost_title,
            }
        })
    if use_tags and event.tags.all().exists():
        fields.append('tags')
        mlt_queries.append({
            'more_like_this': {
                'fields': ['tags'],
                'docs': [
                    {
                        '_index': index,
                        '_type': doc_type,
                        '_id': event.id
                    }],
                'min_term_freq': 1,
                'max_query_terms': 20,
                'min_doc_freq': 1,
                'boost': boost_tags,
            }
        })

    query_ = {
        'bool': {
            'should': mlt_queries,
        }
    }

    if user.is_active:
        if is_contributor(user):
            query = {
                'fields': fields,
                'query': query_,
                'filter': {
                    'bool': {
                        'must_not': {
                            'term': {
                                'privacy': Event.PRIVACY_COMPANY
                            }
                        }
                    }
                }
            }
        else:
            query = {
                'fields': fields,
                'query': query_
            }
    else:
        query = {
            'fields': fields,
            'query': query_,
            "filter": {
                "bool": {
                    "must": {
                        "term": {"privacy": Event.PRIVACY_PUBLIC}
                    }
                }
            }
        }

    ids = []
    query['from'] = 0
    query['size'] = size
    query['explain'] = explain
    hits = es.search(query, index=index)['hits']

    scores = {}
    explanations = []
    for doc in hits['hits']:
        _id = int(doc['_id'])
        scores[_id] = doc['_score']
        ids.append(_id)
        if explain:
            explanations.append(doc['_explanation'])

    events = Event.objects.scheduled_or_processing().filter(id__in=ids)

    if user.is_active:
        if is_contributor(user):
            events = events.exclude(privacy=Event.PRIVACY_COMPANY)
    else:
        events = events.filter(privacy=Event.PRIVACY_PUBLIC)

    events = sorted(events, key=lambda e: ids.index(e.id))

    return (events, scores, explanations)
Exemplo n.º 49
0
def too_few_tags(request):
    """returns a report of all events that very few tags"""
    if request.method == 'POST':
        form = forms.EventEditTagsForm(request.POST)
        if form.is_valid():
            event = get_object_or_404(Event, id=form.cleaned_data['event_id'])
            assert request.user.is_active
            if is_contributor(request.user):
                assert event.privacy != Event.PRIVACY_COMPANY

            if not EventRevision.objects.filter(event=event).count():
                EventRevision.objects.create_from_event(event)

            value = set([
                x.strip() for x in form.cleaned_data['tags'].split(',')
                if x.strip()
            ])
            prev = set([x.name for x in event.tags.all()])
            for tag in prev - value:
                tag_obj = Tag.objects.get(name=tag)
                event.tags.remove(tag_obj)
            added = []
            for tag in value - prev:
                try:
                    tag_obj = Tag.objects.get(name__iexact=tag)
                except Tag.DoesNotExist:
                    tag_obj = Tag.objects.create(name=tag)
                except Tag.MultipleObjectsReturned:
                    tag_obj, = Tag.objects.filter(name__iexact=tag)[:1]
                event.tags.add(tag_obj)
                added.append(tag_obj)
            EventRevision.objects.create_from_event(event, user=request.user)
            messages.success(
                request,
                'Thank you for adding: %s' % ', '.join(x.name for x in added))
            return redirect('main:too_few_tags')

    zero_tags = (Event.objects.scheduled_or_processing().exclude(
        id__in=Event.tags.through.objects.values('event_id')))
    few_tags = (Event.tags.through.objects.filter(
        event__status=Event.STATUS_SCHEDULED).values('event_id').annotate(
            count=Count('event')).filter(count__lt=2))

    assert request.user.is_active
    if is_contributor(request.user):
        few_tags = few_tags.exclude(event__privacy=Event.PRIVACY_COMPANY)
        zero_tags = zero_tags.exclude(privacy=Event.PRIVACY_COMPANY)

    count = zero_tags.count()
    count += few_tags.count()
    try:
        event, = zero_tags.order_by('?')[:1]
    except ValueError:
        event = None
        if few_tags.count():
            try:
                first, = few_tags.order_by('?')[:1]
                event = Event.objects.get(id=first['event_id'])
            except ValueError:
                # there's nothing!
                event = None
                assert count == 0

    context = {
        'count': count,
        'event': event,
    }
    if event:
        initial = {
            'tags': ', '.join(x.name for x in event.tags.all()),
            'event_id': event.id,
        }
        context['form'] = forms.EventEditTagsForm(initial=initial,
                                                  instance=event)

    return render(request, 'main/too_few_tags.html', context)
Exemplo n.º 50
0
def home(request):
    context = {
        'q': None,
        'events_found': None,
        'search_error': None,
        'tags': None,
        'possible_tags': None,
        'channels': None,
        'possible_channels': None
    }

    if request.GET.get('q'):
        form = forms.SearchForm(request.GET)
    else:
        form = forms.SearchForm()

    if request.GET.get('q') and form.is_valid():
        context['q'] = form.cleaned_data['q']
        privacy_filter = {}
        privacy_exclude = {}
        qs = Event.objects.scheduled_or_processing()
        if request.user.is_active:
            if is_contributor(request.user):
                privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
        else:
            privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}
            qs = qs.approved()

        extra = {}
        rest, params = split_search(context['q'], ('tag', 'channel'))
        if params.get('tag'):
            tags = Tag.objects.filter(name__iexact=params['tag'])
            if tags:
                context['q'] = rest
                context['tags'] = extra['tags'] = tags
        else:
            # is the search term possibly a tag?
            all_tag_names = Tag.objects.all().values_list('name', flat=True)
            tags_regex = re.compile(
                r'\b(%s)\b' % ('|'.join(re.escape(x)
                                        for x in all_tag_names), ), re.I)
            # next we need to turn all of these into a Tag QuerySet
            # because we can't do `filter(name__in=tags_regex.findall(...))`
            # because that case sensitive.
            tag_ids = []
            for match in tags_regex.findall(rest):
                tag_ids.extend(
                    Tag.objects.filter(name__iexact=match).values_list(
                        'id', flat=True))
            possible_tags = Tag.objects.filter(id__in=tag_ids)
            for tag in possible_tags:
                regex = re.compile(re.escape(tag.name), re.I)
                tag._query_string = regex.sub(
                    '',
                    context['q'],
                )
                tag._query_string += ' tag: %s' % tag.name
                # reduce all excess whitespace into 1
                tag._query_string = re.sub('\s\s+', ' ', tag._query_string)
                tag._query_string = tag._query_string.strip()
            context['possible_tags'] = possible_tags

        if params.get('channel'):
            channels = Channel.objects.filter(name__iexact=params['channel'])
            if channels:
                context['q'] = rest
                context['channels'] = extra['channels'] = channels
        else:
            # is the search term possibly a channel?
            all_channel_names = (Channel.objects.all().values_list('name',
                                                                   flat=True))
            channels_regex = re.compile(
                r'\b(%s)\b' %
                ('|'.join(re.escape(x) for x in all_channel_names), ), re.I)
            channel_ids = []
            for match in channels_regex.findall(rest):
                channel_ids.extend(
                    Channel.objects.filter(name__iexact=match).values_list(
                        'id', flat=True))
            possible_channels = Channel.objects.filter(id__in=channel_ids)
            for channel in possible_channels:
                regex = re.compile(re.escape(channel.name), re.I)
                channel._query_string = regex.sub(
                    '',
                    context['q'],
                )
                channel._query_string += ' channel: %s' % channel.name
                # reduce all excess whitespace into 1
                channel._query_string = re.sub('\s\s+', ' ',
                                               channel._query_string)
                channel._query_string = channel._query_string.strip()
            context['possible_channels'] = possible_channels

        events = _search(qs,
                         context['q'],
                         privacy_filter=privacy_filter,
                         privacy_exclude=privacy_exclude,
                         sort=request.GET.get('sort'),
                         **extra)
        if not events.count() and utils.possible_to_or_query(context['q']):
            events = _search(qs,
                             context['q'],
                             privacy_filter=privacy_filter,
                             privacy_exclude=privacy_exclude,
                             sort=request.GET.get('sort'),
                             fuzzy=True)

        try:
            page = int(request.GET.get('page', 1))
            if page < 1:
                raise ValueError
        except ValueError:
            return http.HttpResponseBadRequest('Invalid page')

        # we use the paginator() function to get the Paginator
        # instance so we can avoid calling `events.count()` for the
        # header of the page where it says "XX events found"
        try:
            with transaction.atomic():
                pager, events_paged = paginator(events, page, 10)
            _database_error_happened = False
        except DatabaseError:
            _database_error_happened = True
            # don't feed the trolls, just return nothing found
            pager, events_paged = paginator(Event.objects.none(), 1, 10)

        next_page_url = prev_page_url = None

        def url_maker(page):
            querystring = {'q': context['q'].encode('utf-8'), 'page': page}
            querystring = urllib.urlencode(querystring)
            return '%s?%s' % (reverse('search:home'), querystring)

        if events_paged.has_next():
            next_page_url = url_maker(events_paged.next_page_number())
        if events_paged.has_previous():
            prev_page_url = url_maker(events_paged.previous_page_number())

        context['events_paged'] = events_paged
        context['next_page_url'] = next_page_url
        context['prev_page_url'] = prev_page_url
        context['events_found'] = pager.count
        context['channels'] = get_event_channels(events_paged)

        log_searches = settings.LOG_SEARCHES and '_nolog' not in request.GET
        if (log_searches and not _database_error_happened
                and request.GET['q'].strip()):
            logged_search = LoggedSearch.objects.create(
                term=request.GET['q'][:200],
                results=events.count(),
                page=page,
                user=request.user.is_authenticated() and request.user or None)
            request.session['logged_search'] = (logged_search.pk, time.time())
    elif request.GET.get('q'):
        context['search_error'] = form.errors['q']
    else:
        context['events'] = []

    context['form'] = form
    return render(request, 'search/home.html', context)
Exemplo n.º 51
0
def home(request):
    context = {
        'q': None,
        'events_found': None,
        'search_error': None,
        'tags': None,
        'possible_tags': None,
        'channels': None,
        'possible_channels': None
    }

    if request.GET.get('q'):
        form = forms.SearchForm(request.GET)
    else:
        form = forms.SearchForm()

    if request.GET.get('q') and form.is_valid():
        context['q'] = form.cleaned_data['q']
        privacy_filter = {}
        privacy_exclude = {}
        qs = Event.objects.scheduled_or_processing()
        if request.user.is_active:
            if is_contributor(request.user):
                privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
        else:
            privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}
            qs = qs.approved()

        extra = {}
        rest, params = split_search(context['q'], ('tag', 'channel'))
        if params.get('tag'):
            tags = Tag.objects.filter(name__iexact=params['tag'])
            if tags:
                context['q'] = rest
                context['tags'] = extra['tags'] = tags
        else:
            # is the search term possibly a tag?
            all_tag_names = Tag.objects.all().values_list('name', flat=True)
            tags_regex = re.compile(
                r'\b(%s)\b' %
                ('|'.join(re.escape(x) for x in all_tag_names),),
                re.I
            )
            # next we need to turn all of these into a Tag QuerySet
            # because we can't do `filter(name__in=tags_regex.findall(...))`
            # because that case sensitive.
            tag_ids = []
            for match in tags_regex.findall(rest):
                tag_ids.extend(
                    Tag.objects.filter(name__iexact=match)
                    .values_list('id', flat=True)
                )
            possible_tags = Tag.objects.filter(
                id__in=tag_ids
            )
            for tag in possible_tags:
                regex = re.compile(re.escape(tag.name), re.I)
                tag._query_string = regex.sub(
                    '',
                    context['q'],
                )
                tag._query_string += ' tag: %s' % tag.name
                # reduce all excess whitespace into 1
                tag._query_string = re.sub(
                    '\s\s+',
                    ' ',
                    tag._query_string
                )
                tag._query_string = tag._query_string.strip()
            context['possible_tags'] = possible_tags

        if params.get('channel'):
            channels = Channel.objects.filter(name__iexact=params['channel'])
            if channels:
                context['q'] = rest
                context['channels'] = extra['channels'] = channels
        else:
            # is the search term possibly a channel?
            all_channel_names = (
                Channel.objects.all().values_list('name', flat=True)
            )
            channels_regex = re.compile(
                r'\b(%s)\b' %
                ('|'.join(re.escape(x) for x in all_channel_names),),
                re.I
            )
            channel_ids = []
            for match in channels_regex.findall(rest):
                channel_ids.extend(
                    Channel.objects
                    .filter(name__iexact=match).values_list('id', flat=True)
                )
            possible_channels = Channel.objects.filter(
                id__in=channel_ids
            )
            for channel in possible_channels:
                regex = re.compile(re.escape(channel.name), re.I)
                channel._query_string = regex.sub(
                    '',
                    context['q'],
                )
                channel._query_string += ' channel: %s' % channel.name
                # reduce all excess whitespace into 1
                channel._query_string = re.sub(
                    '\s\s+',
                    ' ',
                    channel._query_string
                )
                channel._query_string = channel._query_string.strip()
            context['possible_channels'] = possible_channels

        events = _search(
            qs,
            context['q'],
            privacy_filter=privacy_filter,
            privacy_exclude=privacy_exclude,
            sort=request.GET.get('sort'),
            **extra
        )
        if not events.count() and utils.possible_to_or_query(context['q']):
            events = _search(
                qs,
                context['q'],
                privacy_filter=privacy_filter,
                privacy_exclude=privacy_exclude,
                sort=request.GET.get('sort'),
                fuzzy=True
            )

        try:
            page = int(request.GET.get('page', 1))
            if page < 1:
                raise ValueError
        except ValueError:
            return http.HttpResponseBadRequest('Invalid page')

        # we use the paginator() function to get the Paginator
        # instance so we can avoid calling `events.count()` for the
        # header of the page where it says "XX events found"
        try:
            with transaction.atomic():
                pager, events_paged = paginator(events, page, 10)
            _database_error_happened = False
        except DatabaseError:
            _database_error_happened = True
            # don't feed the trolls, just return nothing found
            pager, events_paged = paginator(Event.objects.none(), 1, 10)

        next_page_url = prev_page_url = None

        def url_maker(page):
            querystring = {'q': context['q'].encode('utf-8'), 'page': page}
            querystring = urllib.urlencode(querystring)
            return '%s?%s' % (reverse('search:home'), querystring)

        if events_paged.has_next():
            next_page_url = url_maker(events_paged.next_page_number())
        if events_paged.has_previous():
            prev_page_url = url_maker(events_paged.previous_page_number())

        context['events_paged'] = events_paged
        context['next_page_url'] = next_page_url
        context['prev_page_url'] = prev_page_url
        context['events_found'] = pager.count
        context['channels'] = get_event_channels(events_paged)

        log_searches = settings.LOG_SEARCHES and '_nolog' not in request.GET
        if (
            log_searches and
            not _database_error_happened and
            request.GET['q'].strip()
        ):
            logged_search = LoggedSearch.objects.create(
                term=request.GET['q'][:200],
                results=events.count(),
                page=page,
                user=request.user.is_authenticated() and request.user or None
            )
            request.session['logged_search'] = (
                logged_search.pk,
                time.time()
            )
    elif request.GET.get('q'):
        context['search_error'] = form.errors['q']
    else:
        context['events'] = []

    context['form'] = form
    return render(request, 'search/home.html', context)
Exemplo n.º 52
0
def find_related_events(event,
                        user,
                        boost_title=None,
                        boost_tags=None,
                        size=None,
                        use_title=True,
                        use_tags=True,
                        explain=False):
    assert use_title or use_tags
    if boost_title is None:
        boost_title = settings.RELATED_CONTENT_BOOST_TITLE
    if boost_tags is None:
        boost_tags = settings.RELATED_CONTENT_BOOST_TAGS
    if size is None:
        size = settings.RELATED_CONTENT_SIZE
    index = related.get_index()
    doc_type = 'event'

    es = related.get_connection()

    fields = ['title']
    if list(event.channels.all()) != [
            Channel.objects.get(slug=settings.DEFAULT_CHANNEL_SLUG)
    ]:
        fields.append('channel')

    mlt_queries = []
    if use_title:
        mlt_queries.append({
            'more_like_this': {
                'fields': ['title'],
                # 'analyzer': 'snowball',
                'docs': [{
                    '_index': index,
                    '_type': doc_type,
                    '_id': event.id
                }],
                'min_term_freq': 1,
                'max_query_terms': 20,
                'min_doc_freq': 1,
                # 'max_doc_freq': 2,
                # 'stop_words': ['your', 'about'],
                'boost': boost_title,
            }
        })
    if use_tags and event.tags.all().exists():
        fields.append('tags')
        mlt_queries.append({
            'more_like_this': {
                'fields': ['tags'],
                'docs': [{
                    '_index': index,
                    '_type': doc_type,
                    '_id': event.id
                }],
                'min_term_freq': 1,
                'max_query_terms': 20,
                'min_doc_freq': 1,
                'boost': boost_tags,
            }
        })

    query_ = {
        'bool': {
            'should': mlt_queries,
        }
    }

    if user.is_active:
        if is_contributor(user):
            query = {
                'fields': fields,
                'query': query_,
                'filter': {
                    'bool': {
                        'must_not': {
                            'term': {
                                'privacy': Event.PRIVACY_COMPANY
                            }
                        }
                    }
                }
            }
        else:
            query = {'fields': fields, 'query': query_}
    else:
        query = {
            'fields': fields,
            'query': query_,
            "filter": {
                "bool": {
                    "must": {
                        "term": {
                            "privacy": Event.PRIVACY_PUBLIC
                        }
                    }
                }
            }
        }

    ids = []
    query['from'] = 0
    query['size'] = size
    query['explain'] = explain
    hits = es.search(query, index=index)['hits']

    scores = {}
    explanations = []
    for doc in hits['hits']:
        _id = int(doc['_id'])
        scores[_id] = doc['_score']
        ids.append(_id)
        if explain:
            explanations.append(doc['_explanation'])

    events = Event.objects.scheduled_or_processing().filter(id__in=ids)

    if user.is_active:
        if is_contributor(user):
            events = events.exclude(privacy=Event.PRIVACY_COMPANY)
    else:
        events = events.filter(privacy=Event.PRIVACY_PUBLIC)

    events = sorted(events, key=lambda e: ids.index(e.id))

    return (events, scores, explanations)
Exemplo n.º 53
0
def sidebar(request):
    # none of this is relevant if you're in certain URLs
    if '/manage/' in request.path_info:
        return {}
    data = {
        # used for things like {% if event.attr == Event.ATTR1 %}
        'Event': Event,
    }
    now = datetime.datetime.utcnow().replace(tzinfo=utc)
    yesterday = now - datetime.timedelta(days=1)
    # subtract one second to not accidentally tip it
    yesterday -= datetime.timedelta(seconds=1)
    featured = (
        EventHitStats.objects
        .exclude(event__archive_time__isnull=True)
        .filter(event__archive_time__lt=yesterday)
        .extra(
            select={
                # being 'featured' pretends the event has twice as
                # many hits as actually does
                'score': '(featured::int + 1) * total_hits'
                         '/ extract(days from (now() - archive_time)) ^ 1.8',
            }
        )
        .select_related('event')
        .order_by('-score')
    )

    upcoming = Event.objects.upcoming().order_by('start_time')
    # if viewing a specific page is limited by channel, apply that filtering
    # here too
    if getattr(request, 'channels', None):
        channels = request.channels
    else:
        channels = Channel.objects.filter(slug=settings.DEFAULT_CHANNEL_SLUG)

    feed_privacy = _get_feed_privacy(request.user)

    if settings.DEFAULT_CHANNEL_SLUG in [x.slug for x in channels]:
        feed_title = 'AirMozilla RSS'
        feed_url = reverse('main:feed', args=(feed_privacy,))
        sidebar_channel = settings.DEFAULT_CHANNEL_SLUG
    else:
        _channel = channels[0]
        feed_title = 'AirMozilla - %s - RSS' % _channel.name
        feed_url = reverse('main:channel_feed',
                           args=(_channel.slug, feed_privacy))
        sidebar_channel = _channel.slug
    data['feed_title'] = feed_title
    data['feed_url'] = feed_url

    # `featured` isn't actually a QuerySet on Event
    featured = featured.filter(event__channels__in=channels)
    upcoming = upcoming.filter(channels__in=channels).distinct()

    if request.user.is_active:
        if is_contributor(request.user):
            # not private
            featured = featured.exclude(event__privacy=Event.PRIVACY_COMPANY)
            upcoming = upcoming.exclude(privacy=Event.PRIVACY_COMPANY)
    else:
        # only public
        featured = featured.filter(event__privacy=Event.PRIVACY_PUBLIC)
        upcoming = upcoming.filter(privacy=Event.PRIVACY_PUBLIC)

    upcoming = upcoming[:settings.UPCOMING_SIDEBAR_COUNT]
    data['upcoming'] = upcoming
    data['featured'] = [x.event for x in featured[:5]]

    data['sidebar_top'] = None
    data['sidebar_bottom'] = None
    sidebar_urls_q = (
        Q(url='sidebar_top_%s' % sidebar_channel) |
        Q(url='sidebar_bottom_%s' % sidebar_channel)
    )
    # to avoid having to do 2 queries, make a combined one
    # set it up with an iterator
    for page in FlatPage.objects.filter(sidebar_urls_q):
        if page.url.startswith('sidebar_top_'):
            data['sidebar_top'] = page
        elif page.url.startswith('sidebar_bottom_'):
            data['sidebar_bottom'] = page

    data['search_form'] = SearchForm(request.GET)

    return data
Exemplo n.º 54
0
def home(request):
    context = {
        'q': None,
        'events_found': None,
        'search_error': None,
    }

    if request.GET.get('q'):
        form = forms.SearchForm(request.GET)
    else:
        form = forms.SearchForm()

    if request.GET.get('q') and form.is_valid():
        context['q'] = request.GET.get('q')
        privacy_filter = {}
        privacy_exclude = {}
        if request.user.is_active:
            if is_contributor(request.user):
                privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
        else:
            privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}

        events = _search(
            request.GET.get('q'),
            privacy_filter=privacy_filter,
            privacy_exclude=privacy_exclude,
            sort=request.GET.get('sort'),
        )
        try:
            page = int(request.GET.get('page', 1))
            if page < 1:
                raise ValueError
        except ValueError:
            return http.HttpResponseBadRequest('Invalid page')

        # we use the paginator() function to get the Paginator
        # instance so we can avoid calling `events.count()` for the
        # header of the page where it says "XX events found"
        pager, events_paged = paginator(events, page, 10)
        next_page_url = prev_page_url = None

        def url_maker(page):
            querystring = {'q': context['q'], 'page': page}
            querystring = urllib.urlencode(querystring)
            return '%s?%s' % (reverse('search:home'), querystring)

        if events_paged.has_next():
            next_page_url = url_maker(events_paged.next_page_number())
        if events_paged.has_previous():
            prev_page_url = url_maker(events_paged.previous_page_number())

        context['events_paged'] = events_paged
        context['next_page_url'] = next_page_url
        context['prev_page_url'] = prev_page_url
        context['events_found'] = pager.count
    elif request.GET.get('q'):
        context['search_error'] = form.errors['q']
    else:
        context['events'] = []

    context['form'] = form
    return render(request, 'search/home.html', context)