def test_most_recent_event(self): # this test does not benefit from the standard fixtures Event.objects.all().delete() date = datetime.datetime(2099, 1, 1, 18, 0, 0).replace( tzinfo=timezone.utc ) mountain_view = Location.objects.create( name='Mountain View', timezone='US/Pacific', ) eq_(most_recent_event(), None) event1 = Event.objects.create( title='Event 1', status=Event.STATUS_INITIATED, start_time=date, location=mountain_view, ) eq_(most_recent_event(), event1) event2 = Event.objects.create( title='Event 2', status=Event.STATUS_INITIATED, start_time=date + datetime.timedelta(days=1), location=mountain_view, ) eq_(most_recent_event(), event2) event1.start_time -= datetime.timedelta(days=1) event1.save() eq_(most_recent_event(), event1)
def test_most_recent_event(self): date = datetime.datetime(2099, 1, 1, 18, 0, 0).replace(tzinfo=utc) mountain_view = Location.objects.create( name='Mountain View', timezone='US/Pacific', ) eq_(most_recent_event(), None) event1 = Event.objects.create( title='Event 1', status=Event.STATUS_INITIATED, start_time=date, location=mountain_view, ) eq_(most_recent_event(), event1) event2 = Event.objects.create( title='Event 2', status=Event.STATUS_INITIATED, start_time=date + datetime.timedelta(days=1), location=mountain_view, ) eq_(most_recent_event(), event2) event1.start_time -= datetime.timedelta(days=1) event1.save() eq_(most_recent_event(), event1)
def test_most_recent_event(self): # this test does not benefit from the standard fixtures Event.objects.all().delete() date = datetime.datetime(2099, 1, 1, 18, 0, 0).replace(tzinfo=timezone.utc) mountain_view = Location.objects.create( name='Mountain View', timezone='US/Pacific', ) eq_(most_recent_event(), None) event1 = Event.objects.create( title='Event 1', status=Event.STATUS_INITIATED, start_time=date, location=mountain_view, ) eq_(most_recent_event(), event1) event2 = Event.objects.create( title='Event 2', status=Event.STATUS_INITIATED, start_time=date + datetime.timedelta(days=1), location=mountain_view, ) eq_(most_recent_event(), event2) event1.start_time -= datetime.timedelta(days=1) event1.save() eq_(most_recent_event(), event1)
def get_featured_events( channels, user, length=settings.FEATURED_SIDEBAR_COUNT ): """return a list of events that are sorted by their score""" anonymous = True contributor = False if user.is_active: anonymous = False if is_contributor(user): contributor = True cache_key = 'featured_events_%s_%s' % (int(anonymous), int(contributor)) if channels: cache_key += ','.join(str(x.id) for x in channels) event = most_recent_event() if event: cache_key += str(event.modified.microsecond) featured = cache.get(cache_key) if featured is None: featured = _get_featured_events(channels, anonymous, contributor) featured = featured[:length] cache.set(cache_key, featured, 60 * 60) return [x.event for x in featured]
def get_featured_events( channels, user, length=settings.FEATURED_SIDEBAR_COUNT ): """return a list of events that are sorted by their score""" anonymous = True contributor = False if user.is_active: anonymous = False if is_contributor(user): contributor = True cache_key = 'featured_events_%s_%s' % (int(anonymous), int(contributor)) if channels: cache_key += ','.join(str(x.id) for x in channels) event = most_recent_event() if event: cache_key += str(event.modified.microsecond) featured = cache.get(cache_key) if featured is None: featured = _get_featured_events(channels, anonymous, contributor) featured = featured[:length] cache.set(cache_key, featured, 60 * 60) # Sadly, in Django when you do a left outer join on a many-to-many # table you get repeats and you can't fix that by adding a simple # `distinct` on the first field. # In django, if you do `myqueryset.distinct('id')` it requires # that that's also something you order by. # In pure Postgresql you can do this: # SELECT # DISTINCT main_eventhitstats.id as id, # (some formula) AS score, # ... # FROM ... # INNER JOIN ... # INNER JOIN ... # ORDER BY score DESC # LIMIT 5; # # But you can't do that with Django. # So we have to manually de-dupe. Hopefully we can alleviate this # problem altogether when we start doing aggregates where you have # many repeated EventHitStats *per* event and you need to look at # their total score across multiple vidly shortcodes. events = [] for each in featured: if each.event not in events: events.append(each.event) return events
def get_upcoming_events(channels, user, length=settings.UPCOMING_SIDEBAR_COUNT): """return a queryset of upcoming events""" anonymous = True contributor = False if user.is_active: anonymous = False if is_contributor(user): contributor = True cache_key = 'upcoming_events_%s_%s' % (int(anonymous), int(contributor)) cache_key += ','.join(str(x.id) for x in channels) event = most_recent_event() if event: cache_key += str(event.modified.microsecond) upcoming = cache.get(cache_key) if upcoming is None: upcoming = _get_upcoming_events(channels, anonymous, contributor) upcoming = upcoming[:length] cache.set(cache_key, upcoming, 60 * 60) return upcoming
def get_featured_events(channels, user, length=settings.FEATURED_SIDEBAR_COUNT): """return a list of events that are sorted by their score""" anonymous = True contributor = False if user.is_active: anonymous = False if is_contributor(user): contributor = True cache_key = 'featured_events_%s_%s' % (int(anonymous), int(contributor)) cache_key += ','.join(str(x.id) for x in channels) event = most_recent_event() if event: cache_key += str(event.modified.microsecond) featured = cache.get(cache_key) if featured is None: featured = _get_featured_events(channels, anonymous, contributor) featured = featured[:length] cache.set(cache_key, featured, 60 * 60) return [x.event for x in featured]