def update_activity(): events = {} event_counts = collections.Counter() query = (ev for ev in LiveUpdateEvent._all() if ev.state == "live" and not ev.banned) for chunk in utils.in_chunks(query, size=100): context_ids = {ev._fullname: ev._id for ev in chunk} view_countable = [ev._fullname for ev in chunk if ev._date >= g.liveupdate_min_date_viewcounts] view_counts_query = ViewCountsQuery.execute_async(view_countable) try: with c.activity_service.retrying(attempts=4) as svc: infos = svc.count_activity_multi(context_ids.keys()) except TTransportException: continue view_counts = view_counts_query.result() for context_id, info in infos.iteritems(): event_id = context_ids[context_id] try: LiveUpdateActivityHistoryByEvent.record_activity( event_id, info.count) except tdb_cassandra.TRANSIENT_EXCEPTIONS as e: g.log.warning("Failed to update activity history for %r: %s", event_id, e) try: event = LiveUpdateEvent.update_activity( event_id, info.count, info.is_fuzzed) except tdb_cassandra.TRANSIENT_EXCEPTIONS as e: g.log.warning("Failed to update event activity for %r: %s", event_id, e) else: events[event_id] = event event_counts[event_id] = info.count websockets.send_broadcast( "/live/" + event_id, type="activity", payload={ "count": info.count, "fuzzed": info.is_fuzzed, "total_views": view_counts.get(context_id), }, ) top_event_ids = [event_id for event_id, count in event_counts.most_common(1000)] top_events = [events[event_id] for event_id in top_event_ids] query_ttl = datetime.timedelta(days=3) with CachedQueryMutator() as m: m.replace(get_active_events(), top_events, ttl=query_ttl) # ensure that all the amqp messages we've put on the worker's queue are # sent before we allow this script to exit. amqp.worker.join()
def update_activity(): events = {} event_counts = collections.Counter() event_ids = ActiveVisitorsByLiveUpdateEvent._cf.get_range( column_count=1, filter_empty=False) for event_id, is_active in event_ids: count = 0 if is_active: try: count = ActiveVisitorsByLiveUpdateEvent.get_count(event_id) except tdb_cassandra.TRANSIENT_EXCEPTIONS as e: g.log.warning("Failed to fetch activity count for %r: %s", event_id, e) return try: LiveUpdateActivityHistoryByEvent.record_activity(event_id, count) except tdb_cassandra.TRANSIENT_EXCEPTIONS as e: g.log.warning("Failed to update activity history for %r: %s", event_id, e) is_fuzzed = False if count < ACTIVITY_FUZZING_THRESHOLD: count = utils.fuzz_activity(count) is_fuzzed = True try: event = LiveUpdateEvent.update_activity(event_id, count, is_fuzzed) except tdb_cassandra.TRANSIENT_EXCEPTIONS as e: g.log.warning("Failed to update event activity for %r: %s", event_id, e) else: events[event_id] = event event_counts[event_id] = count websockets.send_broadcast( "/live/" + event_id, type="activity", payload={ "count": count, "fuzzed": is_fuzzed, }, ) top_event_ids = [ event_id for event_id, count in event_counts.most_common(1000) ] top_events = [events[event_id] for event_id in top_event_ids] query_ttl = datetime.timedelta(days=3) with CachedQueryMutator() as m: m.replace(get_active_events(), top_events, ttl=query_ttl) # ensure that all the amqp messages we've put on the worker's queue are # sent before we allow this script to exit. amqp.worker.join()
def update_activity(): events = {} event_counts = collections.Counter() event_ids = ActiveVisitorsByLiveUpdateEvent._cf.get_range( column_count=1, filter_empty=False) for event_id, is_active in event_ids: count = 0 if is_active: try: count = ActiveVisitorsByLiveUpdateEvent.get_count(event_id) except tdb_cassandra.TRANSIENT_EXCEPTIONS as e: g.log.warning("Failed to fetch activity count for %r: %s", event_id, e) return try: LiveUpdateActivityHistoryByEvent.record_activity(event_id, count) except tdb_cassandra.TRANSIENT_EXCEPTIONS as e: g.log.warning("Failed to update activity history for %r: %s", event_id, e) is_fuzzed = False if count < ACTIVITY_FUZZING_THRESHOLD: count = utils.fuzz_activity(count) is_fuzzed = True try: event = LiveUpdateEvent.update_activity(event_id, count, is_fuzzed) except tdb_cassandra.TRANSIENT_EXCEPTIONS as e: g.log.warning("Failed to update event activity for %r: %s", event_id, e) else: events[event_id] = event event_counts[event_id] = count websockets.send_broadcast( "/live/" + event_id, type="activity", payload={ "count": count, "fuzzed": is_fuzzed, }, ) top_event_ids = [event_id for event_id, count in event_counts.most_common(1000)] top_events = [events[event_id] for event_id in top_event_ids] query_ttl = datetime.timedelta(days=3) with CachedQueryMutator() as m: m.replace(get_active_events(), top_events, ttl=query_ttl) # ensure that all the amqp messages we've put on the worker's queue are # sent before we allow this script to exit. amqp.worker.join()
def update_activity(): events = {} event_counts = collections.Counter() query = (ev for ev in LiveUpdateEvent._all() if ev.state == "live" and not ev.banned) for chunk in utils.in_chunks(query, size=100): context_ids = {"LiveUpdateEvent_" + ev._id: ev._id for ev in chunk} try: with c.activity_service.retrying(attempts=4) as svc: infos = svc.count_activity_multi(context_ids.keys()) except TTransportException: continue for context_id, info in infos.iteritems(): event_id = context_ids[context_id] try: LiveUpdateActivityHistoryByEvent.record_activity( event_id, info.count) except tdb_cassandra.TRANSIENT_EXCEPTIONS as e: g.log.warning("Failed to update activity history for %r: %s", event_id, e) try: event = LiveUpdateEvent.update_activity( event_id, info.count, info.is_fuzzed) except tdb_cassandra.TRANSIENT_EXCEPTIONS as e: g.log.warning("Failed to update event activity for %r: %s", event_id, e) else: events[event_id] = event event_counts[event_id] = info.count websockets.send_broadcast( "/live/" + event_id, type="activity", payload={ "count": info.count, "fuzzed": info.is_fuzzed, }, ) top_event_ids = [ event_id for event_id, count in event_counts.most_common(1000) ] top_events = [events[event_id] for event_id in top_event_ids] query_ttl = datetime.timedelta(days=3) with CachedQueryMutator() as m: m.replace(get_active_events(), top_events, ttl=query_ttl) # ensure that all the amqp messages we've put on the worker's queue are # sent before we allow this script to exit. amqp.worker.join()
def GET_listing(self, filter, num, after, before, count): reverse = False if before: after = before reverse = True builder_cls = LiveUpdateEventBuilder wrapper = Wrapped listing_cls = Listing if filter == "open": title = _("live threads") query = queries.get_live_events("new", "all") elif filter == "closed": title = _("closed threads") query = queries.get_complete_events("new", "all") elif filter == "active": title = _("most active threads") query = queries.get_active_events() elif filter == "reported": if not c.user_is_admin: self.abort403() title = _("reported threads") query = queries.get_reported_events() builder_cls = LiveUpdateReportedEventBuilder wrapper = pages.LiveUpdateReportedEventRow listing_cls = pages.LiveUpdateReportedEventListing else: self.abort404() builder = builder_cls( query, num=num, after=after, reverse=reverse, count=count, wrap=wrapper, skip=True, ) listing = listing_cls(builder) return pages.LiveUpdateMetaPage( title=title, content=listing.listing(), ).render()
def GET_listing(self, filter, num, after, before, count): reverse = False if before: after = before reverse = True builder_cls = LiveUpdateEventBuilder wrapper = Wrapped listing_cls = Listing require_employee = True # for grepping: this is used like VEmployee if filter == "open": title = _("live threads") query = queries.get_live_events("new", "all") elif filter == "closed": title = _("closed threads") query = queries.get_complete_events("new", "all") elif filter == "active": title = _("most active threads") query = queries.get_active_events() elif filter == "reported": if not c.user_is_admin: self.abort403() title = _("reported threads") query = queries.get_reported_events() builder_cls = LiveUpdateReportedEventBuilder wrapper = pages.LiveUpdateReportedEventRow listing_cls = pages.LiveUpdateReportedEventListing elif filter == "happening_now": featured_events = get_all_featured_events() title = _("featured threads") query = sorted(set(featured_events.values())) builder_cls = featured_event_builder_factory(featured_events) wrapper = pages.LiveUpdateFeaturedEvent require_employee = False elif filter == "mine": if not c.user_is_loggedin: self.abort404() title = _("my live threads") query = queries.get_contributor_events(c.user) require_employee = False else: self.abort404() if require_employee and not c.user.employee: self.abort403() builder = builder_cls( query, num=num, after=after, reverse=reverse, count=count, wrap=wrapper, skip=True, ) listing = listing_cls(builder) return pages.LiveUpdateMetaPage( title=title, content=listing.listing(), ).render()