def deferred_ratings(): """ This is the deferred ratings table calculation process """ # Disable the in-context cache to save memory # (it doesn't give any speed advantage for this processing) Context.disable_cache() t0 = time.time() try: _create_ratings() except DeadlineExceededError as ex: # Hit deadline: save the stuff we already have and # defer a new task to continue where we left off logging.error(u"Deadline exceeded in ratings, failing permamently") # Normal return prevents this task from being run again raise deferred.PermanentTaskFailure() except Exception as ex: logging.error( u"Exception in ratings, failing permanently: {0}".format(ex)) # Avoid having the task retried raise deferred.PermanentTaskFailure() t1 = time.time() logging.info(u"Ratings calculation finished in {0:.2f} seconds".format(t1 - t0)) StatsModel.log_cache_stats() StatsModel.clear_cache( ) # Do not maintain the cache in memory between runs
def deferred_update(): """ Update all users in the datastore with lowercase nick and full name """ logging.info("Deferred user update starting") CHUNK_SIZE = 200 count = 0 offset = 0 Context.disable_cache() try: q = UserModel.query() while True: ulist = [] chunk = 0 for um in q.fetch(CHUNK_SIZE, offset = offset): chunk += 1 if um.nick_lc is None: try: um.nick_lc = um.nickname.lower() um.name_lc = um.prefs.get("full_name", "").lower() if um.prefs else "" ulist.append(um) except Exception as e: logging.info("Exception in deferred_update() when setting nick_lc: {0}".format(e)) if ulist: try: ndb.put_multi(ulist) count += len(ulist) except Exception as e: logging.info("Exception in deferred_update() when updating ndb: {0}".format(e)) if chunk < CHUNK_SIZE: break offset += CHUNK_SIZE except Exception as e: logging.info("Exception in deferred_update(): {0}, already updated {1} records".format(e, count)) # Do not retry the task raise deferred.PermanentTaskFailure() logging.info("Completed updating {0} user records".format(count))
def deferred_ratings(): """ This is the deferred ratings table calculation process """ # Disable the in-context cache to save memory # (it doesn't give any speed advantage for this processing) Context.disable_cache() t0 = time.time() try: _create_ratings() except DeadlineExceededError as ex: # Hit deadline: save the stuff we already have and # defer a new task to continue where we left off logging.error(u"Deadline exceeded in ratings, failing permamently") # Normal return prevents this task from being run again raise deferred.PermanentTaskFailure() except Exception as ex: logging.error(u"Exception in ratings, failing permanently: {0}".format(ex)) # Avoid having the task retried raise deferred.PermanentTaskFailure() t1 = time.time() logging.info(u"Ratings calculation finished in {0:.2f} seconds".format(t1 - t0)) StatsModel.log_cache_stats() StatsModel.clear_cache() # Do not maintain the cache in memory between runs
def deferred_ratings(timestamp): """ This is the deferred ratings table calculation process """ # Disable the in-context cache to save memory # (it doesn't give any speed advantage for this processing) Context.disable_cache() t0 = time.time() _create_ratings(timestamp) t1 = time.time() logging.info(u"Ratings calculation finished in {0:.2f} seconds".format(t1 - t0))
def deferred_stats(from_time, to_time): """ This is the deferred stats collection process """ # Disable the in-context cache to save memory # (it doesn't give any speed advantage for this processing) Context.disable_cache() t0 = time.time() _run_stats(from_time, to_time) t1 = time.time() logging.info(u"Stats calculation finished in {0:.2f} seconds".format(t1 - t0))