def dailyStats(self, tDate): try: if tDate is None or tDate == "None": targetDate = None else: targetDate = datetime.datetime.strptime(tDate, "%Y-%m-%d") allStats = SessionModel.getDailyStats(targetDate) logging.info('daily stats for %s ' % targetDate ) if allStats: logging.info('retieved %s ' % len(allStats)) self.calculateStatsPerDomain(allStats,'daily', targetDate) except: e0, e1 = sys.exc_info()[0], sys.exc_info()[1] logging.error('Error while running daily cron task. %s. More info %s' % (e0, e1))
def countDailySessions(self, tDate): try: if tDate is None or tDate == 'None': today = datetime.date.today() logging.info('Started session count for %s' % today) targetDate=datetime.date.today() - datetime.timedelta(days=1) else: targetDate = datetime.datetime.strptime(tDate, "%Y-%m-%d").date() logging.info('targetDate: %s', targetDate) dailyData=SessionModel.getDailyStats(targetDate) #totalCount=SessionModel.countAll() stats=StatsModel() if dailyData: stats.totalDailyNumber=len(dailyData) users = [ d.instaright_account for d in dailyData if d.instaright_account is not None ] user_set = set(users) stats.totalUserNumber = len(user_set) stats.date=targetDate stats.put() logging.info('Link volume for %s : link= %s users=%s' % (tDate , stats.totalDailyNumber , stats.totalUserNumber )) except: e = sys.exc_info()[1] logging.error('Error while running stats cron task. %s' % e)
def post(self): # lower_limit_date = datetime.datetime.strptime('2009-11-15', '%Y-%m-%d').date() dateStr = self.request.get("date", None) if dateStr is None: logging.info("no date specified calc date: yesterday") date = datetime.datetime.now().date() - datetime.timedelta(days=1) else: date = datetime.datetime.strptime(dateStr, "%Y-%m-%d").date() logging.info("calculating stats for %s" % date) sessions = SessionModel.getDailyStats(date) if not sessions: logging.info("no sessions for day %s" % date) return for s in sessions: memcache_key = "domain_update_key" + str(date) + "_" + str(s.key()) if memcache.get(memcache_key): logging.info("already processed key") continue taskqueue.add( queue_name="data-consolidation", url="/aggregate_data", params={"sessionKey": s.key(), "upper_limit_date": date}, ) memcache.set(memcache_key, s.key())