def stats_cron(): current_user_count = GqlQuery('SELECT __key__ FROM UserSettings').count(None) current_site_count = GqlQuery('SELECT __key__ FROM Site WHERE example=false').count(None) now = datetime.now() saved_user_count = counter.load_and_get_count('user:all', period_type=PeriodType.DAY, period=now) or 0 saved_site_count = counter.load_and_get_count('site:all', period_type=PeriodType.DAY, period=now) or 0 count_view('user:all', delta=(current_user_count - saved_user_count), batch_size=None, period=now) count_view('site:all', delta=(current_site_count - saved_site_count), batch_size=None, period=now) return render_template('_stats_cron.html', current_user_count=current_user_count, current_site_count=current_site_count, saved_user_count=saved_user_count, saved_site_count=saved_site_count)
def get(self): try: name = "visitor" counter.load_and_increment_counter(name) except: logging.info(repr(error)) self.response.out.write('Visitor: ' + str(counter.load_and_get_count(name)))
def generate_count_report(): # Process counters with the latest syntax futures = [] processes = MoveProcess.query().fetch() logging.info('Generating count report in [%s] processes', len(processes)) for process in processes: process_ok_count = 0 process_error_count = 0 process_total_count = 0 for email in process.emails: user = ProcessedUser.get_by_id(email) if not user: user = ProcessedUser(id=email, ok_count=0, error_count=0, total_count=list()) total_count = counter.load_and_get_count( '%s_total_count' % email, namespace=str(process.key.id())) if total_count: process_total_count += total_count else: total_count = 0 if total_count not in user.total_count: user.total_count.append(total_count) ok_count = counter.load_and_get_count( '%s_ok_count' % email, namespace=str(process.key.id())) if ok_count: process_ok_count += ok_count user.ok_count += ok_count error_count = counter.load_and_get_count( '%s_error_count' % email, namespace=str(process.key.id())) if error_count: process_error_count += error_count user.error_count += error_count futures.append(user.put_async()) logging.info('Updating process counters: total [%s] ok [%s] error [' '%s]', process_total_count, process_ok_count, process_error_count) process.ok_count = process_ok_count process.error_count = process_error_count process.total_count = process_total_count futures.append(process.put_async()) # Process futures [future.get_result() for future in futures]
def get(self): counter_name = self.request.get('counter_name') namespace = self.request.get('namespace') if not namespace: namespace = "share_domain_count" fetch_limit = self.request.get('fetch_limit') if not fetch_limit: fetch_limit = "20" if counter_name: logging.info("querying counter directly for counter_name = " + str(counter_name) + ", namespace = " + str(namespace)) count = counter.load_and_get_count(counter_name, namespace=namespace) self.response.set_status(200) self.response.out.write(count) else: logging.info("querying datastore for LivecountCounters for counter_name = " + str(counter_name) + ", namespace = " + str(namespace)) counter_entities_query = LivecountCounter.all().order('-count') if counter_name: counter_entities_query.filter("counter_name = ", counter_name) if namespace: counter_entities_query.filter("namespace = ", namespace) counter_entities = counter_entities_query.fetch(int(fetch_limit)) logging.info("counter_entities: " + str(counter_entities)) counters = [] for entity in counter_entities: counter_data = {'key': str(entity.key().name()), 'count': str(entity.count)} counters.append(counter_data) json_counters_data = simplejson.dumps(counters) if json_counters_data: self.response.set_status(200) self.response.out.write(json_counters_data) return
def get_period_and_count(name, period_type, period): period = PeriodType.find_scope(period_type, period) count = counter.load_and_get_count(name, period_type=period_type, period=period) return period, count