def get_caches(self): caches = [] schema, hosts, params = parse_backend_uri(conf.BACKEND) for host in hosts.split(';'): try: caches.append((host, caches['%s://%s?%s' % (schema, host, params)]._cache)) except Exception, e: self.logger.exception(e)
def info(request): stats_tr = stats_tilerequest() stats_c = stats_cache() caches = [] c = settings.TILE_ACCELERATOR['cache']['memory'] size = int(stats_c['bytes']) maxsize = int(stats_c['limit_maxbytes']) size_percentage = str((100.0 * size) / maxsize)+"%" caches.append({ 'name': 'memory', 'enabled': c['enabled'], 'description': c['description'], 'type': c['type'], 'size': formatMemorySize(size, original='B'), 'maxsize': formatMemorySize(maxsize, original='B'), 'size_percentage': size_percentage, 'minzoom': c['minZoom'], 'maxzoom': c['maxZoom'], 'expiration': c['expiration'], 'link_memcached': '/cache/stats/export/cache.json' }) heuristics = [] h = settings.TILE_ACCELERATOR['heuristic']['down'] heuristics.append({ 'name': 'down', 'enabled': h['enabled'], 'description': h['description'] }) h = settings.TILE_ACCELERATOR['heuristic']['up'] heuristics.append({ 'name': 'up', 'enabled': h['enabled'], 'description': h['description'] }) h = settings.TILE_ACCELERATOR['heuristic']['nearby'] heuristics.append({ 'name': 'nearby', 'enabled': h['enabled'], 'description': h['description'] }) context_dict = { 'origins': TileOrigin.objects.all().order_by('name','type'), 'sources': TileSource.objects.all().order_by('name','type'), 'caches': caches, 'heuristics': heuristics, 'hosts': settings.PROXY_ALLOWED_HOSTS } return render_to_response( "cache/info.html", RequestContext(request, context_dict))
def info(request): stats_tr = stats_tilerequest() stats_c = stats_cache() caches = [] c = settings.TILE_ACCELERATOR['cache']['memory'] size = int(stats_c['bytes']) maxsize = int(stats_c['limit_maxbytes']) size_percentage = str((100.0 * size) / maxsize) + "%" caches.append({ 'name': 'memory', 'enabled': c['enabled'], 'description': c['description'], 'type': c['type'], 'size': formatMemorySize(size, original='B'), 'maxsize': formatMemorySize(maxsize, original='B'), 'size_percentage': size_percentage, 'minzoom': c['minZoom'], 'maxzoom': c['maxZoom'], 'expiration': c['expiration'], 'link_memcached': '/cache/stats/export/cache.json' }) heuristics = [] h = settings.TILE_ACCELERATOR['heuristic']['down'] heuristics.append({ 'name': 'down', 'enabled': h['enabled'], 'description': h['description'] }) h = settings.TILE_ACCELERATOR['heuristic']['up'] heuristics.append({ 'name': 'up', 'enabled': h['enabled'], 'description': h['description'] }) h = settings.TILE_ACCELERATOR['heuristic']['nearby'] heuristics.append({ 'name': 'nearby', 'enabled': h['enabled'], 'description': h['description'] }) context_dict = { 'origins': TileOrigin.objects.all().order_by('name', 'type'), 'sources': TileSource.objects.all().order_by('name', 'type'), 'caches': caches, 'heuristics': heuristics, 'hosts': settings.PROXY_ALLOWED_HOSTS } return render_to_response("cache/info.html", RequestContext(request, context_dict))
def info(request): #stats_tr = stats_tilerequest() #cache, stats_tr = get_from_cache( # settings.CACHES['default']['LOCATION'], # settings.CACHES['default'], # 'default', # 'stats_tilerequests', # GEVENT_MONKEY_PATCH=settings.TILEJET_GEVENT_MONKEY_PATCH) caches = [] c = settings.TILEJET['cache']['memory'] stats_c = stats_cache() if stats_c: size = int(stats_c['bytes']) maxsize = int(stats_c['limit_maxbytes']) size_percentage = format(((100.0 * size) / maxsize),'.4f')+"%" items = int(stats_c['curr_items']) caches.append({ 'name': 'memory', 'enabled': c['enabled'], 'description': c['description'], 'type': c['type'], 'size': formatMemorySize(size, original='B'), 'maxsize': formatMemorySize(maxsize, original='B'), 'size_percentage': size_percentage, 'items': items, 'minzoom': c['minZoom'], 'maxzoom': c['maxZoom'], 'expiration': c['expiration'], 'link_memcached': '/cache/stats/export/cache.json' }) heuristics = [] h = settings.TILEJET['heuristic']['down'] heuristics.append({ 'name': 'down', 'enabled': h['enabled'], 'description': h['description'] }) h = settings.TILEJET['heuristic']['up'] heuristics.append({ 'name': 'up', 'enabled': h['enabled'], 'description': h['description'] }) h = settings.TILEJET['heuristic']['nearby'] heuristics.append({ 'name': 'nearby', 'enabled': h['enabled'], 'description': h['description'] }) # Build Queues List queues = [] try: import celery for key, raw_queues in celery.current_app.control.inspect().active_queues().items(): for q in raw_queues: queues.append({ 'name': getValue(q, u'name', fallback=''), 'routing_key': getValue(q, u'routing_key', fallback=''), 'durable': getValue(q, u'durable', fallback=False), 'ttl': getValue(q[u'queue_arguments'], u'x-message-ttl', fallback=-1) }) #import pyrabbit.api #pyrabbit_client = pyrabbit.api.Client(settings.BROKER_DOMAIN+':'+settings.BROKER_PORT, settings.BROKER_USER, settings.BROKER_PASSWORD) for q in queues: q['messages'] = 0 except: print "Could not generate queues. Is celery or RabbitMQ offline?" # Build Schedules Tasks scheduled = [] try: import celery s = beat.Scheduler(app=celery.current_app) scheduled = s.schedule.keys() except: print "Could not build scheduled tasks. Is celery beat running?" #topics = [] #try: # from kafka import KafkaClient # kafka = KafkaClient(settings.TILEJET_GEOWATCH_HOST) # for topic in kafka.topics: # topic2 = { # 'name': topic, # 'partitions': len(kafka.topic_partitions.get(topic, [])) # } # topics.append(topic2) #except: # print "Could not generate topics. Is Kafka offline?" client = provision_geowatch_client() topics = client.list_topics() context_dict = { 'origins': getTileOrigins(), 'sources': getTileSources(), 'caches': caches, 'heuristics': heuristics, 'queues': queues, 'topics': topics, 'scheduled': scheduled, 'stats': settings.TILEJET_LIST_STATS, 'hosts': settings.PROXY_ALLOWED_HOSTS } return render_to_response( "cache/info.html", RequestContext(request, context_dict))