def stats_map(request, origin=None, source=None, date=None): stats = None if settings.STATS_SAVE_MEMORY: cache, stats = get_from_cache('default','stats_tilerequests') if settings.STATS_SAVE_FILE and not stats: stats = get_from_file(settings.STATS_REQUEST_FILE, filetype='json') dates = [] if stats: if 'by_date' in stats: dates = stats['by_date'].keys() #print stats['by_date_location'].keys() context_dict = { 'date': date, 'origins': getTileOrigins(), 'sources': getTileSources(), 'dates': dates } try: context_dict['origin'] = TileOrigin.objects.get(name=origin) except: context_dict['origin'] = None try: context_dict['source'] = TileSource.objects.get(name=source) except: context_dict['source'] = None return render_to_response( "cache/stats_map_3.html", RequestContext(request, context_dict))
def origins_list(request): #cache, stats = get_from_cache('default','stats_tilerequests') context_dict = { 'origins': getTileOrigins() } return render_to_response( "cache/origins_list.html", RequestContext(request, context_dict))
def proxy_tms(request, origin=None, slug=None, z=None, x=None, y=None, u=None, ext=None): #starttime = time.clock() # Check Existing Tile Sourcesi match_tilesource = None tilesources = getTileSources(proxy=True) for tilesource in tilesources: if tilesource['name'] == slug: match_tilesource = tilesource break if match_tilesource: if match_tilesource['origin'] != origin: print "Origin is not correct. Tilesource is unique, but origin need to match too." print tilesource['origin'] return None else: tile = _requestTile( request, tileservice=None, tilesource=match_tilesource, tileorigin=match_tilesource['origin'], z=z,x=x,y=y,u=u,ext=ext) #print "Time Elapsed: "+str(time.clock()-starttime) return tile # Check Existing Tile Origins to see if we need to create a new tile source match_tileorigin = None if origin: tileorigins = getTileOrigins(proxy=True) for tileorigin in tileorigins: if tileorigin.name == origin: match_tileorigin = tileorigin break if match_tileorigin: to = match_tileorigin if to.multiple: ts_url = to.url.replace('{slug}', slug) if TileSource.objects.filter(url=ts_url).count() > 0: print "Error: This souldn't happen. You should have matched the tilesource earlier so you don't duplicate" return None exts = string_to_list(to.extensions) ts_pattern = url_to_pattern(ts_url, extensions=exts) ts = TileSource(auto=True,url=ts_url,pattern=ts_pattern,name=slug,type=to.type,extensions=exts,origin=to) ts.save() reloadTileSources(proxy=False) reloadTileSources(proxy=True) return _requestTile(request,tileservice=None,tilesource=tilesource,z=z,x=x,y=y,u=u,ext=ext) else: ts = TileSource(auto=True,url=to.url,pattern=to.pattern,name=to.name,type=to.type,extensions=to.extensions) ts.save() reloadTileSources(proxy=False) reloadTileSources(proxy=True) return _requestTile(request,tileservice=None,tilesource=tilesource,z=z,x=x,y=y,u=u,ext=ext) else: return None
def info(request): #stats_tr = stats_tilerequest() #cache, stats_tr = get_from_cache( # settings.CACHES['default']['LOCATION'], # settings.CACHES['default'], # 'default', # 'stats_tilerequests', # GEVENT_MONKEY_PATCH=settings.TILEJET_GEVENT_MONKEY_PATCH) caches = [] c = settings.TILEJET['cache']['memory'] stats_c = stats_cache() if stats_c: size = int(stats_c['bytes']) maxsize = int(stats_c['limit_maxbytes']) size_percentage = format(((100.0 * size) / maxsize),'.4f')+"%" items = int(stats_c['curr_items']) caches.append({ 'name': 'memory', 'enabled': c['enabled'], 'description': c['description'], 'type': c['type'], 'size': formatMemorySize(size, original='B'), 'maxsize': formatMemorySize(maxsize, original='B'), 'size_percentage': size_percentage, 'items': items, 'minzoom': c['minZoom'], 'maxzoom': c['maxZoom'], 'expiration': c['expiration'], 'link_memcached': '/cache/stats/export/cache.json' }) heuristics = [] h = settings.TILEJET['heuristic']['down'] heuristics.append({ 'name': 'down', 'enabled': h['enabled'], 'description': h['description'] }) h = settings.TILEJET['heuristic']['up'] heuristics.append({ 'name': 'up', 'enabled': h['enabled'], 'description': h['description'] }) h = settings.TILEJET['heuristic']['nearby'] heuristics.append({ 'name': 'nearby', 'enabled': h['enabled'], 'description': h['description'] }) # Build Queues List queues = [] try: import celery for key, raw_queues in celery.current_app.control.inspect().active_queues().items(): for q in raw_queues: queues.append({ 'name': getValue(q, u'name', fallback=''), 'routing_key': getValue(q, u'routing_key', fallback=''), 'durable': getValue(q, u'durable', fallback=False), 'ttl': getValue(q[u'queue_arguments'], u'x-message-ttl', fallback=-1) }) #import pyrabbit.api #pyrabbit_client = pyrabbit.api.Client(settings.BROKER_DOMAIN+':'+settings.BROKER_PORT, settings.BROKER_USER, settings.BROKER_PASSWORD) for q in queues: q['messages'] = 0 except: print "Could not generate queues. Is celery or RabbitMQ offline?" # Build Schedules Tasks scheduled = [] try: import celery s = beat.Scheduler(app=celery.current_app) scheduled = s.schedule.keys() except: print "Could not build scheduled tasks. Is celery beat running?" #topics = [] #try: # from kafka import KafkaClient # kafka = KafkaClient(settings.TILEJET_GEOWATCH_HOST) # for topic in kafka.topics: # topic2 = { # 'name': topic, # 'partitions': len(kafka.topic_partitions.get(topic, [])) # } # topics.append(topic2) #except: # print "Could not generate topics. Is Kafka offline?" client = provision_geowatch_client() topics = client.list_topics() context_dict = { 'origins': getTileOrigins(), 'sources': getTileSources(), 'caches': caches, 'heuristics': heuristics, 'queues': queues, 'topics': topics, 'scheduled': scheduled, 'stats': settings.TILEJET_LIST_STATS, 'hosts': settings.PROXY_ALLOWED_HOSTS } return render_to_response( "cache/info.html", RequestContext(request, context_dict))
def proxy(request): PROXY_ALLOWED_HOSTS = getattr(settings, 'PROXY_ALLOWED_HOSTS', ()) host = None if 'url' not in request.GET: return HttpResponse("The proxy service requires a URL-encoded URL as a parameter.", status=400, content_type="text/plain" ) raw_url = request.GET['url'] url = urlsplit(raw_url) locator = url.path if url.query != "": locator += '?' + url.query if url.fragment != "": locator += '#' + url.fragment if not settings.DEBUG: if not validate_host(url.hostname, PROXY_ALLOWED_HOSTS): return HttpResponse("DEBUG is set to False but the host of the path provided to the proxy service" " is not in the PROXY_ALLOWED_HOSTS setting.", status=403, content_type="text/plain" ) headers = {} if settings.SESSION_COOKIE_NAME in request.COOKIES and is_safe_url(url=raw_url, host=host): headers["Cookie"] = request.META["HTTP_COOKIE"] if request.method in ("POST", "PUT") and "CONTENT_TYPE" in request.META: headers["Content-Type"] = request.META["CONTENT_TYPE"] print "Raw URL: "+ raw_url match_regex = None match_tilesource = None # Try to match against existing tile sources #tilesources = TileSource.objects.exclude(pattern__isnull=True).exclude(pattern__exact='') tilesources = getTileSources(proxy=True) for tilesource in tilesources: match = match_pattern_url(tilesource.pattern, raw_url) if match: match_regex = match match_tilesource = tilesource break if match_tilesource and match_regex: return proxy_tilesource(request, match_tilesource, match_regex) #else: # return HttpResponse('No matching tilesource found.',RequestContext(request, {}), status=404) # Try to match against existing origins that can automatically create tile sources (auto=true) match_tileorigin = None #tileorigins = TileOrigin.objects.exclude(pattern__isnull=True).exclude(pattern__exact='').filter(auto=True) tileorigins = getTileOrigins(proxy=True) for tileorigin in tileorigins: match = match_pattern_url(tileorigin.pattern, raw_url) if match: match_regex = match match_tileorigin = tileorigin break if match_tileorigin and match_regex: to = match_tileorigin if to.multiple: slug = getRegexValue(match_regex, 'slug') ts_url = to.url.replace('{slug}', slug) #print "ts_url: "+ts_url if TileSource.objects.filter(url=ts_url).count() > 0: print "Error: This souldn't happen. You should have matched the tilesource earlier so you don't duplicate" return None exts = string_to_list(to.extensions) ts_pattern = url_to_pattern(ts_url, extensions=exts) ts = TileSource(auto=True,url=ts_url,pattern=ts_pattern,name=slug,type=to.type,extensions=exts,origin=to) ts.save() reloadTileSources(proxy=False) reloadTileSources(proxy=True) return proxy_tilesource(request, ts, match_regex) else: ts = TileSource(auto=True,url=to.url,pattern=to.pattern,name=to.name,type=to.type,extensions=to.extensions) ts.save() reloadTileSources(proxy=False) reloadTileSources(proxy=True) return proxy_tilesource(request, ts, match_regex) else: return HttpResponse('No matching tile origin or tile source found.',RequestContext(request, {}), status=404)