def stats_map(request, origin=None, source=None, date=None): stats = None if settings.STATS_SAVE_MEMORY: cache, stats = get_from_cache('default','stats_tilerequests') if settings.STATS_SAVE_FILE and not stats: stats = get_from_file(settings.STATS_REQUEST_FILE, filetype='json') dates = [] if stats: if 'by_date' in stats: dates = stats['by_date'].keys() #print stats['by_date_location'].keys() context_dict = { 'date': date, 'origins': getTileOrigins(), 'sources': getTileSources(), 'dates': dates } try: context_dict['origin'] = TileOrigin.objects.get(name=origin) except: context_dict['origin'] = None try: context_dict['source'] = TileSource.objects.get(name=source) except: context_dict['source'] = None return render_to_response( "cache/stats_map_3.html", RequestContext(request, context_dict))
def sources_list(request): #cache, stats = get_from_cache('default','stats_tilerequests') context_dict = { 'sources': getTileSources() } return render_to_response( "cache/sources_list.html", RequestContext(request, context_dict))
def sources_json(request): now = datetime.datetime.now() dt = now ####### stats = None if settings.STATS_SAVE_MEMORY: cache, stats = get_from_cache( settings.CACHES['default']['LOCATION'], settings.CACHES['default'], 'default', 'stats_tilerequests', GEVENT_MONKEY_PATCH=settings.TILEJET_GEVENT_MONKEY_PATCH) if settings.STATS_SAVE_FILE and not stats: stats = get_from_file(settings.STATS_REQUEST_FILE, filetype='json') sources = [] #for source in TileSource.objects.all().order_by('name'): for source in getTileSources(): link_geojson = settings.SITEURL+'cache/stats/export/geojson/15/source/'+source['name']+'.geojson' link_proxy_internal = settings.SITEURL+'proxy/?url='+(source['url']).replace("{ext}","png") link_proxy_external = "" if source['type'] in [TYPE_TMS, TYPE_TMS_FLIPPED]: link_proxy_external = settings.SITEURL+'cache/proxy/tms/origin/'+source['origin']+'/source/'+source['name']+'/{z}/{x}/{y}.png' elif source['type'] == TYPE_BING: link_proxy_external = settings.SITEURL+'cache/proxy/bing/origin/'+source['origin']+'/source/'+source['name']+'{u}.png' if stats: sources.append({ 'name': source['name'], 'type': source['type_title'], 'origin': source['origin'], 'url': source['url'], 'requests_all': getValue(stats['by_source'], source['name'],0), 'requests_year': getValue(getValue(stats['by_year_source'],dt.strftime('%Y')),source['name'], 0), 'requests_month': getValue(getValue(stats['by_month_source'],dt.strftime('%Y-%m')),source['name'], 0), 'requests_today': getValue(getValue(stats['by_date_source'],dt.strftime('%Y-%m-%d')),source['name'], 0), 'link_proxy': link_proxy_internal, 'link_id': 'http://www.openstreetmap.org/edit#?background=custom:'+link_proxy_external, 'link_geojson': link_geojson, 'link_geojsonio': 'http://geojson.io/#data=data:text/x-url,'+link_geojson }) else: sources.append({ 'name': source['name'], 'type': source['type_title'], 'origin': source['origin'], 'url': source['url'], 'requests_all': -1, 'requests_year': -1, 'requests_month': -1, 'requests_today': -1, 'link_proxy': link_proxy_internal, 'link_id': 'http://www.openstreetmap.org/edit#?background=custom:'+link_proxy_external, 'link_geojson': link_geojson, 'link_geojsonio': 'http://geojson.io/#data=data:text/x-url,'+link_geojson }) return HttpResponse(json.dumps(sources), content_type="application/json" )
def proxy_tms(request, origin=None, slug=None, z=None, x=None, y=None, u=None, ext=None): #starttime = time.clock() # Check Existing Tile Sourcesi match_tilesource = None tilesources = getTileSources(proxy=True) for tilesource in tilesources: if tilesource['name'] == slug: match_tilesource = tilesource break if match_tilesource: if match_tilesource['origin'] != origin: print "Origin is not correct. Tilesource is unique, but origin need to match too." print tilesource['origin'] return None else: tile = _requestTile( request, tileservice=None, tilesource=match_tilesource, tileorigin=match_tilesource['origin'], z=z,x=x,y=y,u=u,ext=ext) #print "Time Elapsed: "+str(time.clock()-starttime) return tile # Check Existing Tile Origins to see if we need to create a new tile source match_tileorigin = None if origin: tileorigins = getTileOrigins(proxy=True) for tileorigin in tileorigins: if tileorigin.name == origin: match_tileorigin = tileorigin break if match_tileorigin: to = match_tileorigin if to.multiple: ts_url = to.url.replace('{slug}', slug) if TileSource.objects.filter(url=ts_url).count() > 0: print "Error: This souldn't happen. You should have matched the tilesource earlier so you don't duplicate" return None exts = string_to_list(to.extensions) ts_pattern = url_to_pattern(ts_url, extensions=exts) ts = TileSource(auto=True,url=ts_url,pattern=ts_pattern,name=slug,type=to.type,extensions=exts,origin=to) ts.save() reloadTileSources(proxy=False) reloadTileSources(proxy=True) return _requestTile(request,tileservice=None,tilesource=tilesource,z=z,x=x,y=y,u=u,ext=ext) else: ts = TileSource(auto=True,url=to.url,pattern=to.pattern,name=to.name,type=to.type,extensions=to.extensions) ts.save() reloadTileSources(proxy=False) reloadTileSources(proxy=True) return _requestTile(request,tileservice=None,tilesource=tilesource,z=z,x=x,y=y,u=u,ext=ext) else: return None
def tile_tms(request, slug=None, z=None, x=None, y=None, u=None, ext=None): match_tileservice = None tileservices = getTileServices() for tileservice in tileservices: if tileservice['name'] == slug: match_tileservice = tileservice break if match_tileservice: match_tilesource = None tilesources = getTileSources() for tilesource in tilesources: if tilesource['name'] == tileservice['source']: match_tilesource = tilesource break if match_tilesource: return _requestTile(request,tileservice=match_tileservice,tilesource=match_tilesource,z=z,x=x,y=y,u=u,ext=ext) else: return HttpResponse(RequestContext(request, {}), status=404) else: return HttpResponse(RequestContext(request, {}), status=404)
def taskRequestTile(ts, iz, ix, iy, ext, verbose=True): now = datetime.datetime.now() # Load Logging Info #log_root = settings.LOG_REQUEST_ROOT log_format = settings.LOG_REQUEST_FORMAT #if log_root and log_format: # if not os.path.exists(log_root): # os.makedirs(log_root) #if settings.LOG_ERRORS_ROOT # if not os.path.exists(log_root): # os.makedirs(log_root) indirect_file = settings.LOG_INDIRECT_ROOT+os.sep+"requests_tiles_"+now.strftime('%Y-%m-%d')+"_indirect.tsv" # Find TileSource tilesource = None tilesources = getTileSources(proxy=True) for candidate in tilesources: if candidate['id'] == ts: tilesource = candidate break if not tilesource: error_file = settings.LOG_ERRORS_ROOT+os.sep+"requests_tiles_"+now.strftime('%Y-%m-%d')+"_errors.txt" with open(error_file,'a') as f: line = "Error: Could not find tilesource for primary key "+str(ts)+"." f.write(line+"\n") return #Y is always in regualar TMS before being added to task queue iyf = flip_y(ix,iy,iz) #iy, iyf = getYValues(None,tilesource,ix,iy,iz) tile_bbox = tms_to_bbox(ix,iy,iz) #Check if requested tile is within source's extents returnBlankTile = False returnErrorTile = False intersects = True if tilesource['extents']: intersects = bbox_intersects_source(tilesource,ix,iyf,iz) if not intersects: returnBlankTile = True validZoom = 0 #Check if inside source zoom levels if tilesource['minZoom'] or tilesource['maxZoom']: if (tilesource['minZoom'] and iz < tilesource['minZoom']): validZoom = -1 elif (tilesource['maxZoom'] and iz > tilesource['maxZoom']): validZoom = 1 if validZoom != 0: #returnBlank = True returnErrorTile = True if returnBlankTile or returnErrorTile: return tile = None if iz >= settings.TILEJET['cache']['memory']['minZoom'] and iz <= settings.TILEJET['cache']['memory']['maxZoom']: #key = "{layer},{z},{x},{y},{ext}".format(layer=tilesource.name,x=ix,y=iy,z=iz,ext=ext) key = ",".join([tilesource['name'],str(iz),str(ix),str(iy),ext]) tilecache, tile = getTileFromCache( settings.CACHES['tiles']['LOCATION'], settings.CACHES['tiles'], 'tiles', key, True, GEVENT_MONKEY_PATCH=True) if not tilecache: error_file = settings.LOG_ERRORS_ROOT+os.sep+"requests_tiles_"+now.strftime('%Y-%m-%d')+"_errors.txt" with open(error_file,'a') as f: line = "Error: Could not connect to cache (tiles)." f.write(line+"\n") return if tile: if verbose: print "task / cache hit for "+key else: if verbose: print "task / cache miss for "+key with open(indirect_file,'a') as f: line = log_format.format( status='indirect', tileorigin=tilesource['origin'], tilesource=tilesource['name'], z=iz,x=ix,y=iy, ip='-', datetime=now.isoformat()) f.write(line+"\n") from urllib2 import HTTPError try: if tilesource['type'] == TYPE_TMS: tile = requestTileFromSource(tilesource,ix,iy,iz,ext,True) elif tilesource['type'] == TYPE_TMS_FLIPPED: tile = requestTileFromSource(tilesource,ix,iyf,iz,ext,True) except HTTPError, err: error_file = settings.LOG_ERRORS_ROOT+os.sep+"requests_tiles_"+now.strftime('%Y-%m-%d')+"_errors.txt" with open(error_file,'a') as f: line = "Error: HTTPError. Could not get tile ("+key+") from source." f.write(line+"\n") return except:
def info(request): #stats_tr = stats_tilerequest() #cache, stats_tr = get_from_cache( # settings.CACHES['default']['LOCATION'], # settings.CACHES['default'], # 'default', # 'stats_tilerequests', # GEVENT_MONKEY_PATCH=settings.TILEJET_GEVENT_MONKEY_PATCH) caches = [] c = settings.TILEJET['cache']['memory'] stats_c = stats_cache() if stats_c: size = int(stats_c['bytes']) maxsize = int(stats_c['limit_maxbytes']) size_percentage = format(((100.0 * size) / maxsize),'.4f')+"%" items = int(stats_c['curr_items']) caches.append({ 'name': 'memory', 'enabled': c['enabled'], 'description': c['description'], 'type': c['type'], 'size': formatMemorySize(size, original='B'), 'maxsize': formatMemorySize(maxsize, original='B'), 'size_percentage': size_percentage, 'items': items, 'minzoom': c['minZoom'], 'maxzoom': c['maxZoom'], 'expiration': c['expiration'], 'link_memcached': '/cache/stats/export/cache.json' }) heuristics = [] h = settings.TILEJET['heuristic']['down'] heuristics.append({ 'name': 'down', 'enabled': h['enabled'], 'description': h['description'] }) h = settings.TILEJET['heuristic']['up'] heuristics.append({ 'name': 'up', 'enabled': h['enabled'], 'description': h['description'] }) h = settings.TILEJET['heuristic']['nearby'] heuristics.append({ 'name': 'nearby', 'enabled': h['enabled'], 'description': h['description'] }) # Build Queues List queues = [] try: import celery for key, raw_queues in celery.current_app.control.inspect().active_queues().items(): for q in raw_queues: queues.append({ 'name': getValue(q, u'name', fallback=''), 'routing_key': getValue(q, u'routing_key', fallback=''), 'durable': getValue(q, u'durable', fallback=False), 'ttl': getValue(q[u'queue_arguments'], u'x-message-ttl', fallback=-1) }) #import pyrabbit.api #pyrabbit_client = pyrabbit.api.Client(settings.BROKER_DOMAIN+':'+settings.BROKER_PORT, settings.BROKER_USER, settings.BROKER_PASSWORD) for q in queues: q['messages'] = 0 except: print "Could not generate queues. Is celery or RabbitMQ offline?" # Build Schedules Tasks scheduled = [] try: import celery s = beat.Scheduler(app=celery.current_app) scheduled = s.schedule.keys() except: print "Could not build scheduled tasks. Is celery beat running?" #topics = [] #try: # from kafka import KafkaClient # kafka = KafkaClient(settings.TILEJET_GEOWATCH_HOST) # for topic in kafka.topics: # topic2 = { # 'name': topic, # 'partitions': len(kafka.topic_partitions.get(topic, [])) # } # topics.append(topic2) #except: # print "Could not generate topics. Is Kafka offline?" client = provision_geowatch_client() topics = client.list_topics() context_dict = { 'origins': getTileOrigins(), 'sources': getTileSources(), 'caches': caches, 'heuristics': heuristics, 'queues': queues, 'topics': topics, 'scheduled': scheduled, 'stats': settings.TILEJET_LIST_STATS, 'hosts': settings.PROXY_ALLOWED_HOSTS } return render_to_response( "cache/info.html", RequestContext(request, context_dict))
def proxy(request): PROXY_ALLOWED_HOSTS = getattr(settings, 'PROXY_ALLOWED_HOSTS', ()) host = None if 'url' not in request.GET: return HttpResponse("The proxy service requires a URL-encoded URL as a parameter.", status=400, content_type="text/plain" ) raw_url = request.GET['url'] url = urlsplit(raw_url) locator = url.path if url.query != "": locator += '?' + url.query if url.fragment != "": locator += '#' + url.fragment if not settings.DEBUG: if not validate_host(url.hostname, PROXY_ALLOWED_HOSTS): return HttpResponse("DEBUG is set to False but the host of the path provided to the proxy service" " is not in the PROXY_ALLOWED_HOSTS setting.", status=403, content_type="text/plain" ) headers = {} if settings.SESSION_COOKIE_NAME in request.COOKIES and is_safe_url(url=raw_url, host=host): headers["Cookie"] = request.META["HTTP_COOKIE"] if request.method in ("POST", "PUT") and "CONTENT_TYPE" in request.META: headers["Content-Type"] = request.META["CONTENT_TYPE"] print "Raw URL: "+ raw_url match_regex = None match_tilesource = None # Try to match against existing tile sources #tilesources = TileSource.objects.exclude(pattern__isnull=True).exclude(pattern__exact='') tilesources = getTileSources(proxy=True) for tilesource in tilesources: match = match_pattern_url(tilesource.pattern, raw_url) if match: match_regex = match match_tilesource = tilesource break if match_tilesource and match_regex: return proxy_tilesource(request, match_tilesource, match_regex) #else: # return HttpResponse('No matching tilesource found.',RequestContext(request, {}), status=404) # Try to match against existing origins that can automatically create tile sources (auto=true) match_tileorigin = None #tileorigins = TileOrigin.objects.exclude(pattern__isnull=True).exclude(pattern__exact='').filter(auto=True) tileorigins = getTileOrigins(proxy=True) for tileorigin in tileorigins: match = match_pattern_url(tileorigin.pattern, raw_url) if match: match_regex = match match_tileorigin = tileorigin break if match_tileorigin and match_regex: to = match_tileorigin if to.multiple: slug = getRegexValue(match_regex, 'slug') ts_url = to.url.replace('{slug}', slug) #print "ts_url: "+ts_url if TileSource.objects.filter(url=ts_url).count() > 0: print "Error: This souldn't happen. You should have matched the tilesource earlier so you don't duplicate" return None exts = string_to_list(to.extensions) ts_pattern = url_to_pattern(ts_url, extensions=exts) ts = TileSource(auto=True,url=ts_url,pattern=ts_pattern,name=slug,type=to.type,extensions=exts,origin=to) ts.save() reloadTileSources(proxy=False) reloadTileSources(proxy=True) return proxy_tilesource(request, ts, match_regex) else: ts = TileSource(auto=True,url=to.url,pattern=to.pattern,name=to.name,type=to.type,extensions=to.extensions) ts.save() reloadTileSources(proxy=False) reloadTileSources(proxy=True) return proxy_tilesource(request, ts, match_regex) else: return HttpResponse('No matching tile origin or tile source found.',RequestContext(request, {}), status=404)