def sources_json(request): now = datetime.datetime.now() dt = now ####### stats = None if settings.STATS_SAVE_MEMORY: cache, stats = get_from_cache( settings.CACHES['default']['LOCATION'], settings.CACHES['default'], 'default', 'stats_tilerequests', GEVENT_MONKEY_PATCH=settings.TILEJET_GEVENT_MONKEY_PATCH) if settings.STATS_SAVE_FILE and not stats: stats = get_from_file(settings.STATS_REQUEST_FILE, filetype='json') sources = [] #for source in TileSource.objects.all().order_by('name'): for source in getTileSources(): link_geojson = settings.SITEURL+'cache/stats/export/geojson/15/source/'+source['name']+'.geojson' link_proxy_internal = settings.SITEURL+'proxy/?url='+(source['url']).replace("{ext}","png") link_proxy_external = "" if source['type'] in [TYPE_TMS, TYPE_TMS_FLIPPED]: link_proxy_external = settings.SITEURL+'cache/proxy/tms/origin/'+source['origin']+'/source/'+source['name']+'/{z}/{x}/{y}.png' elif source['type'] == TYPE_BING: link_proxy_external = settings.SITEURL+'cache/proxy/bing/origin/'+source['origin']+'/source/'+source['name']+'{u}.png' if stats: sources.append({ 'name': source['name'], 'type': source['type_title'], 'origin': source['origin'], 'url': source['url'], 'requests_all': getValue(stats['by_source'], source['name'],0), 'requests_year': getValue(getValue(stats['by_year_source'],dt.strftime('%Y')),source['name'], 0), 'requests_month': getValue(getValue(stats['by_month_source'],dt.strftime('%Y-%m')),source['name'], 0), 'requests_today': getValue(getValue(stats['by_date_source'],dt.strftime('%Y-%m-%d')),source['name'], 0), 'link_proxy': link_proxy_internal, 'link_id': 'http://www.openstreetmap.org/edit#?background=custom:'+link_proxy_external, 'link_geojson': link_geojson, 'link_geojsonio': 'http://geojson.io/#data=data:text/x-url,'+link_geojson }) else: sources.append({ 'name': source['name'], 'type': source['type_title'], 'origin': source['origin'], 'url': source['url'], 'requests_all': -1, 'requests_year': -1, 'requests_month': -1, 'requests_today': -1, 'link_proxy': link_proxy_internal, 'link_id': 'http://www.openstreetmap.org/edit#?background=custom:'+link_proxy_external, 'link_geojson': link_geojson, 'link_geojsonio': 'http://geojson.io/#data=data:text/x-url,'+link_geojson }) return HttpResponse(json.dumps(sources), content_type="application/json" )
def stats_geojson(request, z=None, origin=None, source=None, date=None): iz = int(z) features = [] stats = stats_tilerequest() root = None if origin and date: root = getValue(getValue(stats['by_origin_date_location'],origin),date) elif source and date: root = getValue(getValue(stats['by_source_date_location'],source),date) elif origin: root = stats['by_origin_location'][origin] elif source: root = stats['by_source_location'][source] elif date: root = stats['by_date_location'][date] else: root = stats['by_location'] i = 0 for key in root: i = i + 1 t = key.split("/") tz = int(t[0]) tx = int(t[1]) ty = int(t[2]) if iz == tz: #count = stats['global'][stat][key] count = root[key] geom = tms_to_geojson(tx,ty,tz) props = {"x":tx, "y":ty, "z":tz, "location": key, "count": count} features.append( Feature(geometry=geom, id=i, properties=props) ) geojson = FeatureCollection( features ) return HttpResponse(json.dumps(geojson), content_type="application/json" )
def origins_json(request): now = datetime.datetime.now() dt = now ####### #stats = stats_tilerequest() cache, stats = get_from_cache( settings.CACHES['default']['LOCATION'], settings.CACHES['default'], 'default', 'stats_tilerequests', GEVENT_MONKEY_PATCH=settings.TILEJET_GEVENT_MONKEY_PATCH) origins = [] for origin in TileOrigin.objects.all().order_by('name','type'): link_geojson = settings.SITEURL+'cache/stats/export/geojson/15/origin/'+origin.name+'.geojson' if stats: origins.append({ 'name': origin.name, 'description': origin.description, 'type': origin.type_title(), 'multiple': origin.multiple, 'auto': origin.auto, 'url': origin.url, 'requests_all': getValue(stats['by_origin'], origin.name,0), 'requests_year': getValue(getValue(stats['by_year_origin'],dt.strftime('%Y')),origin.name, 0), 'requests_month': getValue(getValue(stats['by_month_origin'],dt.strftime('%Y-%m')),origin.name, 0), 'requests_today': getValue(getValue(stats['by_date_origin'],dt.strftime('%Y-%m-%d')),origin.name, 0), 'link_geojson': link_geojson, 'link_geojsonio': 'http://geojson.io/#data=data:text/x-url,'+link_geojson }) else: origins.append({ 'name': origin.name, 'description': origin.description, 'type': origin.type_title(), 'multiple': origin.multiple, 'auto': origin.auto, 'url': origin.url, 'requests_all': 0, 'requests_year': 0, 'requests_month': 0, 'requests_today': 0, 'link_geojson': link_geojson, 'link_geojsonio': 'http://geojson.io/#data=data:text/x-url,'+link_geojson }) return HttpResponse(json.dumps(origins), content_type="application/json" )
def requestTileFromSource(tilesource=None, x=None, y=None, z=None, u=None, ext=None, verbose=False): print "requestTileFromSource" if tilesource["type"] == TYPE_BING: if tilesource["auth"]: url = tilesource["url"].format(u=u, ext=ext, auth=ts["auth"]) else: url = tilesource["url"].format(u=u, ext=ext) else: if tilesource["auth"]: url = tilesource["url"].format(x=x, y=y, z=z, ext=ext, auth=ts["auth"]) else: url = tilesource["url"].format(x=x, y=y, z=z, ext=ext) contentType = "image/png" # contentType = "text/html" if verbose: print "Requesting tile from " + url print "URL: " + url params = None # params = {'access_token': 'pk.eyJ1IjoiaGl1IiwiYSI6IlhLWFA4Z28ifQ.4gQiuOS-lzhigU5PgMHUzw'} request = make_request(url=url, params=params, auth=None, data=None, contentType=contentType) if request.getcode() != 200: raise Exception( "Could not fetch tile from source with url {url}: Status Code {status}".format( url=url, status=request.getcode() ) ) # image = binascii.hexlify(request.read()) # image = io.BytesIO(request.read())) image = request.read() info = request.info() headers = {"Expires": getValue(info, "Expires", fallback="")} tile = {"headers": headers, "data": image} return tile
def taskUpdateStats(): GEVENT_MONKEY_PATCH = settings.TILEJET_GEVENT_MONKEY_PATCH #=======# now = datetime.datetime.now() stats = {} if GEVENT_MONKEY_PATCH: # Import Gevent and monkey patch from gevent import monkey monkey.patch_all() # Update MongoDB from pymongo import MongoClient client = None db = None try: #client = MongoClient('localhost', 27017) #client = MongoClient('/tmp/mongodb-27017.sock') client = MongoClient(settings.TILEJET_DBHOST, settings.TILEJET_DBPORT) db = client[settings.TILEJET_DBNAME] except: client = None db = None errorline = "Error: Could not connet to stats database from scheduled taskUpdateStats. Most likely issue with connection pool" error_file = settings.LOG_ERRORS_ROOT+os.sep+"requests_tiles_"+now.strftime('%Y-%m-%d')+"_errors.txt" with open(error_file,'a') as f: f.write(errorline+"\n") if client and db: stats_total = db.stats_total stats = { 'total': { 'count': getStat(stats_total, 'total.count', 0) } } #print stats for desc in settings.TILEJET_LIST_STATS: name = desc['name'] attrs = desc['attributes'] window = getValue(desc,'window') #minvalue = None query = None if window: td = window['delta'] mintime = now - datetime.timedelta(**td) minvalue = mintime.strftime(LOG_FIELD_FORMATS[window['attribute']]) query = {window['attribute']: {"$gte": minvalue}} print "Query:" print query print desc if len(attrs) == 0: for doc in getStats(db[desc['collection']],[],query=query): stats[name] = doc['value'] elif len(attrs) > 0: stats[name] = {} docs = getStats(db[desc['collection']],[],query=query) for doc in docs: v = doc['value'] obj = stats[name] for i in range(len(attrs)-1): a = attrs[i] try: obj = obj[doc[a]] except KeyError, e: obj[doc[a]] = {} obj = obj[doc[a]] obj[doc[attrs[len(attrs)-1]]] = v print stats if settings.STATS_SAVE_FILE: print "Saving to file" import json commit_to_file(settings.STATS_REQUEST_FILE, json.dumps(stats), binary=False) if settings.STATS_SAVE_MEMORY: print "Saving to memory" commit_to_cache('default', 'stats_tilerequests', stats, GEVENT_MONKEY_PATCH=GEVENT_MONKEY_PATCH)
def _requestTile(request, tileservice=None, tilesource=None, tileorigin=None, z=None, x=None, y=None, u=None, ext=None): print "_requestTile" now = datetime.datetime.now() ip = getIPAddress(request) #==# if not tileorigin: tileorigin = tilesource['origin'] #==# verbose = True ix = None iy = None iyf = None iz = None indirectTiles = None nearbyTiles = None parentTiles = None childrenTiles = None gw_client, gw_logs, gw_requests = None, None, None if settings.GEOWATCH_ENABLED: gw_client, gw_logs, gw_requests = provision_client_logs_requests() if u: iz, ix, iy = quadkey_to_tms(u) elif x and y and z: ix = int(x) iy = int(y) iz = int(z) if tilesource['type'] == TYPE_BING: u = tms_to_quadkey(ix, iy, iz) iy, iyf = getYValues(tileservice,tilesource,ix,iy,iz) tile_bbox = tms_to_bbox(ix,iy,iz) if tilesource['cacheable']: indirectTiles = [] if settings.TILEJET['heuristic']['nearby']['enabled']: ir = settings.TILEJET['heuristic']['nearby']['radius'] nearbyTiles = getNearbyTiles(ix, iy, iz, ir) indirectTiles.extend(nearbyTiles) #print "Nearby Tiles", nearbyTiles #print "Indirect Tiles", indirectTiles if settings.TILEJET['heuristic']['up']['enabled']: iDepth = getValue(settings.TILEJET['heuristic']['up'],'depth') if iDepth: parentTiles = getParentTiles(ix, iy, iz, depth=iDepth) else: parentTiles = getParentTiles(ix, iy, iz) indirectTiles.extend(parentTiles) #print "Parent Tiles" #print parentTiles heuristic_down = settings.TILEJET['heuristic']['down'] if heuristic_down['enabled']: depth = heuristic_down['depth'] minZoom = heuristic_down['minZoom'] maxZoom = heuristic_down['maxZoom'] childrenTiles = getChildrenTiles(ix, iy, iz, depth, minZoom, maxZoom) indirectTiles.extend(childrenTiles) #print "Children Tiles: "+str(len(childrenTiles)) #print childrenTiles #print "indirectTiles: ", indirectTiles if gw_requests and indirectTiles: start = time.time() gw_requests.send_tile_requests( str(tilesource['id']), indirectTiles, extension=ext, now=now) print "Duration Q: ", (time.time() - start) #Check if requested tile is within source's extents returnBlankTile = False returnErrorTile = False intersects = True if tilesource['extents']: intersects = bbox_intersects_source(tilesource,ix,iyf,iz) if not intersects: returnBlankTile = True validZoom = 0 #Check if inside source zoom levels if tilesource['minZoom'] or tilesource['maxZoom']: if (tilesource['minZoom'] and iz < tilesource['minZoom']): validZoom = -1 elif (tilesource['maxZoom'] and iz > tilesource['maxZoom']): validZoom = 1 if validZoom != 0: #returnBlank = True returnErrorTile = True if returnBlankTile: print "responding with blank image" image = blankTile(width=256, height=256) response = HttpResponse(content_type="image/png") image.save(response, "PNG") return response if returnErrorTile: print "responding with a red image" image = redTile(width=256, height=256) response = HttpResponse(content_type="image/png") image.save(response, "PNG") return response tile = None if tilesource['cacheable'] and iz >= settings.TILEJET['cache']['memory']['minZoom'] and iz <= settings.TILEJET['cache']['memory']['maxZoom']: #key = "{layer},{z},{x},{y},{ext}".format(layer=tilesource.name,x=ix,y=iy,z=iz,ext=ext) key = ",".join([tilesource['name'],str(iz),str(ix),str(iy),ext]) tilecache, tile = getTileFromCache( settings.CACHES['tiles']['LOCATION'], settings.CACHES['tiles'], 'tiles', key, True, GEVENT_MONKEY_PATCH=True) if not tilecache: print "Error: Could not connect to cache (tiles)." line = "Error: Could not connect to cache (tiles)." logTileRequestError(line, now) if tile: if verbose: print "cache hit for "+key logTileRequest(tileorigin, tilesource['name'], x, y, z, ext, 'hit', now, ip, gw_logs=gw_logs) else: if tilecache and verbose: print "cache miss for "+key logTileRequest(tileorigin, tilesource['name'], x, y, z, ext, 'miss', now, ip, gw_logs=gw_logs) if tilesource['type'] == TYPE_TMS: tile = requestTileFromSource(tilesource=tilesource,x=ix,y=iy,z=iz,ext=ext,verbose=True) elif tilesource['type'] == TYPE_TMS_FLIPPED: tile = requestTileFromSource(tilesource=tilesource,x=ix,y=iyf,z=iz,ext=ext,verbose=True) elif tilesource['type'] == TYPE_BING: tile = requestTileFromSource(tilesource=tilesource,u=u,ext=ext,verbose=True) if settings.ASYNC_WRITEBACK: from base64 import b64encode try: taskWriteBackTile.apply_async( args=[key, json.dumps(tile['headers']), b64encode(tile['data'])], kwargs=None, queue="writeback") except: print "Error: Could not connect to writeback queue." line = "Error: Could not connect to writeback queue." logTileRequestError(line, now) else: try: tilecache.set(key, tile) except: print "Error: Could not write back tile synchronously." line = "Error: Could not write back tile synchronously." logTileRequestError(line, now) else: if verbose: print "cache bypass for "+tilesource['name']+"/"+str(iz)+"/"+str(ix)+"/"+str(iy) logTileRequest(tileorigin, tilesource['name'], x, y, z, ext, 'bypass', now, ip, gw_logs=gw_logs) if tilesource['type'] == TYPE_TMS: tile = requestTileFromSource(tilesource=tilesource,x=ix,y=iy,z=iz,ext=ext,verbose=True) elif tilesource['type'] == TYPE_TMS_FLIPPED: tile = requestTileFromSource(tilesource=tilesource,x=ix,y=iyf,z=iz,ext=ext,verbose=True) elif tilesource['type'] == TYPE_BING: tile = requestTileFromSource(tilesource=tilesource,u=u,ext=ext,verbose=True) if not tile: print "responding with a red image" image = redTile(width=256, height=256) response = HttpResponse(content_type="image/png") image.save(response, "PNG") return response #print "Headers:" #print tile['headers'] image = Image.open(StringIO.StringIO(tile['data'])) #Is Tile blank. then band.getextrema should return 0,0 for band 4 #Tile Cache watermarking is messing up bands #bands = image.split() #for band in bands: # print band.getextrema() response = HttpResponse(content_type="image/png") image.save(response, "PNG") return response
def info(request): #stats_tr = stats_tilerequest() #cache, stats_tr = get_from_cache( # settings.CACHES['default']['LOCATION'], # settings.CACHES['default'], # 'default', # 'stats_tilerequests', # GEVENT_MONKEY_PATCH=settings.TILEJET_GEVENT_MONKEY_PATCH) caches = [] c = settings.TILEJET['cache']['memory'] stats_c = stats_cache() if stats_c: size = int(stats_c['bytes']) maxsize = int(stats_c['limit_maxbytes']) size_percentage = format(((100.0 * size) / maxsize),'.4f')+"%" items = int(stats_c['curr_items']) caches.append({ 'name': 'memory', 'enabled': c['enabled'], 'description': c['description'], 'type': c['type'], 'size': formatMemorySize(size, original='B'), 'maxsize': formatMemorySize(maxsize, original='B'), 'size_percentage': size_percentage, 'items': items, 'minzoom': c['minZoom'], 'maxzoom': c['maxZoom'], 'expiration': c['expiration'], 'link_memcached': '/cache/stats/export/cache.json' }) heuristics = [] h = settings.TILEJET['heuristic']['down'] heuristics.append({ 'name': 'down', 'enabled': h['enabled'], 'description': h['description'] }) h = settings.TILEJET['heuristic']['up'] heuristics.append({ 'name': 'up', 'enabled': h['enabled'], 'description': h['description'] }) h = settings.TILEJET['heuristic']['nearby'] heuristics.append({ 'name': 'nearby', 'enabled': h['enabled'], 'description': h['description'] }) # Build Queues List queues = [] try: import celery for key, raw_queues in celery.current_app.control.inspect().active_queues().items(): for q in raw_queues: queues.append({ 'name': getValue(q, u'name', fallback=''), 'routing_key': getValue(q, u'routing_key', fallback=''), 'durable': getValue(q, u'durable', fallback=False), 'ttl': getValue(q[u'queue_arguments'], u'x-message-ttl', fallback=-1) }) #import pyrabbit.api #pyrabbit_client = pyrabbit.api.Client(settings.BROKER_DOMAIN+':'+settings.BROKER_PORT, settings.BROKER_USER, settings.BROKER_PASSWORD) for q in queues: q['messages'] = 0 except: print "Could not generate queues. Is celery or RabbitMQ offline?" # Build Schedules Tasks scheduled = [] try: import celery s = beat.Scheduler(app=celery.current_app) scheduled = s.schedule.keys() except: print "Could not build scheduled tasks. Is celery beat running?" #topics = [] #try: # from kafka import KafkaClient # kafka = KafkaClient(settings.TILEJET_GEOWATCH_HOST) # for topic in kafka.topics: # topic2 = { # 'name': topic, # 'partitions': len(kafka.topic_partitions.get(topic, [])) # } # topics.append(topic2) #except: # print "Could not generate topics. Is Kafka offline?" client = provision_geowatch_client() topics = client.list_topics() context_dict = { 'origins': getTileOrigins(), 'sources': getTileSources(), 'caches': caches, 'heuristics': heuristics, 'queues': queues, 'topics': topics, 'scheduled': scheduled, 'stats': settings.TILEJET_LIST_STATS, 'hosts': settings.PROXY_ALLOWED_HOSTS } return render_to_response( "cache/info.html", RequestContext(request, context_dict))