def markers(db): params = Params() if (not params.users) and (not params.source) and (params.zoom < 7): return params.limit = 200 params.full = False return _errors_geo(db, params)
def _user(db, lang, username): params = Params() if username: params.users = username.split(",") params.limit = 500 params.full = True username = "******".join(params.users) errors = query._gets(db, params) return [params, username, errors]
def _user_count(db, username=None): params = Params() if username: params.users = username.split(",") if not params.users: return res = query._count(db, params, ['class.level'], ['class.level']) ret = {1: 0, 2: 0, 3: 0} for (l, c) in res: ret[l] = c return ret
def errors(db, langs): params = Params(max_limit=10000) results = query._gets(db, params) out = [] for res in results: i = { 'lat': float(res["lat"]), 'lon': float(res["lon"]), 'id': res["uuid"], 'item': str(res["item"]), } if params.full: i.update({ 'lat': float(res["lat"]), 'lon': float(res["lon"]), 'id': res["uuid"], 'item': str(res["item"]), 'source': res["source"], 'class': res["class"], 'subtitle': utils.i10n_select(res["subtitle"], langs), 'title': utils.i10n_select(res["title"], langs), 'level': res["level"], 'update': str(res["timestamp"]), 'usernames': list(map(lambda elem: "username" in elem and elem["username"] or "", res['elems'] or [])), 'osm_ids': dict(map(lambda k_g: [ {'N':'nodes', 'W':'ways', 'R':'relations'}[k_g[0]], list(map(lambda g: g['id'], k_g[1])) ], groupby(sorted(res['elems'] or [], key=lambda e: e['type']), lambda e: e['type']))), }) out.append(i) return {'issues': out}
def get_data(db, options): sqlbase = """ SELECT date, SUM(count) FROM ( SELECT date_trunc('day', timestamp) AS date, AVG(count) AS count FROM ( SELECT marker.source, marker.class, marker.count, generate_series( lower(timestamp_range), coalesce(upper(timestamp_range) - '23 hour'::interval, now()), '1 day'::interval )::timestamp without time zone AS timestamp FROM %s WHERE %s ) AS t WHERE %s GROUP BY source, class, date_trunc('day', timestamp) ) AS t GROUP BY date ORDER BY date """ params = Params() join, where = query._build_param(db, None, params.source, params.item, params.level, None, params.classs, params.country, params.useDevItem, None, params.tags, None, stats=True, start_date=params.start_date, end_date=params.end_date) where2 = ["1 = 1"] if params.start_date: where2.append("timestamp >= '%s'" % params.start_date.isoformat()) if params.end_date: where2.append("timestamp < '%s'" % params.end_date.isoformat()) where2 = " AND ".join(where2) sql = sqlbase % (join, where, where2) if len(sys.argv)>1: print(sql) result = [] db.execute(sql) for r in db.fetchall(): result.append((r[0],r[1])) return result
def graph(db, format='png'): try: data = errors_graph.make_plt(db, Params(), format) response.content_type = { 'png': 'image/png', 'svg': 'image/svg+xml', 'pdf': 'application/pdf', 'csv': 'text/csv', 'json': 'application/json' }[format] return data except Exception as e: response.content_type = "text/plain" import traceback out = io.StringIO() traceback.print_exc(file=out) return out.getvalue() + "\n"
def issues_mvt(db, z, x, y, format): lon1, lat2 = tiles.tile2lonlat(x, y, z) lon2, lat1 = tiles.tile2lonlat(x + 1, y + 1, z) dlon = (lon2 - lon1) / 256 dlat = (lat2 - lat1) / 256 params = Params(max_limit=50 if z > 18 else 10000) params.tilex = x params.tiley = y params.zoom = z params.lat = None params.lon = None params.full = False if params.zoom > 18: return if (not params.users) and (not params.source) and (params.zoom < 7): return results = query._gets(db, params) if z >= 7 else None if format == 'mvt': tile = _errors_mvt(db, results, z, lon1, lat1, lon2, lat2, params.limit) if tile: response.content_type = 'application/vnd.mapbox-vector-tile' return tile else: return HTTPError(404) elif format in ('geojson', 'json'): # Fall back to GeoJSON tile = _errors_geojson(db, results, z, lon1, lat1, lon2, lat2, params.limit) if tile: response.content_type = 'application/vnd.geo+json' return tile else: return [] else: return HTTPError(404)
def matrix(db, lang): params = Params(default_limit=None) errors_groups = query._count(db, params, [ "markers.item", "markers.class", "sources.country", "items.menu->'en'" ]) analysers = defaultdict(lambda: defaultdict(int)) analysers_sum = defaultdict(int) countries_sum = defaultdict(int) total = 0 for row in errors_groups: item, class_, country, menu, count = row analyser = '{}/{} {}'.format(item, class_, menu) analysers[analyser][country] += count analysers_sum[analyser] += count countries_sum[country] += count total += count return dict(total=total, countries_sum=countries_sum, analysers_sum=analysers_sum, analysers=analysers)
def errors(db, lang): params = Params() results = query._gets(db, params) out = OrderedDict() if not params.full: out["description"] = ["lat", "lon", "error_id", "item"] else: out["description"] = ["lat", "lon", "error_id", "item", "source", "class", "elems", "subclass", "subtitle", "title", "level", "update", "username"] out["errors"] = [] for res in results: lat = res["lat"] lon = res["lon"] error_id = res["id"] item = res["item"] or 0 if not params.full: out["errors"].append([str(lat), str(lon), str(error_id), str(item)]) else: source = res["source"] classs = res["class"] elems = '_'.join(map(lambda elem: {'N':'node', 'W':'way', 'R':'relation'}[elem['type']] + str(elem['id']), res['elems'] or [])) subclass = 0 subtitle = utils.i10n_select(res['subtitle'], lang) subtitle = subtitle and subtitle['auto'] or '' title = utils.i10n_select(res['title'], lang) title = title and title['auto'] or '' level = res["level"] update = res["timestamp"] username = '******'.join(map(lambda elem: "username" in elem and elem["username"] or "", res['elems'] or [])) out["errors"].append([str(lat), str(lon), str(error_id), str(item), str(source), str(classs), str(elems), str(subclass), subtitle, title, str(level), str(update), username]) return out
def index(db, lang, format=None): if "false-positive" in request.path: title = _("False positives") gen = "false-positive" elif "done" in request.path: title = _("Fixed issues") gen = "done" else: title = _("Informations") gen = "error" if not format in ('rss', 'gpx', 'kml', 'josm', 'csv'): format = None countries = query_meta._countries(db, lang) if format == None else None items = query_meta._items(db, lang) params = Params() params.status = { "error": "open", "false-positive": "false", "done": "done" }[gen] params.limit = None params.fixable = None if format == None and params.item: errors_groups = query._count( db, params, [ "dynpoi_class.item", "marker.source", "marker.class", "source.country", "source.analyser", "dynpoi_update_last.timestamp" ], ["dynpoi_item", "class"], [ "min(dynpoi_item.menu::text)::jsonb AS menu", "min(class.title::text)::jsonb AS title" ], ) total = 0 for res in errors_groups: if res["count"] != -1: total += res["count"] else: errors_groups = [] total = 0 params.limit = request.params.get('limit', type=int, default=100) if params.limit > 10000: params.limit = 10000 if (total > 0 and total < 1000) or params.limit: params.full = True errors = query._gets(db, params) if gen in ("false-positive", "done"): opt_date = "date" else: opt_date = "-1" else: opt_date = None errors = None if format == 'rss': response.content_type = 'application/rss+xml' tpl = 'errors/list.rss' elif format == 'gpx': response.content_type = 'application/gpx+xml' tpl = 'errors/list.gpx' elif format == 'kml': response.content_type = 'application/vnd.google-earth.kml+xml' tpl = 'errors/list.kml' elif format == 'josm': objects = set( sum( map( lambda error: list( map( lambda elem: elem['type'].lower() + str(elem[ 'id']), error['elems'] or [])), errors), [])) response.status = 302 response.set_header( 'Location', 'http://localhost:8111/load_object?objects=%s' % ','.join(objects)) return elif format == 'csv': output = io.StringIO() writer = csv.writer(output) h = [ 'uuid', 'source', 'item', 'class', 'level', 'title', 'subtitle', 'country', 'analyser', 'timestamp', 'username', 'lat', 'lon', 'elems' ] writer.writerow(h) for res in errors: usernames = list( map(lambda elem: elem.get("username", ""), res['elems'] or [])) elems = '_'.join( map( lambda elem: { 'N': 'node', 'W': 'way', 'R': 'relation' }[elem['type']] + str(elem['id']), res['elems'] or [])) writer.writerow( list( map( lambda a: usernames if a == 'username' else elems if a == 'elems' else res[a], h))) response.content_type = 'text/csv' return output.getvalue() else: tpl = 'errors/index' return template(tpl, countries=countries, items=items, errors_groups=errors_groups, total=total, errors=errors, query=request.query_string, country=params.country, item=params.item, level=params.level, lang=lang[0], translate=translator(lang), gen=gen, opt_date=opt_date, title=title, website=utils.website, main_website=utils.main_website, remote_url_read=utils.remote_url_read)
def heat(db, z, x, y): COUNT = 32 lon1, lat2 = tiles.tile2lonlat(x, y, z) lon2, lat1 = tiles.tile2lonlat(x + 1, y + 1, z) params = Params() items = query._build_where_item(params.item, "items") params.tilex = x params.tiley = y params.zoom = z params.lat = None params.lon = None if params.zoom > 18: return db.execute(""" SELECT SUM((SELECT SUM(t) FROM UNNEST(number) t)) FROM items WHERE """ + items) limit = db.fetchone() if limit and limit[0]: limit = float(limit[0]) else: return HTTPError(404) join, where = query._build_param(db, None, params.source, params.item, params.level, params.users, params.classs, params.country, params.useDevItem, params.status, params.tags, params.fixable, tilex=params.tilex, tiley=params.tiley, zoom=params.zoom) join = join.replace("%", "%%") where = where.replace("%", "%%") sql = """ SELECT COUNT(*), ((lon-%(lon1)s) * %(count)s / (%(lon2)s-%(lon1)s) + 0.5)::int AS latn, ((lat-%(lat1)s) * %(count)s / (%(lat2)s-%(lat1)s) + 0.5)::int AS lonn, mode() WITHIN GROUP (ORDER BY items.marker_color) AS color FROM """ + join + """ WHERE """ + where + """ GROUP BY latn, lonn """ db.execute(sql, { "lon1": lon1, "lat1": lat1, "lon2": lon2, "lat2": lat2, "count": COUNT }) features = [] for row in db.fetchall(): count, x, y, color = row count = max( int( math.log(count) / math.log(limit / ((z - 4 + 1 + math.sqrt(COUNT))**2)) * 255), 1 if count > 0 else 0) if count > 0: count = 255 if count > 255 else count features.append({ "geometry": Polygon([(x, y), (x - 1, y), (x - 1, y - 1), (x, y - 1)]), "properties": { "color": int(color[1:], 16), "count": count } }) response.content_type = 'application/vnd.mapbox-vector-tile' return mapbox_vector_tile.encode([{ "name": "issues", "features": features }], extents=COUNT)
def index(db, lang, format): if "false-positive" in request.path: title = _("False positives") gen = "false-positive" elif "done" in request.path: title = _("Fixed issues") gen = "done" else: title = _("Information") gen = "error" params = Params() params.status = { "error": "open", "false-positive": "false", "done": "done" }[gen] params.fixable = None items = query_meta._items_menu(db, lang) for res in items: if params.item == str(res["item"]): title += ' - ' + res['menu']['auto'] params.limit = request.params.get('limit', type=int, default=100) if params.limit > 10000: params.limit = 10000 params.full = True errors = query._gets(db, params) for error in errors: error["subtitle"] = i10n_select_auto(error["subtitle"], lang) error["title"] = i10n_select_auto(error["title"], lang) error["menu"] = i10n_select_auto(error["menu"], lang) if format == 'rss': response.content_type = 'application/rss+xml' tpl = 'errors/list.rss' elif format == 'gpx': response.content_type = 'application/gpx+xml' tpl = 'errors/list.gpx' elif format == 'kml': response.content_type = 'application/vnd.google-earth.kml+xml' tpl = 'errors/list.kml' elif format == 'josm': objects = set( sum( map( lambda error: list( map( lambda elem: elem['type'].lower() + str(elem[ 'id']), error['elems'] or [])), errors), [])) response.status = 302 response.set_header( 'Location', 'http://localhost:8111/load_object?objects=%s' % ','.join(objects)) return elif format == 'csv': output = io.StringIO() writer = csv.writer(output) h = [ 'uuid', 'source', 'item', 'class', 'level', 'title', 'subtitle', 'country', 'analyser', 'timestamp', 'username', 'lat', 'lon', 'elems' ] hh = {'source': 'source_id'} writer.writerow(h) for res in errors: usernames = list( map(lambda elem: elem.get("username", ""), res['elems'] or [])) elems = '_'.join( map( lambda elem: { 'N': 'node', 'W': 'way', 'R': 'relation' }[elem['type']] + str(elem['id']), res['elems'] or [])) writer.writerow( list( map( lambda a: usernames if a == 'username' else elems if a == 'elems' else res[a], map(lambda y: hh.get(y, y), h)))) response.content_type = 'text/csv' return output.getvalue() else: countries = query_meta._countries(db) items = list(map(dict, items)) if params.item: params.limit = None errors_groups = query._count( db, params, [ "markers_counts.item", "markers.source_id", "markers.class", "sources.country", "sources.analyser", "updates_last.timestamp" ], ["items", "class"], [ "min(items.menu::text)::jsonb AS menu", "min(class.title::text)::jsonb AS title" ], ) total = 0 for res in errors_groups: res["title"] = i10n_select_auto(res["title"], lang) res["menu"] = i10n_select_auto(res["menu"], lang) if res["count"] != -1: total += res["count"] else: errors_groups = [] total = 0 if gen in ("false-positive", "done"): opt_date = "date" else: opt_date = None errors_groups = list(map(dict, errors_groups)) for res in errors_groups: res['timestamp'] = str(res['timestamp']) errors = list(map(dict, errors)) for res in errors: res['timestamp'] = str(res['timestamp']) if 'date' in res: res['date'] = str(res['date']) return dict(countries=countries, items=items, errors_groups=errors_groups, total=total, errors=errors, gen=gen, opt_date=opt_date, website=utils.website, main_website=utils.main_website, remote_url_read=utils.remote_url_read) return template(tpl, items=items, errors=errors, query=request.query_string, lang=lang[0], gen=gen, title=title, website=utils.website, main_website=utils.main_website, remote_url_read=utils.remote_url_read)