def get(self): ts = request.args.get('after') if ts is None: ts = datetime.now() - timedelta(minutes=120) else: try: ts = datetime.fromisoformat(ts) except: abort(400) if ts - datetime.now() > timedelta(hours=24, minutes=5): abort(400) with pgdb().cursor() as cur: execute_sql( cur, QUERIES['latest_updates'], {'ts': ts} ) data = cur.fetchall() response_dict = { 'type': 'FeatureCollection', 'features': [ {'type': 'Feature', 'geometry': bbox, 'properties': {'dataset': dataset, 'created_at': created_at}} for dataset, created_at, bbox in data ] } return response_dict
def post(self): r = request.get_json() # captcha_user_token = request.headers.get('reCaptchaUserToken') # # verify if request is correct # if captcha_user_token is None: # abort(400) # if r is None: # raise ValueError(request.form) # if r is None or (r.get('prg_ids') is None and r.get('bdot_ids') is None): # abort(400) # # verify captcha token # response = requests_lib.post( # url='https://www.google.com/recaptcha/api/siteverify', # data={ # 'secret': environ.get('reCaptchaSecretToken'), # 'response': captcha_user_token # } # ) # if not(response.ok and response.json().get('success')): # abort(400) conn = pgdb() with conn.cursor() as cur: prg_counter, lod1_counter = 0, 0 if r.get('prg_ids'): prg_ids = [(x,) for x in r['prg_ids']] execute_values(cur, QUERIES['insert_to_exclude_prg'], prg_ids) prg_counter = len(prg_ids) if r.get('bdot_ids'): lod1_ids = [(x,) for x in r['bdot_ids']] execute_values(cur, QUERIES['insert_to_exclude_bdot_buildings'], lod1_ids) lod1_counter = len(lod1_ids) conn.commit() return {'prg_ids_inserted': prg_counter, 'bdot_ids_inserted': lod1_counter}, 201
def get(self, uuid: str): with pgdb().cursor() as cur: try: cur = execute_sql(cur, QUERIES['delta_point_info'], (uuid, )) except pg.errors.InvalidTextRepresentation: return { 'Error': f'Error parsing string: `{uuid}` to UUID.' }, 400 info = cur.fetchone() if info: return { 'lokalnyid': info[0], 'teryt_msc': info[1], 'teryt_simc': info[2], 'teryt_ulica': info[3], 'teryt_ulic': info[4], 'nr': info[5], 'pna': info[6] } else: return { 'Error': f'Address point with lokalnyid(uuid): {uuid} not found.' }, 404
def get(self): if request.args.get('filter_by') == 'bbox': if not ( 'xmin' in request.args and 'xmax' in request.args and 'ymin' in request.args and 'ymax' in request.args ): abort(400) else: abort(400) if request.args.get('format') not in {'osm', 'xml'}: abort(400) with pgdb().cursor() as cur: execute_sql( cur, QUERIES['buildings_vertices'], (float(request.args.get('xmin')), float(request.args.get('ymin')), float(request.args.get('xmax')), float(request.args.get('ymax'))) ) root = buildings_xml(cur.fetchall()) return Response( etree.tostring(root, encoding='UTF-8'), mimetype='text/xml', headers={'Content-disposition': 'attachment; filename=buildings.osm'})
def get(self): if random() > 0.1: query = QUERIES['locations_most_count'] else: query = QUERIES['locations_random'] with pgdb().cursor() as cur: execute_sql(cur, query) x, y = choice(cur.fetchall()) return {'lon': x, 'lat': y}
def get(self): addresses_query, buildings_query = self.queries() addresses_params, buildings_params = None, None package_export_params = None root = etree.Element('osm', version='0.6') if request.args.get('filter_by') == 'bbox': addresses_params = (float(request.args.get('xmin')), float(request.args.get('ymin')), float(request.args.get('xmax')), float(request.args.get('ymax'))) buildings_params = addresses_params package_export_params = { 'xmin': addresses_params[0], 'ymin': addresses_params[1], 'xmax': addresses_params[2], 'ymax': addresses_params[3] } elif request.args.get('filter_by') == 'id': temp1 = request.args.get('addresses_ids') addresses_params = (tuple(temp1.split( ',')), ) if temp1 else None # tuple of tuples was needed temp2 = request.args.get('buildings_ids') buildings_params = (tuple(temp2.split( ',')), ) if temp2 else None # tuple of tuples was needed a, b = self.data(addresses_query, addresses_params, buildings_query, buildings_params) if package_export_params: package_export_params['lb_adresow'] = len(a) package_export_params['lb_budynkow'] = len(b) conn = pgdb() with pgdb().cursor() as cur: cur.execute(QUERIES['insert_to_package_exports'], package_export_params) conn.commit() root = self.prepare_xml_tree(root, a, b) return Response(etree.tostring(root, encoding='UTF-8'), mimetype='text/xml', headers={ 'Content-disposition': 'attachment; filename=paczka_danych.osm' })
def get(self): cur = execute_sql(pgdb().cursor(), QUERIES['processes']) list_of_processes = cur.fetchall() result = { 'processes': [ { 'name': x[0], 'in_progress': x[1], 'start_time': x[2], 'end_time': x[3], 'no_of_tiles_to_process': x[4], 'abbr_name': x[5], 'last_status': x[6] } for x in list_of_processes ] } return result
def data(self, addresses_query, addresses_params, buildings_query, buildings_params): addresses, buildings = [], [] with pgdb().cursor() as cur: if addresses_query and addresses_params and len( addresses_params) > 0: cur = execute_sql(cur, addresses_query, addresses_params) addresses = cur.fetchall() if buildings_query and buildings_params and len( buildings_params) > 0: cur = execute_sql(cur, buildings_query, buildings_params) buildings = cur.fetchall() return addresses, buildings
def get(self, z: int, x: int, y: int): # calculate bbox tile = Tile(x, y, z) bbox = to_merc(bounds(tile)) # query db conn = pgdb() cur = execute_sql(conn.cursor(), QUERIES['cached_mvt'], (z, x, y)) tup = cur.fetchone() if tup is None: params = { 'xmin': bbox['west'], 'ymin': bbox['south'], 'xmax': bbox['east'], 'ymax': bbox['north'], 'z': z, 'x': x, 'y': y } if 6 <= int(z) <= 7: cur = execute_sql(cur, QUERIES['mvt_ll_aggr_terc'], params) elif 8 <= int(z) <= 9: cur = execute_sql(cur, QUERIES['mvt_ll_aggr_simc'], params) elif 10 <= int(z) <= 11: cur = execute_sql(cur, QUERIES['mvt_ll_aggr_simc_ulic'], params) elif 12 <= int(z) <= 12: cur = execute_sql(cur, QUERIES['mvt_ll'], params) elif 13 <= int(z) < 23: cur = execute_sql(cur, QUERIES['mvt_hl'], params) else: abort(404) conn.commit() cur = execute_sql(cur, QUERIES['cached_mvt'], (z, x, y)) tup = cur.fetchone() mvt = io.BytesIO(tup[0]).getvalue() if tup else abort(500) # prepare and return response response = Response(mvt) response.headers['Content-Type'] = 'application/x-protobuf' response.headers['Access-Control-Allow-Origin'] = "*" cur.close() if 6 <= int(z) < 13: response.headers['X-Accel-Expires'] = '10800' elif 13 <= int(z) < 23: response.headers['X-Accel-Expires'] = '60' return response
def post(self): r = request.get_json() captcha_user_token = request.headers.get('reCaptchaUserToken') # verify if request is correct if captcha_user_token is None: abort(400) if r is None: raise ValueError(request.form) if r is None or (r.get('prg_ids') is None and r.get('lod1_ids') is None): abort(400) # verify captcha token response = requests_lib.post( url='https://www.google.com/recaptcha/api/siteverify', data={ 'secret': environ.get('reCaptchaSecretToken'), 'response': captcha_user_token }) if not (response.ok and response.json().get('success')): abort(400) conn = pgdb() with conn.cursor() as cur: prg_counter, lod1_counter = 0, 0 if r.get('prg_ids'): prg_ids = [(x, ) for x in r['prg_ids']] prg_ids_tuple = (tuple(r['prg_ids']), ) execute_values(cur, QUERIES['insert_to_exclude_prg'], prg_ids) execute_sql(cur, QUERIES['delete_tiles_excluded_prg'], prg_ids_tuple) prg_counter = len(prg_ids) if r.get('lod1_ids'): lod1_ids = [(x, ) for x in r['lod1_ids']] lod1_ids_tuple = (tuple(r['lod1_ids']), ) execute_values(cur, QUERIES['insert_to_exclude_lod1'], lod1_ids) execute_sql(cur, QUERIES['delete_tiles_excluded_lod1'], lod1_ids_tuple) lod1_counter = len(lod1_ids) conn.commit() return { 'prg_ids_inserted': prg_counter, 'lod1_ids_inserted': lod1_counter }, 201
def data_for_layers(self, layers: List[Tuple[LayerDefinition, QueryParametersType]], filter_by: str) -> Dict[str, XMLElementType]: data = {} with pgdb().cursor() as cur: for layer, params in layers: if filter_by == 'bbox': query = layer.query_by_bbox elif filter_by == 'id': query = layer.query_by_id else: raise NotImplementedError() cur = execute_sql(cur, query, params) temp = cur.fetchall() data[layer.id] = { 'count': len(temp), 'data': layer.convert_to_xml_element(temp) } return data
def register_bbox_export(self, package_export_params: dict) -> None: conn = pgdb() with conn.cursor() as cur: cur.execute(QUERIES['insert_to_package_exports'], package_export_params) conn.commit()