async def handler_exists_layer2(request): session_redis = await get_session(request) posted_data = await request.post() user_id = get_user_id(session_redis, request.app['app_users']) layer_name = posted_data.get('layer') layer_name_redis = posted_data.get('layer_name') file_format = posted_data.get('format') projection = json.loads(posted_data.get('projection')) res = await request.app['redis_conn'].get('_'.join( [user_id, layer_name_redis])) if not res: request.app['logger'].info( '{} - Unable to fetch the requested layer ({}/{})'.format( user_id, layer_name, layer_name_redis)) return web.Response( text='{"Error": "Unable to fetch the layer on the server"}') elif file_format == "TopoJSON": return web.Response(text=res.decode()) else: try: res_geojson = topojson_to_geojson(json.loads(res.decode())) if "GeoJSON" in file_format: return web.Response(text=res_geojson) elif "KML" in file_format: tmp_path = prepare_folder() output_path = ''.join([tmp_path, "/", layer_name, ".geojson"]) savefile(output_path, res_geojson.encode()) result = reproj_convert_layer_kml(output_path) os.remove(output_path) os.removedirs(tmp_path) return web.Response(text=result.decode()) else: out_proj = check_projection( projection["name"] if "name" in projection else projection["proj4string"]) if not out_proj: return web.Response(text=json.dumps( {'Error': 'app_page.common.error_proj4_string'})) available_formats = { "ESRI Shapefile": ".shp", "KML": ".kml", "GML": ".gml" } ext = available_formats[file_format] tmp_path = prepare_folder() output_path = ''.join([tmp_path, "/", layer_name, ".geojson"]) savefile(output_path, res_geojson.encode()) reproj_convert_layer(output_path, output_path.replace(".geojson", ext), file_format, out_proj) os.remove(output_path) raw_data, filename = fetch_zip_clean(tmp_path, layer_name) if ".zip" in filename: b64_zip = b64encode(raw_data) return web.Response(body=b64_zip, headers=MultiDict({ "Content-Type": "application/octet-stream", "Content-Disposition": ''.join([ "attachment; filename=", layer_name, ".zip" ]), "Content-length": str(len(b64_zip)) })) else: return web.Response(text=raw_data.decode()) except Exception as err: request.app['logger'].info( '{} - Error {} while converting layer {} to {} format)'.format( user_id, err, layer_name, file_format)) return web.Response(text='{"Error": "Unexpected error"}') return web.Response(text='{"Error": "Invalid file format"}')
async def convert(request): posted_data, session_redis = \ await asyncio.gather(*[request.post(), get_session(request)]) user_id = get_user_id(session_redis, request.app['app_users']) proj_info_str = None # If a shapefile is provided as multiple files # (.shp, .dbf, .shx, and .prj are expected), not ziped : if "action" in posted_data and "file[]" not in posted_data: list_files = [] for i in range(len(posted_data) - 1): field = posted_data.getall('file[{}]'.format(i))[0] file_name = ''.join(['/tmp/', user_id, '_', field[1]]) list_files.append(file_name) savefile(file_name, field[2].read()) shp_path = [i for i in list_files if 'shp' in i][0] layer_name = shp_path.replace(''.join(['/tmp/', user_id, '_']), '').replace('.shp', '') hashed_input = mmh3_file(shp_path) name = shp_path.split(os.path.sep)[2] datatype = "shp" # If there is a single file (geojson, kml, gml or zip) to handle : elif "action" in posted_data and "file[]" in posted_data: try: field = posted_data.get('file[]') name = field[1] layer_name = name.split('.')[0] data = field[2].read() datatype = field[3] hashed_input = mmh3_hash(data) filepath = ''.join(['/tmp/', user_id, "_", name]) except Exception as err: request.app['logger'].info("posted data :\n{}\nerr:\n{}".format( posted_data, err)) return convert_error('Incorrect datatype') f_name = '_'.join([user_id, str(hashed_input)]) asyncio.ensure_future(request.app['redis_conn'].incr('layers')) result = await request.app['redis_conn'].get(f_name) if result: request.app['logger'].debug( '{} - Used result from redis'.format(user_id)) request.app['redis_conn'].pexpire(f_name, 86400000) return web.Response(text=''.join( ['{"key":', str(hashed_input), ',"file":', result.decode(), '}'])) if "shp" in datatype: clean_files = lambda: [os.remove(_file) for _file in list_files] res = await request.app.loop.run_in_executor( request.app["ProcessPool"], ogr_to_geojson, shp_path) if not res: clean_files() return convert_error() result = await geojson_to_topojson2(res, layer_name) if not result: clean_files() return convert_error() asyncio.ensure_future(request.app['redis_conn'].set(f_name, result, pexpire=86400000)) with open('/tmp/' + name.replace('.shp', '.prj'), 'r') as f: proj_info_str = f.read() clean_files() elif datatype in ('application/x-zip-compressed', 'application/zip'): dataZip = BytesIO(data) dir_path = '/tmp/{}{}/'.format(user_id, hashed_input) with ZipFile(dataZip) as myzip: list_files = myzip.namelist() list_files = [dir_path + i for i in list_files] slots = {"shp": None, "prj": None, "dbf": None, "shx": None} names = [] try: for f in list_files: name, ext = f.split('.') names.append(name) if 'shp' in ext: slots['shp'] = f elif 'prj' in ext: slots['prj'] = f elif 'shx' in ext: slots['shx'] = f elif 'dbf' in ext: slots['dbf'] = f elif 'cpg' in ext: slots['cpg'] = f assert (all(v is not None for v in slots.values())) assert (all(name == names[0] for name in names)) assert (4 <= len(list_files) < 8) except Exception as err: request.app['logger'].info( 'Error with content of zip file : {}'.format(err)) return convert_error('Error with zip file content') os.mkdir(dir_path) myzip.extractall(path=dir_path) try: res = await request.app.loop.run_in_executor( request.app["ProcessPool"], ogr_to_geojson, slots['shp']) if not res: return convert_error() result = await geojson_to_topojson2(res, layer_name) if not result: return convert_error() with open(slots['prj'], 'r') as f: proj_info_str = f.read() asyncio.ensure_future(request.app['redis_conn'].set( f_name, result, pexpire=86400000)) except (asyncio.CancelledError, CancelledError): return except Exception as err: request.app['logger'].info( 'Error with content of zip file : {}'.format(err)) return convert_error('Error with zip file content') finally: [os.remove(dir_path + _file) for _file in os.listdir(dir_path)] os.removedirs(dir_path) elif ('octet-stream' in datatype or 'text/json' in datatype or 'application/geo+json' in datatype or 'application/vnd.google-earth.kml+xml' in datatype or 'application/gml+xml' in datatype) \ and ("kml" in name.lower() or "gml" in name.lower() or "geojson" in name.lower()): with open(filepath, 'wb') as f: f.write(data) res = await request.app.loop.run_in_executor(request.app["ThreadPool"], ogr_to_geojson, filepath) if not res: return convert_error('Error reading the input file') result = await geojson_to_topojson2(res, layer_name) if not result: return convert_error('Error reading the input file') asyncio.ensure_future(request.app['redis_conn'].set(f_name, result, pexpire=86400000)) else: request.app['logger'].info("Incorrect datatype :\n{}name:\n{}".format( datatype, name)) return convert_error('Incorrect datatype') request.app['logger'].debug( '{} - Converted, stored in redis and sent back to client'.format( user_id)) return web.Response(text=''.join([ '{"key":', str(hashed_input), ',"file":', result, ',"proj":', json.dumps(get_proj4_string(proj_info_str)), '}' ]))
async def call_stewart(posted_data, user_id, app): st = time.time() posted_data = json.loads(posted_data.get("json")) f_name = '_'.join([user_id, str(posted_data['topojson'])]) point_layer = await app['redis_conn'].get(f_name) point_layer = json.loads(point_layer.decode()) new_field1 = posted_data['variable1'] new_field2 = posted_data['variable2'] n_field_name1 = list(new_field1.keys())[0] if len(new_field1[n_field_name1]) > 0: join_field_topojson(point_layer, new_field1[n_field_name1], n_field_name1) if new_field2: discretization = "percentiles" n_field_name2 = list(new_field2.keys())[0] if len(new_field2[n_field_name2]) > 0: join_field_topojson(point_layer, new_field2[n_field_name2], n_field_name2) else: discretization = "jenks" n_field_name2 = None if posted_data['mask_layer']: f_name = '_'.join([user_id, str(posted_data['mask_layer'])]) mask_layer = await app['redis_conn'].get(f_name) tmp_part = get_name() filenames = { 'point_layer': ''.join(['/tmp/', tmp_part, '.geojson']), 'mask_layer': ''.join(['/tmp/', get_name(), '.geojson']) if posted_data['mask_layer'] != "" else None } savefile(filenames['point_layer'], topojson_to_geojson(point_layer).encode()) if filenames['mask_layer']: savefile(filenames['mask_layer'], topojson_to_geojson(json.loads(mask_layer.decode())).encode()) reusable_val = '_'.join([ user_id, str(posted_data['topojson']), n_field_name1, n_field_name2 if n_field_name2 else "", str(posted_data["span"]), str(posted_data['beta']), str(posted_data['resolution']), posted_data['typefct'].lower() ]) existing_obj = await app['redis_conn'].get(reusable_val) try: if existing_obj: res, breaks = await app.loop.run_in_executor( app["ThreadPool"], resume_stewart, existing_obj, int(posted_data['nb_class']), discretization, posted_data['user_breaks'], filenames["mask_layer"]) else: res, breaks, dump_obj = await app.loop.run_in_executor( app["ProcessPool"], quick_stewart_mod, filenames['point_layer'], n_field_name1, int(posted_data['span']), float(posted_data['beta']), posted_data['typefct'].lower(), int(posted_data['nb_class']), discretization, posted_data['resolution'], filenames["mask_layer"], n_field_name2, posted_data['user_breaks']) asyncio.ensure_future(app['redis_conn'].set(reusable_val, dump_obj, pexpire=43200000)) except asyncio.CancelledError: app['logger'].info( 'Cancelled after {:.4f}s : stewart'.format(time.time() - st)) return os.remove(filenames['point_layer']) if filenames['mask_layer']: os.remove(filenames['mask_layer']) new_name = '_'.join(['StewartPot', n_field_name1]) res = await geojson_to_topojson2(res, new_name) hash_val = str(mmh3_hash(res)) asyncio.ensure_future(app['redis_conn'].set('_'.join([user_id, hash_val]), res, pexpire=86400000)) asyncio.ensure_future(app['redis_conn'].lpush('stewart_time', time.time() - st)) app['logger'].info('{} - timing : stewart_on_py : {:.4f}s'.format( user_id, time.time() - st)) return "|||".join([ ''.join(['{"key":', hash_val, ',"file":', res, '}']), json.dumps(breaks) ])
async def convert(request): posted_data, session_redis = \ await asyncio.gather(*[request.post(), get_session(request)]) user_id = get_user_id(session_redis, request.app['app_users']) # If a shapefile is provided as multiple files # (.shp, .dbf, .shx, and .prj are expected), not ziped : if "action" in posted_data and "file[]" not in posted_data: list_files = [] for i in range(len(posted_data) - 1): field = posted_data.getall('file[{}]'.format(i))[0] file_name = ''.join(['/tmp/', user_id, '_', field[1]]) list_files.append(file_name) savefile(file_name, field[2].read()) shp_path = [i for i in list_files if 'shp' in i][0] hashed_input = mmh3_file(shp_path) name = shp_path.split(os.path.sep)[2] datatype = "shp" # If there is a single file (geojson, kml, gml or zip) to handle : elif "action" in posted_data and "file[]" in posted_data: try: field = posted_data.get('file[]') name = field[1] data = field[2].read() datatype = field[3] print(datatype) hashed_input = mmh3_hash(data) filepath = ''.join(['/tmp/', user_id, "_", name]) except Exception as err: print("posted data :\n", posted_data) print("err\n", err) return web.Response(text='{"Error": "Incorrect datatype"}') f_name = '_'.join([user_id, str(hashed_input)]) f_nameQ = '_'.join([f_name, "Q"]) f_nameNQ = '_'.join([f_name, "NQ"]) asyncio.ensure_future(request.app['redis_conn'].incr('layers')) results = await request.app['redis_conn'].keys(f_name) if results: result = await request.app['redis_conn'].get(f_nameQ) request.app['logger'].info( '{} - Used result from redis'.format(user_id)) request.app['redis_conn'].pexpire(f_nameQ, 86400000) request.app['redis_conn'].pexpire(f_nameNQ, 86400000) return web.Response(text=''.join( ['{"key":', str(hashed_input), ',"file":', result.decode(), '}'])) if "shp" in datatype: res = await ogr_to_geojson(shp_path, to_latlong=True) filepath2 = '/tmp/' + name.replace('.shp', '.geojson') with open(filepath2, 'wb') as f: f.write(res) result = await geojson_to_topojson(filepath2, "-q 1e5") result = result.replace(''.join([user_id, '_']), '') asyncio.ensure_future( store_non_quantized(filepath2, f_nameNQ, request.app['redis_conn'])) asyncio.ensure_future(request.app['redis_conn'].set(f_nameQ, result, pexpire=86400000)) [os.remove(file) for file in list_files] elif datatype in ('application/x-zip-compressed', 'application/zip'): dataZip = BytesIO(data) dir_path = '/tmp/{}{}/'.format(user_id, hashed_input) os.mkdir(dir_path) with ZipFile(dataZip) as myzip: list_files = myzip.namelist() list_files = [dir_path + i for i in list_files] shp_path = [i for i in list_files if 'shp' in i][0] myzip.extractall(path=dir_path) res = await ogr_to_geojson(shp_path, to_latlong=True) filepath2 = shp_path.replace("{}{}/".format(user_id, hashed_input), "").replace('.shp', '.geojson') with open(filepath2, 'wb') as f: f.write(res) result = await geojson_to_topojson(filepath2, "-q 1e5") result = result.replace(''.join([user_id, '_']), '') asyncio.ensure_future(request.app['redis_conn'].set( f_nameQ, result, pexpire=86400000)) asyncio.ensure_future( store_non_quantized(filepath2, f_nameNQ, request.app['redis_conn'])) [os.remove(dir_path + file) for file in os.listdir(dir_path)] os.removedirs(dir_path) elif ('octet-stream' in datatype or 'text/json' in datatype \ or 'application/geo+json' in datatype or 'application/vnd.google-earth.kml+xml' in datatype \ or 'application/gml+xml' in datatype) \ and ("kml" in name.lower() \ or "gml" in name.lower() or "geojson" in name.lower()): with open(filepath, 'wb') as f: f.write(data) res = await ogr_to_geojson(filepath, to_latlong=True) if len(res) == 0: return web.Response( text=json.dumps({'Error': 'Error reading the input file'})) if 'gml' in name.lower(): os.remove(filepath.replace('gml', 'gfs')) with open(filepath, 'wb') as f: f.write(res) result = await geojson_to_topojson(filepath, "-q 1e5") if len(result) == 0: return web.Response( text='{"Error": "Error converting input file"}') else: result = result.replace(''.join([user_id, '_']), '') asyncio.ensure_future( store_non_quantized(filepath, f_nameNQ, request.app['redis_conn'])) asyncio.ensure_future(request.app['redis_conn'].set( f_nameQ, result, pexpire=86400000)) else: print(datatype, name) return web.Response(text='{"Error": "Incorrect datatype"}') request.app['logger'].info( '{} - Converted, stored in redis and sent back to client'.format( user_id)) return web.Response(text=''.join( ['{"key":', str(hashed_input), ',"file":', result, '}']))