async def links_map(posted_data, user_id, app): st = time.time() posted_data = json.loads(posted_data.get("json")) f_name = '_'.join([user_id, str(posted_data['topojson'])]) ref_layer = await app['redis_conn'].get(f_name) ref_layer = json.loads(ref_layer.decode()) new_field = posted_data['join_field'] n_field_name = list(new_field.keys())[0] if len(new_field[n_field_name]) > 0: join_field_topojson(ref_layer, new_field[n_field_name], n_field_name) ref_layer = convert_from_topo(ref_layer) result_geojson = await app.loop.run_in_executor( app["ThreadPool"], make_geojson_links, ref_layer, posted_data["csv_table"], posted_data["field_i"], posted_data["field_j"], posted_data["field_fij"], n_field_name) new_name = ''.join(["Links_", n_field_name]) res = await geojson_to_topojson2(result_geojson, new_name) hash_val = mmh3_hash(res) asyncio.ensure_future(app['redis_conn'].set('_'.join( [user_id, str(hash_val)]), res, pexpire=86400000)) app['logger'].info('{} - timing : links_on_py : {:.4f}s'.format( user_id, time.time() - st)) asyncio.ensure_future(app['redis_conn'].lpush('links_time', time.time() - st)) return ''.join(['{"key":', str(hash_val), ',"file":', res, '}'])
async def compute_olson(posted_data, user_id, app): st = time.time() posted_data = json.loads(posted_data.get("json")) f_name = '_'.join([user_id, str(posted_data['topojson'])]) ref_layer = await app['redis_conn'].get(f_name) ref_layer = json.loads(ref_layer.decode()) scale_values = posted_data['scale_values'] ref_layer_geojson = convert_from_topo(ref_layer) try: await app.loop.run_in_executor(app["ThreadPool"], olson_transform, ref_layer_geojson, scale_values) except asyncio.CancelledError: app['logger'].info( 'Cancelled after {:.4f}s : olson_transform'.format(time.time() - st)) return new_name = "_".join(["Olson_carto", str(posted_data["field_name"])]) res = await geojson_to_topojson2( json.dumps(ref_layer_geojson).encode(), new_name) hash_val = str(mmh3_hash(res)) asyncio.ensure_future(app['redis_conn'].set('_'.join([user_id, hash_val]), res, pexpire=86400000)) asyncio.ensure_future(app['redis_conn'].lpush('olson_time', time.time() - st)) app['logger'].info('{} - timing : olson-like cartogram : {:.4f}s'.format( user_id, time.time() - st)) return ''.join(['{"key":', hash_val, ',"file":', res, '}'])
async def get_sample_layer(request): posted_data, session_redis = \ await asyncio.gather(*[request.post(), get_session(request)]) user_id = get_user_id(session_redis, request.app['app_users']) name = posted_data.get('layer_name') path = request.app['db_layers'][name] hash_val = str(mmh3_hash(path)) f_name = '_'.join([user_id, hash_val]) asyncio.ensure_future(request.app['redis_conn'].incr('sample_layers')) result = await request.app['redis_conn'].get(f_name) if result: result = result.decode() request.app['logger'].debug( '{} - Used result from redis'.format(user_id)) request.app['redis_conn'].pexpire(f_name, 86400000) return web.Response(text=''.join([ '{"key":', hash_val, ',"file":', result.replace(''.join([user_id, '_']), ''), '}' ])) else: res = await request.app.loop.run_in_executor(request.app["ThreadPool"], ogr_to_geojson, path) request.app['logger'].debug( '{} - Transform coordinates from GeoJSON'.format(user_id)) result = await geojson_to_topojson2(res, name) asyncio.ensure_future(request.app['redis_conn'].set(f_name, result, pexpire=86400000)) return web.Response( text=''.join(['{"key":', hash_val, ',"file":', result, '}']))
def censys_ipv4_http_extraction(s: dict) -> dict: """ Extracts HTTP relevant data out ot service part of Censys IPv4 dict :param s: Service part of a censys dict :return: Dictionary with HTTP data """ headers = s.get("headers", {}) for h in headers.get("unknown", []): headers.update({h["key"].lower().replace("-", "_"): h["value"]}) if "unknown" in headers.keys(): del headers["unknown"] return { "headers": headers, "content": { "html": s.get("body", None), "hash": { "shodan": mmh3_hash(s.get("body", None) or ""), "sha256": s.get("body_sha256", None) or sha256_from_body_string("") }, "favicon": { "shodan": None, "sha256": None }, }, }
async def receiv_layer(request): posted_data, session_redis = \ await asyncio.gather(*[request.post(), get_session(request)]) user_id = get_user_id(session_redis, request.app['app_users']) layer_name = posted_data['layer_name'] data = posted_data['geojson'] h_val = mmh3_hash(data) f_name = '_'.join([user_id, str(h_val)]) res = await geojson_to_topojson2(data.encode(), layer_name) asyncio.ensure_future(request.app['redis_conn'].set(f_name, res, pexpire=86400000)) return web.Response(text=''.join(['{"key":', str(h_val), '}']))
async def carto_gridded(posted_data, user_id, app): st = time.time() posted_data = json.loads(posted_data.get("json")) f_name = '_'.join([user_id, str(posted_data['topojson']), "NQ"]) ref_layer = await app['redis_conn'].get(f_name) ref_layer = json.loads(ref_layer.decode()) new_field = posted_data['var_name'] n_field_name = list(new_field.keys())[0] if len(new_field[n_field_name]) > 0: join_field_topojson(ref_layer, new_field[n_field_name], n_field_name) tmp_part = get_name() filenames = { "src_layer": ''.join(['/tmp/', tmp_part, '.geojson']), "result": None } savefile(filenames['src_layer'], topojson_to_geojson(ref_layer).encode()) try: result_geojson = await app.loop.run_in_executor( app["ProcessPool"], get_grid_layer, filenames['src_layer'], posted_data["cellsize"], n_field_name, posted_data["grid_shape"].lower()) except asyncio.CancelledError: app['logger'].info('Cancelled after {:.4f}s : get_grid_layer'.format( user_id, time.time() - st)) return savefile(filenames['src_layer'], result_geojson.encode()) res = await geojson_to_topojson(filenames['src_layer'], remove=True) app['logger'].info('{} - Gridded_on_py - {:.4f}'.format( user_id, st - time.time())) new_name = '_'.join( ['Gridded', str(posted_data["cellsize"]), n_field_name]) res = res.replace(tmp_part, new_name) hash_val = str(mmh3_hash(res)) asyncio.ensure_future(app['redis_conn'].set('_'.join( [user_id, hash_val, "NQ"]), res, pexpire=86400000)) asyncio.ensure_future(app['redis_conn'].lpush('gridded_time', time.time() - st)) return ''.join(['{"key":', hash_val, ',"file":', res, '}'])
async def receiv_layer(request): posted_data, session_redis = \ await asyncio.gather(*[request.post(), get_session(request)]) user_id = get_user_id(session_redis, request.app['app_users']) layer_name = posted_data['layer_name'] data = posted_data['geojson'] h_val = mmh3_hash(data) f_name = '_'.join([user_id, str(h_val)]) f_nameNQ = '_'.join([f_name, "NQ"]) tmp_part = get_name() filepath = "".join(['/tmp/', tmp_part, '.geojson']) with open(filepath, 'w', encoding='utf-8') as f: f.write(data) res = await geojson_to_topojson(filepath) res = res.replace(tmp_part, layer_name) asyncio.ensure_future(request.app['redis_conn'].set(f_nameNQ, res, pexpire=86400000)) return web.Response(text=''.join(['{"key":', str(h_val), '}']))
async def convert_extrabasemap(request): posted_data, session_redis = \ await asyncio.gather(*[request.post(), get_session(request)]) user_id = get_user_id(session_redis, request.app['app_users']) url = posted_data['url'] layer_name = posted_data['layer_name'] async with ClientSession(loop=request.app.loop) as client: async with client.get(url) as resp: assert resp.status == 200 data = await resp.text() data = data.encode() hashed_input = mmh3_hash(data) f_name = '_'.join([user_id, str(hashed_input)]) asyncio.ensure_future(request.app['redis_conn'].incr('layers')) result = await request.app['redis_conn'].get(f_name) if result: request.app['logger'].debug( '{} - Used result from redis'.format(user_id)) request.app['redis_conn'].pexpire(f_name, 86400000) return web.Response(text=''.join([ '{"key":', str(hashed_input), ',"file":', result.decode(), '}' ])) result = await geojson_to_topojson2(data, layer_name) if not result: return web.Response( text='{"Error": "Error converting input file"}') else: asyncio.ensure_future(request.app['redis_conn'].set( f_name, result, pexpire=86400000)) request.app['logger'].debug( '{} - Converted, stored in redis and sent back to client'. format(user_id)) return web.Response(text=''.join( ['{"key":', str(hashed_input), ',"file":', result, '}']))
async def carto_doug(posted_data, user_id, app): st = time.time() posted_data = json.loads(posted_data.get("json")) f_name = '_'.join([user_id, str(posted_data['topojson']), "Q"]) ref_layer = await app['redis_conn'].get(f_name) ref_layer = json.loads(ref_layer.decode()) new_field = posted_data['var_name'] iterations = int(posted_data['iterations']) n_field_name = list(new_field.keys())[0] if len(new_field[n_field_name]) > 0: join_field_topojson(ref_layer, new_field[n_field_name], n_field_name) tmp_part = get_name() tmp_path = ''.join(['/tmp/', tmp_part, '.geojson']) savefile(tmp_path, topojson_to_geojson(ref_layer).encode()) try: result = await app.loop.run_in_executor(app["ThreadPool"], make_carto_doug, tmp_path, n_field_name, iterations) except asyncio.CancelledError: app['logger'].info('Cancelled after {:.4f}s : carto_doug'.format( user_id, time.time() - st)) return os.remove(tmp_path) savefile(tmp_path, result) res = await geojson_to_topojson(tmp_path, remove=True) new_name = '_'.join(["Carto_doug", str(iterations), n_field_name]) res = res.replace(tmp_part, new_name) hash_val = mmh3_hash(res) asyncio.ensure_future(app['redis_conn'].set('_'.join( [user_id, str(hash_val), "NQ"]), res, pexpire=86400000)) asyncio.ensure_future(app['redis_conn'].lpush('dougenik_time', time.time() - st)) app['logger'].info('{} - timing : carto_doug : {:.4f}s'.format( user_id, time.time() - st)) return ''.join(['{"key":', str(hash_val), ',"file":', res, '}'])
async def convert_csv_geo(request): posted_data, session_redis = \ await asyncio.gather(*[request.post(), get_session(request)]) user_id = get_user_id(session_redis, request.app['app_users']) st = time.time() file_name = posted_data.get("filename") data = posted_data.get("csv_file") hash_val = str(mmh3_hash(data)) f_name = '_'.join([user_id, hash_val, "NQ"]) result = await request.app['redis_conn'].get(f_name) if result: request.app['logger'].info( '{} - Used result from redis'.format(user_id)) return web.Response(text=''.join( ['{"key":', hash_val, ',"file":', result.decode(), '}'])) res = await rawcsv_to_geo(data) filepath = "/tmp/" + file_name + ".geojson" with open(filepath, 'wb') as f: f.write(res.encode()) result = await geojson_to_topojson(filepath) if len(result) == 0: result = json.dumps({'Error': 'Wrong CSV input'}) else: asyncio.ensure_future(request.app['redis_conn'].set('_'.join( [user_id, hash_val, "NQ"]), result, pexpire=86400000)) request.app['logger'].info( '{} - timing : csv -> geojson -> topojson : {:.4f}s'.format( user_id, time.time() - st)) return web.Response( text=''.join(['{"key":', hash_val, ',"file":', result, '}']))
async def get_stats_json(request): posted_data = await request.post() if not ('data' in posted_data and mmh3_hash(posted_data['data']) == 1163649321): return web.Response() redis_conn = request.app['redis_conn'] stewart, doug, gridded, olson, links = await asyncio.gather(*[ redis_conn.lrange('stewart_time', 0, -1), redis_conn.lrange('dougenik_time', 0, -1), redis_conn.lrange('gridded_time', 0, -1), redis_conn.lrange('olson_time', 0, -1), redis_conn.lrange('links_time', 0, -1), ]) layers, sample_layers = await asyncio.gather( *[redis_conn.get('layers'), redis_conn.get('sample_layers')]) view_onepage, single_view_onepage = await asyncio.gather(*[ redis_conn.get('view_onepage'), redis_conn.get('single_view_onepage') ]) contact = await redis_conn.lrange('contact', 0, -1) count = await redis_conn.get('single_view_modulepage') return web.Response(text=json.dumps({ "count": count, "layer": layers, "view_onepage": view_onepage, "single_view_onepage": single_view_onepage, "sample": sample_layers, "contact": contact, "t": { "stewart": stewart, "dougenik": doug, "gridded": gridded, "olson": olson, "links": links } }))
async def convert_topo(request): posted_data, session_redis = \ await asyncio.gather(*[request.post(), get_session(request)]) try: file_field = posted_data['file[]'] name = file_field.filename data = file_field.file.read() except Exception as err: request.app['logger'].info("posted data :\n{}\nerr:\n{}".format( posted_data, err)) return web.Response(text='{"Error": "Incorrect datatype"}') user_id = get_user_id(session_redis, request.app['app_users']) hash_val = str(mmh3_hash(data)) f_name = '_'.join([user_id, hash_val]) asyncio.ensure_future(request.app['redis_conn'].incr('layers')) result = await request.app['redis_conn'].get(f_name) if result: result = result.decode() request.app['logger'].debug( '{} - Used result from redis'.format(user_id)) request.app['redis_conn'].pexpire(f_name, 86400000) return web.Response(text=''.join([ '{"key":', hash_val, ',"file":', result.replace(hash_val, name), '}' ])) asyncio.ensure_future(request.app['redis_conn'].set(f_name, data, pexpire=86400000)) request.app['logger'].debug('Caching the TopoJSON') return web.Response(text=''.join(['{"key":', hash_val, ',"file":null}']))
async def call_stewart(posted_data, user_id, app): st = time.time() posted_data = json.loads(posted_data.get("json")) f_name = '_'.join([user_id, str(posted_data['topojson'])]) point_layer = await app['redis_conn'].get(f_name) point_layer = json.loads(point_layer.decode()) new_field1 = posted_data['variable1'] new_field2 = posted_data['variable2'] n_field_name1 = list(new_field1.keys())[0] if len(new_field1[n_field_name1]) > 0: join_field_topojson(point_layer, new_field1[n_field_name1], n_field_name1) if new_field2: discretization = "percentiles" n_field_name2 = list(new_field2.keys())[0] if len(new_field2[n_field_name2]) > 0: join_field_topojson(point_layer, new_field2[n_field_name2], n_field_name2) else: discretization = "jenks" n_field_name2 = None if posted_data['mask_layer']: f_name = '_'.join([user_id, str(posted_data['mask_layer'])]) mask_layer = await app['redis_conn'].get(f_name) tmp_part = get_name() filenames = { 'point_layer': ''.join(['/tmp/', tmp_part, '.geojson']), 'mask_layer': ''.join(['/tmp/', get_name(), '.geojson']) if posted_data['mask_layer'] != "" else None } savefile(filenames['point_layer'], topojson_to_geojson(point_layer).encode()) if filenames['mask_layer']: savefile(filenames['mask_layer'], topojson_to_geojson(json.loads(mask_layer.decode())).encode()) reusable_val = '_'.join([ user_id, str(posted_data['topojson']), n_field_name1, n_field_name2 if n_field_name2 else "", str(posted_data["span"]), str(posted_data['beta']), str(posted_data['resolution']), posted_data['typefct'].lower() ]) existing_obj = await app['redis_conn'].get(reusable_val) try: if existing_obj: res, breaks = await app.loop.run_in_executor( app["ThreadPool"], resume_stewart, existing_obj, int(posted_data['nb_class']), discretization, posted_data['user_breaks'], filenames["mask_layer"]) else: res, breaks, dump_obj = await app.loop.run_in_executor( app["ProcessPool"], quick_stewart_mod, filenames['point_layer'], n_field_name1, int(posted_data['span']), float(posted_data['beta']), posted_data['typefct'].lower(), int(posted_data['nb_class']), discretization, posted_data['resolution'], filenames["mask_layer"], n_field_name2, posted_data['user_breaks']) asyncio.ensure_future(app['redis_conn'].set(reusable_val, dump_obj, pexpire=43200000)) except asyncio.CancelledError: app['logger'].info( 'Cancelled after {:.4f}s : stewart'.format(time.time() - st)) return os.remove(filenames['point_layer']) if filenames['mask_layer']: os.remove(filenames['mask_layer']) new_name = '_'.join(['StewartPot', n_field_name1]) res = await geojson_to_topojson2(res, new_name) hash_val = str(mmh3_hash(res)) asyncio.ensure_future(app['redis_conn'].set('_'.join([user_id, hash_val]), res, pexpire=86400000)) asyncio.ensure_future(app['redis_conn'].lpush('stewart_time', time.time() - st)) app['logger'].info('{} - timing : stewart_on_py : {:.4f}s'.format( user_id, time.time() - st)) return "|||".join([ ''.join(['{"key":', hash_val, ',"file":', res, '}']), json.dumps(breaks) ])
async def convert(request): posted_data, session_redis = \ await asyncio.gather(*[request.post(), get_session(request)]) user_id = get_user_id(session_redis, request.app['app_users']) proj_info_str = None # If a shapefile is provided as multiple files # (.shp, .dbf, .shx, and .prj are expected), not ziped : if "action" in posted_data and "file[]" not in posted_data: list_files = [] for i in range(len(posted_data) - 1): field = posted_data.getall('file[{}]'.format(i))[0] file_name = ''.join(['/tmp/', user_id, '_', field[1]]) list_files.append(file_name) savefile(file_name, field[2].read()) shp_path = [i for i in list_files if 'shp' in i][0] layer_name = shp_path.replace(''.join(['/tmp/', user_id, '_']), '').replace('.shp', '') hashed_input = mmh3_file(shp_path) name = shp_path.split(os.path.sep)[2] datatype = "shp" # If there is a single file (geojson, kml, gml or zip) to handle : elif "action" in posted_data and "file[]" in posted_data: try: field = posted_data.get('file[]') name = field[1] layer_name = name.split('.')[0] data = field[2].read() datatype = field[3] hashed_input = mmh3_hash(data) filepath = ''.join(['/tmp/', user_id, "_", name]) except Exception as err: request.app['logger'].info("posted data :\n{}\nerr:\n{}".format( posted_data, err)) return convert_error('Incorrect datatype') f_name = '_'.join([user_id, str(hashed_input)]) asyncio.ensure_future(request.app['redis_conn'].incr('layers')) result = await request.app['redis_conn'].get(f_name) if result: request.app['logger'].debug( '{} - Used result from redis'.format(user_id)) request.app['redis_conn'].pexpire(f_name, 86400000) return web.Response(text=''.join( ['{"key":', str(hashed_input), ',"file":', result.decode(), '}'])) if "shp" in datatype: clean_files = lambda: [os.remove(_file) for _file in list_files] res = await request.app.loop.run_in_executor( request.app["ProcessPool"], ogr_to_geojson, shp_path) if not res: clean_files() return convert_error() result = await geojson_to_topojson2(res, layer_name) if not result: clean_files() return convert_error() asyncio.ensure_future(request.app['redis_conn'].set(f_name, result, pexpire=86400000)) with open('/tmp/' + name.replace('.shp', '.prj'), 'r') as f: proj_info_str = f.read() clean_files() elif datatype in ('application/x-zip-compressed', 'application/zip'): dataZip = BytesIO(data) dir_path = '/tmp/{}{}/'.format(user_id, hashed_input) with ZipFile(dataZip) as myzip: list_files = myzip.namelist() list_files = [dir_path + i for i in list_files] slots = {"shp": None, "prj": None, "dbf": None, "shx": None} names = [] try: for f in list_files: name, ext = f.split('.') names.append(name) if 'shp' in ext: slots['shp'] = f elif 'prj' in ext: slots['prj'] = f elif 'shx' in ext: slots['shx'] = f elif 'dbf' in ext: slots['dbf'] = f elif 'cpg' in ext: slots['cpg'] = f assert (all(v is not None for v in slots.values())) assert (all(name == names[0] for name in names)) assert (4 <= len(list_files) < 8) except Exception as err: request.app['logger'].info( 'Error with content of zip file : {}'.format(err)) return convert_error('Error with zip file content') os.mkdir(dir_path) myzip.extractall(path=dir_path) try: res = await request.app.loop.run_in_executor( request.app["ProcessPool"], ogr_to_geojson, slots['shp']) if not res: return convert_error() result = await geojson_to_topojson2(res, layer_name) if not result: return convert_error() with open(slots['prj'], 'r') as f: proj_info_str = f.read() asyncio.ensure_future(request.app['redis_conn'].set( f_name, result, pexpire=86400000)) except (asyncio.CancelledError, CancelledError): return except Exception as err: request.app['logger'].info( 'Error with content of zip file : {}'.format(err)) return convert_error('Error with zip file content') finally: [os.remove(dir_path + _file) for _file in os.listdir(dir_path)] os.removedirs(dir_path) elif ('octet-stream' in datatype or 'text/json' in datatype or 'application/geo+json' in datatype or 'application/vnd.google-earth.kml+xml' in datatype or 'application/gml+xml' in datatype) \ and ("kml" in name.lower() or "gml" in name.lower() or "geojson" in name.lower()): with open(filepath, 'wb') as f: f.write(data) res = await request.app.loop.run_in_executor(request.app["ThreadPool"], ogr_to_geojson, filepath) if not res: return convert_error('Error reading the input file') result = await geojson_to_topojson2(res, layer_name) if not result: return convert_error('Error reading the input file') asyncio.ensure_future(request.app['redis_conn'].set(f_name, result, pexpire=86400000)) else: request.app['logger'].info("Incorrect datatype :\n{}name:\n{}".format( datatype, name)) return convert_error('Incorrect datatype') request.app['logger'].debug( '{} - Converted, stored in redis and sent back to client'.format( user_id)) return web.Response(text=''.join([ '{"key":', str(hashed_input), ',"file":', result, ',"proj":', json.dumps(get_proj4_string(proj_info_str)), '}' ]))
async def convert(request): posted_data, session_redis = \ await asyncio.gather(*[request.post(), get_session(request)]) user_id = get_user_id(session_redis, request.app['app_users']) # If a shapefile is provided as multiple files # (.shp, .dbf, .shx, and .prj are expected), not ziped : if "action" in posted_data and "file[]" not in posted_data: list_files = [] for i in range(len(posted_data) - 1): field = posted_data.getall('file[{}]'.format(i))[0] file_name = ''.join(['/tmp/', user_id, '_', field[1]]) list_files.append(file_name) savefile(file_name, field[2].read()) shp_path = [i for i in list_files if 'shp' in i][0] hashed_input = mmh3_file(shp_path) name = shp_path.split(os.path.sep)[2] datatype = "shp" # If there is a single file (geojson, kml, gml or zip) to handle : elif "action" in posted_data and "file[]" in posted_data: try: field = posted_data.get('file[]') name = field[1] data = field[2].read() datatype = field[3] print(datatype) hashed_input = mmh3_hash(data) filepath = ''.join(['/tmp/', user_id, "_", name]) except Exception as err: print("posted data :\n", posted_data) print("err\n", err) return web.Response(text='{"Error": "Incorrect datatype"}') f_name = '_'.join([user_id, str(hashed_input)]) f_nameQ = '_'.join([f_name, "Q"]) f_nameNQ = '_'.join([f_name, "NQ"]) asyncio.ensure_future(request.app['redis_conn'].incr('layers')) results = await request.app['redis_conn'].keys(f_name) if results: result = await request.app['redis_conn'].get(f_nameQ) request.app['logger'].info( '{} - Used result from redis'.format(user_id)) request.app['redis_conn'].pexpire(f_nameQ, 86400000) request.app['redis_conn'].pexpire(f_nameNQ, 86400000) return web.Response(text=''.join( ['{"key":', str(hashed_input), ',"file":', result.decode(), '}'])) if "shp" in datatype: res = await ogr_to_geojson(shp_path, to_latlong=True) filepath2 = '/tmp/' + name.replace('.shp', '.geojson') with open(filepath2, 'wb') as f: f.write(res) result = await geojson_to_topojson(filepath2, "-q 1e5") result = result.replace(''.join([user_id, '_']), '') asyncio.ensure_future( store_non_quantized(filepath2, f_nameNQ, request.app['redis_conn'])) asyncio.ensure_future(request.app['redis_conn'].set(f_nameQ, result, pexpire=86400000)) [os.remove(file) for file in list_files] elif datatype in ('application/x-zip-compressed', 'application/zip'): dataZip = BytesIO(data) dir_path = '/tmp/{}{}/'.format(user_id, hashed_input) os.mkdir(dir_path) with ZipFile(dataZip) as myzip: list_files = myzip.namelist() list_files = [dir_path + i for i in list_files] shp_path = [i for i in list_files if 'shp' in i][0] myzip.extractall(path=dir_path) res = await ogr_to_geojson(shp_path, to_latlong=True) filepath2 = shp_path.replace("{}{}/".format(user_id, hashed_input), "").replace('.shp', '.geojson') with open(filepath2, 'wb') as f: f.write(res) result = await geojson_to_topojson(filepath2, "-q 1e5") result = result.replace(''.join([user_id, '_']), '') asyncio.ensure_future(request.app['redis_conn'].set( f_nameQ, result, pexpire=86400000)) asyncio.ensure_future( store_non_quantized(filepath2, f_nameNQ, request.app['redis_conn'])) [os.remove(dir_path + file) for file in os.listdir(dir_path)] os.removedirs(dir_path) elif ('octet-stream' in datatype or 'text/json' in datatype \ or 'application/geo+json' in datatype or 'application/vnd.google-earth.kml+xml' in datatype \ or 'application/gml+xml' in datatype) \ and ("kml" in name.lower() \ or "gml" in name.lower() or "geojson" in name.lower()): with open(filepath, 'wb') as f: f.write(data) res = await ogr_to_geojson(filepath, to_latlong=True) if len(res) == 0: return web.Response( text=json.dumps({'Error': 'Error reading the input file'})) if 'gml' in name.lower(): os.remove(filepath.replace('gml', 'gfs')) with open(filepath, 'wb') as f: f.write(res) result = await geojson_to_topojson(filepath, "-q 1e5") if len(result) == 0: return web.Response( text='{"Error": "Error converting input file"}') else: result = result.replace(''.join([user_id, '_']), '') asyncio.ensure_future( store_non_quantized(filepath, f_nameNQ, request.app['redis_conn'])) asyncio.ensure_future(request.app['redis_conn'].set( f_nameQ, result, pexpire=86400000)) else: print(datatype, name) return web.Response(text='{"Error": "Incorrect datatype"}') request.app['logger'].info( '{} - Converted, stored in redis and sent back to client'.format( user_id)) return web.Response(text=''.join( ['{"key":', str(hashed_input), ',"file":', result, '}']))
def mmh3_file(path): with open(path, 'rb') as f: buf = f.read() return mmh3_hash(buf)
async def cache_input_topojson(request): posted_data, session_redis = \ await asyncio.gather(*[request.post(), get_session(request)]) params = request.match_info['params'] if "sample_data" in params: user_id = get_user_id(session_redis, request.app['app_users']) name = posted_data.get('layer_name') path = request.app['db_layers'][name] hash_val = str(mmh3_hash(path)) fp_name = '_'.join([user_id, name]) f_name = '_'.join([user_id, hash_val]) f_nameQ = '_'.join([f_name, "Q"]) f_nameNQ = '_'.join([f_name, "NQ"]) asyncio.ensure_future(request.app['redis_conn'].incr('sample_layers')) result = await request.app['redis_conn'].get(f_nameQ) if result: result = result.decode() request.app['logger'].info( '{} - Used result from redis'.format(user_id)) request.app['redis_conn'].pexpire(f_nameQ, 86400000) request.app['redis_conn'].pexpire(f_nameNQ, 86400000) return web.Response(text=''.join([ '{"key":', hash_val, ',"file":', result.replace(''.join([user_id, '_']), ''), '}' ])) else: res = await ogr_to_geojson(path, to_latlong=True) request.app['logger'].info( '{} - Transform coordinates from GeoJSON'.format(user_id)) f_path = '/tmp/' + fp_name + ".geojson" with open(f_path, 'wb') as f: f.write(res) result = await geojson_to_topojson(f_path, "-q 1e5") result = result.replace(''.join([user_id, '_']), '') asyncio.ensure_future( store_non_quantized(f_path, f_nameNQ, request.app['redis_conn'])) asyncio.ensure_future(request.app['redis_conn'].set( f_nameQ, result, pexpire=86400000)) print('Caching the TopoJSON') return web.Response( text=''.join(['{"key":', hash_val, ',"file":', result, '}'])) elif "user" in params: try: file_field = posted_data['file[]'] name = file_field.filename data = file_field.file.read() except Exception as err: print("posted data :\n", posted_data) print("err\n", err) return web.Response(text='{"Error": "Incorrect datatype"}') user_id = get_user_id(session_redis, request.app['app_users']) hash_val = str(mmh3_hash(data)) f_name = '_'.join([user_id, hash_val]) f_nameQ = '_'.join([f_name, "Q"]) f_nameNQ = '_'.join([f_name, "NQ"]) asyncio.ensure_future(request.app['redis_conn'].incr('layers')) result = await request.app['redis_conn'].get(f_nameNQ) if result: result = result.decode() request.app['logger'].info( '{} - Used result from redis'.format(user_id)) request.app['redis_conn'].pexpire(f_nameQ, 86400000) request.app['redis_conn'].pexpire(f_nameNQ, 86400000) return web.Response(text=''.join([ '{"key":', hash_val, ',"file":', result.replace(hash_val, name), '}' ])) asyncio.ensure_future(request.app['redis_conn'].set(f_nameNQ, data, pexpire=86400000)) asyncio.ensure_future(request.app['redis_conn'].set(f_nameQ, data, pexpire=86400000)) print('Caching the TopoJSON') return web.Response( text=''.join(['{"key":', hash_val, ',"file":null}']))