async def carto_gridded(posted_data, user_id, app): st = time.time() posted_data = json.loads(posted_data.get("json")) f_name = '_'.join([user_id, str(posted_data['topojson']), "NQ"]) ref_layer = await app['redis_conn'].get(f_name) ref_layer = json.loads(ref_layer.decode()) new_field = posted_data['var_name'] n_field_name = list(new_field.keys())[0] if len(new_field[n_field_name]) > 0: join_field_topojson(ref_layer, new_field[n_field_name], n_field_name) tmp_part = get_name() filenames = { "src_layer": ''.join(['/tmp/', tmp_part, '.geojson']), "result": None } savefile(filenames['src_layer'], topojson_to_geojson(ref_layer).encode()) try: result_geojson = await app.loop.run_in_executor( app["ProcessPool"], get_grid_layer, filenames['src_layer'], posted_data["cellsize"], n_field_name, posted_data["grid_shape"].lower()) except asyncio.CancelledError: app['logger'].info('Cancelled after {:.4f}s : get_grid_layer'.format( user_id, time.time() - st)) return savefile(filenames['src_layer'], result_geojson.encode()) res = await geojson_to_topojson(filenames['src_layer'], remove=True) app['logger'].info('{} - Gridded_on_py - {:.4f}'.format( user_id, st - time.time())) new_name = '_'.join( ['Gridded', str(posted_data["cellsize"]), n_field_name]) res = res.replace(tmp_part, new_name) hash_val = str(mmh3_hash(res)) asyncio.ensure_future(app['redis_conn'].set('_'.join( [user_id, hash_val, "NQ"]), res, pexpire=86400000)) asyncio.ensure_future(app['redis_conn'].lpush('gridded_time', time.time() - st)) return ''.join(['{"key":', hash_val, ',"file":', res, '}'])
async def receiv_layer(request): posted_data, session_redis = \ await asyncio.gather(*[request.post(), get_session(request)]) user_id = get_user_id(session_redis, request.app['app_users']) layer_name = posted_data['layer_name'] data = posted_data['geojson'] h_val = mmh3_hash(data) f_name = '_'.join([user_id, str(h_val)]) f_nameNQ = '_'.join([f_name, "NQ"]) tmp_part = get_name() filepath = "".join(['/tmp/', tmp_part, '.geojson']) with open(filepath, 'w', encoding='utf-8') as f: f.write(data) res = await geojson_to_topojson(filepath) res = res.replace(tmp_part, layer_name) asyncio.ensure_future(request.app['redis_conn'].set(f_nameNQ, res, pexpire=86400000)) return web.Response(text=''.join(['{"key":', str(h_val), '}']))
async def carto_doug(posted_data, user_id, app): st = time.time() posted_data = json.loads(posted_data.get("json")) f_name = '_'.join([user_id, str(posted_data['topojson']), "Q"]) ref_layer = await app['redis_conn'].get(f_name) ref_layer = json.loads(ref_layer.decode()) new_field = posted_data['var_name'] iterations = int(posted_data['iterations']) n_field_name = list(new_field.keys())[0] if len(new_field[n_field_name]) > 0: join_field_topojson(ref_layer, new_field[n_field_name], n_field_name) tmp_part = get_name() tmp_path = ''.join(['/tmp/', tmp_part, '.geojson']) savefile(tmp_path, topojson_to_geojson(ref_layer).encode()) try: result = await app.loop.run_in_executor(app["ThreadPool"], make_carto_doug, tmp_path, n_field_name, iterations) except asyncio.CancelledError: app['logger'].info('Cancelled after {:.4f}s : carto_doug'.format( user_id, time.time() - st)) return os.remove(tmp_path) savefile(tmp_path, result) res = await geojson_to_topojson(tmp_path, remove=True) new_name = '_'.join(["Carto_doug", str(iterations), n_field_name]) res = res.replace(tmp_part, new_name) hash_val = mmh3_hash(res) asyncio.ensure_future(app['redis_conn'].set('_'.join( [user_id, str(hash_val), "NQ"]), res, pexpire=86400000)) asyncio.ensure_future(app['redis_conn'].lpush('dougenik_time', time.time() - st)) app['logger'].info('{} - timing : carto_doug : {:.4f}s'.format( user_id, time.time() - st)) return ''.join(['{"key":', str(hash_val), ',"file":', res, '}'])
async def compute_olson(posted_data, user_id, app): st = time.time() posted_data = json.loads(posted_data.get("json")) f_name = '_'.join([user_id, str(posted_data['topojson']), "NQ"]) ref_layer = await app['redis_conn'].get(f_name) ref_layer = json.loads(ref_layer.decode()) scale_values = posted_data['scale_values'] ref_layer_geojson = convert_from_topo(ref_layer) try: await app.loop.run_in_executor(app["ThreadPool"], olson_transform, ref_layer_geojson, scale_values) except asyncio.CancelledError: app['logger'].info('Cancelled after {:.4f}s : olson_transform'.format( user_id, time.time() - st)) return tmp_part = get_name() f_name = "".join(["/tmp/", tmp_part, ".geojson"]) savefile(f_name, json.dumps(ref_layer_geojson).encode()) res = await geojson_to_topojson(f_name, remove=True) new_name = "_".join(["Olson_carto", str(posted_data["field_name"])]) res = res.replace(tmp_part, new_name) hash_val = str(mmh3_hash(res)) asyncio.ensure_future(app['redis_conn'].set('_'.join( [user_id, hash_val, "NQ"]), res, pexpire=86400000)) asyncio.ensure_future(app['redis_conn'].lpush('olson_time', time.time() - st)) app['logger'].info('{} - timing : olson-like cartogram : {:.4f}s'.format( user_id, time.time() - st)) return ''.join(['{"key":', hash_val, ',"file":', res, '}'])
async def links_map(posted_data, user_id, app): st = time.time() posted_data = json.loads(posted_data.get("json")) f_name = '_'.join([user_id, str(posted_data['topojson']), "NQ"]) ref_layer = await app['redis_conn'].get(f_name) ref_layer = json.loads(ref_layer.decode()) new_field = posted_data['join_field'] n_field_name = list(new_field.keys())[0] if len(new_field[n_field_name]) > 0: join_field_topojson(ref_layer, new_field[n_field_name], n_field_name) ref_layer = convert_from_topo(ref_layer) result_geojson = await app.loop.run_in_executor( app["ThreadPool"], make_geojson_links, ref_layer, posted_data["csv_table"], posted_data["field_i"], posted_data["field_j"], posted_data["field_fij"], n_field_name) tmp_part = get_name() tmp_name = ''.join(['/tmp/', tmp_part, '.geojson']) savefile(tmp_name, result_geojson) res = await geojson_to_topojson(tmp_name, remove=True) new_name = ''.join(["Links_", n_field_name]) res = res.replace(tmp_part, new_name) hash_val = mmh3_hash(res) asyncio.ensure_future(app['redis_conn'].set('_'.join( [user_id, str(hash_val), "NQ"]), res, pexpire=86400000)) app['logger'].info('{} - timing : links_on_py : {:.4f}s'.format( user_id, time.time() - st)) asyncio.ensure_future(app['redis_conn'].lpush('links_time', time.time() - st)) return ''.join(['{"key":', str(hash_val), ',"file":', res, '}'])
async def call_stewart(posted_data, user_id, app): st = time.time() posted_data = json.loads(posted_data.get("json")) f_name = '_'.join([user_id, str(posted_data['topojson'])]) point_layer = await app['redis_conn'].get(f_name) point_layer = json.loads(point_layer.decode()) new_field1 = posted_data['variable1'] new_field2 = posted_data['variable2'] n_field_name1 = list(new_field1.keys())[0] if len(new_field1[n_field_name1]) > 0: join_field_topojson(point_layer, new_field1[n_field_name1], n_field_name1) if new_field2: discretization = "percentiles" n_field_name2 = list(new_field2.keys())[0] if len(new_field2[n_field_name2]) > 0: join_field_topojson(point_layer, new_field2[n_field_name2], n_field_name2) else: discretization = "jenks" n_field_name2 = None if posted_data['mask_layer']: f_name = '_'.join([user_id, str(posted_data['mask_layer'])]) mask_layer = await app['redis_conn'].get(f_name) tmp_part = get_name() filenames = { 'point_layer': ''.join(['/tmp/', tmp_part, '.geojson']), 'mask_layer': ''.join(['/tmp/', get_name(), '.geojson']) if posted_data['mask_layer'] != "" else None } savefile(filenames['point_layer'], topojson_to_geojson(point_layer).encode()) if filenames['mask_layer']: savefile(filenames['mask_layer'], topojson_to_geojson(json.loads(mask_layer.decode())).encode()) reusable_val = '_'.join([ user_id, str(posted_data['topojson']), n_field_name1, n_field_name2 if n_field_name2 else "", str(posted_data["span"]), str(posted_data['beta']), str(posted_data['resolution']), posted_data['typefct'].lower() ]) existing_obj = await app['redis_conn'].get(reusable_val) try: if existing_obj: res, breaks = await app.loop.run_in_executor( app["ThreadPool"], resume_stewart, existing_obj, int(posted_data['nb_class']), discretization, posted_data['user_breaks'], filenames["mask_layer"]) else: res, breaks, dump_obj = await app.loop.run_in_executor( app["ProcessPool"], quick_stewart_mod, filenames['point_layer'], n_field_name1, int(posted_data['span']), float(posted_data['beta']), posted_data['typefct'].lower(), int(posted_data['nb_class']), discretization, posted_data['resolution'], filenames["mask_layer"], n_field_name2, posted_data['user_breaks']) asyncio.ensure_future(app['redis_conn'].set(reusable_val, dump_obj, pexpire=43200000)) except asyncio.CancelledError: app['logger'].info( 'Cancelled after {:.4f}s : stewart'.format(time.time() - st)) return os.remove(filenames['point_layer']) if filenames['mask_layer']: os.remove(filenames['mask_layer']) new_name = '_'.join(['StewartPot', n_field_name1]) res = await geojson_to_topojson2(res, new_name) hash_val = str(mmh3_hash(res)) asyncio.ensure_future(app['redis_conn'].set('_'.join([user_id, hash_val]), res, pexpire=86400000)) asyncio.ensure_future(app['redis_conn'].lpush('stewart_time', time.time() - st)) app['logger'].info('{} - timing : stewart_on_py : {:.4f}s'.format( user_id, time.time() - st)) return "|||".join([ ''.join(['{"key":', hash_val, ',"file":', res, '}']), json.dumps(breaks) ])