def create_app(env='Defaults'): """ Creates application. :returns: flask application instance """ app = Flask(__name__) cfgfile = os.environ.get('LOPOCS_SETTINGS') if cfgfile: app.config.update(load_yaml_config(cfgfile)) else: try: cfgfile = (Path(__file__).parent / '..' / 'conf' / 'lopocs.yml').resolve() except FileNotFoundError: app.logger.critical('no config file found !!') sys.exit(1) app.config.update(load_yaml_config(str(cfgfile))) app.logger.debug('loading config from {}'.format(cfgfile)) # load extensions if 'URL_PREFIX' in app.config: blueprint = Blueprint('api', __name__, url_prefix=app.config['URL_PREFIX']) else: blueprint = Blueprint('api', __name__) api.init_app(blueprint) app.register_blueprint(blueprint) Session.init_app(app) Config.init(app.config) if Config.STATS: Stats.init() return app
def get_infos(): infos = {} infos.update(Session.boundingbox2d()) sql = "select count(*) from {0}".format(Session.column) infos['npatchs'] = Session.query_aslist(sql)[0] infos['dx'] = infos['xmax'] - infos['xmin'] infos['dy'] = infos['ymax'] - infos['ymin'] #infos['dz'] = infos['zmax'] - infos['zmin'] return infos
def tileset(table, column, server_url, work_dir): """ (Re)build a tileset.json for a given table """ # intialize flask application create_app() work_dir = Path(work_dir) if '.' not in table: table = 'public.{}'.format(table) lpsession = Session(table, column) # initialize range for level of details fullbbox = lpsession.boundingbox bbox = [ fullbbox['xmin'], fullbbox['ymin'], fullbbox['zmin'], fullbbox['xmax'], fullbbox['ymax'], fullbbox['zmax'] ] pending('Building tileset from database') hcy = threedtiles.build_hierarchy_from_pg(lpsession, server_url, bbox) ok() tileset = os.path.join(str(work_dir.resolve()), 'tileset-{}.{}.json'.format(table, column)) pending('Writing tileset to disk') with io.open(tileset, 'wb') as out: out.write(hcy.encode()) ok()
def build_index_by_morton(infos, side_x, side_y): # add an index column sql = ("ALTER TABLE {0} drop column if exists morton;" "ALTER TABLE {0} add column morton integer" .format(Session.column) ) Session.db.cursor().execute(sql) for n in range(0, infos['npatchs']): print("{0}/{1}\r".format(n, infos['npatchs']), end='') sql = ("select pc_patchmin({0}, 'x') as xmin, " "pc_patchmax({0}, 'x') as xmax, " "pc_patchmin({0}, 'y') as ymin, " "pc_patchmax({0}, 'y') as ymax " "from {1} where id = {2}" .format(Session.column, Session.table, n+1)) res = Session.query_asdict(sql)[0] center_x = float(res['xmin']) + side_x/2 center_y = float(res['ymin']) + side_y/2 #col = math.floor((float(res['xmin']) - infos['xmin']) / side) #row = math.floor((float(res['ymin']) - infos['ymin']) / side) col = math.floor((center_x - infos['xmin']) / side_x) row = math.floor((center_y - infos['ymin']) / side_y) morton = morton_revert_code(col, row) sql = ("update pa set morton = {0}" " where id = {1}".format(morton, n+1)) Session.db.cursor().execute(sql)
def cmd_pg(message, request): '''wrapper around a session query ''' click.echo('{} ... '.format(message), nl=False) try: result = Session.query(request) if not result: raise Exception('Not found') green(result[0][0]) except Exception as exc: ko(str(exc))
def cmd_pg(message, request): '''wrapper around a session query ''' click.echo('{} ... '.format(message), nl=False) try: result = Session.query(request) if not result: raise Exception('Not found') green(result[0][0]) except Exception as exc: ko(str(exc))
def create_app(env='Defaults'): """ Creates application. :returns: flask application instance """ app = Flask(__name__) cfgfile = os.environ.get('LOPOCS_SETTINGS') if cfgfile: app.config.update(load_yaml_config(cfgfile)) else: try: cfgfile = (Path(__file__).parent / '..' / 'conf' / 'lopocs.yml').resolve() except FileNotFoundError: logger.warning('no config file found !!') sys.exit(1) app.config.update(load_yaml_config(str(cfgfile))) print(str(cfgfile)) set_level(app.config['LOG_LEVEL']) logger.debug('loading config from {}'.format(cfgfile)) # load extensions if 'URL_PREFIX' in app.config: blueprint = Blueprint('api', __name__, url_prefix=app.config['URL_PREFIX']) else: blueprint = Blueprint('api', __name__) api.init_app(blueprint) app.register_blueprint(blueprint) Session.init_app(app) Config.init(app.config) if Config.STATS: Stats.init() return app
def _load(filename, table, column, work_dir, server_url, capacity, usewith, srid=0): '''load pointclouds data using pdal and add metadata needed by lopocs''' # intialize flask application app = create_app() filename = Path(filename) work_dir = Path(work_dir) extension = filename.suffix[1:].lower() # laz uses las reader in PDAL extension = extension if extension != 'laz' else 'las' basename = filename.stem basedir = filename.parent pending('Creating metadata table') Session.create_pointcloud_lopocs_table() ok() pending('Reading summary with PDAL') json_path = os.path.join( str(work_dir.resolve()), '{basename}_{table}_pipeline.json'.format(**locals())) # tablename should be always prefixed if '.' not in table: table = 'public.{}'.format(table) cmd = "pdal info --summary {}".format(filename) try: output = check_output(shlex.split(cmd)) except CalledProcessError as e: fatal(e) summary = json.loads(output.decode())['summary'] ok() if 'srs' not in summary and not srid: fatal( 'Unable to find the spatial reference system, please provide a SRID with option --srid' ) if not srid: # find authority code in wkt string srid = re.findall('EPSG","(\d+)"', summary['srs']['wkt'])[-1] p = Proj(init='epsg:{}'.format(srid)) if p.is_latlong(): # geographic scale_x, scale_y, scale_z = (1e-6, 1e-6, 1e-2) else: # projection or geocentric scale_x, scale_y, scale_z = (0.01, 0.01, 0.01) offset_x = summary['bounds']['X']['min'] + ( summary['bounds']['X']['max'] - summary['bounds']['X']['min']) / 2 offset_y = summary['bounds']['Y']['min'] + ( summary['bounds']['Y']['max'] - summary['bounds']['Y']['min']) / 2 offset_z = summary['bounds']['Z']['min'] + ( summary['bounds']['Z']['max'] - summary['bounds']['Z']['min']) / 2 reproject = "" if usewith == 'cesium': from_srid = srid # cesium only use epsg:4978, so we must reproject before loading into pg srid = 4978 reproject = """ {{ "type":"filters.reprojection", "in_srs":"EPSG:{from_srid}", "out_srs":"EPSG:{srid}" }},""".format(**locals()) # transform bounds in new coordinate system pini = Proj(init='epsg:{}'.format(from_srid)) pout = Proj(init='epsg:{}'.format(srid)) # recompute offset in new space and start at 0 pending('Reprojected bounds', nl=True) # xmin, ymin, zmin = transform(pini, pout, offset_x, offset_y, offset_z) xmin, ymin, zmin = transform(pini, pout, summary['bounds']['X']['min'], summary['bounds']['Y']['min'], summary['bounds']['Z']['min']) xmax, ymax, zmax = transform(pini, pout, summary['bounds']['X']['max'], summary['bounds']['Y']['max'], summary['bounds']['Z']['max']) offset_x, offset_y, offset_z = xmin, ymin, zmin click.echo('{} < x < {}'.format(xmin, xmax)) click.echo('{} < y < {}'.format(ymin, ymax)) click.echo('{} < z < {} '.format(zmin, zmax), nl=False) ok() pending('Computing best scales for cesium') # override scales for cesium if possible we try to use quantized positions scale_x = min(compute_scale_for_cesium(xmin, xmax), 1) scale_y = min(compute_scale_for_cesium(ymin, ymax), 1) scale_z = min(compute_scale_for_cesium(zmin, zmax), 1) ok('[{}, {}, {}]'.format(scale_x, scale_y, scale_z)) pg_host = app.config['PG_HOST'] pg_name = app.config['PG_NAME'] pg_port = app.config['PG_PORT'] pg_user = app.config['PG_USER'] pg_password = app.config['PG_PASSWORD'] realfilename = str(filename.resolve()) schema, tab = table.split('.') pending('Loading point clouds into database') with io.open(json_path, 'w') as json_file: json_file.write(PDAL_PIPELINE.format(**locals())) cmd = "pdal pipeline {}".format(json_path) try: check_call(shlex.split(cmd), stderr=DEVNULL, stdout=DEVNULL) except CalledProcessError as e: fatal(e) ok() pending("Creating indexes") Session.execute(""" create index on {table} using gist(pc_envelopegeometry(points)); alter table {table} add column morton bigint; select Morton_Update('{table}', 'points', 'morton', 128, TRUE); create index on {table}(morton); """.format(**locals())) ok() pending("Adding metadata for lopocs") Session.update_metadata(table, column, srid, scale_x, scale_y, scale_z, offset_x, offset_y, offset_z) lpsession = Session(table, column) ok() # retrieve boundingbox fullbbox = lpsession.boundingbox bbox = [ fullbbox['xmin'], fullbbox['ymin'], fullbbox['zmin'], fullbbox['xmax'], fullbbox['ymax'], fullbbox['zmax'] ] if usewith == 'potree': lod_min = 0 lod_max = 5 # add schema currently used by potree (version 1.5RC) Session.add_output_schema(table, column, 0.01, 0.01, 0.01, offset_x, offset_y, offset_z, srid, potree_schema) cache_file = ("{0}_{1}_{2}_{3}_{4}.hcy".format( lpsession.table, lpsession.column, lod_min, lod_max, '_'.join(str(e) for e in bbox))) pending("Building greyhound hierarchy") new_hcy = greyhound.build_hierarchy_from_pg(lpsession, lod_min, lod_max, bbox) greyhound.write_in_cache(new_hcy, cache_file) ok() create_potree_page(str(work_dir.resolve()), server_url, table, column) if usewith == 'cesium': pending("Building 3Dtiles tileset") hcy = threedtiles.build_hierarchy_from_pg(lpsession, server_url, bbox) tileset = os.path.join(str(work_dir.resolve()), 'tileset-{}.{}.json'.format(table, column)) with io.open(tileset, 'wb') as out: out.write(hcy.encode()) ok() create_cesium_page(str(work_dir.resolve()), table, column)
def _load(filename, table, column, work_dir, server_url, capacity, usewith, srid=0): '''load pointclouds data using pdal and add metadata needed by lopocs''' # intialize flask application app = create_app() filename = Path(filename) work_dir = Path(work_dir) extension = filename.suffix[1:].lower() # laz uses las reader in PDAL extension = extension if extension != 'laz' else 'las' basename = filename.stem basedir = filename.parent pending('Creating metadata table') Session.create_pointcloud_lopocs_table() ok() pending('Reading summary with PDAL') json_path = os.path.join( str(work_dir.resolve()), '{basename}_{table}_pipeline.json'.format(**locals())) # tablename should be always prefixed if '.' not in table: table = 'public.{}'.format(table) cmd = "pdal info --summary {}".format(filename) try: output = check_output(shlex.split(cmd)) except CalledProcessError as e: fatal(e) summary = json.loads(output.decode())['summary'] ok() if 'srs' not in summary and not srid: fatal('Unable to find the spatial reference system, please provide a SRID with option --srid') if not srid: # find authority code in wkt string srid = re.findall('EPSG","(\d+)"', summary['srs']['wkt'])[-1] p = Proj(init='epsg:{}'.format(srid)) if p.is_latlong(): # geographic scale_x, scale_y, scale_z = (1e-6, 1e-6, 1e-2) else: # projection or geocentric scale_x, scale_y, scale_z = (0.01, 0.01, 0.01) offset_x = summary['bounds']['X']['min'] + (summary['bounds']['X']['max'] - summary['bounds']['X']['min']) / 2 offset_y = summary['bounds']['Y']['min'] + (summary['bounds']['Y']['max'] - summary['bounds']['Y']['min']) / 2 offset_z = summary['bounds']['Z']['min'] + (summary['bounds']['Z']['max'] - summary['bounds']['Z']['min']) / 2 reproject = "" if usewith == 'cesium': from_srid = srid # cesium only use epsg:4978, so we must reproject before loading into pg srid = 4978 reproject = """ {{ "type":"filters.reprojection", "in_srs":"EPSG:{from_srid}", "out_srs":"EPSG:{srid}" }},""".format(**locals()) # transform bounds in new coordinate system pini = Proj(init='epsg:{}'.format(from_srid)) pout = Proj(init='epsg:{}'.format(srid)) # recompute offset in new space and start at 0 pending('Reprojected bounds', nl=True) # xmin, ymin, zmin = transform(pini, pout, offset_x, offset_y, offset_z) xmin, ymin, zmin = transform(pini, pout, summary['bounds']['X']['min'], summary['bounds']['Y']['min'], summary['bounds']['Z']['min']) xmax, ymax, zmax = transform(pini, pout, summary['bounds']['X']['max'], summary['bounds']['Y']['max'], summary['bounds']['Z']['max']) offset_x, offset_y, offset_z = xmin, ymin, zmin click.echo('{} < x < {}'.format(xmin, xmax)) click.echo('{} < y < {}'.format(ymin, ymax)) click.echo('{} < z < {} '.format(zmin, zmax), nl=False) ok() pending('Computing best scales for cesium') # override scales for cesium if possible we try to use quantized positions scale_x = min(compute_scale_for_cesium(xmin, xmax), 1) scale_y = min(compute_scale_for_cesium(ymin, ymax), 1) scale_z = min(compute_scale_for_cesium(zmin, zmax), 1) ok('[{}, {}, {}]'.format(scale_x, scale_y, scale_z)) pg_host = app.config['PG_HOST'] pg_name = app.config['PG_NAME'] pg_port = app.config['PG_PORT'] pg_user = app.config['PG_USER'] pg_password = app.config['PG_PASSWORD'] realfilename = str(filename.resolve()) schema, tab = table.split('.') pending('Loading point clouds into database') with io.open(json_path, 'w') as json_file: json_file.write(PDAL_PIPELINE.format(**locals())) cmd = "pdal pipeline {}".format(json_path) try: check_call(shlex.split(cmd), stderr=DEVNULL, stdout=DEVNULL) except CalledProcessError as e: fatal(e) ok() pending("Creating indexes") Session.execute(""" create index on {table} using gist(pc_envelopegeometry(points)); alter table {table} add column morton bigint; select Morton_Update('{table}', 'points', 'morton', 128, TRUE); create index on {table}(morton); """.format(**locals())) ok() pending("Adding metadata for lopocs") Session.update_metadata( table, column, srid, scale_x, scale_y, scale_z, offset_x, offset_y, offset_z ) lpsession = Session(table, column) ok() # retrieve boundingbox fullbbox = lpsession.boundingbox bbox = [ fullbbox['xmin'], fullbbox['ymin'], fullbbox['zmin'], fullbbox['xmax'], fullbbox['ymax'], fullbbox['zmax'] ] if usewith == 'potree': lod_min = 0 lod_max = 5 # add schema currently used by potree (version 1.5RC) Session.add_output_schema( table, column, 0.01, 0.01, 0.01, offset_x, offset_y, offset_z, srid, potree_schema ) cache_file = ( "{0}_{1}_{2}_{3}_{4}.hcy".format( lpsession.table, lpsession.column, lod_min, lod_max, '_'.join(str(e) for e in bbox) ) ) pending("Building greyhound hierarchy") new_hcy = greyhound.build_hierarchy_from_pg( lpsession, lod_min, lod_max, bbox ) greyhound.write_in_cache(new_hcy, cache_file) ok() create_potree_page(str(work_dir.resolve()), server_url, table, column) if usewith == 'cesium': pending("Building 3Dtiles tileset") hcy = threedtiles.build_hierarchy_from_pg( lpsession, server_url, bbox ) tileset = os.path.join(str(work_dir.resolve()), 'tileset-{}.{}.json'.format(table, column)) with io.open(tileset, 'wb') as out: out.write(hcy.encode()) ok() create_cesium_page(str(work_dir.resolve()), table, column)
# open config file ymlconf_db = None with open(args.cfg, 'r') as f: try: ymlconf_db = yaml.load(f)['flask'] except: print("ERROR: ", sys.exc_info()[0]) f.close() sys.exit() app = type('', (), {})() app.config = ymlconf_db # open database Session.init_app(app) # build the hierarchy fullbbox = Session.boundingbox() bbox = [ fullbbox['xmin'], fullbbox['ymin'], fullbbox['zmin'], fullbbox['xmax'], fullbbox['ymax'], fullbbox['zmax'] ] print(fullbbox) lod_min = 0 lod_max = ymlconf_db['DEPTH'] - 1 bbox_str = '_'.join(str(e) for e in bbox) if args.t == "greyhound":
# open config file ymlconf_db = None with open(args.cfg, 'r') as f: try: ymlconf_db = yaml.load(f)['flask'] except: print("ERROR: ", sys.exc_info()[0]) f.close() sys.exit() app = type('', (), {})() app.config = ymlconf_db # open database Session.init_app(app) # extract infos from files infos = get_infos() #print(infos) # compute cell parameters cell_params = compute_cell_parameters(infos) #print(cell_params) # build the regular grid as a generator save_grid = False if save_grid: grid_gen = regular_grid(infos, cell_params) store_grid(grid_gen)