def tileset(table, column, server_url, work_dir): """ (Re)build a tileset.json for a given table """ # intialize flask application create_app() work_dir = Path(work_dir) if '.' not in table: table = 'public.{}'.format(table) lpsession = Session(table, column) # initialize range for level of details fullbbox = lpsession.boundingbox bbox = [ fullbbox['xmin'], fullbbox['ymin'], fullbbox['zmin'], fullbbox['xmax'], fullbbox['ymax'], fullbbox['zmax'] ] pending('Building tileset from database') hcy = threedtiles.build_hierarchy_from_pg( lpsession, server_url, bbox ) ok() tileset = os.path.join(str(work_dir.resolve()), 'tileset-{}.{}.json'.format(table, column)) pending('Writing tileset to disk') with io.open(tileset, 'wb') as out: out.write(hcy.encode()) ok()
def tileset(table, column, server_url, work_dir): """ (Re)build a tileset.json for a given table """ # intialize flask application create_app() work_dir = Path(work_dir) if '.' not in table: table = 'public.{}'.format(table) lpsession = Session(table, column) # initialize range for level of details fullbbox = lpsession.boundingbox bbox = [ fullbbox['xmin'], fullbbox['ymin'], fullbbox['zmin'], fullbbox['xmax'], fullbbox['ymax'], fullbbox['zmax'] ] pending('Building tileset from database') hcy = threedtiles.build_hierarchy_from_pg(lpsession, server_url, bbox) ok() tileset = os.path.join(str(work_dir.resolve()), 'tileset-{}.{}.json'.format(table, column)) pending('Writing tileset to disk') with io.open(tileset, 'wb') as out: out.write(hcy.encode()) ok()
def check(): '''check lopocs configuration and dependencies''' try: app = create_app() except Exception as exc: fatal(str(exc)) if not app: fatal("it appears that you don't have any configuration file") # pdal cmd_output('Pdal', 'pdal-config --version') cmd_rt( 'Pdal plugin pgpointcloud', "test -e `pdal-config --plugin-dir`/libpdal_plugin_writer_pgpointcloud.so" ) # postgresql and extensions cmd_pg('PostgreSQL', 'show server_version') cmd_pg( 'PostGIS extension', "select default_version from pg_available_extensions where name = 'postgis'" ) cmd_pg( 'PgPointcloud extension', "select default_version from pg_available_extensions where name = 'pointcloud'" ) cmd_pg( 'PgPointcloud-PostGIS extension', "select default_version from pg_available_extensions where name = 'pointcloud_postgis'" )
def check(): '''check lopocs configuration and dependencies''' try: app = create_app() except Exception as exc: fatal(str(exc)) if not app: fatal("it appears that you don't have any configuration file") # pdal cmd_output('Pdal', 'pdal-config --version') cmd_rt('Pdal plugin pgpointcloud', "test -e `pdal-config --plugin-dir`/libpdal_plugin_writer_pgpointcloud.so") # postgresql and extensions cmd_pg('PostgreSQL', 'show server_version') cmd_pg('PostGIS extension', "select default_version from pg_available_extensions where name = 'postgis'") cmd_pg('PgPointcloud extension', "select default_version from pg_available_extensions where name = 'pointcloud'") cmd_pg('PgPointcloud-PostGIS extension', "select default_version from pg_available_extensions where name = 'pointcloud_postgis'")
def _load(filename, table, column, work_dir, server_url, capacity, usewith, srid=0): '''load pointclouds data using pdal and add metadata needed by lopocs''' # intialize flask application app = create_app() filename = Path(filename) work_dir = Path(work_dir) extension = filename.suffix[1:].lower() # laz uses las reader in PDAL extension = extension if extension != 'laz' else 'las' basename = filename.stem basedir = filename.parent pending('Creating metadata table') Session.create_pointcloud_lopocs_table() ok() pending('Reading summary with PDAL') json_path = os.path.join( str(work_dir.resolve()), '{basename}_{table}_pipeline.json'.format(**locals())) # tablename should be always prefixed if '.' not in table: table = 'public.{}'.format(table) cmd = "pdal info --summary {}".format(filename) try: output = check_output(shlex.split(cmd)) except CalledProcessError as e: fatal(e) summary = json.loads(output.decode())['summary'] ok() if 'srs' not in summary and not srid: fatal( 'Unable to find the spatial reference system, please provide a SRID with option --srid' ) if not srid: # find authority code in wkt string srid = re.findall('EPSG","(\d+)"', summary['srs']['wkt'])[-1] p = Proj(init='epsg:{}'.format(srid)) if p.is_latlong(): # geographic scale_x, scale_y, scale_z = (1e-6, 1e-6, 1e-2) else: # projection or geocentric scale_x, scale_y, scale_z = (0.01, 0.01, 0.01) offset_x = summary['bounds']['X']['min'] + ( summary['bounds']['X']['max'] - summary['bounds']['X']['min']) / 2 offset_y = summary['bounds']['Y']['min'] + ( summary['bounds']['Y']['max'] - summary['bounds']['Y']['min']) / 2 offset_z = summary['bounds']['Z']['min'] + ( summary['bounds']['Z']['max'] - summary['bounds']['Z']['min']) / 2 reproject = "" if usewith == 'cesium': from_srid = srid # cesium only use epsg:4978, so we must reproject before loading into pg srid = 4978 reproject = """ {{ "type":"filters.reprojection", "in_srs":"EPSG:{from_srid}", "out_srs":"EPSG:{srid}" }},""".format(**locals()) # transform bounds in new coordinate system pini = Proj(init='epsg:{}'.format(from_srid)) pout = Proj(init='epsg:{}'.format(srid)) # recompute offset in new space and start at 0 pending('Reprojected bounds', nl=True) # xmin, ymin, zmin = transform(pini, pout, offset_x, offset_y, offset_z) xmin, ymin, zmin = transform(pini, pout, summary['bounds']['X']['min'], summary['bounds']['Y']['min'], summary['bounds']['Z']['min']) xmax, ymax, zmax = transform(pini, pout, summary['bounds']['X']['max'], summary['bounds']['Y']['max'], summary['bounds']['Z']['max']) offset_x, offset_y, offset_z = xmin, ymin, zmin click.echo('{} < x < {}'.format(xmin, xmax)) click.echo('{} < y < {}'.format(ymin, ymax)) click.echo('{} < z < {} '.format(zmin, zmax), nl=False) ok() pending('Computing best scales for cesium') # override scales for cesium if possible we try to use quantized positions scale_x = min(compute_scale_for_cesium(xmin, xmax), 1) scale_y = min(compute_scale_for_cesium(ymin, ymax), 1) scale_z = min(compute_scale_for_cesium(zmin, zmax), 1) ok('[{}, {}, {}]'.format(scale_x, scale_y, scale_z)) pg_host = app.config['PG_HOST'] pg_name = app.config['PG_NAME'] pg_port = app.config['PG_PORT'] pg_user = app.config['PG_USER'] pg_password = app.config['PG_PASSWORD'] realfilename = str(filename.resolve()) schema, tab = table.split('.') pending('Loading point clouds into database') with io.open(json_path, 'w') as json_file: json_file.write(PDAL_PIPELINE.format(**locals())) cmd = "pdal pipeline {}".format(json_path) try: check_call(shlex.split(cmd), stderr=DEVNULL, stdout=DEVNULL) except CalledProcessError as e: fatal(e) ok() pending("Creating indexes") Session.execute(""" create index on {table} using gist(pc_envelopegeometry(points)); alter table {table} add column morton bigint; select Morton_Update('{table}', 'points', 'morton', 128, TRUE); create index on {table}(morton); """.format(**locals())) ok() pending("Adding metadata for lopocs") Session.update_metadata(table, column, srid, scale_x, scale_y, scale_z, offset_x, offset_y, offset_z) lpsession = Session(table, column) ok() # retrieve boundingbox fullbbox = lpsession.boundingbox bbox = [ fullbbox['xmin'], fullbbox['ymin'], fullbbox['zmin'], fullbbox['xmax'], fullbbox['ymax'], fullbbox['zmax'] ] if usewith == 'potree': lod_min = 0 lod_max = 5 # add schema currently used by potree (version 1.5RC) Session.add_output_schema(table, column, 0.01, 0.01, 0.01, offset_x, offset_y, offset_z, srid, potree_schema) cache_file = ("{0}_{1}_{2}_{3}_{4}.hcy".format( lpsession.table, lpsession.column, lod_min, lod_max, '_'.join(str(e) for e in bbox))) pending("Building greyhound hierarchy") new_hcy = greyhound.build_hierarchy_from_pg(lpsession, lod_min, lod_max, bbox) greyhound.write_in_cache(new_hcy, cache_file) ok() create_potree_page(str(work_dir.resolve()), server_url, table, column) if usewith == 'cesium': pending("Building 3Dtiles tileset") hcy = threedtiles.build_hierarchy_from_pg(lpsession, server_url, bbox) tileset = os.path.join(str(work_dir.resolve()), 'tileset-{}.{}.json'.format(table, column)) with io.open(tileset, 'wb') as out: out.write(hcy.encode()) ok() create_cesium_page(str(work_dir.resolve()), table, column)
def serve(host, port): '''run lopocs server (development usage)''' app = create_app() CORS(app) app.run(host=host, port=port)
def _load(filename, table, column, work_dir, server_url, capacity, usewith, srid=0): '''load pointclouds data using pdal and add metadata needed by lopocs''' # intialize flask application app = create_app() filename = Path(filename) work_dir = Path(work_dir) extension = filename.suffix[1:].lower() # laz uses las reader in PDAL extension = extension if extension != 'laz' else 'las' basename = filename.stem basedir = filename.parent pending('Creating metadata table') Session.create_pointcloud_lopocs_table() ok() pending('Reading summary with PDAL') json_path = os.path.join( str(work_dir.resolve()), '{basename}_{table}_pipeline.json'.format(**locals())) # tablename should be always prefixed if '.' not in table: table = 'public.{}'.format(table) cmd = "pdal info --summary {}".format(filename) try: output = check_output(shlex.split(cmd)) except CalledProcessError as e: fatal(e) summary = json.loads(output.decode())['summary'] ok() if 'srs' not in summary and not srid: fatal('Unable to find the spatial reference system, please provide a SRID with option --srid') if not srid: # find authority code in wkt string srid = re.findall('EPSG","(\d+)"', summary['srs']['wkt'])[-1] p = Proj(init='epsg:{}'.format(srid)) if p.is_latlong(): # geographic scale_x, scale_y, scale_z = (1e-6, 1e-6, 1e-2) else: # projection or geocentric scale_x, scale_y, scale_z = (0.01, 0.01, 0.01) offset_x = summary['bounds']['X']['min'] + (summary['bounds']['X']['max'] - summary['bounds']['X']['min']) / 2 offset_y = summary['bounds']['Y']['min'] + (summary['bounds']['Y']['max'] - summary['bounds']['Y']['min']) / 2 offset_z = summary['bounds']['Z']['min'] + (summary['bounds']['Z']['max'] - summary['bounds']['Z']['min']) / 2 reproject = "" if usewith == 'cesium': from_srid = srid # cesium only use epsg:4978, so we must reproject before loading into pg srid = 4978 reproject = """ {{ "type":"filters.reprojection", "in_srs":"EPSG:{from_srid}", "out_srs":"EPSG:{srid}" }},""".format(**locals()) # transform bounds in new coordinate system pini = Proj(init='epsg:{}'.format(from_srid)) pout = Proj(init='epsg:{}'.format(srid)) # recompute offset in new space and start at 0 pending('Reprojected bounds', nl=True) # xmin, ymin, zmin = transform(pini, pout, offset_x, offset_y, offset_z) xmin, ymin, zmin = transform(pini, pout, summary['bounds']['X']['min'], summary['bounds']['Y']['min'], summary['bounds']['Z']['min']) xmax, ymax, zmax = transform(pini, pout, summary['bounds']['X']['max'], summary['bounds']['Y']['max'], summary['bounds']['Z']['max']) offset_x, offset_y, offset_z = xmin, ymin, zmin click.echo('{} < x < {}'.format(xmin, xmax)) click.echo('{} < y < {}'.format(ymin, ymax)) click.echo('{} < z < {} '.format(zmin, zmax), nl=False) ok() pending('Computing best scales for cesium') # override scales for cesium if possible we try to use quantized positions scale_x = min(compute_scale_for_cesium(xmin, xmax), 1) scale_y = min(compute_scale_for_cesium(ymin, ymax), 1) scale_z = min(compute_scale_for_cesium(zmin, zmax), 1) ok('[{}, {}, {}]'.format(scale_x, scale_y, scale_z)) pg_host = app.config['PG_HOST'] pg_name = app.config['PG_NAME'] pg_port = app.config['PG_PORT'] pg_user = app.config['PG_USER'] pg_password = app.config['PG_PASSWORD'] realfilename = str(filename.resolve()) schema, tab = table.split('.') pending('Loading point clouds into database') with io.open(json_path, 'w') as json_file: json_file.write(PDAL_PIPELINE.format(**locals())) cmd = "pdal pipeline {}".format(json_path) try: check_call(shlex.split(cmd), stderr=DEVNULL, stdout=DEVNULL) except CalledProcessError as e: fatal(e) ok() pending("Creating indexes") Session.execute(""" create index on {table} using gist(pc_envelopegeometry(points)); alter table {table} add column morton bigint; select Morton_Update('{table}', 'points', 'morton', 128, TRUE); create index on {table}(morton); """.format(**locals())) ok() pending("Adding metadata for lopocs") Session.update_metadata( table, column, srid, scale_x, scale_y, scale_z, offset_x, offset_y, offset_z ) lpsession = Session(table, column) ok() # retrieve boundingbox fullbbox = lpsession.boundingbox bbox = [ fullbbox['xmin'], fullbbox['ymin'], fullbbox['zmin'], fullbbox['xmax'], fullbbox['ymax'], fullbbox['zmax'] ] if usewith == 'potree': lod_min = 0 lod_max = 5 # add schema currently used by potree (version 1.5RC) Session.add_output_schema( table, column, 0.01, 0.01, 0.01, offset_x, offset_y, offset_z, srid, potree_schema ) cache_file = ( "{0}_{1}_{2}_{3}_{4}.hcy".format( lpsession.table, lpsession.column, lod_min, lod_max, '_'.join(str(e) for e in bbox) ) ) pending("Building greyhound hierarchy") new_hcy = greyhound.build_hierarchy_from_pg( lpsession, lod_min, lod_max, bbox ) greyhound.write_in_cache(new_hcy, cache_file) ok() create_potree_page(str(work_dir.resolve()), server_url, table, column) if usewith == 'cesium': pending("Building 3Dtiles tileset") hcy = threedtiles.build_hierarchy_from_pg( lpsession, server_url, bbox ) tileset = os.path.join(str(work_dir.resolve()), 'tileset-{}.{}.json'.format(table, column)) with io.open(tileset, 'wb') as out: out.write(hcy.encode()) ok() create_cesium_page(str(work_dir.resolve()), table, column)
# -*- coding: utf-8 -*- from lopocs import create_app app = create_app() if __name__ == '__main__': app.run()