def terrain_geotiff_elevation_apply(obj, ddd_proj): elevation = ElevationModel.instance() #print(transform_ddd_to_geo(ddd_proj, [obj.mesh.vertices[0][ 0], obj.mesh.vertices[0][1]])) func = lambda x, y, z, i: [ x, y, z + elevation.value(transform_ddd_to_geo(ddd_proj, [x, y])) ] obj = obj.vertex_func(func) #mesh.mesh.invert() return obj
def terrain_geotiff(bounds, ddd_proj, detail=1.0): """ Generates a square grid and applies terrain elevation to it. """ # TODO: we should load the chunk as a heightmap, and load via terrain_heightmap for reuse #elevation = ElevationChunk.load('/home/jjmontes/git/ddd/data/elevation/eudem/eudem_dem_5deg_n40w010.tif') #elevation = ElevationChunk.load(dem_file) elevation = ElevationModel.instance() mesh = terrain_grid(bounds, detail=detail) func = lambda x, y, z, i: [ x, y, elevation.value(transform_ddd_to_geo(ddd_proj, [x, y])) ] mesh = mesh.vertex_func(func) #mesh.mesh.invert() return mesh
def terrain_geotiff_min_elevation_apply(obj, ddd_proj): elevation = ElevationModel.instance() min_h = None for v in obj.vertex_iterator(): v_h = elevation.value(transform_ddd_to_geo(ddd_proj, [v[0], v[1]])) if min_h is None: min_h = v_h if v_h < min_h: min_h = v_h if min_h is None: raise DDDException("Cannot calculate min value for elevation: %s" % obj) #func = lambda x, y, z, i: [x, y, z + min_h] obj = obj.translate([0, 0, min_h]) obj.extra['_terrain_geotiff_min_elevation_apply:elevation'] = min_h #mesh.mesh.invert() return obj
def osm_terrain_export_heightmap(root, osm, pipeline, logger): # Get chunk heightmap from elevation engine # TODO: Correct heightmap with calculated alterations (ponds, riverbanks, elevation augmentation...) #terrain.terrain_geotiff_elevation_apply(ceilings_3d, osm.ddd_proj) elevation = ElevationModel.instance() ddd_bounds = osm.area_crop.bounds wgs84_min = terrain.transform_ddd_to_geo(osm.ddd_proj, ddd_bounds[:2]) wgs84_max = terrain.transform_ddd_to_geo(osm.ddd_proj, ddd_bounds[2:]) wgs84_bounds = wgs84_min + wgs84_max heightmap_size = pipeline.data.get('ddd:terrain:heightmap:size', 128) logger.info("Generating heightmap for area: ddd_bounds=%s, wgs84_bounds=%s, size=%s", ddd_bounds, wgs84_bounds, heightmap_size) #height_matrix = elevation.dem.area(wgs84_bounds) #print(height_matrix) water = root.select(path="/Areas", selector='["ddd:area:type" = "sea"]["ddd:crop:original"]') # Interpolate over DDD coordinates and resolve height height_matrix = np.zeros([heightmap_size, heightmap_size]) for xi, x in enumerate(np.linspace(ddd_bounds[0], ddd_bounds[2], heightmap_size, endpoint=True)): for yi, y in enumerate(reversed(np.linspace(ddd_bounds[1], ddd_bounds[3], heightmap_size, endpoint=True))): wgs84_point = terrain.transform_ddd_to_geo(osm.ddd_proj, [x, y]) # Arbitrary offset tests height = elevation.value(wgs84_point) if height < 0: height = 0 # Temporarily enhance with areas (ultimately, this is needed for much more, maybe comming from elevation-plus engine) # At least use spatial partitioning to find where the point lies ''' dddp = ddd.point([x, y]) for c in water.children: if c.get('ddd:crop:original').intersects(dddp): height = 0 ''' height_matrix[yi, xi] = height height_max = np.max(height_matrix) height_min = np.min(height_matrix) logger.info("Heightmap max=%s min=%s (range=%s)", height_max, height_min, height_max - height_min) ''' # Hillshade #print(height_matrix) height_normalized = (height_matrix - height_min) / (height_max - height_min) hillshade_matrix = hillshade(height_matrix, 270 + wgs84_min[1], 45.0) # Blend height and hillshade height_hillshade_matrix = height_normalized * 0.5 + hillshade_matrix * 0.5 # Save Hillshade encoded_hillshade = (height_hillshade_matrix * 255) im = Image.fromarray(np.uint8(encoded_hillshade), "L") #im.save("/tmp/hillshade.png", "PNG") im.save(pipeline.data['filenamebase'] + ".hillshade.png", "PNG") ''' ''' # Calculate gradient gradient_diff = 0.1 #gradient_matrix = np.gradient(height_matrix) gradient_matrix = np.zeros([heightmap_size, heightmap_size, 2]) for xi, x in enumerate(np.linspace(ddd_bounds[0], ddd_bounds[2], heightmap_size, endpoint=True)): for yi, y in enumerate(reversed(np.linspace(ddd_bounds[1], ddd_bounds[3], heightmap_size, endpoint=True))): if (height_matrix[yi, xi] == 0): # water gradient_matrix[yi, xi, 0] = 0 gradient_matrix[yi, xi, 1] = 0 continue wgs84_point_x0 = terrain.transform_ddd_to_geo(osm.ddd_proj, [x - gradient_diff, y]) wgs84_point_x1 = terrain.transform_ddd_to_geo(osm.ddd_proj, [x + gradient_diff, y]) wgs84_point_y0 = terrain.transform_ddd_to_geo(osm.ddd_proj, [x, y - gradient_diff]) wgs84_point_y1 = terrain.transform_ddd_to_geo(osm.ddd_proj, [x, y + gradient_diff]) height_x0 = elevation.value(wgs84_point_x0) height_x1 = elevation.value(wgs84_point_x1) height_y0 = elevation.value(wgs84_point_y0) height_y1 = elevation.value(wgs84_point_y1) grad_x = (height_x1 - height_x0) / (gradient_diff * 2.0) grad_y = -(height_y1 - height_y0) / (gradient_diff * 2.0) # DDD CRS is positive Y north/up gradient_matrix[yi, xi, 0] = grad_x gradient_matrix[yi, xi, 1] = grad_y #print(gradient_matrix) # Encode heightmap # R,G = height # B = normals # A = holes ''' heightmap_offset = height_min heightmap_range = height_max - height_min heightmap_quantization = 65535 ''' encoded_heightmap = np.zeros((heightmap_size, heightmap_size, 4)) for xi in range(heightmap_size): for yi in range(heightmap_size): # Encode height height = height_matrix[yi, xi] normalizedHeight = (height - heightmap_offset) / heightmap_range; quantizedHeight = int(normalizedHeight * heightmap_quantization); encoded_heightmap[yi, xi, 0] = quantizedHeight & 0x00ff encoded_heightmap[yi, xi, 1] = (quantizedHeight & 0xff00) >> 8 # Encode normal gradient_x = gradient_matrix[yi, xi, 0] normal_x = math.cos(math.pi / 2 + math.atan(gradient_x)) gradient_y = gradient_matrix[yi, xi, 1] normal_y = math.cos(math.pi / 2 + math.atan(gradient_y)) quantized_normal_x = int(((normal_x + 1.0) / 2.0) * 255.0) # between -128 and 127 quantized_normal_y = int(((normal_y + 1.0) / 2.0) * 255.0) # between -128 and 127 encoded_heightmap[yi, xi, 2] = quantized_normal_x encoded_heightmap[yi, xi, 3] = quantized_normal_y #encoded_heightmap[yi, xi, 3] = 255 ''' # Save heightmap as PNG filename = pipeline.data['filenamebase'] + ".heightmap-" + str(heightmap_size) + ".png" #im = Image.fromarray(np.uint8(encoded_heightmap), "RGBA") #im = Image.fromarray(np.uint16(encoded_heightmap), "I") #im.save("/tmp/osm-heightmap.png", "PNG") #im.save(filename, "PNG") heightmap_uint16 = np.uint16(((height_matrix - heightmap_offset) / heightmap_range) * 65535) with open(filename, 'wb') as f: writer = png.Writer(width=heightmap_size, height=heightmap_size, bitdepth=16, greyscale=True) pngdata = (heightmap_uint16).tolist() writer.write(f, pngdata) # Metadata (to be saved later to descriptor) pipeline.data['height:min'] = height_min pipeline.data['height:max'] = height_max pipeline.data['heightmap:offset'] = heightmap_offset pipeline.data['heightmap:range'] = heightmap_range pipeline.data['heightmap:quantization'] = heightmap_quantization
def osm_gdterrain_export_heightmap(root, osm, pipeline, logger): # Get chunk heightmap from elevation engine # TODO: Correct heightmap with calculated alterations (ponds, riverbanks, elevation augmentation...) #terrain.terrain_geotiff_elevation_apply(ceilings_3d, osm.ddd_proj) elevation = ElevationModel.instance() ddd_bounds = osm.area_crop.bounds wgs84_min = terrain.transform_ddd_to_geo(osm.ddd_proj, ddd_bounds[:2]) wgs84_max = terrain.transform_ddd_to_geo(osm.ddd_proj, ddd_bounds[2:]) wgs84_bounds = wgs84_min + wgs84_max heightmap_size = 128 logger.info( "Generating heightmap for area: ddd_bounds=%s, wgs84_bounds=%s, size=%s", ddd_bounds, wgs84_bounds, heightmap_size) #height_matrix = elevation.dem.area(wgs84_bounds) #print(height_matrix) # Interpolate over DDD coordinates and resolve height height_matrix = np.zeros([heightmap_size, heightmap_size]) for xi, x in enumerate( np.linspace(ddd_bounds[0], ddd_bounds[2], heightmap_size, endpoint=True)): for yi, y in enumerate( np.linspace(ddd_bounds[1], ddd_bounds[3], heightmap_size, endpoint=True)): wgs84_point = terrain.transform_ddd_to_geo(osm.ddd_proj, [x, y]) height = elevation.value(wgs84_point) height_matrix[yi, xi] = height #print(height_matrix) #gradient = np.gradient(height_matrix) # Encode heightmap # R,G = height # B = normals # A = holes encoded_heightmap = np.zeros((heightmap_size, heightmap_size, 4)) for xi in range(heightmap_size): for yi in range(heightmap_size): height = height_matrix[yi, xi] patch_height_offset = 0.0 patch_height_range = 512 # 65535.0 # 65535.0 normalizedHeight = (height - patch_height_offset) / patch_height_range quantizedHeight = int(normalizedHeight * 65535) encoded_heightmap[yi, xi, 0] = quantizedHeight & 0x00ff encoded_heightmap[yi, xi, 1] = (quantizedHeight & 0xff00) >> 8 #encoded_heightmap[yi, xi, 2] = 0 encoded_heightmap[yi, xi, 3] = 255 height_max = np.max(height_matrix) height_min = np.min(height_matrix) #print(encoded_heightmap) logger.info("Heightmap max=%s min=%s (range=%s)", height_max, height_min, height_max - height_min) # Save heightmap as PNG im = Image.fromarray(np.uint8(encoded_heightmap), "RGBA") im.save("/tmp/heightmap.png", "PNG") im.save(pipeline.data['filenamebase'] + ".heightmap.png", "PNG")
def run(self): # TODO: Move to pipelined builder logger.warn("Move to builder") logger.info("Running DDD123 OSM build command.") D1D2D3Bootstrap._instance._unparsed_args = None tasks_count = 0 if self.xyztile: self.process_xyztile() #name = "vigo" #center_wgs84 = vigo_wgs84 #area = area_vigo_huge_rande center_wgs84 = self.center # Name if self.name is None: self.name = "ddd-osm-%.3f,%.3f" % center_wgs84 name = self.name path = "data/osm/" # Prepare data # Check if geojson file is available #sides = 15 * 0.01 # Approximate degrees to km sides = 5 * 0.001 roundto = sides / 3 datacenter = int(self.center[0] / roundto) * roundto, int( self.center[1] / roundto) * roundto dataname = name + "_%.4f_%.4f" % datacenter datafile = os.path.join(path, "%s.osm.geojson" % dataname) # Get data if needed or forced force_get_data = parse_bool( D1D2D3Bootstrap.data.get('ddd:osm:datasource:force_refresh', False)) file_exists = os.path.isfile(datafile) if force_get_data or not file_exists: logger.info( "Data file '%s' not found or datasource:force_refresh is True. Trying to produce data." % datafile) #self.get_data(path, dataname, datacenter) self.get_data_osm(path, dataname, datacenter) # Read data files = [ os.path.join(path, f) for f in [dataname + '.osm.geojson'] if os.path.isfile(os.path.join(path, f)) and f.endswith(".geojson") ] logger.info("Reading %d files from %s: %s" % (len(files), path, files)) osm_proj = pyproj.Proj( init='epsg:4326' ) # FIXME: API reocmends using only 'epsg:4326' but seems to give weird coordinates? (always_xy=Tre?) ddd_proj = pyproj.Proj(proj="tmerc", lon_0=center_wgs84[0], lat_0=center_wgs84[1], k=1, x_0=0., y_0=0., units="m", datum="WGS84", ellps="WGS84", towgs84="0,0,0,0,0,0,0", no_defs=True) # TODO: Move area resolution outside this method and resolve after processing args area_ddd = None if self.area is not None: trans_func = partial(pyproj.transform, osm_proj, ddd_proj) area_ddd = ops.transform(trans_func, self.area) elif not self.chunk_size: resolution = 8 if resolution > 1: area_ddd = ddd.point().buffer(self._radius, cap_style=ddd.CAP_ROUND, resolution=resolution).geom else: area_ddd = ddd.rect( [-self._radius, -self._radius, self._radius, self._radius]).geom logger.info("Area meters/coords=%s", area_ddd) if area_ddd: logger.info( "Complete polygon area: %.1f km2 (%d at 500, %d at 250, %d at 200)", area_ddd.area / (1000 * 1000), math.ceil(area_ddd.area / (500 * 500)), math.ceil(area_ddd.area / (250 * 250)), math.ceil(area_ddd.area / (200 * 200))) # TODO: organise tasks and locks in pipeline, not here skipped = 0 existed = 0 tiles = [(0, 0)] if not self.chunk_size else range_around( [-64, -64, 64, 64]) for (idx, (x, y)) in enumerate(tiles): #for x, y in range_around([-8, -8, 8, 8]): # -8, 3 if self.limit and tasks_count >= self.limit: logger.info("Limit of %d tiles hit.", self.limit) break if self.chunk_size: logger.info("Chunk size: %s", self.chunk_size) bbox_crop = [ x * self.chunk_size, y * self.chunk_size, (x + 1) * self.chunk_size, (y + 1) * self.chunk_size ] bbox_filter = [ bbox_crop[0] - self.chunk_size_extra_filter, bbox_crop[1] - self.chunk_size_extra_filter, bbox_crop[2] + self.chunk_size_extra_filter, bbox_crop[3] + self.chunk_size_extra_filter ] area_crop = ddd.rect(bbox_crop).geom area_filter = ddd.rect(bbox_filter).geom #area_ddd = ddd.rect(bbox_crop) trans_func = partial(pyproj.transform, ddd_proj, osm_proj) self.area = ops.transform(trans_func, area_crop) shortname = '%s_%d_%d,%d' % (name, abs(x) + abs(y), bbox_crop[0], bbox_crop[1]) filenamebase = 'output/%s/%s' % (name, shortname) filename = filenamebase + ".glb" elif self.xyztile: area_crop = area_ddd area_filter = area_ddd.buffer(self.chunk_size_extra_filter, join_style=ddd.JOIN_MITRE) shortname = '%s_%d_%d_%d' % (name, self.xyztile[2], self.xyztile[0], self.xyztile[1]) filenamebase = 'output/%s/%s' % (name, shortname) filename = filenamebase + ".glb" else: #logger.info("No chunk size defined (area was given)") area_crop = area_ddd #print(area_crop) area_filter = area_ddd.buffer(self.chunk_size_extra_filter, join_style=ddd.JOIN_MITRE) shortname = '%s_%dr_%.3f,%.3f' % ( name, self._radius if self._radius else 0, self.center[0], self.center[1]) filenamebase = 'output/%s/%s' % (name, shortname) filename = filenamebase + ".glb" if area_ddd and not area_ddd.intersects(area_crop): skipped += 1 #logger.debug("Skipping: %s (cropped area not contained in greater filtering area)", filename) #if os.path.exists(filename): # logger.info("Deleting: %s", filename) # os.unlink(filename) continue if not D1D2D3Bootstrap._instance.overwrite and os.path.exists( filename): #logger.debug("Skipping: %s (already exists)", filename) existed += 1 continue # Try to lock lockfilename = filename + ".lock" try: with open(lockfilename, "x") as _: old_formatters = { hdlr: hdlr.formatter for hdlr in logging.getLogger().handlers } if D1D2D3Bootstrap._instance.debug: new_formatter = logging.Formatter( '%(asctime)s - %(levelname)s - %(module)s [' + shortname + '] %(message)s') else: new_formatter = logging.Formatter('%(asctime)s [' + shortname + '] %(message)s') # Apply formatter to existing loggers for hdlr in logging.getLogger().handlers: hdlr.setFormatter(new_formatter) # Create a file handler for this process log # TODO: Support this at pipeline level / ddd command (?) build_log_file = False if build_log_file: fh = logging.FileHandler('/tmp/%s.log' % (shortname, )) fh.setLevel(level=logging.DEBUG) fh.setFormatter(new_formatter) logging.getLogger().addHandler(fh) # Check elevation is available elevation = ElevationModel.instance() center_elevation = elevation.value(center_wgs84) logger.info("Center point elevation: %s", center_elevation) logger.info("Generating: %s", filename) pipeline = DDDPipeline( [ 'pipelines.osm_base.s10_init.py', 'pipelines.osm_common.s10_locale_config.py', 'pipelines.osm_base.s20_osm_features.py', 'pipelines.osm_base.s20_osm_features_export_2d.py', 'pipelines.osm_base.s30_groups.py', 'pipelines.osm_base.s30_groups_ways.py', 'pipelines.osm_base.s30_groups_buildings.py', 'pipelines.osm_base.s30_groups_areas.py', 'pipelines.osm_base.s30_groups_items_nodes.py', 'pipelines.osm_base.s30_groups_items_ways.py', 'pipelines.osm_base.s30_groups_items_areas.py', 'pipelines.osm_base.s30_groups_export_2d.py', 'pipelines.osm_base.s40_structured.py', 'pipelines.osm_base.s40_structured_export_2d.py', 'pipelines.osm_augment.s45_pitch.py', 'pipelines.osm_base.s50_stairs.py', 'pipelines.osm_base.s50_positioning.py', 'pipelines.osm_base.s50_crop.py', 'pipelines.osm_base.s50_90_export_2d.py', 'pipelines.osm_augment.s50_ways.py', 'pipelines.osm_augment.s55_plants.py', 'pipelines.osm_augment.s55_rocks.py', 'pipelines.osm_augment.s55_building_floors.py', 'pipelines.osm_base.s60_model.py', 'pipelines.osm_base.s65_model_metadata_clean.py', 'pipelines.osm_base.s65_model_post_opt.py', 'pipelines.osm_base.s69_model_export_3d.py', 'pipelines.osm_base.s70_metadata.py', 'pipelines.osm_terrain.s60_heightmap_export.py', 'pipelines.osm_terrain.s60_splatmap_export.py', 'pipelines.osm_extras.s30_icons.py', 'pipelines.osm_extras.s80_model_compress.py', #'pipelines.osm_extras.mapillary.py', #'pipelines.osm_extras.ortho.py', ], name="OSM Build Pipeline") pipeline.data['osmfiles'] = files pipeline.data['filenamebase'] = filenamebase pipeline.data[ 'ddd:pipeline:start_date'] = datetime.datetime.now() pipeline.data['tile:bounds_wgs84'] = self.area.bounds pipeline.data['tile:bounds_m'] = area_crop.bounds # Fusion DDD data with pipeline data, so changes to the later affect the former # TODO: better way to do this without globals and merging data? D1D2D3.data.update(pipeline.data) D1D2D3.data = pipeline.data try: osmbuilder = osm.OSMBuilder(area_crop=area_crop, area_filter=area_filter, osm_proj=osm_proj, ddd_proj=ddd_proj) pipeline.data['osm'] = osmbuilder pipeline.run() #scene = osmbuilder.generate() tasks_count += 1 finally: # Ensure lock file is removed try: os.unlink(lockfilename) except Exception as e: pass for hdlr in logging.getLogger().handlers: hdlr.setFormatter(old_formatters[hdlr]) except FileExistsError as e: logger.info("Skipping: %s (lock file exists)", filename) if existed > 0: logger.info("Skipped %d files that already existed.", existed) if skipped > 0: logger.info( "Skipped %d files not contained in greater filtering area.", skipped)
def osm_bootstrap_check_elevation(root, pipeline, logger): # Check elevation is available center_wgs84 = pipeline.data.get('ddd:osm:area:center') elevation = ElevationModel.instance() center_elevation = elevation.value(center_wgs84) logger.info("Center point elevation: %s", center_elevation)
def run(self): # TODO: Move to pipelined builder logger.warn("Move to builder") logger.info("Running DDD123 OSM build command.") D1D2D3Bootstrap._instance._unparsed_args = None tasks_count = 0 # TODO: allow alias in ~/.ddd.conf #vigo_wgs84 = [-8.723, 42.238] #cuvi_wgs84 = [-8.683, 42.168] #area_vigo = { "type": "Polygon", "coordinates": [ [ [ -8.738025517345417, 42.223436382101397 ], [ -8.740762525671032, 42.229564900743533 ], [ -8.73778751662145, 42.23289691087907 ], [ -8.738620519155333, 42.235871919928648 ], [ -8.733920004856994, 42.241702937665828 ], [ -8.729516991463614, 42.242773940923676 ], [ -8.724102474993376, 42.244975447620369 ], [ -8.712142938614059, 42.246254701511681 ], [ -8.711190935718193, 42.245748949973255 ], [ -8.703842663365727, 42.244112694995998 ], [ -8.700570153411187, 42.241197186127408 ], [ -8.702057657935978, 42.238995679430715 ], [ -8.70289066046986, 42.235485168752206 ], [ -8.705865669519442, 42.231736657349735 ], [ -8.70907867929299, 42.23036815318693 ], [ -8.716278201192978, 42.229059149205113 ], [ -8.719610211328508, 42.225370137983631 ], [ -8.726750233047504, 42.219539120246452 ], [ -8.730379744087994, 42.217516114092739 ], [ -8.736210761825173, 42.2191821191605 ], [ -8.736210761825173, 42.2191821191605 ], [ -8.738174267797897, 42.221562126400165 ], [ -8.738174267797897, 42.221562126400165 ], [ -8.738025517345417, 42.223436382101397 ] ] ] } #area_vigo_huge_rande = { "type": "MultiPolygon", "coordinates": [ [ [ [ -8.678739229779634, 42.285406246127017 ], [ -8.679768244461799, 42.286124008658462 ], [ -8.679944646978743, 42.287581258944734 ], [ -8.679709443622819, 42.290212924049762 ], [ -8.680473854529568, 42.292192037766931 ], [ -8.68123826543632, 42.293540409297322 ], [ -8.680326852432117, 42.296345799483696 ], [ -8.67829822348728, 42.296019597743189 ], [ -8.676534198317855, 42.296367546206326 ], [ -8.673329552593403, 42.296258812518111 ], [ -8.67153612700449, 42.297955036674374 ], [ -8.668243280021565, 42.299129318941247 ], [ -8.665009233877623, 42.299738197421469 ], [ -8.661275380602341, 42.30252156692557 ], [ -8.652602256852674, 42.303152156982897 ], [ -8.648603799801982, 42.298759639848647 ], [ -8.641165493670913, 42.289147221675357 ], [ -8.65072063000529, 42.282382853576621 ], [ -8.65730632397114, 42.275465481810826 ], [ -8.65965835753037, 42.268242761434706 ], [ -8.661657586055718, 42.260758105800491 ], [ -8.664597628004756, 42.257189526957589 ], [ -8.676240194122952, 42.251009315195994 ], [ -8.676475397478876, 42.245350843851035 ], [ -8.651308638395097, 42.239953059756857 ], [ -8.63943086892098, 42.244741439740103 ], [ -8.620496998769166, 42.249181249186741 ], [ -8.612147279633895, 42.243870852227474 ], [ -8.618144965209934, 42.226543662551634 ], [ -8.628493912870553, 42.213566923726354 ], [ -8.647192579666443, 42.210082781391023 ], [ -8.654366282022099, 42.200674637101095 ], [ -8.654601485378024, 42.190132366865519 ], [ -8.663421611225139, 42.175492249420188 ], [ -8.672476940428181, 42.164509936746896 ], [ -8.666949661563988, 42.158059118335679 ], [ -8.666949661563988, 42.154048818664116 ], [ -8.682355481376954, 42.151782015135495 ], [ -8.698584512935652, 42.151956387519974 ], [ -8.707522240460731, 42.154397550462775 ], [ -8.715166349528236, 42.158756535815868 ], [ -8.726103305578659, 42.167473605784153 ], [ -8.732806601222471, 42.173139057222009 ], [ -8.735041033103739, 42.180982690717805 ], [ -8.742685142171242, 42.189173891460896 ], [ -8.762559825746747, 42.185688403840906 ], [ -8.7798472724071, 42.182987018755384 ], [ -8.786903373084794, 42.188041129063663 ], [ -8.795605897253949, 42.187256897051611 ], [ -8.80536683652476, 42.191744315452596 ], [ -8.808248077634818, 42.2020249661402 ], [ -8.796664312355604, 42.206990444093883 ], [ -8.792077846915102, 42.202939688770883 ], [ -8.780905687508753, 42.212608803743251 ], [ -8.782493310161234, 42.223582762359229 ], [ -8.764500253433113, 42.235425529931319 ], [ -8.743625955594931, 42.241781393183061 ], [ -8.719870416646694, 42.248049562726422 ], [ -8.709756672341998, 42.260366442379272 ], [ -8.691763615613878, 42.264500544688026 ], [ -8.688235565275031, 42.262498802682778 ], [ -8.678357024326258, 42.271506141210914 ], [ -8.66950749805965, 42.283774937233652 ], [ -8.669514848164527, 42.286255869446485 ], [ -8.669597536844341, 42.286667762707708 ], [ -8.669812527411864, 42.286814575496322 ], [ -8.670181870181713, 42.28682680987994 ], [ -8.678739229779634, 42.285406246127017 ] ] ] ] } #name = "vilanovailagertru" if self.xyztile: self.process_xyztile() #name = "vigo" #center_wgs84 = vigo_wgs84 #area = area_vigo_huge_rande center_wgs84 = self.center # Name if self.name is None: self.name = "ddd-osm-%.3f,%.3f" % center_wgs84 name = self.name path = "data/osm/" # Prepare data # Check if geojson file is available #sides = 15 * 0.01 # Approximate degrees to km sides = 5 * 0.001 roundto = sides / 3 datacenter = int(self.center[0] / roundto) * roundto, int( self.center[1] / roundto) * roundto dataname = name + "_%.4f_%.4f" % datacenter datafile = os.path.join(path, "%s.osm.geojson" % dataname) if not os.path.isfile(datafile): logger.info("Data file '%s' not found. Trying to produce data." % datafile) self.get_data_osm(path, dataname, datacenter, self.area) files = [ os.path.join(path, f) for f in [dataname + '.osm.geojson'] if os.path.isfile(os.path.join(path, f)) and f.endswith(".geojson") ] logger.info("Reading %d files from %s: %s" % (len(files), path, files)) osm_proj = pyproj.Proj( init='epsg:4326' ) # FIXME: API reocmends using only 'epsg:4326' but seems to give weird coordinates? ddd_proj = pyproj.Proj(proj="tmerc", lon_0=center_wgs84[0], lat_0=center_wgs84[1], k=1, x_0=0., y_0=0., units="m", datum="WGS84", ellps="WGS84", towgs84="0,0,0,0,0,0,0", no_defs=True) # TODO: Move area resolution outside this method and resolve after processing args area_ddd = None if self.area is not None: trans_func = partial(pyproj.transform, osm_proj, ddd_proj) area_ddd = ops.transform(trans_func, self.area) else: resolution = 8 if resolution > 1: area_ddd = ddd.point().buffer(self._radius, cap_style=ddd.CAP_ROUND, resolution=resolution).geom else: area_ddd = ddd.rect( [-self._radius, -self._radius, self._radius, self._radius]).geom logger.info("Area meters/coords=%s", area_ddd) logger.info( "Complete polygon area: %.1f km2 (%d at 500, %d at 250, %d at 200)", area_ddd.area / (1000 * 1000), math.ceil(area_ddd.area / (500 * 500)), math.ceil(area_ddd.area / (250 * 250)), math.ceil(area_ddd.area / (200 * 200))) # TODO: organise tasks and locks in pipeline, not here skipped = 0 existed = 0 tiles = [(0, 0)] if not self.chunk_size else range_around( [-64, -64, 64, 64]) for (idx, (x, y)) in enumerate(tiles): #for x, y in range_around([-8, -8, 8, 8]): # -8, 3 if self.limit and tasks_count >= self.limit: logger.info("Limit of %d tiles hit.", self.limit) break if self.chunk_size: logger.info("Chunk size: %s", self.chunk_size) bbox_crop = [ x * self.chunk_size, y * self.chunk_size, (x + 1) * self.chunk_size, (y + 1) * self.chunk_size ] bbox_filter = [ bbox_crop[0] - self.chunk_size_extra_filter, bbox_crop[1] - self.chunk_size_extra_filter, bbox_crop[2] + self.chunk_size_extra_filter, bbox_crop[3] + self.chunk_size_extra_filter ] area_crop = ddd.rect(bbox_crop).geom area_filter = ddd.rect(bbox_filter).geom shortname = '%s_%d_%d,%d' % (name, abs(x) + abs(y), bbox_crop[0], bbox_crop[1]) filenamebase = 'output/%s/%s' % (name, shortname) filename = filenamebase + ".glb" elif self.xyztile: area_crop = area_ddd area_filter = area_ddd.buffer(self.chunk_size_extra_filter, join_style=ddd.JOIN_MITRE) shortname = '%s_%d_%d_%d' % (name, self.xyztile[2], self.xyztile[0], self.xyztile[1]) filenamebase = 'output/%s/%s' % (name, shortname) filename = filenamebase + ".glb" else: #logger.info("No chunk size defined (area was given)") area_crop = area_ddd #print(area_crop) area_filter = area_ddd.buffer(self.chunk_size_extra_filter, join_style=ddd.JOIN_MITRE) shortname = '%s_%dr_%.3f,%.3f' % ( name, self._radius if self._radius else 0, self.center[0], self.center[1]) filenamebase = 'output/%s/%s' % (name, shortname) filename = filenamebase + ".glb" if area_ddd and not area_ddd.intersects(area_crop): skipped += 1 #logger.debug("Skipping: %s (cropped area not contained in greater filtering area)", filename) #if os.path.exists(filename): # logger.info("Deleting: %s", filename) # os.unlink(filename) continue if not D1D2D3Bootstrap._instance.overwrite and os.path.exists( filename): #logger.debug("Skipping: %s (already exists)", filename) existed += 1 continue # Try to lock lockfilename = filename + ".lock" try: with open(lockfilename, "x") as _: old_formatters = { hdlr: hdlr.formatter for hdlr in logging.getLogger().handlers } if D1D2D3Bootstrap._instance.debug: new_formatter = logging.Formatter( '%(asctime)s - %(levelname)s - %(module)s [' + shortname + '] %(message)s') else: new_formatter = logging.Formatter('%(asctime)s [' + shortname + '] %(message)s') # Apply formatter to existing loggers for hdlr in logging.getLogger().handlers: hdlr.setFormatter(new_formatter) # Create a file handler for this process log # TODO: Support this at pipeline level / ddd command (?) build_log_file = False if build_log_file: fh = logging.FileHandler('/tmp/%s.log' % (shortname, )) fh.setLevel(level=logging.DEBUG) fh.setFormatter(new_formatter) logging.getLogger().addHandler(fh) # Check elevation is available elevation = ElevationModel.instance() center_elevation = elevation.value(center_wgs84) logger.info("Center point elevation: %s", center_elevation) logger.info("Generating: %s", filename) pipeline = DDDPipeline( [ 'pipelines.osm_base.s10_init.py', 'pipelines.osm_common.s10_locale_config.py', 'pipelines.osm_base.s20_osm_features.py', 'pipelines.osm_base.s20_osm_features_export_2d.py', 'pipelines.osm_base.s30_groups.py', 'pipelines.osm_base.s30_groups_ways.py', 'pipelines.osm_base.s30_groups_buildings.py', 'pipelines.osm_base.s30_groups_areas.py', 'pipelines.osm_base.s30_groups_items_nodes.py', 'pipelines.osm_base.s30_groups_items_ways.py', 'pipelines.osm_base.s30_groups_items_areas.py', 'pipelines.osm_base.s30_groups_export_2d.py', 'pipelines.osm_base.s40_structured.py', 'pipelines.osm_base.s40_structured_export_2d.py', 'pipelines.osm_common.s45_pitch.py', 'pipelines.osm_base.s50_stairs.py', 'pipelines.osm_base.s50_positioning.py', 'pipelines.osm_base.s50_crop.py', 'pipelines.osm_base.s50_90_export_2d.py', 'pipelines.osm_base.s60_model.py', 'pipelines.osm_base.s60_model_export_3d.py', 'pipelines.osm_gdterrain.s60_terrain_export.py', 'pipelines.osm_augment.s50_ways.py', 'pipelines.osm_augment.s55_plants.py', 'pipelines.osm_default_2d.s30_icons.py', #'pipelines.osm_extras.mapillary.py', #'pipelines.osm_extras.ortho.py', ], name="OSM Build Pipeline") pipeline.data['osmfiles'] = files pipeline.data['filenamebase'] = filenamebase # Fusion DDD data with pipeline data, so changes to the later affect the former # TODO: better way to do this without globals and merging data? D1D2D3.data.update(pipeline.data) D1D2D3.data = pipeline.data try: osmbuilder = osm.OSMBuilder(area_crop=area_crop, area_filter=area_filter, osm_proj=osm_proj, ddd_proj=ddd_proj) pipeline.data['osm'] = osmbuilder pipeline.run() #scene = osmbuilder.generate() tasks_count += 1 finally: # Ensure lock file is removed try: os.unlink(lockfilename) except Exception as e: pass for hdlr in logging.getLogger().handlers: hdlr.setFormatter(old_formatters[hdlr]) except FileExistsError as e: logger.info("Skipping: %s (lock file exists)", filename) if existed > 0: logger.info("Skipped %d files that already existed.", existed) if skipped > 0: logger.info( "Skipped %d files not contained in greater filtering area.", skipped)
def terrain_geotiff_elevation_value(v, ddd_proj): elevation = ElevationModel.instance() v_h = elevation.value(transform_ddd_to_geo(ddd_proj, [v[0], v[1]])) return v_h