def build_curv_tol_weight_map(tile, weight_array): if tile.apt_curv_tol != tile.curvature_tol and tile.apt_curv_tol > 0: UI.vprint( 1, "-> Modifying curv_tol weight map according to runway locations.") try: f = open(FNAMES.apt_file(tile), 'rb') dico_airports = pickle.load(f) f.close() except: UI.vprint( 1, " WARNING: File", FNAMES.apt_file(tile), "is missing (erased after Step 1?), cannot check airport info for upgraded zoomlevel." ) dico_airports = {} for airport in dico_airports: (xmin, ymin, xmax, ymax) = dico_airports[airport]['boundary'].bounds x_shift = 1000 * tile.apt_curv_ext * GEO.m_to_lon(tile.lat) y_shift = 1000 * tile.apt_curv_ext * GEO.m_to_lat colmin = max(round((xmin - x_shift) * 1000), 0) colmax = min(round((xmax + x_shift) * 1000), 1000) rowmax = min(round(((1 - ymin) + y_shift) * 1000), 1000) rowmin = max(round(((1 - ymax) - y_shift) * 1000), 0) weight_array[rowmin:rowmax + 1, colmin:colmax + 1] = tile.curvature_tol / tile.apt_curv_tol if tile.coast_curv_tol != tile.curvature_tol: UI.vprint( 1, "-> Modifying curv_tol weight map according to coastline location." ) sea_layer = OSM.OSM_layer() queries = ['way["natural"="coastline"]'] tags_of_interest = [] if not OSM.OSM_queries_to_OSM_layer(queries, sea_layer, tile.lat, tile.lon, tags_of_interest, cached_suffix='coastline'): return 0 for nodeid in sea_layer.dicosmn: (lonp, latp) = [float(x) for x in sea_layer.dicosmn[nodeid]] if lonp < tile.lon or lonp > tile.lon + 1 or latp < tile.lat or latp > tile.lat + 1: continue x_shift = 1000 * tile.coast_curv_ext * GEO.m_to_lon(tile.lat) y_shift = tile.coast_curv_ext / (111.12) colmin = max(round((lonp - tile.lon - x_shift) * 1000), 0) colmax = min(round((lonp - tile.lon + x_shift) * 1000), 1000) rowmax = min(round((tile.lat + 1 - latp + y_shift) * 1000), 1000) rowmin = max(round((tile.lat + 1 - latp - y_shift) * 1000), 0) weight_array[rowmin:rowmax + 1, colmin:colmax + 1] = numpy.maximum( weight_array[rowmin:rowmax + 1, colmin:colmax + 1], tile.curvature_tol / tile.coast_curv_tol) del (sea_layer) # It could be of interest to write the weight file as a png for user editing #from PIL import Image #Image.fromarray((weight_array!=1).astype(numpy.uint8)*255).save('weight.png') return
def create_terrain_file(tile, texture_file_name, til_x_left, til_y_top, zoomlevel, provider_code, tri_type, is_overlay): if not os.path.exists(os.path.join(tile.build_dir, 'terrain')): os.makedirs(os.path.join(tile.build_dir, 'terrain')) suffix = '_water' if tri_type == 1 else '_sea' if tri_type == 2 else '' if is_overlay: suffix += '_overlay' ter_file_name = texture_file_name[:-4] + suffix + '.ter' if use_test_texture: texture_file_name = 'test_texture.dds' with open(os.path.join(tile.build_dir, 'terrain', ter_file_name), 'w') as f: f.write('A\n800\nTERRAIN\n\n') [lat_med, lon_med] = GEO.gtile_to_wgs84(til_x_left + 8, til_y_top + 8, zoomlevel) texture_approx_size = int( GEO.webmercator_pixel_size(lat_med, zoomlevel) * 4096) f.write('LOAD_CENTER '+'{:.5f}'.format(lat_med)+' '\ +'{:.5f}'.format(lon_med)+' '\ +str(texture_approx_size)+' 4096\n') f.write('BASE_TEX_NOWRAP ../textures/' + texture_file_name + '\n') if tri_type in (1, 2) and not is_overlay: # experimental water f.write('TEXTURE_NORMAL ' + str(2**(17 - zoomlevel)) + ' ../textures/water_normal_map.dds\n') f.write('GLOBAL_specular 1.0\n') f.write('NORMAL_METALNESS\n') if not os.path.exists( os.path.join(tile.build_dir, 'textures', 'water_normal_map.dds')): shutil.copy( os.path.join(FNAMES.Utils_dir, 'water_normal_map.dds'), os.path.join(tile.build_dir, 'textures')) elif tri_type == 1 or (tri_type == 2 and is_overlay == 'ratio_water'): #constant transparency level f.write('BORDER_TEX ../textures/water_transition.png\n') if not os.path.exists( os.path.join(tile.build_dir, 'textures', 'water_transition.png')): shutil.copy( os.path.join(FNAMES.Utils_dir, 'water_transition.png'), os.path.join(tile.build_dir, 'textures')) elif tri_type == 2: #border_tex mask f.write('LOAD_CENTER_BORDER '+'{:.5f}'.format(lat_med)+' '\ +'{:.5f}'.format(lon_med)+' '+str(texture_approx_size)+' '+str(4096//2**(zoomlevel-tile.mask_zl))+'\n') f.write('BORDER_TEX ../textures/' + FNAMES.mask_file( til_x_left, til_y_top, zoomlevel, provider_code) + '\n') elif tile.use_decal_on_terrain: f.write('DECAL_LIB lib/g10/decals/maquify_1_green_key.dcl\n') if tri_type in (1, 2): f.write('WET\n') if tri_type in (1, 2) or not tile.terrain_casts_shadows: f.write('NO_SHADOW\n') return ter_file_name
def discard_unwanted_airports(tile, dico_airports): # A bit of cleaning (aeromodelism, helipads, should be removed here) for airport in list(dico_airports.keys()): apt = dico_airports[airport] #if apt['key_type'] in ('icao','iata','local_ref'): continue if apt['boundary']: if apt['boundary'].area < 5000 * GEO.m_to_lat * GEO.m_to_lon( tile.lat): # too small, skip it dico_airports.pop(airport, None) continue if apt['runway'][0].area < 2500 * GEO.m_to_lat * GEO.m_to_lon( tile.lat): # too small, skip it dico_airports.pop(airport, None) continue
def filter_large_lakes(pol, osmid, dicosmtags): if pol.area < large_lake_threshold: return False area = int(pol.area * GEO.lat_to_m * GEO.lon_to_m(tile.lat + 0.5) / 1e6) if (osmid in dicosmtags) and ('name' in dicosmtags[osmid]): if (dicosmtags[osmid]['name'] in good_imagery_list): UI.vprint(1, " * ", dicosmtags[osmid]['name'], "kept will complete imagery although it is", area, "km^2.") return False else: UI.vprint( 1, " * ", dicosmtags[osmid]['name'], "will be masked like the sea due to its large area of", area, "km^2.") return True else: pt = pol.exterior.coords[ 0] if 'Multi' not in pol.geom_type else pol[0].exterior.coords[ 0] UI.vprint(1, " * ", "Some large OSM water patch close to lat=", '{:.2f}'.format(pt[1] + tile.lon), "lon=", '{:.2f}'.format(pt[0] + tile.lat), "will be masked due to its large area of", area, "km^2.") return True
def flatten_helipads(airport_layer, vector_map, tile, patches_area): multipol = [] seeds = [] total = 0 # helipads whose boundary is encoded in OSM for wayid in (x for x in airport_layer.dicosmw if x in airport_layer.dicosmtags['w'] and 'aeroway' in airport_layer.dicosmtags['w'][x] and airport_layer.dicosmtags['w'][x]['aeroway'] == 'helipad'): if airport_layer.dicosmw[wayid][0] != airport_layer.dicosmw[wayid][-1]: continue way = numpy.round( numpy.array([ airport_layer.dicosmn[nodeid] for nodeid in airport_layer.dicosmw[wayid] ]) - numpy.array([[tile.lon, tile.lat]]), 7) pol = geometry.Polygon(way) if (pol.is_empty) or (not pol.is_valid) or (not pol.area) or ( pol.intersects(patches_area)): continue multipol.append(pol) alti_way = numpy.ones( (len(way), 1)) * numpy.mean(tile.dem.alt_vec(way)) vector_map.insert_way(numpy.hstack([way, alti_way]), 'INTERP_ALT', check=True) seeds.append(numpy.array(pol.representative_point())) total += 1 helipad_area = ops.cascaded_union(multipol) # helipads that are only encoded as nodes, they will be grown into hexagons for nodeid in (x for x in airport_layer.dicosmn if x in airport_layer.dicosmtags['n'] and 'aeroway' in airport_layer.dicosmtags['n'][x] and airport_layer.dicosmtags['n'][x]['aeroway'] == 'helipad'): center = numpy.round( numpy.array(airport_layer.dicosmn[nodeid]) - numpy.array([tile.lon, tile.lat]), 7) if geometry.Point(center).intersects(helipad_area) or geometry.Point( center).intersects(patches_area): continue way = numpy.round( center + numpy.array([[ cos(k * pi / 3) * 7 * GEO.m_to_lon(tile.lat), sin(k * pi / 3) * 7 * GEO.m_to_lat ] for k in range(7)]), 7) alti_way = numpy.ones( (len(way), 1)) * numpy.mean(tile.dem.alt_vec(way)) vector_map.insert_way(numpy.hstack([way, alti_way]), 'INTERP_ALT', check=True) seeds.append(center) total += 1 if seeds: if 'INTERP_ALT' in vector_map.seeds: vector_map.seeds['INTERP_ALT'] += seeds else: vector_map.seeds['INTERP_ALT'] = seeds if total: UI.vprint(1, " Flattened", total, "helipads.")
def build_airport_array(tile, dico_airports): airport_array = numpy.zeros((1001, 1001), dtype=numpy.bool) for airport in dico_airports: (xmin, ymin, xmax, ymax) = dico_airports[airport]['boundary'].bounds x_shift = 1500 * GEO.m_to_lon(tile.lat) y_shift = 1500 * GEO.m_to_lat colmin = max(round((xmin - x_shift) * 1000), 0) colmax = min(round((xmax + x_shift) * 1000), 1000) rowmax = min(round(((1 - ymin) + y_shift) * 1000), 1000) rowmin = max(round(((1 - ymax) - y_shift) * 1000), 0) airport_array[rowmin:rowmax + 1, colmin:colmax + 1] = True return airport_array
def build_poly_file(tile): UI.red_flag = 0 UI.logprint("Step 1 for tile lat=", tile.lat, ", lon=", tile.lon, ": starting.") UI.vprint( 0, "\nStep 1 : Building vector data for tile " + FNAMES.short_latlon(tile.lat, tile.lon) + " : \n--------\n") timer = time.time() if not os.path.exists(tile.build_dir): os.makedirs(tile.build_dir) if not os.path.exists(FNAMES.osm_dir(tile.lat, tile.lon)): os.makedirs(FNAMES.osm_dir(tile.lat, tile.lon)) node_file = FNAMES.input_node_file(tile) poly_file = FNAMES.input_poly_file(tile) vector_map = VECT.Vector_Map() tile.ensure_elevation_data() if UI.red_flag: UI.exit_message_and_bottom_line() return 0 # Patches patches_area = include_patches(vector_map, tile) UI.vprint(1, " Number of edges at this point:", len(vector_map.dico_edges)) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 # Airports include_airports(vector_map, tile, patches_area) UI.vprint(1, " Number of edges at this point:", len(vector_map.dico_edges)) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 # Roads include_roads(vector_map, tile) UI.vprint(1, " Number of edges at this point:", len(vector_map.dico_edges)) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 # Sea include_sea(vector_map, tile) UI.vprint(1, " Number of edges at this point:", len(vector_map.dico_edges)) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 # Water include_water(vector_map, tile) UI.vprint(1, " Number of edges at this point:", len(vector_map.dico_edges)) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 # Buildings # include_buildings(vector_map) # if UI.red_flag: UI.exit_message_and_bottom_line(); return 0 # Orthogrid UI.vprint(0, "-> Inserting edges related to the orthophotos grid") xgrid = set() # x coordinates of vertical grid lines ygrid = set() # y coordinates of horizontal grid lines (til_xul, til_yul) = GEO.wgs84_to_orthogrid(tile.lat + 1, tile.lon, tile.mesh_zl) (til_xlr, til_ylr) = GEO.wgs84_to_orthogrid(tile.lat, tile.lon + 1, tile.mesh_zl) for til_x in range(til_xul + 16, til_xlr + 1, 16): pos_x = (til_x / (2**(tile.mesh_zl - 1)) - 1) xgrid.add(pos_x * 180 - tile.lon) for til_y in range(til_yul + 16, til_ylr + 1, 16): pos_y = (1 - (til_y) / (2**(tile.mesh_zl - 1))) ygrid.add(360 / pi * atan(exp(pi * pos_y)) - 90 - tile.lat) xgrid.add(0) xgrid.add(1) ygrid.add(0) ygrid.add(1) xgrid = list(sorted(xgrid)) ygrid = list(sorted(ygrid)) ortho_network = geometry.MultiLineString( [geometry.LineString([(x, 0), (x, 1)]) for x in xgrid] + [geometry.LineString([(0, y), (1, y)]) for y in ygrid]) vector_map.encode_MultiLineString(ortho_network, tile.dem.alt_vec, 'DUMMY', check=True, skip_cut=True) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 # Gluing edges UI.vprint(0, "-> Inserting additional boundary edges for gluing") segs = 2500 gluing_network=geometry.MultiLineString([\ geometry.LineString([(x,0) for x in numpy.arange(0,segs+1)/segs]),\ geometry.LineString([(x,1) for x in numpy.arange(0,segs+1)/segs]),\ geometry.LineString([(0,y) for y in numpy.arange(0,segs+1)/segs]),\ geometry.LineString([(1,y) for y in numpy.arange(0,segs+1)/segs])]) vector_map.encode_MultiLineString(gluing_network, tile.dem.alt_vec, 'DUMMY', check=True, skip_cut=True) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 UI.vprint(0, "-> Transcription to the files ", poly_file, "and .node") if not vector_map.seeds: if tile.dem.alt_dem.max() >= 1: vector_map.seeds['SEA'] = [numpy.array([1000, 1000])] else: vector_map.seeds['SEA'] = [numpy.array([0.5, 0.5])] vector_map.write_node_file(node_file) vector_map.write_poly_file(poly_file) UI.vprint(1, "\nFinal number of constrained edges :", len(vector_map.dico_edges)) UI.timings_and_bottom_line(timer) UI.logprint("Step 1 for tile lat=", tile.lat, ", lon=", tile.lon, ": normal exit.") return 1
def zone_list_to_ortho_dico(tile): # tile.zone_list is a list of 3-uples of the form ([(lat0,lat0),...(latN,lonN),zoomlevel,provider_code) # where higher lines have priority over lower ones. masks_im = Image.new("L", (4096, 4096), 'black') masks_draw = ImageDraw.Draw(masks_im) airport_array = numpy.zeros((4096, 4096), dtype=numpy.bool) if tile.cover_airports_with_highres: UI.vprint(1, "-> Checking airport locations for upgraded zoomlevel.") try: f = open(FNAMES.apt_file(tile), 'rb') dico_airports = pickle.load(f) f.close() except: UI.vprint( 1, " WARNING: File", FNAMES.apt_file(tile), "is missing (erased after Step 1?), cannot check airport info for upgraded zoomlevel." ) dico_airports = {} for airport in dico_airports: (xmin, ymin, xmax, ymax) = dico_airports[airport]['boundary'].bounds # extension xmin -= 1000 * tile.cover_extent * GEO.m_to_lon(tile.lat) xmax += 1000 * tile.cover_extent * GEO.m_to_lon(tile.lat) ymax += 1000 * tile.cover_extent * GEO.m_to_lat ymin -= 1000 * tile.cover_extent * GEO.m_to_lat # round off to texture boundaries at tile.cover_zl zoomlevel (til_x_left, til_y_top) = GEO.wgs84_to_orthogrid(ymax + tile.lat, xmin + tile.lon, tile.cover_zl) (ymax, xmin) = GEO.gtile_to_wgs84(til_x_left, til_y_top, tile.cover_zl) ymax -= tile.lat xmin -= tile.lon (til_x_left2, til_y_top2) = GEO.wgs84_to_orthogrid(ymin + tile.lat, xmax + tile.lon, tile.cover_zl) (ymin, xmax) = GEO.gtile_to_wgs84(til_x_left2 + 16, til_y_top2 + 16, tile.cover_zl) ymin -= tile.lat xmax -= tile.lon xmin = max(0, xmin) xmax = min(1, xmax) ymin = max(0, ymin) ymax = min(1, ymax) # mark to airport_array colmin = round(xmin * 4095) colmax = round(xmax * 4095) rowmax = round((1 - ymin) * 4095) rowmin = round((1 - ymax) * 4095) airport_array[rowmin:rowmax + 1, colmin:colmax + 1] = 1 dico_tmp = {} dico_customzl = {} i = 1 base_zone = ((tile.lat, tile.lon, tile.lat, tile.lon + 1, tile.lat + 1, tile.lon + 1, tile.lat + 1, tile.lon, tile.lat, tile.lon), tile.default_zl, tile.default_website) for region in [base_zone] + tile.zone_list[::-1]: dico_tmp[i] = (region[1], region[2]) pol = [(round((x - tile.lon) * 4095), round((tile.lat + 1 - y) * 4095)) for (x, y) in zip(region[0][1::2], region[0][::2])] masks_draw.polygon(pol, fill=i) i += 1 til_x_min, til_y_min = GEO.wgs84_to_orthogrid(tile.lat + 1, tile.lon, tile.mesh_zl) til_x_max, til_y_max = GEO.wgs84_to_orthogrid(tile.lat, tile.lon + 1, tile.mesh_zl) for til_x in range(til_x_min, til_x_max + 1, 16): for til_y in range(til_y_min, til_y_max + 1, 16): (latp, lonp) = GEO.gtile_to_wgs84(til_x + 8, til_y + 8, tile.mesh_zl) lonp = max(min(lonp, tile.lon + 1), tile.lon) latp = max(min(latp, tile.lat + 1), tile.lat) x = round((lonp - tile.lon) * 4095) y = round((tile.lat + 1 - latp) * 4095) (zoomlevel, provider_code) = dico_tmp[masks_im.getpixel((x, y))] if airport_array[y, x]: zoomlevel = max(zoomlevel, tile.cover_zl) til_x_text = 16 * (int(til_x / 2**(tile.mesh_zl - zoomlevel)) // 16) til_y_text = 16 * (int(til_y / 2**(tile.mesh_zl - zoomlevel)) // 16) dico_customzl[(til_x, til_y)] = (til_x_text, til_y_text, zoomlevel, provider_code) return dico_customzl
def zone_list_to_ortho_dico(tile): # tile.zone_list is a list of 3-uples of the form ([(lat0,lat0),...(latN,lonN),zoomlevel,provider_code) # where higher lines have priority over lower ones. masks_im=Image.new("L",(4096,4096),'black') masks_draw=ImageDraw.Draw(masks_im) airport_array=numpy.zeros((4096,4096),dtype=numpy.bool) if tile.cover_airports_with_highres: UI.vprint(1,"-> Checking airport locations for upgraded zoomlevel.") airport_layer=OSM.OSM_layer() queries=[('rel["aeroway"="runway"]','rel["aeroway"="taxiway"]','rel["aeroway"="apron"]', 'way["aeroway"="runway"]','way["aeroway"="taxiway"]','way["aeroway"="apron"]')] tags_of_interest=["all"] if not OSM.OSM_queries_to_OSM_layer(queries,airport_layer,tile.lat,tile.lon,tags_of_interest,cached_suffix='airports'): return 0 runway_network=OSM.OSM_to_MultiLineString(airport_layer,tile.lat,tile.lon) runway_area=VECT.improved_buffer(runway_network,0.0003,0.0001,0.00001) runway_area=VECT.ensure_MultiPolygon(runway_area) for polygon in runway_area.geoms: (xmin,ymin,xmax,ymax)=polygon.bounds # extension xmin-=1000*tile.cover_extent*GEO.m_to_lon(tile.lat) xmax+=1000*tile.cover_extent*GEO.m_to_lon(tile.lat) ymax+=1000*tile.cover_extent*GEO.m_to_lat ymin-=1000*tile.cover_extent*GEO.m_to_lat # round off to texture boundaries at tile.cover_zl zoomlevel (til_x_left,til_y_top)=GEO.wgs84_to_orthogrid(ymax+tile.lat,xmin+tile.lon,tile.cover_zl) (ymax,xmin)=GEO.gtile_to_wgs84(til_x_left,til_y_top,tile.cover_zl) ymax-=tile.lat; xmin-=tile.lon (til_x_left,til_y_top)=GEO.wgs84_to_orthogrid(ymin+tile.lat,xmax+tile.lon,tile.cover_zl) (ymin,xmax)=GEO.gtile_to_wgs84(til_x_left+16,til_y_top+16,tile.cover_zl) ymin-=tile.lat; xmax-=tile.lon # mark to airport_array colmin=round(xmin*4095) colmax=round(xmax*4095) rowmax=round((1-ymin)*4095) rowmin=round((1-ymax)*4095) airport_array[rowmin:rowmax+1,colmin:colmax+1]=1 del(airport_layer) del(runway_network) del(runway_area) dico_tmp={} dico_customzl={} i=1 base_zone=((tile.lat,tile.lon,tile.lat,tile.lon+1,tile.lat+1,tile.lon+1,tile.lat+1,tile.lon,tile.lat,tile.lon),tile.default_zl,tile.default_website) for region in [base_zone]+tile.zone_list[::-1]: dico_tmp[i]=(region[1],region[2]) pol=[(round((x-tile.lon)*4095),round((tile.lat+1-y)*4095)) for (x,y) in zip(region[0][1::2],region[0][::2])] masks_draw.polygon(pol,fill=i) i+=1 til_x_min,til_y_min=GEO.wgs84_to_orthogrid(tile.lat+1,tile.lon,tile.mesh_zl) til_x_max,til_y_max=GEO.wgs84_to_orthogrid(tile.lat,tile.lon+1,tile.mesh_zl) for til_x in range(til_x_min,til_x_max+1,16): for til_y in range(til_y_min,til_y_max+1,16): (latp,lonp)=GEO.gtile_to_wgs84(til_x+8,til_y+8,tile.mesh_zl) lonp=max(min(lonp,tile.lon+1),tile.lon) latp=max(min(latp,tile.lat+1),tile.lat) x=round((lonp-tile.lon)*4095) y=round((tile.lat+1-latp)*4095) (zoomlevel,provider_code)=dico_tmp[masks_im.getpixel((x,y))] if airport_array[y,x]: zoomlevel=max(zoomlevel,tile.cover_zl) til_x_text=16*(int(til_x/2**(tile.mesh_zl-zoomlevel))//16) til_y_text=16*(int(til_y/2**(tile.mesh_zl-zoomlevel))//16) dico_customzl[(til_x,til_y)]=(til_x_text,til_y_text,zoomlevel,provider_code) return dico_customzl
def build_mesh(tile): UI.red_flag = False UI.logprint("Step 2 for tile lat=", tile.lat, ", lon=", tile.lon, ": starting.") UI.vprint( 0, "\nStep 2 : Building mesh tile " + FNAMES.short_latlon(tile.lat, tile.lon) + " : \n--------\n") UI.progress_bar(1, 0) timer = time.time() tri_verbosity = 'Q' if UI.verbosity <= 1 else 'V' tile_log = open( os.path.join(FNAMES.Tile_dir, 'zOrtho4XP_' + FNAMES.short_latlon(tile.lat, tile.lon), FNAMES.short_latlon(tile.lat, tile.lon) + ".log"), 'w+') if tile.iterate == 0: Tri_option = '-pAuYB' + tri_verbosity else: Tri_option = '-pruYB' + tri_verbosity poly_file = FNAMES.input_poly_file(tile) alt_file = FNAMES.alt_file(tile) weight_file = FNAMES.weight_file(tile) if not os.path.isfile(poly_file): UI.exit_message_and_bottom_line("\nERROR: Could not find ", poly_file) return 0 tile.ensure_elevation_data() if UI.red_flag: UI.exit_message_and_bottom_line() return 0 tile.dem.write_to_file(alt_file) weight_array = numpy.ones((1000, 1000), dtype=numpy.float32) build_curv_tol_weight_map(tile, weight_array) weight_array.tofile(weight_file) del (weight_array) curv_tol_scaling = tile.dem.nxdem / (1000 * (tile.dem.x1 - tile.dem.x0)) hmin_effective = max(tile.hmin, (tile.dem.y1 - tile.dem.y0) * GEO.lat_to_m / tile.dem.nydem / 2) mesh_cmd = [ Triangle4XP_cmd.strip(), Tri_option.strip(), '{:.9g}'.format(GEO.lon_to_m(tile.lat)), '{:.9g}'.format(GEO.lat_to_m), '{:n}'.format(tile.dem.nxdem), '{:n}'.format(tile.dem.nydem), '{:.9g}'.format(tile.dem.x0), '{:.9g}'.format(tile.dem.y0), '{:.9g}'.format(tile.dem.x1), '{:.9g}'.format(tile.dem.y1), '{:.9g}'.format(tile.dem.nodata), '{:.9g}'.format(tile.curvature_tol * curv_tol_scaling), '{:.9g}'.format(tile.min_angle), str(hmin_effective), alt_file, weight_file, poly_file ] UI.vprint(1, "-> Start of the mesh algorithm Triangle4XP.") UI.vprint(2, ' Mesh command:', ' '.join(mesh_cmd)) fingers_crossed = subprocess.Popen(mesh_cmd, stdout=subprocess.PIPE, bufsize=0) while True: line = fingers_crossed.stdout.readline() if not line: break else: print(line.decode("utf-8")[:-1]) tile_log.write(line.decode("utf-8")) fingers_crossed.poll() if fingers_crossed.returncode: UI.exit_message_and_bottom_line("\nERROR: Triangle4XP crashed !\n\n"+\ "If the reason is not due to the limited amount of RAM please\n"+\ "file a bug including the .node and .poly files for that you\n"+\ "will find in "+str(tile.build_dir)+".\n") tile_log.write(line.decode("utf-8")) tile_log.close() return 0 tile_log.close() if UI.red_flag: UI.exit_message_and_bottom_line() return 0 vertices = post_process_nodes_altitudes(tile) tile.dem = None # post_processing has introduced smoothing, we trash the dem data if UI.red_flag: UI.exit_message_and_bottom_line() return 0 write_mesh_file(tile, vertices) # UI.timings_and_bottom_line(timer) UI.logprint("Step 2 for tile lat=", tile.lat, ", lon=", tile.lon, ": normal exit.") return 1
def build_mask(til_x, til_y): if til_x < til_x_min or til_x > til_x_max or til_y < til_y_min or til_y > til_y_max: return 1 (latm0, lonm0) = GEO.gtile_to_wgs84(til_x, til_y, tile.mask_zl) (px0, py0) = GEO.wgs84_to_pix(latm0, lonm0, tile.mask_zl) px0 -= 1024 py0 -= 1024 # 1) We start with a black mask mask_im = Image.new("L", (4096 + 2 * 1024, 4096 + 2 * 1024), 'black') mask_draw = ImageDraw.Draw(mask_im) # 2) We fill it with white over the extent of each tile around for which we had a mesh available for mesh_file_name in mesh_file_name_list: latlonstr = mesh_file_name.split('.mes')[-2][-7:] lathere = int(latlonstr[0:3]) lonhere = int(latlonstr[3:7]) (px1, py1) = GEO.wgs84_to_pix(lathere, lonhere, tile.mask_zl) (px2, py2) = GEO.wgs84_to_pix(lathere, lonhere + 1, tile.mask_zl) (px3, py3) = GEO.wgs84_to_pix(lathere + 1, lonhere + 1, tile.mask_zl) (px4, py4) = GEO.wgs84_to_pix(lathere + 1, lonhere, tile.mask_zl) px1 -= px0 px2 -= px0 px3 -= px0 px4 -= px0 py1 -= py0 py2 -= py0 py3 -= py0 py4 -= py0 mask_draw.polygon([(px1, py1), (px2, py2), (px3, py3), (px4, py4)], fill='white') # 3a) We overwrite the white part of the mask with grey (ratio_water dependent) where inland water was detected in the first part above if (til_x, til_y) in dico_masks_inland: for (lat1, lon1, lat2, lon2, lat3, lon3) in dico_masks_inland[(til_x, til_y)]: (px1, py1) = GEO.wgs84_to_pix(lat1, lon1, tile.mask_zl) (px2, py2) = GEO.wgs84_to_pix(lat2, lon2, tile.mask_zl) (px3, py3) = GEO.wgs84_to_pix(lat3, lon3, tile.mask_zl) px1 -= px0 px2 -= px0 px3 -= px0 py1 -= py0 py2 -= py0 py3 -= py0 mask_draw.polygon( [(px1, py1), (px2, py2), (px3, py3)], fill=sea_level) # int(255*(1-tile.ratio_water))) # 3b) We overwrite the white + grey part of the mask with black where sea water was detected in the first part above for (lat1, lon1, lat2, lon2, lat3, lon3) in dico_masks[(til_x, til_y)]: (px1, py1) = GEO.wgs84_to_pix(lat1, lon1, tile.mask_zl) (px2, py2) = GEO.wgs84_to_pix(lat2, lon2, tile.mask_zl) (px3, py3) = GEO.wgs84_to_pix(lat3, lon3, tile.mask_zl) px1 -= px0 px2 -= px0 px3 -= px0 py1 -= py0 py2 -= py0 py3 -= py0 mask_draw.polygon([(px1, py1), (px2, py2), (px3, py3)], fill='black') del (mask_draw) # mask_im=mask_im.convert("L") img_array = numpy.array(mask_im, dtype=numpy.uint8) if tile.masks_use_DEM_too: # computing the part of the mask coming from the DEM: (latmax, lonmin) = GEO.pix_to_wgs84(px0, py0, tile.mask_zl) (latmin, lonmax) = GEO.pix_to_wgs84(px0 + 6144, py0 + 6144, tile.mask_zl) (x03857, y03857) = GEO.transform('4326', '3857', lonmin, latmax) (x13857, y13857) = GEO.transform('4326', '3857', lonmax, latmin) ((lonmin, lonmax, latmin, latmax), demarr4326) = tile.dem.super_level_set( mask_altitude_above, (lonmin, lonmax, latmin, latmax)) if demarr4326.any(): demim4326 = Image.fromarray( demarr4326.astype(numpy.uint8) * 255) del (demarr4326) s_bbox = (lonmin, latmax, lonmax, latmin) t_bbox = (x03857, y03857, x13857, y13857) demim3857 = IMG.gdalwarp_alternative(s_bbox, '4326', demim4326, t_bbox, '3857', (6144, 6144)) demim3857 = demim3857.filter( ImageFilter.GaussianBlur( 0.3 * 2**(tile.mask_zl - 14))) # slight increase of area dem_array = (numpy.array(demim3857, dtype=numpy.uint8) > 0).astype(numpy.uint8) * 255 del (demim3857) del (demim4326) img_array = numpy.maximum(img_array, dem_array) custom_mask_array = numpy.zeros((4096, 4096), dtype=numpy.uint8) if tile.masks_custom_extent: (latm1, lonm1) = GEO.gtile_to_wgs84(til_x + 16, til_y + 16, tile.mask_zl) bbox_4326 = (lonm0, latm0, lonm1, latm1) masks_im = IMG.has_data(bbox_4326, tile.masks_custom_extent, True, mask_size=(4096, 4096), is_sharp_resize=False, is_mask_layer=False) if masks_im: custom_mask_array = (numpy.array(masks_im, dtype=numpy.uint8) * (sea_level / 255)).astype(numpy.uint8) if (img_array.max() == 0) and ( custom_mask_array.max() == 0 ): # no need to test if the mask is all white since it would otherwise not be present in dico_mask UI.vprint(1, " Skipping", FNAMES.legacy_mask(til_x, til_y)) return 1 else: UI.vprint(1, " Creating", FNAMES.legacy_mask(til_x, til_y)) # Blur of the mask pxscal = GEO.webmercator_pixel_size(tile.lat + 0.5, tile.mask_zl) if tile.masking_mode == "sand": blur_width = int(tile.masks_width / pxscal) elif tile.masking_mode == "rocks": blur_width = tile.masks_width / (2 * pxscal) elif tile.masking_mode == "3steps": blur_width = [L / pxscal for L in tile.masks_width] if tile.masking_mode == "sand" and blur_width: # convolution with a hat function b_img_array = numpy.array(img_array) kernel = numpy.array(range(1, 2 * blur_width)) kernel[blur_width:] = range(blur_width - 1, 0, -1) kernel = kernel / blur_width**2 for i in range(0, len(b_img_array)): b_img_array[i] = numpy.convolve(b_img_array[i], kernel, 'same') b_img_array = b_img_array.transpose() for i in range(0, len(b_img_array)): b_img_array[i] = numpy.convolve(b_img_array[i], kernel, 'same') b_img_array = b_img_array.transpose() b_img_array = 2 * numpy.minimum(b_img_array, 127) b_img_array = numpy.array(b_img_array, dtype=numpy.uint8) elif tile.masking_mode == "rocks" and blur_width: # slight increase of the mask, then gaussian blur, nonlinear map and a tiny bit of smoothing again on a short scale along the shore b_img_array = (numpy.array(Image.fromarray(img_array).convert("L").\ filter(ImageFilter.GaussianBlur(blur_width / 1.7)), dtype=numpy.uint8) > 0).astype(numpy.uint8) * 255 # blur it b_img_array = numpy.array(Image.fromarray(b_img_array).convert("L").\ filter(ImageFilter.GaussianBlur(blur_width)), dtype=numpy.uint8) # nonlinear transform to make the transition quicker at the shore (gaussian is too flat) gamma = 2.5 b_img_array = (((numpy.tan((b_img_array.astype(numpy.float32) - 127.5) / 128 * atan(3)) - numpy.tan(-127.5 / 128 * atan(3)))\ *254 / (2 * numpy.tan(127.5 / 128 * atan(3)))) ** gamma / (255 ** (gamma - 1))).astype(numpy.uint8) # b_img_array=(1.4*(255-((256-b_img_array.astype(numpy.float32))/256.0)**0.2*255)).astype(numpy.uint8) # b_img_array=numpy.minimum(b_img_array,200) # still some slight smoothing at the shore b_img_array = numpy.maximum(b_img_array, numpy.array(Image.fromarray(img_array).convert("L").\ filter(ImageFilter.GaussianBlur(2 ** (tile.mask_zl - 14))), dtype=numpy.uint8)) elif tile.masking_mode == "3steps": # why trying something so complicated... transin = blur_width[0] midzone = blur_width[1] transout = blur_width[2] # print(transin,midzone,transout) shore_level = 255 b_img_array = b_mask_array = numpy.array(img_array) # First the transition at the shore # We go from shore_level to sea_level in transin meters stepsin = int(transin / 3) for i in range(stepsin): value = shore_level + transition_profile( (i + 1) / stepsin, 'parabolic') * (sea_level - shore_level) b_mask_array = (numpy.array(Image.fromarray(b_mask_array).convert("L").\ filter(ImageFilter.GaussianBlur(1)), dtype=numpy.uint8) > 0).astype(numpy.uint8) * 255 b_img_array[(b_img_array == 0) * (b_mask_array != 0)] = value UI.vprint(2, value) # Next the intermediate zone at constant transparency sea_b_radius = midzone / 3 sea_b_radius_buffered = (midzone + transout) / 3 b_mask_array = (numpy.array(Image.fromarray(b_mask_array).convert("L").\ filter(ImageFilter.GaussianBlur(sea_b_radius_buffered)), dtype=numpy.uint8) > 0).astype(numpy.uint8) * 255 b_mask_array = (numpy.array(Image.fromarray(b_mask_array).convert("L").\ filter(ImageFilter.GaussianBlur(sea_b_radius_buffered - sea_b_radius)), dtype=numpy.uint8) == 255).astype(numpy.uint8) * 255 b_img_array[(b_img_array == 0) * (b_mask_array != 0)] = sea_level # Finally the transition to the X-Plane sea # We go from sea_level to 0 in transout meters stepsout = int(transout / 3) for i in range(stepsout): value = sea_level * (1 - transition_profile( (i + 1) / stepsout, 'linear')) b_mask_array = (numpy.array(Image.fromarray(b_mask_array).convert("L").\ filter(ImageFilter.GaussianBlur(1)), dtype=numpy.uint8) > 0).astype(numpy.uint8) * 255 b_img_array[(b_img_array == 0) * (b_mask_array != 0)] = value UI.vprint(2, value) # To smoothen the thresolding introduced above we do a global short extent gaussian blur b_img_array = numpy.array(Image.fromarray(b_img_array).convert("L").\ filter(ImageFilter.GaussianBlur(2)), dtype=numpy.uint8) else: # Just a (futile) copy b_img_array = numpy.array(img_array) # Ensure land is kept to 255 on the mask to avoid unecessary ones, crop to final size, and take the # max with the possible custom extent mask img_array = numpy.maximum((img_array > 0).astype(numpy.uint8) * 255, b_img_array)[1024:4096 + 1024, 1024:4096 + 1024] img_array = numpy.maximum(img_array, custom_mask_array) if not (img_array.max() == 0 or img_array.min() == 255): masks_im = Image.fromarray( img_array) # .filter(ImageFilter.GaussianBlur(3)) masks_im.save( os.path.join(dest_dir, FNAMES.legacy_mask(til_x, til_y))) UI.vprint(2, " Done.") else: UI.vprint(1, " Ends-up being discarded.") return 1
def extract_mesh_to_obj(mesh_file,til_x_left,til_y_top,zoomlevel,provider_code): UI.red_flag=False timer=time.time() (latmax,lonmin)=GEO.gtile_to_wgs84(til_x_left,til_y_top,zoomlevel) (latmin,lonmax)=GEO.gtile_to_wgs84(til_x_left+16,til_y_top+16,zoomlevel) obj_file_name=FNAMES.obj_file(til_x_left,til_y_top,zoomlevel,provider_code) mtl_file_name=FNAMES.mtl_file(til_x_left,til_y_top,zoomlevel,provider_code) f_mesh=open(mesh_file,"r") for i in range(4): f_mesh.readline() nbr_pt_in=int(f_mesh.readline()) UI.vprint(1," Reading nodes...") pt_in=numpy.zeros(5*nbr_pt_in,'float') for i in range(nbr_pt_in): pt_in[5*i:5*i+3]=[float(x) for x in f_mesh.readline().split()[:3]] for i in range(3): f_mesh.readline() for i in range(nbr_pt_in): pt_in[5*i+3:5*i+5]=[float(x) for x in f_mesh.readline().split()[:2]] for i in range(0,2): # skip 2 lines f_mesh.readline() if UI.red_flag: UI.exit_message_and_bottom_line(); return 0 UI.vprint(1," Reading triangles...") nbr_tri_in=int(f_mesh.readline()) # read nbr of tris textured_nodes={} textured_nodes_inv={} nodes_st_coord={} len_textured_nodes=0 dico_new_tri={} len_dico_new_tri=0 for i in range(0,nbr_tri_in): (n1,n2,n3)=[int(x)-1 for x in f_mesh.readline().split()[:3]] (lon1,lat1,z1,u1,v1)=pt_in[5*n1:5*n1+5] (lon2,lat2,z2,u2,v2)=pt_in[5*n2:5*n2+5] (lon3,lat3,z3,u3,v3)=pt_in[5*n3:5*n3+5] if is_in_region((lat1+lat2+lat3)/3.0,(lon1+lon2+lon3)/3.0,latmin,latmax,lonmin,lonmax): if n1 not in textured_nodes_inv: len_textured_nodes+=1 textured_nodes_inv[n1]=len_textured_nodes textured_nodes[len_textured_nodes]=n1 nodes_st_coord[len_textured_nodes]=GEO.st_coord(lat1,lon1,til_x_left,til_y_top,zoomlevel,provider_code) n1new=textured_nodes_inv[n1] if n2 not in textured_nodes_inv: len_textured_nodes+=1 textured_nodes_inv[n2]=len_textured_nodes textured_nodes[len_textured_nodes]=n2 nodes_st_coord[len_textured_nodes]=GEO.st_coord(lat2,lon2,til_x_left,til_y_top,zoomlevel,provider_code) n2new=textured_nodes_inv[n2] if n3 not in textured_nodes_inv: len_textured_nodes+=1 textured_nodes_inv[n3]=len_textured_nodes textured_nodes[len_textured_nodes]=n3 nodes_st_coord[len_textured_nodes]=GEO.st_coord(lat3,lon3,til_x_left,til_y_top,zoomlevel,provider_code) n3new=textured_nodes_inv[n3] dico_new_tri[len_dico_new_tri]=(n1new,n2new,n3new) len_dico_new_tri+=1 nbr_vert=len_textured_nodes nbr_tri=len_dico_new_tri if UI.red_flag: UI.exit_message_and_bottom_line(); return 0 UI.vprint(1," Writing the obj file.") # first the obj file f=open(obj_file_name,"w") for i in range(1,nbr_vert+1): j=textured_nodes[i] f.write("v "+'{:.9f}'.format(pt_in[5*j]-lonmin)+" "+\ '{:.9f}'.format(pt_in[5*j+1]-latmin)+" "+\ '{:.9f}'.format(pt_in[5*j+2])+"\n") f.write("\n") for i in range(1,nbr_vert+1): j=textured_nodes[i] f.write("vn "+'{:.9f}'.format(pt_in[5*j+3])+" "+'{:.9f}'.format(pt_in[5*j+4])+" "+'{:.9f}'.format(sqrt(max(1-pt_in[5*j+3]**2-pt_in[5*j+4]**2,0)))+"\n") f.write("\n") for i in range(1,nbr_vert+1): j=textured_nodes[i] f.write("vt "+'{:.9f}'.format(nodes_st_coord[i][0])+" "+\ '{:.9f}'.format(nodes_st_coord[i][1])+"\n") f.write("\n") f.write("usemtl orthophoto\n\n") for i in range(0,nbr_tri): (one,two,three)=dico_new_tri[i] f.write("f "+str(one)+"/"+str(one)+"/"+str(one)+" "+str(two)+"/"+str(two)+"/"+str(two)+" "+str(three)+"/"+str(three)+"/"+str(three)+"\n") f_mesh.close() f.close() # then the mtl file f=open(mtl_file_name,'w') f.write("newmtl orthophoto\nmap_Kd "+FNAMES.geotiff_file_name_from_attributes(til_x_left,til_y_top,zoomlevel,provider_code)+"\n") f.close() UI.timings_and_bottom_line(timer) return
def sort_and_reconstruct_runways(tile, airport_layer, dico_airports): ### Runways in OSM are either encoded as linear features or as area features, and sometimes both for the same runway. Here we identify them and ### remove duplicates. Runways of linear type are also often split in OSM between multiple parts (displaced threshold etc), we also group them ### together in this funcion. for airport in dico_airports: ## Distinction between linear and area runways runways_as_area = [ ] # runways that are encoded in OSM as a polygon around their boundary runways_as_line = [ ] # runways that are encoded in OSM as a linestrings linear = [ ] # temporary list containing parts (displaced threshold, etc) of OSM runways as linestrings linear_width = [ ] # whenever the width tag appears for runways that are linear features, if not we'll try to guess the width from the length for wayid in dico_airports[airport]['runway']: if airport_layer.dicosmw[wayid][0] == airport_layer.dicosmw[wayid][ -1]: runway_pol = geometry.Polygon( numpy.round( numpy.array([ airport_layer.dicosmn[nodeid] for nodeid in airport_layer.dicosmw[wayid] ]) - numpy.array([tile.lon, tile.lat]), 7)) if not runway_pol.is_empty and runway_pol.is_valid and runway_pol.area > 1e-7: runway_pol_rect = VECT.min_bounding_rectangle(runway_pol) if wayid not in airport_layer.dicosmtags[ 'w'] or 'custom' not in airport_layer.dicosmtags[ 'w'][wayid]: discrep = runway_pol_rect.hausdorff_distance( runway_pol) if discrep > 0.0008: UI.logprint( "Bad runway (geometry too far from a rectangle) close to", airport, "at", dico_airports[airport]['repr_node']) UI.vprint( 1, " !Bad runway (geometry too far from a rectangle) close to", airport, "at", dico_airports[airport]['repr_node']) UI.vprint( 1, " !You may correct it editing the file ", FNAMES.osm_cached(tile.lat, tile.lon, 'airports'), "in JOSM.") continue rectangle = numpy.array( VECT.min_bounding_rectangle( runway_pol).exterior.coords) if VECT.length_in_meters( rectangle[0:2]) < VECT.length_in_meters( rectangle[1:3]): runway_start = (rectangle[0] + rectangle[1]) / 2 runway_end = (rectangle[2] + rectangle[3]) / 2 runway_width = VECT.length_in_meters(rectangle[0:2]) else: runway_start = (rectangle[1] + rectangle[2]) / 2 runway_end = (rectangle[0] + rectangle[3]) / 2 runway_width = VECT.length_in_meters(rectangle[1:3]) runways_as_area.append( (runway_pol, runway_start, runway_end, runway_width)) else: UI.logprint( 1, "Bad runway (geometry invalid or going back over itself) close to", airport, "at", dico_airports[airport]['repr_node']) UI.vprint( 1, " !Bad runway (geometry invalid or going back over itself) close to", airport, "at", dico_airports[airport]['repr_node']) UI.vprint( 1, " !You may correct it editing the file ", FNAMES.osm_cached(tile.lat, tile.lon, 'airports'), "in JOSM.") continue else: linear.append(airport_layer.dicosmw[wayid]) try: linear_width.append( float(airport_layer.dicosmtags['w'][wayid]['width'])) except: linear_width.append( 0 ) # 0 is just a mark for non existing data, a fictive length will be given later based on the runway length ## Line merge runway parts defined as linear features runway_parts_are_grouped = False while not runway_parts_are_grouped: runway_parts_are_grouped = True for i in range(len(linear) - 1): dir_i = numpy.arctan2( *(numpy.array(airport_layer.dicosmn[linear[i][-1]]) - numpy.array(airport_layer.dicosmn[linear[i][0]]))) for j in range(i + 1, len(linear)): dir_j = numpy.arctan2( *(numpy.array(airport_layer.dicosmn[linear[j][-1]]) - numpy.array(airport_layer.dicosmn[linear[j][0]]))) # Some different runways may share a common end-point in OSM, in this case we don't want to group them into a single one if not numpy.min( numpy.abs( numpy.array([-2 * pi, -pi, 0, pi, 2 * pi]) - (dir_i - dir_j))) < 0.2: continue if linear[i][-1] == linear[j][0]: linear = [ linear[k] for k in range(len(linear)) if k not in (i, j) ] + [linear[i] + linear[j][1:]] linear_width = [ linear_width[k] for k in range(len(linear_width)) if k not in (i, j) ] + [max(linear_width[i], linear_width[j])] runway_parts_are_grouped = False break elif linear[i][-1] == linear[j][-1]: linear = [ linear[k] for k in range(len(linear)) if k not in (i, j) ] + [linear[i] + linear[j][-2::-1]] linear_width = [ linear_width[k] for k in range(len(linear_width)) if k not in (i, j) ] + [max(linear_width[i], linear_width[j])] runway_parts_are_grouped = False break elif linear[i][0] == linear[j][0]: linear = [ linear[k] for k in range(len(linear)) if k not in (i, j) ] + [linear[i][-1::-1] + linear[j][1:]] linear_width = [ linear_width[k] for k in range(len(linear_width)) if k not in (i, j) ] + [max(linear_width[i], linear_width[j])] runway_parts_are_grouped = False break elif linear[i][0] == linear[j][-1]: linear = [ linear[k] for k in range(len(linear)) if k not in (i, j) ] + [linear[j] + linear[i][1:]] linear_width = [ linear_width[k] for k in range(len(linear_width)) if k not in (i, j) ] + [max(linear_width[i], linear_width[j])] runway_parts_are_grouped = False break if not runway_parts_are_grouped: break ## Grow linear runways into rectangle ones and check wether they are duplicates of existing area ones, in which case they are skipped for (nodeid_list, width) in zip(linear, linear_width): runway_start = airport_layer.dicosmn[nodeid_list[0]] runway_end = airport_layer.dicosmn[nodeid_list[-1]] runway_length = GEO.dist(runway_start, runway_end) runway_start = numpy.round( numpy.array(runway_start) - numpy.array([tile.lon, tile.lat]), 7) runway_end = numpy.round( numpy.array(runway_end) - numpy.array([tile.lon, tile.lat]), 7) if width: width += 10 else: width = 30 + runway_length // 1000 pol = geometry.Polygon( VECT.buffer_simple_way( numpy.vstack((runway_start, runway_end)), width)) keep_this = True i = 0 for pol2 in runways_as_area: if (pol2[0].intersection(pol)).area > 0.6 * min( pol.area, pol2[0].area): # update area one with start end and width from linear one runways_as_area[i] = (pol2[0], runway_start, runway_end, width) # and then skip the linear one keep_this = False break i += 1 if keep_this: runways_as_line.append((pol, runway_start, runway_end, width)) ## Save this into the dico_airport dictionnary runway = VECT.ensure_MultiPolygon( ops.cascaded_union( [item[0] for item in runways_as_area + runways_as_line])) dico_airports[airport]['runway'] = (runway, runways_as_area, runways_as_line) return
def build_poly_file(tile): if UI.is_working: return 0 UI.is_working = 1 UI.red_flag = 0 # in case that was forgotten by the user tile.iterate = 0 # update the lat/lon scaling factor in VECT VECT.scalx = cos((tile.lat + 0.5) * pi / 180) # Let's go ! UI.logprint("Step 1 for tile lat=", tile.lat, ", lon=", tile.lon, ": starting.") UI.vprint( 0, "\nStep 1 : Building vector data for tile " + FNAMES.short_latlon(tile.lat, tile.lon) + " : \n--------\n") timer = time.time() if not os.path.exists(tile.build_dir): os.makedirs(tile.build_dir) if not os.path.exists(FNAMES.osm_dir(tile.lat, tile.lon)): os.makedirs(FNAMES.osm_dir(tile.lat, tile.lon)) node_file = FNAMES.input_node_file(tile) poly_file = FNAMES.input_poly_file(tile) vector_map = VECT.Vector_Map() if UI.red_flag: UI.exit_message_and_bottom_line() return 0 if O4_ESP_Globals.build_for_ESP and os.path.isfile( O4_Config_Utils.ESP_scenproc_loc): include_scenproc(tile) # Airports (apt_array, apt_area) = include_airports(vector_map, tile) UI.vprint(1, " Number of edges at this point:", len(vector_map.dico_edges)) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 # Roads include_roads(vector_map, tile, apt_array, apt_area) if tile.road_level: UI.vprint(1, " Number of edges at this point:", len(vector_map.dico_edges)) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 # Sea include_sea(vector_map, tile) UI.vprint(1, " Number of edges at this point:", len(vector_map.dico_edges)) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 # Water include_water(vector_map, tile) UI.vprint(1, " Number of edges at this point:", len(vector_map.dico_edges)) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 # Buildings # include_buildings(vector_map) # if UI.red_flag: UI.exit_message_and_bottom_line(); return 0 # Orthogrid UI.vprint(0, "-> Inserting edges related to the orthophotos grid") xgrid = set() # x coordinates of vertical grid lines ygrid = set() # y coordinates of horizontal grid lines (til_xul, til_yul) = GEO.wgs84_to_orthogrid(tile.lat + 1, tile.lon, tile.mesh_zl) (til_xlr, til_ylr) = GEO.wgs84_to_orthogrid(tile.lat, tile.lon + 1, tile.mesh_zl) for til_x in range(til_xul + 16, til_xlr + 1, 16): pos_x = (til_x / (2**(tile.mesh_zl - 1)) - 1) xgrid.add(pos_x * 180 - tile.lon) for til_y in range(til_yul + 16, til_ylr + 1, 16): pos_y = (1 - (til_y) / (2**(tile.mesh_zl - 1))) ygrid.add(360 / pi * atan(exp(pi * pos_y)) - 90 - tile.lat) xgrid.add(0) xgrid.add(1) ygrid.add(0) ygrid.add(1) xgrid = list(sorted(xgrid)) ygrid = list(sorted(ygrid)) eps = 2**-5 ortho_network = geometry.MultiLineString( [geometry.LineString([(x, 0.0 - eps), (x, 1.0 + eps)]) for x in xgrid] + [geometry.LineString([(0.0 - eps, y), (1.0 + eps, y)]) for y in ygrid]) vector_map.encode_MultiLineString(ortho_network, tile.dem.alt_vec, 'DUMMY', check=True, skip_cut=True) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 # Gluing edges UI.vprint(0, "-> Inserting additional boundary edges for gluing") segs = 2048 gluing_network=geometry.MultiLineString([\ geometry.LineString([(x,0) for x in numpy.arange(0,segs+1)/segs]),\ geometry.LineString([(x,1) for x in numpy.arange(0,segs+1)/segs]),\ geometry.LineString([(0,y) for y in numpy.arange(0,segs+1)/segs]),\ geometry.LineString([(1,y) for y in numpy.arange(0,segs+1)/segs])]) vector_map.encode_MultiLineString(gluing_network, tile.dem.alt_vec, 'DUMMY', check=True, skip_cut=True) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 UI.vprint(0, "-> Transcription to the files ", poly_file, "and .node") if not vector_map.seeds: if tile.dem.alt_dem.max() >= 1: vector_map.seeds['SEA'] = [numpy.array([1000, 1000])] else: vector_map.seeds['SEA'] = [numpy.array([0.5, 0.5])] vector_map.snap_to_grid(15) vector_map.write_node_file(node_file) vector_map.write_poly_file(poly_file) UI.vprint(1, "\nFinal number of constrained edges :", len(vector_map.dico_edges)) UI.timings_and_bottom_line(timer) UI.logprint("Step 1 for tile lat=", tile.lat, ", lon=", tile.lon, ": normal exit.") return 1
def zone_list_to_ortho_dico(tile): # tile.zone_list is a list of 3-uples of the form ([(lat0,lat0),...(latN,lonN),zoomlevel,provider_code) # where higher lines have priority over lower ones. masks_im = Image.new("L", (4096, 4096), 'black') masks_draw = ImageDraw.Draw(masks_im) airport_array = numpy.zeros((4096, 4096), dtype=numpy.bool) if tile.cover_airports_with_highres in ['True', 'ICAO']: UI.vprint(1, "-> Checking airport locations for upgraded zoomlevel.") try: f = open(FNAMES.apt_file(tile), 'rb') dico_airports = pickle.load(f) f.close() except: UI.vprint( 1, " WARNING: File", FNAMES.apt_file(tile), "is missing (erased after Step 1?), cannot check airport info for upgraded zoomlevel." ) dico_airports = {} if tile.cover_airports_with_highres == 'ICAO': airports_list = [ airport for airport in dico_airports if dico_airports[airport]['key_type'] == 'icao' ] else: airports_list = dico_airports.keys() for airport in airports_list: (xmin, ymin, xmax, ymax) = dico_airports[airport]['boundary'].bounds # extension xmin -= 1000 * tile.cover_extent * GEO.m_to_lon(tile.lat) xmax += 1000 * tile.cover_extent * GEO.m_to_lon(tile.lat) ymax += 1000 * tile.cover_extent * GEO.m_to_lat ymin -= 1000 * tile.cover_extent * GEO.m_to_lat # round off to texture boundaries at tile.cover_zl zoomlevel (til_x_left, til_y_top) = GEO.wgs84_to_orthogrid(ymax + tile.lat, xmin + tile.lon, tile.cover_zl) (ymax, xmin) = GEO.gtile_to_wgs84(til_x_left, til_y_top, tile.cover_zl) ymax -= tile.lat xmin -= tile.lon (til_x_left2, til_y_top2) = GEO.wgs84_to_orthogrid(ymin + tile.lat, xmax + tile.lon, tile.cover_zl) (ymin, xmax) = GEO.gtile_to_wgs84(til_x_left2 + 16, til_y_top2 + 16, tile.cover_zl) ymin -= tile.lat xmax -= tile.lon xmin = max(0, xmin) xmax = min(1, xmax) ymin = max(0, ymin) ymax = min(1, ymax) # mark to airport_array colmin = round(xmin * 4095) colmax = round(xmax * 4095) rowmax = round((1 - ymin) * 4095) rowmin = round((1 - ymax) * 4095) airport_array[rowmin:rowmax + 1, colmin:colmax + 1] = 1 dico_customzl = {} dico_tmp = {} til_x_min, til_y_min = GEO.wgs84_to_orthogrid(tile.lat + 1, tile.lon, tile.mesh_zl) til_x_max, til_y_max = GEO.wgs84_to_orthogrid(tile.lat, tile.lon + 1, tile.mesh_zl) i = 1 base_zone = ((tile.lat, tile.lon, tile.lat, tile.lon + 1, tile.lat + 1, tile.lon + 1, tile.lat + 1, tile.lon, tile.lat, tile.lon), tile.default_zl, tile.default_website) for region in [base_zone] + tile.zone_list[::-1]: dico_tmp[i] = (region[1], region[2]) pol = [(round((x - tile.lon) * 4095), round((tile.lat + 1 - y) * 4095)) for (x, y) in zip(region[0][1::2], region[0][::2])] masks_draw.polygon(pol, fill=i) i += 1 for til_x in range(til_x_min, til_x_max + 1, 16): for til_y in range(til_y_min, til_y_max + 1, 16): (latp, lonp) = GEO.gtile_to_wgs84(til_x + 8, til_y + 8, tile.mesh_zl) lonp = max(min(lonp, tile.lon + 1), tile.lon) latp = max(min(latp, tile.lat + 1), tile.lat) x = round((lonp - tile.lon) * 4095) y = round((tile.lat + 1 - latp) * 4095) (zoomlevel, provider_code) = dico_tmp[masks_im.getpixel((x, y))] if airport_array[y, x]: zoomlevel = max(zoomlevel, tile.cover_zl) til_x_text = 16 * (int(til_x / 2**(tile.mesh_zl - zoomlevel)) // 16) til_y_text = 16 * (int(til_y / 2**(tile.mesh_zl - zoomlevel)) // 16) dico_customzl[(til_x, til_y)] = (til_x_text, til_y_text, zoomlevel, provider_code) if tile.cover_airports_with_highres == 'Existing': # what we find in the texture folder of the existing tile for f in os.listdir(os.path.join(tile.build_dir, 'textures')): if f[-4:] != '.dds': continue items = f.split('_') (til_y_text, til_x_text) = [int(x) for x in items[:2]] zoomlevel = int(items[-1][-6:-4]) provider_code = '_'.join(items[2:])[:-6] for til_x in range(til_x_text * 2**(tile.mesh_zl - zoomlevel), (til_x_text + 16) * 2**(tile.mesh_zl - zoomlevel)): for til_y in range(til_y_text * 2**(tile.mesh_zl - zoomlevel), (til_y_text + 16) * 2**(tile.mesh_zl - zoomlevel)): if ((til_x, til_y) not in dico_customzl) or dico_customzl[ (til_x, til_y)][2] <= zoomlevel: dico_customzl[(til_x, til_y)] = (til_x_text, til_y_text, zoomlevel, provider_code) return dico_customzl
def build_dsf(tile, download_queue): dico_customzl = zone_list_to_ortho_dico(tile) dsf_file_name = os.path.join( tile.build_dir, 'Earth nav data', FNAMES.long_latlon(tile.lat, tile.lon) + '.dsf') UI.vprint(1, "-> Computing the pool quadtree") if tile.add_low_res_sea_ovl or tile.use_masks_for_inland: quad_capacity = quad_capacity_low else: quad_capacity = quad_capacity_high pool_quadtree = QuadTree(quad_init_level, quad_capacity) f_mesh = open(FNAMES.mesh_file(tile.build_dir, tile.lat, tile.lon), "r") mesh_version = float(f_mesh.readline().strip().split()[-1]) for i in range(3): f_mesh.readline() nbr_nodes = int(f_mesh.readline()) node_coords = numpy.zeros(5 * nbr_nodes, 'float') for i in range(nbr_nodes): node_coords[5 * i:5 * i + 3] = [float(x) for x in f_mesh.readline().split()[:3]] pool_quadtree.insert(float2qquad(node_coords[5 * i] - tile.lon), float2qquad(node_coords[5 * i + 1] - tile.lat), quad_init_level) pool_quadtree.clean() pool_quadtree.statistics() # pool_nbr = len(pool_quadtree) idx_node_to_idx_pool = {} idx_pool = 0 key_to_idx_pool = {} for key in pool_quadtree: key_to_idx_pool[key] = idx_pool for idx_node in pool_quadtree[key]['idx_nodes']: idx_node_to_idx_pool[idx_node] = idx_pool idx_pool += 1 # for i in range(3): f_mesh.readline() for i in range(nbr_nodes): node_coords[5 * i + 3:5 * i + 5] = [float(x) for x in f_mesh.readline().split()[:2]] # altitutes are encoded in .mesh files with a 100000 scaling factor node_coords[2::5] *= 100000 # pools params and nodes uint16 coordinates in pools pool_param = {} node_icoords = numpy.zeros(5 * nbr_nodes, 'uint16') for key in pool_quadtree: level = len(key[0]) plist = sorted(list(pool_quadtree[key]['idx_nodes'])) node_icoords[[5 * idx_node for idx_node in plist]] = [ int(pool_quadtree.nodes[idx_node][0][level:level + 16], 2) for idx_node in plist ] node_icoords[[5 * idx_node + 1 for idx_node in plist]] = [ int(pool_quadtree.nodes[idx_node][1][level:level + 16], 2) for idx_node in plist ] altitudes = numpy.array( [node_coords[5 * idx_node + 2] for idx_node in plist]) altmin = floor(altitudes.min()) altmax = ceil(altitudes.max()) if altmax - altmin < 770: scale_z = 771 # 65535=771*85 inv_stp = 85 elif altmax - altmin < 1284: scale_z = 1285 # 65535=1285*51 inv_stp = 51 elif altmax - altmin < 4368: scale_z = 4369 # 65535=4369*15 inv_stp = 15 else: scale_z = 13107 # 65535=13107*5 inv_stp = 5 scal_x = scal_y = 2**(-level) node_icoords[[5 * idx_node + 2 for idx_node in plist]] = numpy.round( (altitudes - altmin) * inv_stp) pool_param[key_to_idx_pool[key]] = (scal_x, tile.lon + int(key[0], 2) * scal_x, scal_y, tile.lat + int(key[1], 2) * scal_y, scale_z, altmin, 2, -1, 2, -1, 1, 0, 1, 0, 1, 0, 1, 0) node_icoords[3::5] = numpy.round( (1 + tile.normal_map_strength * node_coords[3::5]) / 2 * 65535) node_icoords[4::5] = numpy.round( (1 - tile.normal_map_strength * node_coords[4::5]) / 2 * 65535) node_icoords = array.array('H', node_icoords) ########################## dico_terrains = {} overlay_terrains = set() treated_textures = set() skipped_terrains_for_masking = set() dsf_pools = {} # we need more pools for textured nodes than for nodes, # one for each number of coordinates [7 (land or experimental water), 9 (water masks) and 5 (X-Plane water)] dsf_pool_nbr = 3 * pool_nbr for idx_dsfpool in range(dsf_pool_nbr): dsf_pools[idx_dsfpool] = array.array('H') dsf_pool_length = numpy.zeros(dsf_pool_nbr, 'int') dsf_pool_plane = 7 * numpy.ones(dsf_pool_nbr, 'int') dsf_pool_plane[pool_nbr:2 * pool_nbr] = 9 dsf_pool_plane[2 * pool_nbr:3 * pool_nbr] = 5 textured_nodes = {} len_textured_nodes = 0 textured_tris = {} total_cross_pool = 0 ########################## bPROP = bTERT = bOBJT = bPOLY = bNETW = bDEMN = bGEOD = bDEMS = bCMDS = b'' nbr_dsfpools_yet_in = 0 dico_terrains = {'terrain_Water': 0} bTERT = bytes("terrain_Water\0", 'ascii') textured_tris[0] = defaultdict(lambda: array.array('H')) # Next, we go through the Triangle section of the mesh file and build DSF # mesh points (these take into accound texture as well), point pools, etc. has_water = 7 if mesh_version >= 1.3 else 3 for i in range(0, 2): # skip 2 lines f_mesh.readline() nbr_tris = int(f_mesh.readline()) # read nbr of tris step = nbr_tris // 100 + 1 tri_list = [] for i in range(nbr_tris): # look for the texture that will possibly cover the tri (n1, n2, n3, tri_type) = [int(x) - 1 for x in f_mesh.readline().split()[:4]] tri_type += 1 # Triangles of mixed types are set for water in priority (to avoid water cut by solid roads), and others are set for type=0 tri_type = (tri_type & has_water) and (2 * ( (tri_type & has_water) > 1 or tile.use_masks_for_inland) or 1) tri_list.append((n1, n2, n3, tri_type)) f_mesh.close() i = 0 # First sea water (or equivalent) tris for tri in [tri for tri in tri_list if tri[3] == 2]: (n1, n2, n3, tri_type) = tri if i % step == 0: UI.progress_bar(1, int(i / step * 0.9)) if UI.red_flag: UI.vprint(1, "DSF construction interrupted.") return 0 i += 1 bary_lon = (node_coords[5 * n1] + node_coords[5 * n2] + node_coords[5 * n3]) / 3 bary_lat = (node_coords[5 * n1 + 1] + node_coords[5 * n2 + 1] + node_coords[5 * n3 + 1]) / 3 texture_attributes = dico_customzl[GEO.wgs84_to_orthogrid( bary_lat, bary_lon, tile.mesh_zl)] # The entries for the terrain and texture main dictionnaries terrain_attributes = (texture_attributes, tri_type) # Do we need to build new terrain file(s) ? if terrain_attributes in dico_terrains: terrain_idx = dico_terrains[terrain_attributes] else: needs_new_terrain = False # if not we need to check with masks values if terrain_attributes not in skipped_terrains_for_masking: mask_im = MASK.needs_mask(tile, *texture_attributes) if mask_im: UI.vprint(2, " Use of an alpha mask.") needs_new_terrain = True mask_im.save( os.path.join(tile.build_dir, "textures", FNAMES.mask_file(*texture_attributes))) else: skipped_terrains_for_masking.add(terrain_attributes) # clean up potential old masks in the tile dir try: os.remove( os.path.join( tile.build_dir, "textures", FNAMES.mask_file(*texture_attributes))) except: pass if needs_new_terrain: terrain_idx = len(dico_terrains) textured_tris[terrain_idx] = defaultdict( lambda: array.array('I')) dico_terrains[terrain_attributes] = terrain_idx is_overlay = tri_type == 2 or ( tri_type == 1 and not (tile.experimental_water & 1)) if is_overlay: overlay_terrains.add(terrain_idx) texture_file_name = FNAMES.dds_file_name_from_attributes( *texture_attributes) # do we need to download a new texture ? if texture_attributes not in treated_textures: if (not os.path.isfile( os.path.join(tile.build_dir, 'textures', texture_file_name))) or ( tile.imprint_masks_to_dds): if 'g2xpl' not in texture_attributes[3]: download_queue.put(texture_attributes) elif os.path.isfile( os.path.join( tile.build_dir, 'textures', texture_file_name.replace( 'dds', 'partial.dds'))): texture_file_name = texture_file_name.replace( 'dds', 'partial.dds') UI.vprint( 1, " Texture file " + texture_file_name + " already present.") else: UI.vprint( 1, " Missing a required texture, conversion from g2xpl requires texture download." ) download_queue.put(texture_attributes) else: UI.vprint( 1, " Texture file " + texture_file_name + " already present.") treated_textures.add(texture_attributes) terrain_file_name = create_terrain_file( tile, texture_file_name, *texture_attributes, tri_type, is_overlay) bTERT += bytes('terrain/' + terrain_file_name + '\0', 'ascii') else: terrain_idx = 0 # We put the tri in the right terrain # First the ones associated to the dico_customzl if terrain_idx: tri_p = array.array('I') for n in (n1, n3, n2): # beware of ordering for orientation ! idx_pool = idx_node_to_idx_pool[n] node_hash = (idx_pool, *node_icoords[5 * n:5 * n + 2], terrain_idx) if node_hash in textured_nodes: (idx_dsfpool, pos_in_pool) = textured_nodes[node_hash] else: (s, t) = GEO.st_coord(node_coords[5 * n + 1], node_coords[5 * n], *texture_attributes) # BEWARE : normal coordinates are pointing (EAST,SOUTH) in X-Plane, not (EAST,NORTH) ! (cfr DSF specs), so v -> -v if not tile.imprint_masks_to_dds: # border_tex idx_dsfpool = idx_pool + pool_nbr # border_tex masks with original normal dsf_pools[idx_dsfpool].extend( node_icoords[5 * n:5 * n + 5]) dsf_pools[idx_dsfpool].extend( (int(round(s * 65535)), int(round(t * 65535)), int(round(s * 65535)), int(round(t * 65535)))) else: # dtx5 dds with mask included idx_dsfpool = idx_pool dsf_pools[idx_dsfpool].extend( node_icoords[5 * n:5 * n + 5]) dsf_pools[idx_dsfpool].extend( (int(round(s * 65535)), int(round(t * 65535)))) len_textured_nodes += 1 pos_in_pool = dsf_pool_length[idx_dsfpool] textured_nodes[node_hash] = (idx_dsfpool, pos_in_pool) dsf_pool_length[idx_dsfpool] += 1 tri_p.extend((idx_dsfpool, pos_in_pool)) # some triangles could be reduced to nothing by the pool snapping, # we skip thme (possible killer to X-Plane's drapping of roads ?) if tri_p[:2] == tri_p[2:4] or tri_p[2:4] == tri_p[4:] or tri_p[ 4:] == tri_p[:2]: continue if tri_p[0] == tri_p[2] == tri_p[4]: textured_tris[terrain_idx][tri_p[0]].extend( (tri_p[1], tri_p[3], tri_p[5])) else: total_cross_pool += 1 textured_tris[terrain_idx]['cross-pool'].extend(tri_p) # I. X-Plane water if not (tile.experimental_water & tri_type): tri_p = array.array('H') for n in (n1, n3, n2): # beware of ordering for orientation ! node_hash = (n, 0) if node_hash in textured_nodes: (idx_dsfpool, pos_in_pool) = textured_nodes[node_hash] else: idx_dsfpool = idx_node_to_idx_pool[n] + 2 * pool_nbr len_textured_nodes += 1 pos_in_pool = dsf_pool_length[idx_dsfpool] textured_nodes[node_hash] = [idx_dsfpool, pos_in_pool] #in some cases we might prefer to use normal shading for some sea triangles too (albedo continuity with elevation derived masks) #dsf_pools[idx_dsfpool].extend(node_icoords[5*n:5*n+5]) dsf_pools[idx_dsfpool].extend(node_icoords[5 * n:5 * n + 3]) dsf_pools[idx_dsfpool].extend((32768, 32768)) dsf_pool_length[idx_dsfpool] += 1 tri_p.extend((idx_dsfpool, pos_in_pool)) if tri_p[0] == tri_p[2] == tri_p[4]: textured_tris[0][tri_p[0]].extend( (tri_p[1], tri_p[3], tri_p[5])) else: total_cross_pool += 1 textured_tris[0]['cross-pool'].extend(tri_p) # II. Low resolution texture with global coverage if ((tile.experimental_water & 2) or tile.add_low_res_sea_ovl): # experimental water over sea #sea_zl=int(IMG.providers_dict['SEA']['max_zl']) sea_zl = experimental_water_zl (til_x_left, til_y_top) = GEO.wgs84_to_orthogrid(bary_lat, bary_lon, sea_zl) texture_attributes = (til_x_left, til_y_top, sea_zl, 'SEA') terrain_attributes = (texture_attributes, tri_type) if terrain_attributes in dico_terrains: terrain_idx = dico_terrains[terrain_attributes] else: terrain_idx = len(dico_terrains) is_overlay = not (tile.experimental_water & 2) and 'ratio_water' if is_overlay: overlay_terrains.add(terrain_idx) textured_tris[terrain_idx] = defaultdict( lambda: array.array('H')) dico_terrains[terrain_attributes] = terrain_idx texture_file_name = FNAMES.dds_file_name_from_attributes( *texture_attributes) # do we need to download a new texture ? if texture_attributes not in treated_textures: if not os.path.isfile( os.path.join(tile.build_dir, 'textures', texture_file_name)): download_queue.put(texture_attributes) else: UI.vprint( 1, " Texture file " + texture_file_name + " already present.") treated_textures.add(texture_attributes) terrain_file_name = create_terrain_file( tile, texture_file_name, *texture_attributes, tri_type, is_overlay) bTERT += bytes('terrain/' + terrain_file_name + '\0', 'ascii') # We put the tri in the right terrain tri_p = array.array('H') for n in (n1, n3, n2): # beware of ordering for orientation ! idx_pool = idx_node_to_idx_pool[n] node_hash = (idx_pool, *node_icoords[5 * n:5 * n + 2], terrain_idx) if node_hash in textured_nodes: (idx_dsfpool, pos_in_pool) = textured_nodes[node_hash] else: (s, t) = GEO.st_coord(node_coords[5 * n + 1], node_coords[5 * n], *texture_attributes) # BEWARE : normal coordinates are pointing (EAST,SOUTH) in X-Plane, not (EAST,NORTH) ! (cfr DSF specs), so v -> -v if (tile.experimental_water & 2): idx_dsfpool = idx_pool # normal map texture over flat shading - no overlay dsf_pools[idx_dsfpool].extend( node_icoords[5 * n:5 * n + 3]) dsf_pools[idx_dsfpool].extend( (32768, 32768, int(round(s * 65535)), int(round(t * 65535)))) else: idx_dsfpool = idx_pool + pool_nbr # constant alpha overlay with flat shading dsf_pools[idx_dsfpool].extend( node_icoords[5 * n:5 * n + 3]) dsf_pools[idx_dsfpool].extend( (32768, 32768, int(round(s * 65535)), int(round(t * 65535)), 0, int(round(tile.ratio_water * 65535)))) len_textured_nodes += 1 pos_in_pool = dsf_pool_length[idx_dsfpool] textured_nodes[node_hash] = (idx_dsfpool, pos_in_pool) dsf_pool_length[idx_dsfpool] += 1 tri_p.extend((idx_dsfpool, pos_in_pool)) if tri_p[0] == tri_p[2] == tri_p[4]: textured_tris[terrain_idx][tri_p[0]].extend( (tri_p[1], tri_p[3], tri_p[5])) else: total_cross_pool += 1 textured_tris[terrain_idx]['cross-pool'].extend(tri_p) # Second land and inland water tris for tri in [tri for tri in tri_list if tri[3] < 2]: (n1, n2, n3, tri_type) = tri if i % step == 0: UI.progress_bar(1, int(i / step * 0.9)) if UI.red_flag: UI.vprint(1, "DSF construction interrupted.") return 0 i += 1 bary_lon = (node_coords[5 * n1] + node_coords[5 * n2] + node_coords[5 * n3]) / 3 bary_lat = (node_coords[5 * n1 + 1] + node_coords[5 * n2 + 1] + node_coords[5 * n3 + 1]) / 3 texture_attributes = dico_customzl[GEO.wgs84_to_orthogrid( bary_lat, bary_lon, tile.mesh_zl)] # The entries for the terrain and texture main dictionnaries terrain_attributes = (texture_attributes, tri_type) # Do we need to build new terrain file(s) ? if terrain_attributes in dico_terrains: terrain_idx = dico_terrains[terrain_attributes] else: terrain_idx = len(dico_terrains) textured_tris[terrain_idx] = defaultdict(lambda: array.array('I')) dico_terrains[terrain_attributes] = terrain_idx is_overlay = (tri_type == 1 and not (tile.experimental_water & 1)) if is_overlay: overlay_terrains.add(terrain_idx) texture_file_name = FNAMES.dds_file_name_from_attributes( *texture_attributes) # do we need to download a new texture ? if texture_attributes not in treated_textures: if (not os.path.isfile( os.path.join(tile.build_dir, 'textures', texture_file_name))): if 'g2xpl' not in texture_attributes[3]: download_queue.put(texture_attributes) elif os.path.isfile( os.path.join( tile.build_dir, 'textures', texture_file_name.replace( 'dds', 'partial.dds'))): texture_file_name = texture_file_name.replace( 'dds', 'partial.dds') UI.vprint( 1, " Texture file " + texture_file_name + " already present.") else: UI.vprint( 1, " Missing a required texture, conversion from g2xpl requires texture download." ) download_queue.put(texture_attributes) else: UI.vprint( 1, " Texture file " + texture_file_name + " already present.") treated_textures.add(texture_attributes) terrain_file_name = create_terrain_file(tile, texture_file_name, *texture_attributes, tri_type, is_overlay) bTERT += bytes('terrain/' + terrain_file_name + '\0', 'ascii') # We put the tri in the right terrain # First the ones associated to the dico_customzl tri_p = array.array('I') for n in (n1, n3, n2): # beware of ordering for orientation ! idx_pool = idx_node_to_idx_pool[n] node_hash = (idx_pool, *node_icoords[5 * n:5 * n + 2], terrain_idx) if node_hash in textured_nodes: (idx_dsfpool, pos_in_pool) = textured_nodes[node_hash] else: (s, t) = GEO.st_coord(node_coords[5 * n + 1], node_coords[5 * n], *texture_attributes) # BEWARE : normal coordinates are pointing (EAST,SOUTH) in X-Plane, not (EAST,NORTH) ! (cfr DSF specs), so v -> -v if not tri_type: # land idx_dsfpool = idx_pool dsf_pools[idx_dsfpool].extend(node_icoords[5 * n:5 * n + 5]) dsf_pools[idx_dsfpool].extend( (int(round(s * 65535)), int(round(t * 65535)))) else: # inland water if not (tile.experimental_water & 1): idx_dsfpool = idx_pool + pool_nbr # constant alpha overlay with flat shading dsf_pools[idx_dsfpool].extend( node_icoords[5 * n:5 * n + 3]) dsf_pools[idx_dsfpool].extend( (32768, 32768, int(round(s * 65535)), int(round(t * 65535)), 0, int(round(tile.ratio_water * 65535)))) else: idx_dsfpool = idx_pool # normal map texture over flat shading - no overlay dsf_pools[idx_dsfpool].extend( node_icoords[5 * n:5 * n + 3]) dsf_pools[idx_dsfpool].extend( (32768, 32768, int(round(s * 65535)), int(round(t * 65535)))) len_textured_nodes += 1 pos_in_pool = dsf_pool_length[idx_dsfpool] textured_nodes[node_hash] = (idx_dsfpool, pos_in_pool) dsf_pool_length[idx_dsfpool] += 1 tri_p.extend((idx_dsfpool, pos_in_pool)) # some triangles could be reduced to nothing by the pool snapping, # we skip thme (possible killer to X-Plane's drapping of roads ?) if tri_p[:2] == tri_p[2:4] or tri_p[2:4] == tri_p[4:] or tri_p[ 4:] == tri_p[:2]: continue if tri_p[0] == tri_p[2] == tri_p[4]: textured_tris[terrain_idx][tri_p[0]].extend( (tri_p[1], tri_p[3], tri_p[5])) else: total_cross_pool += 1 textured_tris[terrain_idx]['cross-pool'].extend(tri_p) if tri_type: # All water effects not related to the full resolution texture # I. X-Plane water if not (tile.experimental_water & tri_type): tri_p = array.array('H') for n in (n1, n3, n2): # beware of ordering for orientation ! node_hash = (n, 0) if node_hash in textured_nodes: (idx_dsfpool, pos_in_pool) = textured_nodes[node_hash] else: idx_dsfpool = idx_node_to_idx_pool[n] + 2 * pool_nbr len_textured_nodes += 1 pos_in_pool = dsf_pool_length[idx_dsfpool] textured_nodes[node_hash] = [idx_dsfpool, pos_in_pool] #in some cases we might prefer to use normal shading for some sea triangles too (albedo continuity with elevation derived masks) #dsf_pools[idx_dsfpool].extend(node_icoords[5*n:5*n+5]) dsf_pools[idx_dsfpool].extend( node_icoords[5 * n:5 * n + 3]) dsf_pools[idx_dsfpool].extend((32768, 32768)) dsf_pool_length[idx_dsfpool] += 1 tri_p.extend((idx_dsfpool, pos_in_pool)) if tri_p[0] == tri_p[2] == tri_p[4]: textured_tris[0][tri_p[0]].extend( (tri_p[1], tri_p[3], tri_p[5])) else: total_cross_pool += 1 textured_tris[0]['cross-pool'].extend(tri_p) download_queue.put('quit') UI.vprint(1, "-> Encoding of the DSF file") UI.vprint(1, " Final nbr of nodes: " + str(len_textured_nodes)) UI.vprint(2, " Final nbr of cross pool tris: " + str(total_cross_pool)) # Now is time to write our DSF to disk, the exact binary format is described on the wiki if os.path.exists(dsf_file_name + '.bak'): os.remove(dsf_file_name + '.bak') if os.path.exists(dsf_file_name): os.rename(dsf_file_name, dsf_file_name + '.bak') if bPROP == b'': bPROP=bytes("sim/west\0"+str(tile.lon)+"\0"+"sim/east\0"+str(tile.lon+1)+"\0"+\ "sim/south\0"+str(tile.lat)+"\0"+"sim/north\0"+str(tile.lat+1)+"\0"+\ "sim/creation_agent\0"+"Ortho4XP\0",'ascii') else: bPROP += b'sim/creation_agent\0Patched by Ortho4XP\0' # Computation of intermediate and of total length size_of_head_atom = 16 + len(bPROP) size_of_prop_atom = 8 + len(bPROP) size_of_defn_atom = 48 + len(bTERT) + len(bOBJT) + len(bPOLY) + len( bNETW) + len(bDEMN) size_of_geod_atom = 8 + len(bGEOD) for k in range(dsf_pool_nbr): if dsf_pool_length[k] > 0: size_of_geod_atom += 21 + dsf_pool_plane[k] * ( 9 + 2 * dsf_pool_length[k]) UI.vprint(2, " Size of DEFN atom : " + str(size_of_defn_atom) + " bytes.") UI.vprint(2, " Size of GEOD atom : " + str(size_of_geod_atom) + " bytes.") f = open(dsf_file_name + '.tmp', 'wb') f.write(b'XPLNEDSF') f.write(struct.pack('<I', 1)) # Head super-atom f.write(b"DAEH") f.write(struct.pack('<I', size_of_head_atom)) f.write(b"PORP") f.write(struct.pack('<I', size_of_prop_atom)) f.write(bPROP) # Definitions super-atom f.write(b"NFED") f.write(struct.pack('<I', size_of_defn_atom)) f.write(b"TRET") f.write(struct.pack('<I', 8 + len(bTERT))) f.write(bTERT) f.write(b"TJBO") f.write(struct.pack('<I', 8 + len(bOBJT))) f.write(bOBJT) f.write(b"YLOP") f.write(struct.pack('<I', 8 + len(bPOLY))) f.write(bPOLY) f.write(b"WTEN") f.write(struct.pack('<I', 8 + len(bNETW))) f.write(bNETW) f.write(b"NMED") f.write(struct.pack('<I', 8 + len(bDEMN))) f.write(bDEMN) # Geodata super-atom f.write(b"DOEG") f.write(struct.pack('<I', size_of_geod_atom)) f.write(bGEOD) for k in range(dsf_pool_nbr): if dsf_pool_length[k] == 0: continue f.write(b'LOOP') f.write( struct.pack( '<I', 13 + dsf_pool_plane[k] + 2 * dsf_pool_plane[k] * dsf_pool_length[k])) f.write(struct.pack('<I', dsf_pool_length[k])) f.write(struct.pack('<B', dsf_pool_plane[k])) for l in range(dsf_pool_plane[k]): f.write(struct.pack('<B', 0)) for m in range(dsf_pool_length[k]): f.write( struct.pack('<H', dsf_pools[k][dsf_pool_plane[k] * m + l])) for k in range(dsf_pool_nbr): if dsf_pool_length[k] == 0: continue f.write(b'LACS') f.write(struct.pack('<I', 8 + 8 * dsf_pool_plane[k])) for l in range(2 * dsf_pool_plane[k]): f.write(struct.pack('<f', pool_param[k % pool_nbr][l])) UI.progress_bar(1, 95) if UI.red_flag: UI.vprint(1, "DSF construction interrupted.") return 0 # Since we possibly skipped some pools, and since we possibly # get pools from elsewhere, we rebuild a dico # which tells the pool position in the dsf of a pool prior # to the stripping : dico_new_dsf_pool = {} new_idx_dsfpool = nbr_dsfpools_yet_in for k in range(dsf_pool_nbr): if dsf_pool_length[k] != 0: dico_new_dsf_pool[k] = new_idx_dsfpool new_idx_dsfpool += 1 # DEMS atom if bDEMS != b'': f.write(b"SMED") f.write(struct.pack('<I', 8 + len(bDEMS))) f.write(bDEMS) # Commands atom # we first compute its size : size_of_cmds_atom = 8 + len(bCMDS) for terrain_idx in textured_tris: if len(textured_tris[terrain_idx]) == 0: continue size_of_cmds_atom += 3 for idx_dsfpool in textured_tris[terrain_idx]: if idx_dsfpool != 'cross-pool': size_of_cmds_atom+= 13+2*(len(textured_tris[terrain_idx][idx_dsfpool])+\ ceil(len(textured_tris[terrain_idx][idx_dsfpool])/255)) else: size_of_cmds_atom+= 13+2*(len(textured_tris[terrain_idx][idx_dsfpool])+\ ceil(len(textured_tris[terrain_idx][idx_dsfpool])/510)) UI.vprint(2, " Size of CMDS atom : " + str(size_of_cmds_atom) + " bytes.") f.write(b'SDMC') # CMDS header f.write(struct.pack('<I', size_of_cmds_atom)) # CMDS length f.write(bCMDS) for terrain_idx in textured_tris: if len(textured_tris[terrain_idx]) == 0: continue #print("terrain_idx = "+str(terrain_idx)) f.write(struct.pack('<B', 4)) # SET DEFINITION 16 f.write(struct.pack('<H', terrain_idx)) # TERRAIN INDEX flag = 1 if terrain_idx not in overlay_terrains else 2 # physical or overlay lod = -1 if flag == 1 else tile.overlay_lod for idx_dsfpool in textured_tris[terrain_idx]: if idx_dsfpool != 'cross-pool': f.write(struct.pack('<B', 1)) # POOL SELECT f.write(struct.pack( '<H', dico_new_dsf_pool[idx_dsfpool])) # POOL INDEX f.write(struct.pack('<B', 18)) # TERRAIN PATCH FLAGS AND LOD f.write(struct.pack('<B', flag)) # FLAG f.write(struct.pack('<f', 0)) # NEAR LOD f.write(struct.pack('<f', lod)) # FAR LOD blocks = floor( len(textured_tris[terrain_idx][idx_dsfpool]) / 255) for j in range(blocks): f.write(struct.pack('<B', 23)) # PATCH TRIANGLE f.write(struct.pack('<B', 255)) # COORDINATE COUNT for k in range(255): f.write( struct.pack( '<H', textured_tris[terrain_idx][idx_dsfpool][ 255 * j + k])) # COORDINATE IDX remaining_tri_p = len( textured_tris[terrain_idx][idx_dsfpool]) % 255 if remaining_tri_p != 0: f.write(struct.pack('<B', 23)) # PATCH TRIANGLE f.write(struct.pack('<B', remaining_tri_p)) # COORDINATE COUNT for k in range(remaining_tri_p): f.write( struct.pack( '<H', textured_tris[terrain_idx][idx_dsfpool][ 255 * blocks + k])) # COORDINATE IDX else: # idx_dsfpool == 'cross-pool' pool_idx_init = textured_tris[terrain_idx][idx_dsfpool][0] f.write(struct.pack('<B', 1)) # POOL SELECT f.write(struct.pack( '<H', dico_new_dsf_pool[pool_idx_init])) # POOL INDEX f.write(struct.pack('<B', 18)) # TERRAIN PATCH FLAGS AND LOD f.write(struct.pack('<B', flag)) # FLAG f.write(struct.pack('<f', 0)) # NEAR LOD f.write(struct.pack('<f', lod)) # FAR LOD blocks = floor( len(textured_tris[terrain_idx][idx_dsfpool]) / 510) for j in range(blocks): f.write(struct.pack('<B', 24)) # PATCH TRIANGLE CROSS-POOL f.write(struct.pack('<B', 255)) # COORDINATE COUNT for k in range(255): f.write( struct.pack( '<H', dico_new_dsf_pool[ textured_tris[terrain_idx][idx_dsfpool][ 510 * j + 2 * k]])) # POOL IDX f.write( struct.pack( '<H', textured_tris[terrain_idx][idx_dsfpool][ 510 * j + 2 * k + 1])) # POS_IN_POOL IDX remaining_tri_p = int( (len(textured_tris[terrain_idx][idx_dsfpool]) % 510) / 2) if remaining_tri_p != 0: f.write(struct.pack('<B', 24)) # PATCH TRIANGLE CROSS-POOL f.write(struct.pack('<B', remaining_tri_p)) # COORDINATE COUNT for k in range(remaining_tri_p): f.write( struct.pack( '<H', dico_new_dsf_pool[ textured_tris[terrain_idx][idx_dsfpool][ 510 * blocks + 2 * k]])) # POOL IDX f.write( struct.pack( '<H', textured_tris[terrain_idx][idx_dsfpool] [510 * blocks + 2 * k + 1])) # POS_IN_PO0L IDX UI.progress_bar(1, 98) if UI.red_flag: UI.vprint(1, "DSF construction interrupted.") return 0 f.close() f = open(dsf_file_name + '.tmp', 'rb') data = f.read() m = hashlib.md5() m.update(data) md5sum = m.digest() f.close() f = open(dsf_file_name + '.tmp', 'ab') f.write(md5sum) f.close() UI.progress_bar(1, 100) size_of_dsf = 28 + size_of_head_atom + size_of_defn_atom + size_of_geod_atom + size_of_cmds_atom UI.vprint(1, " DSF file encoded, total size is :", size_of_dsf, "bytes", "(" + UI.human_print(size_of_dsf) + ")") return 1
def attach_surfaces_to_airports(airport_layer, dico_airports): ### We link surfaces to airports (this information is unfortunately not in OSM) for surface_type in ('runway', 'taxiway', 'apron', 'hangar'): for wayid in ( x for x in airport_layer.dicosmw if x in airport_layer.dicosmtags['w'] and 'aeroway' in airport_layer.dicosmtags['w'][x] and airport_layer.dicosmtags['w'][x]['aeroway'] == surface_type): linestring = geometry.LineString( numpy.array([ airport_layer.dicosmn[nodeid] for nodeid in airport_layer.dicosmw[wayid] ])) found_apt = False for airport in (x for x in dico_airports if dico_airports[x]['boundary']): if linestring.intersects(dico_airports[airport]['boundary']): dico_airports[airport][surface_type].append(wayid) found_apt = True break if found_apt: continue closest_dist = 99999 closest_apt = None pt_check = tuple( numpy.mean(numpy.array([ airport_layer.dicosmn[nodeid] for nodeid in airport_layer.dicosmw[wayid] ]), axis=0)) for airport in dico_airports: dist = GEO.dist(pt_check, dico_airports[airport]['repr_node']) if dist < closest_dist: closest_dist = dist closest_apt = airport if closest_apt and closest_dist < 3500: dico_airports[closest_apt][surface_type].append(wayid) else: try: name = airport_layer.dicosmtags['w'][wayid]['name'] dico_airports[name] = { 'key_type': 'name', 'repr_node': pt_check, 'name': name, 'runway': [], 'runway_width': [], 'taxiway': [], 'apron': [], 'hangar': [], 'boundary': None } dico_airports[name][surface_type].append(wayid) except: dico_airports[pt_check] = { 'key_type': 'repr_node', 'repr_node': pt_check, 'name': '****', 'runway': [], 'runway_width': [], 'taxiway': [], 'apron': [], 'hangar': [], 'boundary': None } dico_airports[pt_check][surface_type].append(wayid) return
def build_mesh(tile): if UI.is_working: return 0 UI.is_working=1 UI.red_flag=False VECT.scalx=cos((tile.lat+0.5)*pi/180) UI.logprint("Step 2 for tile lat=",tile.lat,", lon=",tile.lon,": starting.") UI.vprint(0,"\nStep 2 : Building mesh tile "+FNAMES.short_latlon(tile.lat,tile.lon)+" : \n--------\n") UI.progress_bar(1,0) poly_file = FNAMES.input_poly_file(tile) node_file = FNAMES.input_node_file(tile) alt_file = FNAMES.alt_file(tile) weight_file = FNAMES.weight_file(tile) if not os.path.isfile(node_file): UI.exit_message_and_bottom_line("\nERROR: Could not find ",node_file) return 0 if not tile.iterate and not os.path.isfile(poly_file): UI.exit_message_and_bottom_line("\nERROR: Could not find ",poly_file) return 0 if not tile.iterate: if not os.path.isfile(alt_file): UI.exit_message_and_bottom_line("\nERROR: Could not find",alt_file,". You must run Step 1 first.") return 0 try: fill_nodata = tile.fill_nodata or "to zero" source= ((";" in tile.custom_dem) and tile.custom_dem.split(";")[0]) or tile.custom_dem tile.dem=DEM.DEM(tile.lat,tile.lon,source,fill_nodata,info_only=True) if not os.path.getsize(alt_file)==4*tile.dem.nxdem*tile.dem.nydem: UI.exit_message_and_bottom_line("\nERROR: Cached raster elevation does not match the current custom DEM specs.\n You must run Step 1 and Step 2 with the same elevation base.") return 0 except Exception as e: print(e) UI.exit_message_and_bottom_line("\nERROR: Could not determine the appropriate source. Please check your custom_dem entry.") return 0 else: try: source= ((";" in tile.custom_dem) and tile.custom_dem.split(";")[tile.iterate]) or tile.custom_dem tile.dem=DEM.DEM(tile.lat,tile.lon,source,fill_nodata=False,info_only=True) if not os.path.isfile(alt_file) or not os.path.getsize(alt_file)==4*tile.dem.nxdem*tile.dem.nydem: tile.dem=DEM.DEM(tile.lat,tile.lon,source,fill_nodata=False,info_only=False) tile.dem.write_to_file(FNAMES.alt_file(tile)) except Exception as e: print(e) UI.exit_message_and_bottom_line("\nERROR: Could not determine the appropriate source. Please check your custom_dem entry.") return 0 try: f=open(node_file,'r') input_nodes=int(f.readline().split()[0]) f.close() except: UI.exit_message_and_bottom_line("\nERROR: In reading ",node_file) return 0 timer=time.time() tri_verbosity = 'Q' if UI.verbosity<=1 else 'V' output_poly = 'P' if UI.cleaning_level else '' do_refine = 'r' if tile.iterate else 'A' limit_tris = 'S'+str(max(int(tile.limit_tris/1.9-input_nodes),0)) if tile.limit_tris else '' Tri_option = '-p'+do_refine+'uYB'+tri_verbosity+output_poly+limit_tris weight_array=numpy.ones((1001,1001),dtype=numpy.float32) build_curv_tol_weight_map(tile,weight_array) weight_array.tofile(weight_file) del(weight_array) curv_tol_scaling=sqrt(tile.dem.nxdem/(1000*(tile.dem.x1-tile.dem.x0))) hmin_effective=max(tile.hmin,(tile.dem.y1-tile.dem.y0)*GEO.lat_to_m/tile.dem.nydem/2) mesh_cmd=[Triangle4XP_cmd.strip(), Tri_option.strip(), '{:.9g}'.format(GEO.lon_to_m(tile.lat)), '{:.9g}'.format(GEO.lat_to_m), '{:n}'.format(tile.dem.nxdem), '{:n}'.format(tile.dem.nydem), '{:.9g}'.format(tile.dem.x0), '{:.9g}'.format(tile.dem.y0), '{:.9g}'.format(tile.dem.x1), '{:.9g}'.format(tile.dem.y1), '{:.9g}'.format(tile.dem.nodata), '{:.9g}'.format(tile.curvature_tol*curv_tol_scaling), '{:.9g}'.format(tile.min_angle),str(hmin_effective),alt_file,weight_file,poly_file] del(tile.dem) # for machines with not much RAM, we do not need it anymore tile.dem=None UI.vprint(1,"-> Start of the mesh algorithm Triangle4XP.") UI.vprint(2,' Mesh command:',' '.join(mesh_cmd)) fingers_crossed=subprocess.Popen(mesh_cmd,stdout=subprocess.PIPE,bufsize=0) while True: line = fingers_crossed.stdout.readline() if not line: break else: try: print(line.decode("utf-8")[:-1]) except: pass time.sleep(0.3) fingers_crossed.poll() if fingers_crossed.returncode: UI.vprint(0,"\nWARNING: Triangle4XP could not achieve the requested quality (min_angle), most probably due to an uncatched OSM error.\n"+\ "It will be tempted now with no angle constraint (i.e. min_angle=0).") mesh_cmd[-5]='{:.9g}'.format(0) fingers_crossed=subprocess.Popen(mesh_cmd,stdout=subprocess.PIPE,bufsize=0) while True: line = fingers_crossed.stdout.readline() if not line: break else: try: print(line.decode("utf-8")[:-1]) except: pass time.sleep(0.3) fingers_crossed.poll() if fingers_crossed.returncode: UI.exit_message_and_bottom_line("\nERROR: Triangle4XP really couldn't make it !\n\n"+\ "If the reason is not due to the limited amount of RAM please\n"+\ "file a bug including the .node and .poly files that you\n"+\ "will find in "+str(tile.build_dir)+".\n") return 0 if UI.red_flag: UI.exit_message_and_bottom_line(); return 0 vertices=post_process_nodes_altitudes(tile) if UI.red_flag: UI.exit_message_and_bottom_line(); return 0 write_mesh_file(tile,vertices) # if UI.cleaning_level: try: os.remove(FNAMES.weight_file(tile)) except: pass try: os.remove(FNAMES.output_node_file(tile)) except: pass try: os.remove(FNAMES.output_ele_file(tile)) except: pass if UI.cleaning_level>2: try: os.remove(FNAMES.alt_file(tile)) except: pass try: os.remove(FNAMES.input_node_file(tile)) except: pass try: os.remove(FNAMES.input_poly_file(tile)) except: pass UI.timings_and_bottom_line(timer) UI.logprint("Step 2 for tile lat=",tile.lat,", lon=",tile.lon,": normal exit.") return 1
def include_water(vector_map, tile): large_lake_threshold = tile.max_area * 1e6 / (GEO.lat_to_m * GEO.lon_to_m(tile.lat + 0.5)) def filter_large_lakes(pol, osmid, dicosmtags): if pol.area < large_lake_threshold: return False area = int(pol.area * GEO.lat_to_m * GEO.lon_to_m(tile.lat + 0.5) / 1e6) if (osmid in dicosmtags) and ('name' in dicosmtags[osmid]): if (dicosmtags[osmid]['name'] in good_imagery_list): UI.vprint(1, " * ", dicosmtags[osmid]['name'], "kept will complete imagery although it is", area, "km^2.") return False else: UI.vprint( 1, " * ", dicosmtags[osmid]['name'], "will be masked like the sea due to its large area of", area, "km^2.") return True else: pt = pol.exterior.coords[ 0] if 'Multi' not in pol.geom_type else pol[0].exterior.coords[ 0] UI.vprint(1, " * ", "Some large OSM water patch close to lat=", '{:.2f}'.format(pt[1] + tile.lon), "lon=", '{:.2f}'.format(pt[0] + tile.lat), "will be masked due to its large area of", area, "km^2.") return True UI.vprint(0, "-> Dealing with inland water") water_layer = OSM.OSM_layer() custom_water = FNAMES.custom_water(tile.lat, tile.lon) custom_water_dir = FNAMES.custom_water_dir(tile.lat, tile.lon) if os.path.isfile(custom_water): UI.vprint(1, " * User defined custom water data detected.") water_layer.update_dicosm(custom_water, input_tags=None, target_tags=None) elif os.path.isdir(custom_water_dir): UI.vprint( 1, " * User defined custom water data detected (multiple files).") for osm_file in os.listdir(custom_water_dir): UI.vprint(2, " ", osm_file) water_layer.update_dicosm(os.path.join(custom_water_dir, osm_file), input_tags=None, target_tags=None) water_layer.write_to_file(custom_water) else: queries = [ 'rel["natural"="water"]', 'rel["waterway"="riverbank"]', 'way["natural"="water"]', 'way["waterway"="riverbank"]', 'way["waterway"="dock"]' ] tags_of_interest = ["name"] if not OSM.OSM_queries_to_OSM_layer(queries, water_layer, tile.lat, tile.lon, tags_of_interest, cached_suffix='water'): return 0 UI.vprint(1, " * Building water multipolygon.") (water_area, sea_equiv_area) = OSM.OSM_to_MultiPolygon(water_layer, tile.lat, tile.lon, filter_large_lakes) if not water_area.is_empty: UI.vprint(1, " Cleaning it.") try: (idx_water, dico_water) = VECT.MultiPolygon_to_Indexed_Polygons( water_area, merge_overlappings=tile.clean_bad_geometries) except: return 0 UI.vprint( 2, " Number of water Multipolygons : " + str(len(dico_water))) UI.vprint(1, " Encoding it.") vector_map.encode_MultiPolygon(dico_water, tile.dem.alt_vec, 'WATER', area_limit=tile.min_area / 10000, simplify=tile.water_simplification * GEO.m_to_lat, check=True) if not sea_equiv_area.is_empty: UI.vprint( 1, " Separate treatment for larger pieces requiring masks.") try: (idx_water, dico_water) = VECT.MultiPolygon_to_Indexed_Polygons( sea_equiv_area, merge_overlappings=tile.clean_bad_geometries) except: return 0 UI.vprint( 2, " Number of water Multipolygons : " + str(len(dico_water))) UI.vprint(1, " Encoding them.") vector_map.encode_MultiPolygon(dico_water, tile.dem.alt_vec, 'SEA_EQUIV', area_limit=tile.min_area / 10000, simplify=tile.water_simplification * GEO.m_to_lat, check=True) return 1
def build_masks(tile): ########################################## def transition_profile(ratio, ttype): if ttype == 'spline': return 3 * ratio**2 - 2 * ratio**3 elif ttype == 'linear': return ratio elif ttype == 'parabolic': return 2 * ratio - ratio**2 ########################################## UI.red_flag = False UI.logprint("Step 2.5 for tile lat=", tile.lat, ", lon=", tile.lon, ": starting.") UI.vprint( 0, "\nStep 2.5 : Building masks for tile " + FNAMES.short_latlon(tile.lat, tile.lon) + " : \n--------\n") timer = time.time() if not os.path.exists(FNAMES.mesh_file(tile.build_dir, tile.lat, tile.lon)): UI.lvprint(0, "ERROR: Mesh file ", FNAMES.mesh_file(tile.build_dir, tile.lat, tile.lon), "absent.") UI.exit_message_and_bottom_line('') return 0 if not os.path.exists(FNAMES.mask_dir(tile.lat, tile.lon)): os.makedirs(FNAMES.mask_dir(tile.lat, tile.lon)) mesh_file_name_list = [] for close_lat in range(tile.lat - 1, tile.lat + 2): for close_lon in range(tile.lon - 1, tile.lon + 2): close_build_dir = tile.build_dir if tile.grouped else tile.build_dir.replace( FNAMES.tile_dir(tile.lat, tile.lon), FNAMES.tile_dir(close_lat, close_lon)) close_mesh_file_name = FNAMES.mesh_file(close_build_dir, close_lat, close_lon) if os.path.isfile(close_mesh_file_name): mesh_file_name_list.append(close_mesh_file_name) #################### dico_masks = {} dico_masks_inland = {} #################### [til_x_min, til_y_min] = GEO.wgs84_to_orthogrid(tile.lat + 1, tile.lon, tile.mask_zl) [til_x_max, til_y_max] = GEO.wgs84_to_orthogrid(tile.lat, tile.lon + 1, tile.mask_zl) UI.vprint(1, "-> Deleting existing masks") for til_x in range(til_x_min, til_x_max + 1, 16): for til_y in range(til_y_min, til_y_max + 1, 16): try: os.remove( os.path.join(FNAMES.mask_dir(tile.lat, tile.lon), FNAMES.legacy_mask(til_x, til_y))) except: pass UI.vprint(1, "-> Reading mesh data") for mesh_file_name in mesh_file_name_list: try: f_mesh = open(mesh_file_name, "r") UI.vprint(1, " * ", mesh_file_name) except: UI.lvprint(1, "Mesh file ", mesh_file_name, " could not be read. Skipped.") continue for i in range(0, 4): f_mesh.readline() nbr_pt_in = int(f_mesh.readline()) pt_in = numpy.zeros(5 * nbr_pt_in, 'float') for i in range(0, nbr_pt_in): pt_in[5 * i:5 * i + 3] = [float(x) for x in f_mesh.readline().split()[:3]] for i in range(0, 3): f_mesh.readline() for i in range(0, nbr_pt_in): pt_in[5 * i + 3:5 * i + 5] = [float(x) for x in f_mesh.readline().split()[:2]] for i in range(0, 2): # skip 2 lines f_mesh.readline() nbr_tri_in = int(f_mesh.readline()) # read nbr of tris step_stones = nbr_tri_in // 100 percent = -1 UI.vprint( 2, " Attribution process of masks buffers to water triangles for " + str(mesh_file_name) + ".") for i in range(0, nbr_tri_in): if i % step_stones == 0: percent += 1 UI.progress_bar(1, int(percent * 5 / 10)) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 (n1, n2, n3, tri_type) = [int(x) - 1 for x in f_mesh.readline().split()[:4]] tri_type += 1 tri_type = (tri_type & 2) or (tri_type & 1) if (not tri_type) or (tri_type == 1 and not tile.use_masks_for_inland): continue (lon1, lat1) = pt_in[5 * n1:5 * n1 + 2] (lon2, lat2) = pt_in[5 * n2:5 * n2 + 2] (lon3, lat3) = pt_in[5 * n3:5 * n3 + 2] bary_lat = (lat1 + lat2 + lat3) / 3 bary_lon = (lon1 + lon2 + lon3) / 3 (til_x, til_y) = GEO.wgs84_to_orthogrid(bary_lat, bary_lon, tile.mask_zl) if til_x < til_x_min - 16 or til_x > til_x_max + 16 or til_y < til_y_min - 16 or til_y > til_y_max + 16: continue (til_x2, til_y2) = GEO.wgs84_to_orthogrid(bary_lat, bary_lon, tile.mask_zl + 2) a = (til_x2 // 16) % 4 b = (til_y2 // 16) % 4 if (til_x, til_y) in dico_masks: dico_masks[(til_x, til_y)].append( (lat1, lon1, lat2, lon2, lat3, lon3)) else: dico_masks[(til_x, til_y)] = [(lat1, lon1, lat2, lon2, lat3, lon3)] if a == 0: if (til_x - 16, til_y) in dico_masks: dico_masks[(til_x - 16, til_y)].append( (lat1, lon1, lat2, lon2, lat3, lon3)) else: dico_masks[(til_x - 16, til_y)] = [(lat1, lon1, lat2, lon2, lat3, lon3)] if b == 0: if (til_x - 16, til_y - 16) in dico_masks: dico_masks[(til_x - 16, til_y - 16)].append( (lat1, lon1, lat2, lon2, lat3, lon3)) else: dico_masks[(til_x - 16, til_y - 16)] = [ (lat1, lon1, lat2, lon2, lat3, lon3) ] elif b == 3: if (til_x - 16, til_y + 16) in dico_masks: dico_masks[(til_x - 16, til_y + 16)].append( (lat1, lon1, lat2, lon2, lat3, lon3)) else: dico_masks[(til_x - 16, til_y + 16)] = [ (lat1, lon1, lat2, lon2, lat3, lon3) ] elif a == 3: if (til_x + 16, til_y) in dico_masks: dico_masks[(til_x + 16, til_y)].append( (lat1, lon1, lat2, lon2, lat3, lon3)) else: dico_masks[(til_x + 16, til_y)] = [(lat1, lon1, lat2, lon2, lat3, lon3)] if b == 0: if (til_x + 16, til_y - 16) in dico_masks: dico_masks[(til_x + 16, til_y - 16)].append( (lat1, lon1, lat2, lon2, lat3, lon3)) else: dico_masks[(til_x + 16, til_y - 16)] = [ (lat1, lon1, lat2, lon2, lat3, lon3) ] elif b == 3: if (til_x + 16, til_y + 16) in dico_masks: dico_masks[(til_x + 16, til_y + 16)].append( (lat1, lon1, lat2, lon2, lat3, lon3)) else: dico_masks[(til_x + 16, til_y + 16)] = [ (lat1, lon1, lat2, lon2, lat3, lon3) ] if b == 0: if (til_x, til_y - 16) in dico_masks: dico_masks[(til_x, til_y - 16)].append( (lat1, lon1, lat2, lon2, lat3, lon3)) else: dico_masks[(til_x, til_y - 16)] = [(lat1, lon1, lat2, lon2, lat3, lon3)] elif b == 3: if (til_x, til_y + 16) in dico_masks: dico_masks[(til_x, til_y + 16)].append( (lat1, lon1, lat2, lon2, lat3, lon3)) else: dico_masks[(til_x, til_y + 16)] = [(lat1, lon1, lat2, lon2, lat3, lon3)] f_mesh.close() if not tile.use_masks_for_inland: UI.vprint(2, " Taking care of inland water near shoreline") f_mesh = open(mesh_file_name, "r") for i in range(0, 4): f_mesh.readline() nbr_pt_in = int(f_mesh.readline()) for i in range(0, 2 * nbr_pt_in + 5): f_mesh.readline() nbr_tri_in = int(f_mesh.readline()) # read nbr of tris step_stones = nbr_tri_in // 100 percent = -1 for i in range(0, nbr_tri_in): if i % step_stones == 0: percent += 1 UI.progress_bar(1, int(percent * 5 / 10)) if UI.red_flag: UI.exit_message_and_bottom_line() return 0 (n1, n2, n3, tri_type) = [ int(x) - 1 for x in f_mesh.readline().split()[:4] ] tri_type += 1 tri_type = (tri_type & 2) or (tri_type & 1) if not (tri_type == 1): continue (lon1, lat1) = pt_in[5 * n1:5 * n1 + 2] (lon2, lat2) = pt_in[5 * n2:5 * n2 + 2] (lon3, lat3) = pt_in[5 * n3:5 * n3 + 2] bary_lat = (lat1 + lat2 + lat3) / 3 bary_lon = (lon1 + lon2 + lon3) / 3 (til_x, til_y) = GEO.wgs84_to_orthogrid(bary_lat, bary_lon, tile.mask_zl) if til_x < til_x_min - 16 or til_x > til_x_max + 16 or til_y < til_y_min - 16 or til_y > til_y_max + 16: continue (til_x2, til_y2) = GEO.wgs84_to_orthogrid(bary_lat, bary_lon, tile.mask_zl + 2) a = (til_x2 // 16) % 4 b = (til_y2 // 16) % 4 # Here an inland water tri is added ONLY if sea water tri were already added for this mask extent if (til_x, til_y) in dico_masks: if (til_x, til_y) in dico_masks_inland: dico_masks_inland[(til_x, til_y)].append( (lat1, lon1, lat2, lon2, lat3, lon3)) else: dico_masks_inland[(til_x, til_y)] = [ (lat1, lon1, lat2, lon2, lat3, lon3) ] f_mesh.close() UI.vprint(1, "-> Construction of the masks") if tile.masks_use_DEM_too: tile.ensure_elevation_data() task_len = len(dico_masks) task_done = 0 for (til_x, til_y) in dico_masks: if UI.red_flag: UI.exit_message_and_bottom_line() return 0 task_done += 1 UI.progress_bar(1, 50 + int(49 * task_done / task_len)) if til_x < til_x_min or til_x > til_x_max or til_y < til_y_min or til_y > til_y_max: continue (latm0, lonm0) = GEO.gtile_to_wgs84(til_x, til_y, tile.mask_zl) (px0, py0) = GEO.wgs84_to_pix(latm0, lonm0, tile.mask_zl) px0 -= 1024 py0 -= 1024 # 1) We start with a black mask mask_im = Image.new("L", (4096 + 2 * 1024, 4096 + 2 * 1024), 'black') mask_draw = ImageDraw.Draw(mask_im) # 2) We fill it with white over the extent of each tile around for which we had a mesh available for mesh_file_name in mesh_file_name_list: latlonstr = mesh_file_name.split('.mes')[-2][-7:] lathere = int(latlonstr[0:3]) lonhere = int(latlonstr[3:7]) (px1, py1) = GEO.wgs84_to_pix(lathere, lonhere, tile.mask_zl) (px2, py2) = GEO.wgs84_to_pix(lathere, lonhere + 1, tile.mask_zl) (px3, py3) = GEO.wgs84_to_pix(lathere + 1, lonhere + 1, tile.mask_zl) (px4, py4) = GEO.wgs84_to_pix(lathere + 1, lonhere, tile.mask_zl) px1 -= px0 px2 -= px0 px3 -= px0 px4 -= px0 py1 -= py0 py2 -= py0 py3 -= py0 py4 -= py0 mask_draw.polygon([(px1, py1), (px2, py2), (px3, py3), (px4, py4)], fill='white') # 3a) We overwrite the withe part of the mask with grey (ratio_water dependent) where inland water was detected in the first part above if (til_x, til_y) in dico_masks_inland: for (lat1, lon1, lat2, lon2, lat3, lon3) in dico_masks_inland[(til_x, til_y)]: (px1, py1) = GEO.wgs84_to_pix(lat1, lon1, tile.mask_zl) (px2, py2) = GEO.wgs84_to_pix(lat2, lon2, tile.mask_zl) (px3, py3) = GEO.wgs84_to_pix(lat3, lon3, tile.mask_zl) px1 -= px0 px2 -= px0 px3 -= px0 py1 -= py0 py2 -= py0 py3 -= py0 mask_draw.polygon([(px1, py1), (px2, py2), (px3, py3)], fill=int(255 * (1 - tile.ratio_water))) # 3b) We overwrite the withe + grey part of the mask with black where sea water was detected in the first part above for (lat1, lon1, lat2, lon2, lat3, lon3) in dico_masks[(til_x, til_y)]: (px1, py1) = GEO.wgs84_to_pix(lat1, lon1, tile.mask_zl) (px2, py2) = GEO.wgs84_to_pix(lat2, lon2, tile.mask_zl) (px3, py3) = GEO.wgs84_to_pix(lat3, lon3, tile.mask_zl) px1 -= px0 px2 -= px0 px3 -= px0 py1 -= py0 py2 -= py0 py3 -= py0 mask_draw.polygon([(px1, py1), (px2, py2), (px3, py3)], fill='black') del (mask_draw) #mask_im=mask_im.convert("L") img_array = numpy.array(mask_im, dtype=numpy.uint8) if tile.masks_use_DEM_too: #computing the part of the mask coming from the DEM: (latmax, lonmin) = GEO.pix_to_wgs84(px0, py0, tile.mask_zl) (latmin, lonmax) = GEO.pix_to_wgs84(px0 + 6144, py0 + 6144, tile.mask_zl) (x03857, y03857) = GEO.transform('4326', '3857', lonmin, latmax) (x13857, y13857) = GEO.transform('4326', '3857', lonmax, latmin) ((lonmin, lonmax, latmin, latmax), demarr4326) = tile.dem.super_level_set( 1, (lonmin, lonmax, latmin, latmax)) if demarr4326.any(): demim4326 = Image.fromarray( demarr4326.astype(numpy.uint8) * 255) del (demarr4326) s_bbox = (lonmin, latmax, lonmax, latmin) t_bbox = (x03857, y03857, x13857, y13857) demim3857 = IMG.gdalwarp_alternative(s_bbox, '4326', demim4326, t_bbox, '3857', (6144, 6144)) demim3857 = demim3857.filter( ImageFilter.GaussianBlur( 0.3 * 2**(tile.mask_zl - 14))) # slight increase of area dem_array = (numpy.array(demim3857, dtype=numpy.uint8) > 0).astype(numpy.uint8) * 255 del (demim3857) del (demim4326) img_array = numpy.maximum(img_array, dem_array) custom_mask_array = numpy.zeros((4096, 4096), dtype=numpy.uint8) if tile.masks_custom_extent: (latm1, lonm1) = GEO.gtile_to_wgs84(til_x + 16, til_y + 16, tile.mask_zl) bbox_4326 = (lonm0, latm0, lonm1, latm1) masks_im = IMG.has_data(bbox_4326, tile.masks_extent_code, True, mask_size=(4096, 4096), is_sharp_resize=False, is_mask_layer=False) if masks_im: custom_mask_array = (numpy.array(masks_im, dtype=numpy.uint8) * tile.ratio_water).astype(numpy.uint8) if (img_array.max() == 0) and ( custom_mask_array.max() == 0 ): # no need to test if the mask is all white since it would otherwise not be present in dico_mask UI.vprint(1, " Skipping", FNAMES.legacy_mask(til_x, til_y)) continue else: UI.vprint(1, " Creating", FNAMES.legacy_mask(til_x, til_y)) # Blur of the mask pxscal = GEO.webmercator_pixel_size(tile.lat + 0.5, tile.mask_zl) if tile.masking_mode == "sand": blur_width = int(tile.masks_width / pxscal) elif tile.masking_mode == "rocks": blur_width = tile.masks_width / pxscal elif tile.masking_mode == "3steps": blur_width = [L / pxscal for L in tile.masks_width] if tile.masking_mode == "sand" and blur_width: # convolution with a hat function b_img_array = numpy.array(img_array) kernel = numpy.array(range(1, 2 * blur_width)) kernel[blur_width:] = range(blur_width - 1, 0, -1) kernel = kernel / blur_width**2 for i in range(0, len(b_img_array)): b_img_array[i] = numpy.convolve(b_img_array[i], kernel, 'same') b_img_array = b_img_array.transpose() for i in range(0, len(b_img_array)): b_img_array[i] = numpy.convolve(b_img_array[i], kernel, 'same') b_img_array = b_img_array.transpose() b_img_array = 2 * numpy.minimum(b_img_array, 127) b_img_array = numpy.array(b_img_array, dtype=numpy.uint8) elif tile.masking_mode == "rocks" and blur_width: # slight increase of the mask, then gaussian blur, nonlinear map and a tiny bit of smoothing again on a short scale along the shore b_img_array=(numpy.array(Image.fromarray(img_array).convert("L").\ filter(ImageFilter.GaussianBlur(blur_width/1.7)),dtype=numpy.uint8)>0).astype(numpy.uint8)*255 #blur it b_img_array=numpy.array(Image.fromarray(b_img_array).convert("L").\ filter(ImageFilter.GaussianBlur(blur_width)),dtype=numpy.uint8) #nonlinear transform to make the transition quicker at the shore (gaussian is too flat) gamma = 2.5 b_img_array=(((numpy.tan((b_img_array.astype(numpy.float32)-127.5)/128*atan(3))-numpy.tan(-127.5/128*atan(3)))\ *254/(2*numpy.tan(127.5/128*atan(3))))**gamma/(255**(gamma-1))).astype(numpy.uint8) #b_img_array=numpy.minimum(b_img_array,200) #still some slight smoothing at the shore b_img_array=numpy.maximum(b_img_array,numpy.array(Image.fromarray(img_array).convert("L").\ filter(ImageFilter.GaussianBlur(2**(tile.mask_zl-14))),dtype=numpy.uint8)) elif tile.masking_mode == "3steps": # why trying something so complicated... transin = blur_width[0] midzone = blur_width[1] transout = blur_width[2] shore_level = 255 sea_level = int(tile.ratio_water * 255) b_img_array = b_mask_array = numpy.array(img_array) # First the transition at the shore # We go from shore_level to sea_level in transin meters stepsin = int(transin / 3) for i in range(stepsin): value = shore_level + transition_profile( (i + 1) / stepsin, 'parabolic') * (sea_level - shore_level) b_mask_array=(numpy.array(Image.fromarray(b_mask_array).convert("L").\ filter(ImageFilter.GaussianBlur(1)),dtype=numpy.uint8)>0).astype(numpy.uint8)*255 b_img_array[(b_img_array == 0) * (b_mask_array != 0)] = value UI.vprint(2, value) # Next the intermediate zone at constant transparency sea_b_radius = midzone / 3 sea_b_radius_buffered = (midzone + transout) / 2 b_mask_array=(numpy.array(Image.fromarray(b_mask_array).convert("L").\ filter(ImageFilter.GaussianBlur(sea_b_radius_buffered)),dtype=numpy.uint8)>0).astype(numpy.uint8)*255 b_mask_array=(numpy.array(Image.fromarray(b_mask_array).convert("L").\ filter(ImageFilter.GaussianBlur(sea_b_radius_buffered-sea_b_radius)),dtype=numpy.uint8)==255).astype(numpy.uint8)*255 b_img_array[(b_img_array == 0) * (b_mask_array != 0)] = sea_level # Finally the transition to the X-Plane sea # We go from sea_level to 0 in transout meters stepsout = int(transout / 3) for i in range(stepsout): value = sea_level * (1 - transition_profile( (i + 1) / stepsout, 'linear')) b_mask_array=(numpy.array(Image.fromarray(b_mask_array).convert("L").\ filter(ImageFilter.GaussianBlur(1)),dtype=numpy.uint8)>0).astype(numpy.uint8)*255 b_img_array[(b_img_array == 0) * (b_mask_array != 0)] = value UI.vprint(2, value) # To smoothen the thresolding introduced above we do a global short extent gaussian blur b_img_array=numpy.array(Image.fromarray(b_img_array).convert("L").\ filter(ImageFilter.GaussianBlur(2)),dtype=numpy.uint8) else: # Just a (futile) copy b_img_array = numpy.array(img_array) # Ensure land is kept to 255 on the mask to avoid unecessary ones, crop to final size, and take the # max with the possible custom extent mask img_array = numpy.maximum(img_array, b_img_array)[1024:4096 + 1024, 1024:4096 + 1024] img_array = numpy.maximum(img_array, custom_mask_array) if not (img_array.max() == 0 or img_array.min() == 255): masks_im = Image.fromarray( img_array) #.filter(ImageFilter.GaussianBlur(3)) masks_im.save( os.path.join(FNAMES.mask_dir(tile.lat, tile.lon), FNAMES.legacy_mask(til_x, til_y))) UI.vprint(2, " Done.") else: UI.vprint(1, " Ends-up being discarded.") UI.progress_bar(1, 100) UI.timings_and_bottom_line(timer) UI.logprint("Step 2.5 for tile lat=", tile.lat, ", lon=", tile.lon, ": normal exit.") return
def build_curv_tol_weight_map(tile, weight_array): if tile.apt_curv_tol != tile.curvature_tol: UI.vprint( 1, "-> Modifying curv_tol weight map according to runway locations.") airport_layer = OSM.OSM_layer() queries = [('rel["aeroway"="runway"]', 'rel["aeroway"="taxiway"]', 'rel["aeroway"="apron"]', 'way["aeroway"="runway"]', 'way["aeroway"="taxiway"]', 'way["aeroway"="apron"]')] tags_of_interest = ["all"] if not OSM.OSM_queries_to_OSM_layer(queries, airport_layer, tile.lat, tile.lon, tags_of_interest, cached_suffix='airports'): return 0 runway_network = OSM.OSM_to_MultiLineString(airport_layer, tile.lat, tile.lon) runway_area = VECT.improved_buffer(runway_network, 0.0003, 0.0001, 0.00001) if not runway_area: return 0 runway_area = VECT.ensure_MultiPolygon(runway_area) for polygon in runway_area.geoms if ( 'Multi' in runway_area.geom_type or 'Collection' in runway_area.geom_type) else [runway_area]: (xmin, ymin, xmax, ymax) = polygon.bounds x_shift = 1000 * tile.apt_curv_ext * GEO.m_to_lon(tile.lat) y_shift = 1000 * tile.apt_curv_ext * GEO.m_to_lat colmin = round((xmin - x_shift) * 1000) colmax = round((xmax + x_shift) * 1000) rowmax = round(((1 - ymin) + y_shift) * 1000) rowmin = round(((1 - ymax) - y_shift) * 1000) weight_array[ rowmin:rowmax + 1, colmin:colmax + 1] = tile.curvature_tol / tile.apt_curv_tol if tile.apt_curv_tol > 0 else 1 del (airport_layer) del (runway_network) del (runway_area) if tile.coast_curv_tol != tile.curvature_tol: UI.vprint( 1, "-> Modifying curv_tol weight map according to coastline location." ) sea_layer = OSM.OSM_layer() queries = ['way["natural"="coastline"]'] tags_of_interest = [] if not OSM.OSM_queries_to_OSM_layer(queries, sea_layer, tile.lat, tile.lon, tags_of_interest, cached_suffix='coastline'): return 0 for nodeid in sea_layer.dicosmn: (lonp, latp) = [float(x) for x in sea_layer.dicosmn[nodeid]] x_shift = 1000 * tile.coast_curv_ext * GEO.m_to_lon(tile.lat) y_shift = tile.coast_curv_ext / (111.12) colmin = round((lonp - tile.lon - x_shift) * 1000) colmax = round((lonp - tile.lon + x_shift) * 1000) rowmax = round((tile.lat + 1 - latp + y_shift) * 1000) rowmin = round((tile.lat + 1 - latp - y_shift) * 1000) weight_array[ rowmin:rowmax + 1, colmin:colmax + 1] = tile.curvature_tol / tile.coast_curv_tol if tile.coast_curv_tol > 0 else 1 del (sea_layer) # It could be of interest to write the weight file as a png for user editing #Image.fromarray((weight_array!=1).astype(numpy.uint8)*255).save('weight.png') return