def run(self): logger.info("Running DDD123 script.") pipeline = DDDPipeline(self.script, name="DDD Server Build Pipeline") # Run pipeline pipeline.run()
def pipeline_reload(self): #self.pipeline = None if self.rollbackImporter: self.rollbackImporter.uninstall() else: self.rollbackImporter = RollbackImporter() try: del (sys.modules[self.script.replace(".py", "")]) except Exception as e: pass self.pipeline = DDDPipeline(self.script, name="DDD Server Build Pipeline")
""" Run as: (env) jjmontes@j2ws:~/git/ddd/private (master)$ ddd pipelines.godot.godot_pipeline cp ~/git/NinjaCow/scenes/test/TestProcedural.tscn . ddd pipelines.godot.godot_pipeline --renderer=pyrender cp /tmp/ddd-godot.tscn ~/git/NinjaCow/scenes/test/ """ pipeline = DDDPipeline(['pipelines.test.test_base.s10_init.py', 'pipelines.test.test_base.s20_features_generate.py', 'pipelines.test.test_base.s20_features_export_2d.py', #'pipelines.godot.godot_base.s40_rooms.py', #'pipelines.godot.godot_base.s60_godot_export_scene.py', ], name="Test Build Pipeline") pipeline.data['ddd:test'] = True pipeline.data['ddd:test:output:json'] = True # associate to debug config if not set pipeline.data['ddd:test:output:intermediate'] = True # associate to debug config if not set pipeline.run() '''
class ServerServeCommand(DDDCommand): def parse_args(self, args): #program_name = os.path.basename(sys.argv[0]) parser = argparse.ArgumentParser() # description='', usage = '' #parser.add_argument("-w", "--worker", default=None, help="worker (i/n)") parser.add_argument("script", help="script or pipeline entry point") args = parser.parse_args(args) self.script = args.script self.files_changed = False self.rollbackImporter = None self._results = {} def show(self, obj, label=None): logger.info("Server processing generated result (show): label=%s %s", label, obj) loop = self.loop result_index = len(self._results) + 1 self._results[result_index] = {'data': obj.copy(), 'label': label} loop.call_soon_threadsafe(asyncio. async, self.result_send(None, result_index)) def run(self): logger.info("Starting DDD server tool API (ws://).") D1D2D3Bootstrap._instance._unparsed_args = None # Disable builtin rendering logger.info("Disabling builtin rendering.") D1D2D3Bootstrap.renderer = self.show self.loop = asyncio.get_event_loop() # Create pipeline self.pipeline = None self.running = False # Start python-socketio self.sio = socketio.AsyncServer(cors_allowed_origins='*') app = web.Application() self.sio.attach(app) async def index(request): """ """ #with open('index.html') as f: # return web.Response(text=f.read(), content_type='text/html') return web.Response(text="DDD SocketIO API Server", content_type='text/html') @self.sio.event def connect(sid, environ): logger.info("Websocket connect: %s %s", sid, environ) @self.sio.event async def chat_message(sid, data): logger.info("Websocket chat_message: %s %s", sid, data) @self.sio.event async def status_get(sid, data): logger.info("Websocket status_get: %s %s", sid, data) status = self.status_get() #logger.debug("Sending status: %s", status) await self.sio.emit('status', status, room=sid) @self.sio.event async def result_get(sid, data): logger.info("Websocket result_get: %s %s", sid, data) if self.running: return await self.result_send(sid) @self.sio.event def disconnect(sid): logger.info('Websocket disconnect: %s', sid) #app.router.add_static('/static', 'static') app.router.add_get('/', index) # Run pipeline initially asyncio.ensure_future(self.pipeline_init()) try: web.run_app(app, host="localhost", port=8085) except KeyboardInterrupt: logger.info("Interrupted by user.") def status2_get(self): status = { 'script': self.script, 'status': { 'running': self.running, } } def status_get(self): tasks_sorted = self.pipeline.tasks_sorted() tasks = [ { 'name': t.name, 'order': t.order, 'order_num': t._order_num, 'path': t.path, 'condition': t.condition != None, 'selector': t.selector.selector if t.selector else None, 'filter': t.filter != None, 'recurse': t.recurse, 'replace': t.replace, 'cache': t.cache, 'cache_override': t.cache_override, #'funcargs': t._funcargs, 'description': t._funcargs[0].__doc__, 'params': t.params, 'run_seconds': t._run_seconds, 'run_selected': t._run_selected, } for t in tasks_sorted ] # Serialize and deserialize to ensure data is JSON serializable (converts objects to strings) data = json.loads(json.dumps(self.pipeline.data, default=str)) status = {'script': self.script, 'data': data, 'tasks': tasks} return status def result_get(self, result_index=0): if result_index: result = self._results[result_index] else: result = { 'data': self.pipeline.root, 'label': 'DDDServer Root Node' } # Process result #if isinstance(root, DDDObject2): # root = root.copy3(copy_children=True) #root = root.find("/Elements3") # Export try: result_node = Generic3DPresentation.present(result['data']) result_data = result_node.save(".glb") except Exception as e: logger.error("Could not produce result model (.glb): %s", e) print(traceback.format_exc()) result_data = None return {'data': result_data, 'label': result['label']} async def result_send(self, sid=None, result_index=0): result = self.result_get(result_index) #return status if result and result['data']: logger.info("Sending result: %s bytes", len(result['data']) if result['data'] else None) await self.sio.emit('result', { "key": result_index, 'data': result['data'], "label": result['label'] }, room=sid) else: logger.info("No result to send.") async def pipeline_init(self): #self.pipeline = DDDPipeline([self.script], name="DDD Server Build Pipeline") self.pipeline_reload() # Start file monitoring self.start_file_monitoring() # Run pipeline initially await self.pipeline_run() def pipeline_reload(self): #self.pipeline = None if self.rollbackImporter: self.rollbackImporter.uninstall() else: self.rollbackImporter = RollbackImporter() try: del (sys.modules[self.script.replace(".py", "")]) except Exception as e: pass self.pipeline = DDDPipeline(self.script, name="DDD Server Build Pipeline") async def pipeline_run(self): if self.running: logger.warn("Pipeline already running.") return self.running = True with futures.ThreadPoolExecutor() as pool: logger.info("Running in thread pool.") run_result = await self.loop.run_in_executor( pool, self.pipeline_run_blocking) logger.info("Thread pool result: %s", run_result) self.running = False asyncio.ensure_future(self.result_send()) def pipeline_run_blocking(self): try: self.pipeline.run() except Exception as e: logger.warn("Error running pipeline: %s", e) print(traceback.format_exc()) return False return True def start_file_monitoring(self): event_handler = FileChangedEventHandler(self) path = self.script logger.info("Starting file monitoring.") observer = Observer() # Main file #observer.schedule(event_handler, path, recursive=False) # Main file dir recursively observer.schedule(event_handler, os.path.dirname(os.path.abspath(path)), recursive=True) # Imported files ''' for modname in self.rollbackImporter.newModules.keys(): logger.info("Monitoring: %s", modname) try: observer.schedule(event_handler, sys.modules[modname].__file__, recursive=False) except Exception as e: logger.info(e) ''' observer.start()
import math from csv import DictReader from ddd.pipeline.pipeline import DDDPipeline from ddd.pipeline.decorators import dddtask import logging """ An example of a configurable processing pipeline in DDD. This gets an list of atomic elements and displays them after several processing and styling steps. """ # Get instance of logger for this module logger = logging.getLogger(__name__) # From https://en.wikipedia.org/wiki/List_of_chemical_elements # Process features pipeline = DDDPipeline( ['periodictable_pipeline_base.py', 'periodictable_pipeline_simple.py']) pipeline.run() # Show an alternative styling #pipeline = DDDPipeline(['periodictable_pipeline_base.py', 'periodictable_pipeline_variant.py']) #pipeline.run() # Style via generation of OSM elements and processing through the OSM pipeline #pipeline = DDDPipeline.load(['periodictable_pipeline_base.py', 'periodictable_pipeline_osm.py', '../osm/osm_sketchy/*.py']) #pipeline.run() #pipeline.root.show()
(env) jjmontes@j2ws:~/git/ddd/private (master)$ ddd pipelines.godot.godot_pipeline cp ~/git/NinjaCow/scenes/test/TestProcedural.tscn . ddd pipelines.godot.godot_pipeline --renderer=pyrender cp /tmp/ddd-godot.tscn ~/git/NinjaCow/scenes/test/ """ pipeline = DDDPipeline( [ 'pipelines.godot.godot_base.s10_init.py', #'pipelines.godot_common.s10_locale_config.py', 'pipelines.godot.godot_base.s20_godot_features.py', 'pipelines.godot.godot_base.s20_godot_features_export_2d.py', 'pipelines.godot.godot_base.s40_rooms.py', 'pipelines.godot.godot_base.s40_items.py', 'pipelines.godot.godot_base.s50_decoration.py', 'pipelines.godot.godot_base.s60_godot_show.py', 'pipelines.godot.godot_base.s60_godot_export_scene.py', ], name="Godot Polygon2D Build Pipeline") pipeline.data['ddd:godot'] = True pipeline.data[ 'ddd:godot:output:json'] = True # associate to debug config if not set pipeline.data[ 'ddd:godot:output:itermediate'] = True # associate to debug config if not set pipeline.run()
def run(self): # TODO: Move to pipelined builder logger.warn("Move to builder") logger.info("Running DDD123 OSM build command.") D1D2D3Bootstrap._instance._unparsed_args = None tasks_count = 0 if self.xyztile: self.process_xyztile() #name = "vigo" #center_wgs84 = vigo_wgs84 #area = area_vigo_huge_rande center_wgs84 = self.center # Name if self.name is None: self.name = "ddd-osm-%.3f,%.3f" % center_wgs84 name = self.name path = "data/osm/" # Prepare data # Check if geojson file is available #sides = 15 * 0.01 # Approximate degrees to km sides = 5 * 0.001 roundto = sides / 3 datacenter = int(self.center[0] / roundto) * roundto, int( self.center[1] / roundto) * roundto dataname = name + "_%.4f_%.4f" % datacenter datafile = os.path.join(path, "%s.osm.geojson" % dataname) # Get data if needed or forced force_get_data = parse_bool( D1D2D3Bootstrap.data.get('ddd:osm:datasource:force_refresh', False)) file_exists = os.path.isfile(datafile) if force_get_data or not file_exists: logger.info( "Data file '%s' not found or datasource:force_refresh is True. Trying to produce data." % datafile) #self.get_data(path, dataname, datacenter) self.get_data_osm(path, dataname, datacenter) # Read data files = [ os.path.join(path, f) for f in [dataname + '.osm.geojson'] if os.path.isfile(os.path.join(path, f)) and f.endswith(".geojson") ] logger.info("Reading %d files from %s: %s" % (len(files), path, files)) osm_proj = pyproj.Proj( init='epsg:4326' ) # FIXME: API reocmends using only 'epsg:4326' but seems to give weird coordinates? (always_xy=Tre?) ddd_proj = pyproj.Proj(proj="tmerc", lon_0=center_wgs84[0], lat_0=center_wgs84[1], k=1, x_0=0., y_0=0., units="m", datum="WGS84", ellps="WGS84", towgs84="0,0,0,0,0,0,0", no_defs=True) # TODO: Move area resolution outside this method and resolve after processing args area_ddd = None if self.area is not None: trans_func = partial(pyproj.transform, osm_proj, ddd_proj) area_ddd = ops.transform(trans_func, self.area) elif not self.chunk_size: resolution = 8 if resolution > 1: area_ddd = ddd.point().buffer(self._radius, cap_style=ddd.CAP_ROUND, resolution=resolution).geom else: area_ddd = ddd.rect( [-self._radius, -self._radius, self._radius, self._radius]).geom logger.info("Area meters/coords=%s", area_ddd) if area_ddd: logger.info( "Complete polygon area: %.1f km2 (%d at 500, %d at 250, %d at 200)", area_ddd.area / (1000 * 1000), math.ceil(area_ddd.area / (500 * 500)), math.ceil(area_ddd.area / (250 * 250)), math.ceil(area_ddd.area / (200 * 200))) # TODO: organise tasks and locks in pipeline, not here skipped = 0 existed = 0 tiles = [(0, 0)] if not self.chunk_size else range_around( [-64, -64, 64, 64]) for (idx, (x, y)) in enumerate(tiles): #for x, y in range_around([-8, -8, 8, 8]): # -8, 3 if self.limit and tasks_count >= self.limit: logger.info("Limit of %d tiles hit.", self.limit) break if self.chunk_size: logger.info("Chunk size: %s", self.chunk_size) bbox_crop = [ x * self.chunk_size, y * self.chunk_size, (x + 1) * self.chunk_size, (y + 1) * self.chunk_size ] bbox_filter = [ bbox_crop[0] - self.chunk_size_extra_filter, bbox_crop[1] - self.chunk_size_extra_filter, bbox_crop[2] + self.chunk_size_extra_filter, bbox_crop[3] + self.chunk_size_extra_filter ] area_crop = ddd.rect(bbox_crop).geom area_filter = ddd.rect(bbox_filter).geom #area_ddd = ddd.rect(bbox_crop) trans_func = partial(pyproj.transform, ddd_proj, osm_proj) self.area = ops.transform(trans_func, area_crop) shortname = '%s_%d_%d,%d' % (name, abs(x) + abs(y), bbox_crop[0], bbox_crop[1]) filenamebase = 'output/%s/%s' % (name, shortname) filename = filenamebase + ".glb" elif self.xyztile: area_crop = area_ddd area_filter = area_ddd.buffer(self.chunk_size_extra_filter, join_style=ddd.JOIN_MITRE) shortname = '%s_%d_%d_%d' % (name, self.xyztile[2], self.xyztile[0], self.xyztile[1]) filenamebase = 'output/%s/%s' % (name, shortname) filename = filenamebase + ".glb" else: #logger.info("No chunk size defined (area was given)") area_crop = area_ddd #print(area_crop) area_filter = area_ddd.buffer(self.chunk_size_extra_filter, join_style=ddd.JOIN_MITRE) shortname = '%s_%dr_%.3f,%.3f' % ( name, self._radius if self._radius else 0, self.center[0], self.center[1]) filenamebase = 'output/%s/%s' % (name, shortname) filename = filenamebase + ".glb" if area_ddd and not area_ddd.intersects(area_crop): skipped += 1 #logger.debug("Skipping: %s (cropped area not contained in greater filtering area)", filename) #if os.path.exists(filename): # logger.info("Deleting: %s", filename) # os.unlink(filename) continue if not D1D2D3Bootstrap._instance.overwrite and os.path.exists( filename): #logger.debug("Skipping: %s (already exists)", filename) existed += 1 continue # Try to lock lockfilename = filename + ".lock" try: with open(lockfilename, "x") as _: old_formatters = { hdlr: hdlr.formatter for hdlr in logging.getLogger().handlers } if D1D2D3Bootstrap._instance.debug: new_formatter = logging.Formatter( '%(asctime)s - %(levelname)s - %(module)s [' + shortname + '] %(message)s') else: new_formatter = logging.Formatter('%(asctime)s [' + shortname + '] %(message)s') # Apply formatter to existing loggers for hdlr in logging.getLogger().handlers: hdlr.setFormatter(new_formatter) # Create a file handler for this process log # TODO: Support this at pipeline level / ddd command (?) build_log_file = False if build_log_file: fh = logging.FileHandler('/tmp/%s.log' % (shortname, )) fh.setLevel(level=logging.DEBUG) fh.setFormatter(new_formatter) logging.getLogger().addHandler(fh) # Check elevation is available elevation = ElevationModel.instance() center_elevation = elevation.value(center_wgs84) logger.info("Center point elevation: %s", center_elevation) logger.info("Generating: %s", filename) pipeline = DDDPipeline( [ 'pipelines.osm_base.s10_init.py', 'pipelines.osm_common.s10_locale_config.py', 'pipelines.osm_base.s20_osm_features.py', 'pipelines.osm_base.s20_osm_features_export_2d.py', 'pipelines.osm_base.s30_groups.py', 'pipelines.osm_base.s30_groups_ways.py', 'pipelines.osm_base.s30_groups_buildings.py', 'pipelines.osm_base.s30_groups_areas.py', 'pipelines.osm_base.s30_groups_items_nodes.py', 'pipelines.osm_base.s30_groups_items_ways.py', 'pipelines.osm_base.s30_groups_items_areas.py', 'pipelines.osm_base.s30_groups_export_2d.py', 'pipelines.osm_base.s40_structured.py', 'pipelines.osm_base.s40_structured_export_2d.py', 'pipelines.osm_augment.s45_pitch.py', 'pipelines.osm_base.s50_stairs.py', 'pipelines.osm_base.s50_positioning.py', 'pipelines.osm_base.s50_crop.py', 'pipelines.osm_base.s50_90_export_2d.py', 'pipelines.osm_augment.s50_ways.py', 'pipelines.osm_augment.s55_plants.py', 'pipelines.osm_augment.s55_rocks.py', 'pipelines.osm_augment.s55_building_floors.py', 'pipelines.osm_base.s60_model.py', 'pipelines.osm_base.s65_model_metadata_clean.py', 'pipelines.osm_base.s65_model_post_opt.py', 'pipelines.osm_base.s69_model_export_3d.py', 'pipelines.osm_base.s70_metadata.py', 'pipelines.osm_terrain.s60_heightmap_export.py', 'pipelines.osm_terrain.s60_splatmap_export.py', 'pipelines.osm_extras.s30_icons.py', 'pipelines.osm_extras.s80_model_compress.py', #'pipelines.osm_extras.mapillary.py', #'pipelines.osm_extras.ortho.py', ], name="OSM Build Pipeline") pipeline.data['osmfiles'] = files pipeline.data['filenamebase'] = filenamebase pipeline.data[ 'ddd:pipeline:start_date'] = datetime.datetime.now() pipeline.data['tile:bounds_wgs84'] = self.area.bounds pipeline.data['tile:bounds_m'] = area_crop.bounds # Fusion DDD data with pipeline data, so changes to the later affect the former # TODO: better way to do this without globals and merging data? D1D2D3.data.update(pipeline.data) D1D2D3.data = pipeline.data try: osmbuilder = osm.OSMBuilder(area_crop=area_crop, area_filter=area_filter, osm_proj=osm_proj, ddd_proj=ddd_proj) pipeline.data['osm'] = osmbuilder pipeline.run() #scene = osmbuilder.generate() tasks_count += 1 finally: # Ensure lock file is removed try: os.unlink(lockfilename) except Exception as e: pass for hdlr in logging.getLogger().handlers: hdlr.setFormatter(old_formatters[hdlr]) except FileExistsError as e: logger.info("Skipping: %s (lock file exists)", filename) if existed > 0: logger.info("Skipped %d files that already existed.", existed) if skipped > 0: logger.info( "Skipped %d files not contained in greater filtering area.", skipped)
def run(self): # TODO: Move to pipelined builder logger.warn("Move to builder") logger.info("Running DDD123 OSM build command.") D1D2D3Bootstrap._instance._unparsed_args = None tasks_count = 0 # TODO: allow alias in ~/.ddd.conf #vigo_wgs84 = [-8.723, 42.238] #cuvi_wgs84 = [-8.683, 42.168] #area_vigo = { "type": "Polygon", "coordinates": [ [ [ -8.738025517345417, 42.223436382101397 ], [ -8.740762525671032, 42.229564900743533 ], [ -8.73778751662145, 42.23289691087907 ], [ -8.738620519155333, 42.235871919928648 ], [ -8.733920004856994, 42.241702937665828 ], [ -8.729516991463614, 42.242773940923676 ], [ -8.724102474993376, 42.244975447620369 ], [ -8.712142938614059, 42.246254701511681 ], [ -8.711190935718193, 42.245748949973255 ], [ -8.703842663365727, 42.244112694995998 ], [ -8.700570153411187, 42.241197186127408 ], [ -8.702057657935978, 42.238995679430715 ], [ -8.70289066046986, 42.235485168752206 ], [ -8.705865669519442, 42.231736657349735 ], [ -8.70907867929299, 42.23036815318693 ], [ -8.716278201192978, 42.229059149205113 ], [ -8.719610211328508, 42.225370137983631 ], [ -8.726750233047504, 42.219539120246452 ], [ -8.730379744087994, 42.217516114092739 ], [ -8.736210761825173, 42.2191821191605 ], [ -8.736210761825173, 42.2191821191605 ], [ -8.738174267797897, 42.221562126400165 ], [ -8.738174267797897, 42.221562126400165 ], [ -8.738025517345417, 42.223436382101397 ] ] ] } #area_vigo_huge_rande = { "type": "MultiPolygon", "coordinates": [ [ [ [ -8.678739229779634, 42.285406246127017 ], [ -8.679768244461799, 42.286124008658462 ], [ -8.679944646978743, 42.287581258944734 ], [ -8.679709443622819, 42.290212924049762 ], [ -8.680473854529568, 42.292192037766931 ], [ -8.68123826543632, 42.293540409297322 ], [ -8.680326852432117, 42.296345799483696 ], [ -8.67829822348728, 42.296019597743189 ], [ -8.676534198317855, 42.296367546206326 ], [ -8.673329552593403, 42.296258812518111 ], [ -8.67153612700449, 42.297955036674374 ], [ -8.668243280021565, 42.299129318941247 ], [ -8.665009233877623, 42.299738197421469 ], [ -8.661275380602341, 42.30252156692557 ], [ -8.652602256852674, 42.303152156982897 ], [ -8.648603799801982, 42.298759639848647 ], [ -8.641165493670913, 42.289147221675357 ], [ -8.65072063000529, 42.282382853576621 ], [ -8.65730632397114, 42.275465481810826 ], [ -8.65965835753037, 42.268242761434706 ], [ -8.661657586055718, 42.260758105800491 ], [ -8.664597628004756, 42.257189526957589 ], [ -8.676240194122952, 42.251009315195994 ], [ -8.676475397478876, 42.245350843851035 ], [ -8.651308638395097, 42.239953059756857 ], [ -8.63943086892098, 42.244741439740103 ], [ -8.620496998769166, 42.249181249186741 ], [ -8.612147279633895, 42.243870852227474 ], [ -8.618144965209934, 42.226543662551634 ], [ -8.628493912870553, 42.213566923726354 ], [ -8.647192579666443, 42.210082781391023 ], [ -8.654366282022099, 42.200674637101095 ], [ -8.654601485378024, 42.190132366865519 ], [ -8.663421611225139, 42.175492249420188 ], [ -8.672476940428181, 42.164509936746896 ], [ -8.666949661563988, 42.158059118335679 ], [ -8.666949661563988, 42.154048818664116 ], [ -8.682355481376954, 42.151782015135495 ], [ -8.698584512935652, 42.151956387519974 ], [ -8.707522240460731, 42.154397550462775 ], [ -8.715166349528236, 42.158756535815868 ], [ -8.726103305578659, 42.167473605784153 ], [ -8.732806601222471, 42.173139057222009 ], [ -8.735041033103739, 42.180982690717805 ], [ -8.742685142171242, 42.189173891460896 ], [ -8.762559825746747, 42.185688403840906 ], [ -8.7798472724071, 42.182987018755384 ], [ -8.786903373084794, 42.188041129063663 ], [ -8.795605897253949, 42.187256897051611 ], [ -8.80536683652476, 42.191744315452596 ], [ -8.808248077634818, 42.2020249661402 ], [ -8.796664312355604, 42.206990444093883 ], [ -8.792077846915102, 42.202939688770883 ], [ -8.780905687508753, 42.212608803743251 ], [ -8.782493310161234, 42.223582762359229 ], [ -8.764500253433113, 42.235425529931319 ], [ -8.743625955594931, 42.241781393183061 ], [ -8.719870416646694, 42.248049562726422 ], [ -8.709756672341998, 42.260366442379272 ], [ -8.691763615613878, 42.264500544688026 ], [ -8.688235565275031, 42.262498802682778 ], [ -8.678357024326258, 42.271506141210914 ], [ -8.66950749805965, 42.283774937233652 ], [ -8.669514848164527, 42.286255869446485 ], [ -8.669597536844341, 42.286667762707708 ], [ -8.669812527411864, 42.286814575496322 ], [ -8.670181870181713, 42.28682680987994 ], [ -8.678739229779634, 42.285406246127017 ] ] ] ] } #name = "vilanovailagertru" if self.xyztile: self.process_xyztile() #name = "vigo" #center_wgs84 = vigo_wgs84 #area = area_vigo_huge_rande center_wgs84 = self.center # Name if self.name is None: self.name = "ddd-osm-%.3f,%.3f" % center_wgs84 name = self.name path = "data/osm/" # Prepare data # Check if geojson file is available #sides = 15 * 0.01 # Approximate degrees to km sides = 5 * 0.001 roundto = sides / 3 datacenter = int(self.center[0] / roundto) * roundto, int( self.center[1] / roundto) * roundto dataname = name + "_%.4f_%.4f" % datacenter datafile = os.path.join(path, "%s.osm.geojson" % dataname) if not os.path.isfile(datafile): logger.info("Data file '%s' not found. Trying to produce data." % datafile) self.get_data_osm(path, dataname, datacenter, self.area) files = [ os.path.join(path, f) for f in [dataname + '.osm.geojson'] if os.path.isfile(os.path.join(path, f)) and f.endswith(".geojson") ] logger.info("Reading %d files from %s: %s" % (len(files), path, files)) osm_proj = pyproj.Proj( init='epsg:4326' ) # FIXME: API reocmends using only 'epsg:4326' but seems to give weird coordinates? ddd_proj = pyproj.Proj(proj="tmerc", lon_0=center_wgs84[0], lat_0=center_wgs84[1], k=1, x_0=0., y_0=0., units="m", datum="WGS84", ellps="WGS84", towgs84="0,0,0,0,0,0,0", no_defs=True) # TODO: Move area resolution outside this method and resolve after processing args area_ddd = None if self.area is not None: trans_func = partial(pyproj.transform, osm_proj, ddd_proj) area_ddd = ops.transform(trans_func, self.area) else: resolution = 8 if resolution > 1: area_ddd = ddd.point().buffer(self._radius, cap_style=ddd.CAP_ROUND, resolution=resolution).geom else: area_ddd = ddd.rect( [-self._radius, -self._radius, self._radius, self._radius]).geom logger.info("Area meters/coords=%s", area_ddd) logger.info( "Complete polygon area: %.1f km2 (%d at 500, %d at 250, %d at 200)", area_ddd.area / (1000 * 1000), math.ceil(area_ddd.area / (500 * 500)), math.ceil(area_ddd.area / (250 * 250)), math.ceil(area_ddd.area / (200 * 200))) # TODO: organise tasks and locks in pipeline, not here skipped = 0 existed = 0 tiles = [(0, 0)] if not self.chunk_size else range_around( [-64, -64, 64, 64]) for (idx, (x, y)) in enumerate(tiles): #for x, y in range_around([-8, -8, 8, 8]): # -8, 3 if self.limit and tasks_count >= self.limit: logger.info("Limit of %d tiles hit.", self.limit) break if self.chunk_size: logger.info("Chunk size: %s", self.chunk_size) bbox_crop = [ x * self.chunk_size, y * self.chunk_size, (x + 1) * self.chunk_size, (y + 1) * self.chunk_size ] bbox_filter = [ bbox_crop[0] - self.chunk_size_extra_filter, bbox_crop[1] - self.chunk_size_extra_filter, bbox_crop[2] + self.chunk_size_extra_filter, bbox_crop[3] + self.chunk_size_extra_filter ] area_crop = ddd.rect(bbox_crop).geom area_filter = ddd.rect(bbox_filter).geom shortname = '%s_%d_%d,%d' % (name, abs(x) + abs(y), bbox_crop[0], bbox_crop[1]) filenamebase = 'output/%s/%s' % (name, shortname) filename = filenamebase + ".glb" elif self.xyztile: area_crop = area_ddd area_filter = area_ddd.buffer(self.chunk_size_extra_filter, join_style=ddd.JOIN_MITRE) shortname = '%s_%d_%d_%d' % (name, self.xyztile[2], self.xyztile[0], self.xyztile[1]) filenamebase = 'output/%s/%s' % (name, shortname) filename = filenamebase + ".glb" else: #logger.info("No chunk size defined (area was given)") area_crop = area_ddd #print(area_crop) area_filter = area_ddd.buffer(self.chunk_size_extra_filter, join_style=ddd.JOIN_MITRE) shortname = '%s_%dr_%.3f,%.3f' % ( name, self._radius if self._radius else 0, self.center[0], self.center[1]) filenamebase = 'output/%s/%s' % (name, shortname) filename = filenamebase + ".glb" if area_ddd and not area_ddd.intersects(area_crop): skipped += 1 #logger.debug("Skipping: %s (cropped area not contained in greater filtering area)", filename) #if os.path.exists(filename): # logger.info("Deleting: %s", filename) # os.unlink(filename) continue if not D1D2D3Bootstrap._instance.overwrite and os.path.exists( filename): #logger.debug("Skipping: %s (already exists)", filename) existed += 1 continue # Try to lock lockfilename = filename + ".lock" try: with open(lockfilename, "x") as _: old_formatters = { hdlr: hdlr.formatter for hdlr in logging.getLogger().handlers } if D1D2D3Bootstrap._instance.debug: new_formatter = logging.Formatter( '%(asctime)s - %(levelname)s - %(module)s [' + shortname + '] %(message)s') else: new_formatter = logging.Formatter('%(asctime)s [' + shortname + '] %(message)s') # Apply formatter to existing loggers for hdlr in logging.getLogger().handlers: hdlr.setFormatter(new_formatter) # Create a file handler for this process log # TODO: Support this at pipeline level / ddd command (?) build_log_file = False if build_log_file: fh = logging.FileHandler('/tmp/%s.log' % (shortname, )) fh.setLevel(level=logging.DEBUG) fh.setFormatter(new_formatter) logging.getLogger().addHandler(fh) # Check elevation is available elevation = ElevationModel.instance() center_elevation = elevation.value(center_wgs84) logger.info("Center point elevation: %s", center_elevation) logger.info("Generating: %s", filename) pipeline = DDDPipeline( [ 'pipelines.osm_base.s10_init.py', 'pipelines.osm_common.s10_locale_config.py', 'pipelines.osm_base.s20_osm_features.py', 'pipelines.osm_base.s20_osm_features_export_2d.py', 'pipelines.osm_base.s30_groups.py', 'pipelines.osm_base.s30_groups_ways.py', 'pipelines.osm_base.s30_groups_buildings.py', 'pipelines.osm_base.s30_groups_areas.py', 'pipelines.osm_base.s30_groups_items_nodes.py', 'pipelines.osm_base.s30_groups_items_ways.py', 'pipelines.osm_base.s30_groups_items_areas.py', 'pipelines.osm_base.s30_groups_export_2d.py', 'pipelines.osm_base.s40_structured.py', 'pipelines.osm_base.s40_structured_export_2d.py', 'pipelines.osm_common.s45_pitch.py', 'pipelines.osm_base.s50_stairs.py', 'pipelines.osm_base.s50_positioning.py', 'pipelines.osm_base.s50_crop.py', 'pipelines.osm_base.s50_90_export_2d.py', 'pipelines.osm_base.s60_model.py', 'pipelines.osm_base.s60_model_export_3d.py', 'pipelines.osm_gdterrain.s60_terrain_export.py', 'pipelines.osm_augment.s50_ways.py', 'pipelines.osm_augment.s55_plants.py', 'pipelines.osm_default_2d.s30_icons.py', #'pipelines.osm_extras.mapillary.py', #'pipelines.osm_extras.ortho.py', ], name="OSM Build Pipeline") pipeline.data['osmfiles'] = files pipeline.data['filenamebase'] = filenamebase # Fusion DDD data with pipeline data, so changes to the later affect the former # TODO: better way to do this without globals and merging data? D1D2D3.data.update(pipeline.data) D1D2D3.data = pipeline.data try: osmbuilder = osm.OSMBuilder(area_crop=area_crop, area_filter=area_filter, osm_proj=osm_proj, ddd_proj=ddd_proj) pipeline.data['osm'] = osmbuilder pipeline.run() #scene = osmbuilder.generate() tasks_count += 1 finally: # Ensure lock file is removed try: os.unlink(lockfilename) except Exception as e: pass for hdlr in logging.getLogger().handlers: hdlr.setFormatter(old_formatters[hdlr]) except FileExistsError as e: logger.info("Skipping: %s (lock file exists)", filename) if existed > 0: logger.info("Skipped %d files that already existed.", existed) if skipped > 0: logger.info( "Skipped %d files not contained in greater filtering area.", skipped)
from ddd.ddd import ddd, DDDMaterial from ddd.pipeline.decorators import dddtask from ddd.pipeline.pipeline import DDDPipeline """ This is run as: ddd osm_materials.py --export-textures The result is then copied to the client app, eg: cp catalog_materials.glb ~/git/ddd-viewer2/public/assets/ """ pipeline = DDDPipeline([ 'pipelines.osm_base.s10_init.py', ], name="OSM Build Pipeline") # TODO: Move to init? #osmbuilder = osm.OSMBuilder(area_crop=area_crop, area_filter=area_filter, osm_proj=osm_proj, ddd_proj=ddd_proj) pipeline.data['osm'] = None @dddtask() def materials_list(root, osm): mats = ddd.group3(name="Materials") root.append(mats) for key in dir(ddd.mats): mat = getattr(ddd.mats, key)