def create_process_db_sqlite(testname, matched_files): ''' Setup process db for organizing parallel processing. ''' db_name = testname + "_{0:d}".format(int(time.time())) + ".sqlite" driver = ogr.GetDriverByName('SQLite') datasource = driver.CreateDataSource(db_name, ['SPATIALITE=YES']) srs = osr.SpatialReference() srs.ImportFromEPSG(constants.EPSG_CODE) layer = datasource.CreateLayer(testname, srs, ogr.wkbPolygon, ['GEOMETRY_NAME=geom']) layer.CreateField(ogr.FieldDefn('id', ogr.OFTInteger)) layer.CreateField(ogr.FieldDefn('tile_name', ogr.OFTString)) layer.CreateField(ogr.FieldDefn('las_path', ogr.OFTString)) layer.CreateField(ogr.FieldDefn('ref_path', ogr.OFTString)) layer.CreateField(ogr.FieldDefn('prc_id', ogr.OFTInteger)) layer.CreateField(ogr.FieldDefn('exe_start', ogr.OFTString)) layer.CreateField(ogr.FieldDefn('exe_end', ogr.OFTString)) layer.CreateField(ogr.FieldDefn('status', ogr.OFTInteger)) layer.CreateField(ogr.FieldDefn('rcode', ogr.OFTInteger)) layer.CreateField(ogr.FieldDefn('msg', ogr.OFTString)) pid = 0 for lasname, vname in matched_files: tile = constants.get_tilename(lasname) wkt = constants.tilename_to_extent(tile, return_wkt=True) feature = ogr.Feature(layer.GetLayerDefn()) feature.SetField('id', pid) feature.SetField('tile_name', tile) feature.SetField('las_path', lasname) feature.SetField('ref_path', vname) feature.SetField('status', 0) feature.SetGeometry(ogr.CreateGeometryFromWkt(wkt)) layer.CreateFeature(feature) feature = None pid += 1 return db_name
def push_job(cstr,matched_files,job_def): #very similar to stuff in qc_wrap con=db.connect(cstr) cur=con.cursor() testname=job_def["TESTNAME"] targs=json.dumps(job_def["TARGS"]) runid=job_def["RUN_ID"] schema=job_def["SCHEMA"] priority=job_def["PRIORITY"] client=platform.node() n_tiles=len(matched_files) cur.execute("insert into proc_defs(testname,report_schema,run_id,targs,n_tiles,created_time,created_by) values(%s,%s,%s,%s,%s,now(),%s) returning id",(testname,schema,runid,targs,n_tiles,client)) job_id= cur.fetchone()[0] n_added=0 #Now add a row in job_def table for tile_path,ref_path in matched_files: try: #or use ogr-geometry tile=constants.get_tilename(tile_path) wkt=constants.tilename_to_extent(tile,return_wkt=True) except Exception,e: print("Bad tilename in "+tile_path) continue cur.execute("insert into proc_jobs(wkb_geometry,tile_name,path,ref_cstr,job_id,status,priority,version) values(st_geomfromtext(%s,25832),%s,%s,%s,%s,%s,%s,%s)",(wkt,tile,tile_path,ref_path,job_id,0,priority,0)) n_added+=1
def main(args): ''' Main script functionality. Can be invoked from either the command line or via qc_wrap.py ''' try: pargs = parser.parse_args(args[1:]) except Exception as error_msg: print(str(error_msg)) return 1 kmname = get_tilename(pargs.las_file) print("Running %s on block: %s, %s" % (PROGNAME, kmname, time.asctime())) if pargs.schema is not None: report.set_schema(pargs.schema) reporter = report.ReportClassCount(pargs.use_local) pc = pointcloud.fromAny(pargs.las_file) n_points_total = pc.get_size() if n_points_total == 0: print( "Something is terribly terribly wrong here! Simon - vi skal melde en fjel" ) pc_temp = pc.cut_to_class(constants.created_unused) n_created_unused = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.surface) n_surface = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.terrain) n_terrain = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.low_veg) n_low_veg = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.high_veg) n_high_veg = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.med_veg) n_med_veg = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.building) n_building = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.outliers) n_outliers = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.mod_key) n_mod_key = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.water) n_water = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.ignored) n_ignored = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.bridge) n_bridge = pc_temp.get_size() # new classes pc_temp = pc.cut_to_class(constants.high_noise) n_high_noise = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.power_line) n_power_line = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.terrain_in_buildings) n_terrain_in_buildings = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.low_veg_in_buildings) n_low_veg_in_buildings = pc_temp.get_size() pc_temp = pc.cut_to_class(constants.man_excl) n_man_excl = pc_temp.get_size() polywkt = tilename_to_extent(kmname, return_wkt=True) print(polywkt) reporter.report(kmname, n_created_unused, n_surface, n_terrain, n_low_veg, n_med_veg, n_high_veg, n_building, n_outliers, n_mod_key, n_water, n_ignored, n_power_line, n_bridge, n_high_noise, n_terrain_in_buildings, n_low_veg_in_buildings, n_man_excl, n_points_total, wkt_geom=polywkt)
def match_tiles_to_ref_data(input_files, args, test_connections=True): # Match input files to reference data # test wheter we want tiled reference data... matched_files = [] if args["REF_DATA_CONNECTION"] is not None: print("A non-tiled reference datasource is specified.") print("Testing reference data connection....") ds = ogr.Open(args["REF_DATA_CONNECTION"]) if ds is None: if test_connections: raise Exception("Failed to open reference datasource.") else: print("Failed to open reference datasource.") ds = None print("ok...") matched_files = [(name, args["REF_DATA_CONNECTION"]) for name in input_files] else: print("Tiled reference data specified... getting corresponding tiles.") print("Assuming that " + args["REF_TILE_DB"] + " has table named " + args["REF_TILE_TABLE"] + " with fields " + args["REF_TILE_NAME_FIELD"] + "," + args["REF_TILE_PATH_FIELD"]) ds = ogr.Open(args["REF_TILE_DB"]) assert ds is not None matched_files = [] n_not_existing = 0 for name in input_files: tile_name = constants.get_tilename(name) # Wow - hard to bypass SQL-injection here... ;-() #layer = ds.ExecuteSQL("select " + args["REF_TILE_PATH_FIELD"] + " from " + args[ # "REF_TILE_TABLE"] + " where " + args["REF_TILE_NAME_FIELD"] + "='{0:s}'".format(tile_name)) sql = """ SELECT {path} FROM {table} WHERE {name_field} = '{name:s}'""".format( path=args["REF_TILE_PATH_FIELD"], table=args['REF_TILE_TABLE'], name_field=args['REF_TILE_NAME_FIELD'], name=tile_name, ) layer = ds.ExecuteSQL(sql) if layer.GetFeatureCount() > 1: print("Hmmm - more than one reference tile...") if layer.GetFeatureCount() == 0: print("Reference tile corresponding to " + name + " not found in db.") n_not_existing += 1 continue feat = layer[0] ref_tile = feat.GetField(0) if not os.path.exists(ref_tile): print("Reference tile " + ref_tile + " does not exist in the file system!") n_not_existing += 1 continue matched_files.append((name, ref_tile)) print("%d input tiles matched with reference tiles." % len(matched_files)) print("%d non existing reference tiles." % (n_not_existing)) return matched_files
def append_tiles(datasource, layer, walk_path, ext_match, wdepth=None, rexclude=None, rinclude=None, rfpat=None, upsert=False): """Append tiles to a tile-coverage database.""" n_insertions = 0 n_excluded = 0 n_badnames = 0 n_dublets = 0 print(walk_path) walker = WalkFiles(walk_path) for path, mtime in walker: # Walk of ALL 'files' below the toplevel folder. # Include and/or exclude some directory / filenames. # If you only need to index a subfolder point directly to that to increase # speed and avoid filename collisions. # Will include the FIRST tilename encountered, # subsequent similar tilenames will be excluded. Unless the --overwrite arg is used. root = os.path.dirname(path) name = os.path.basename(path) if root == walk_path: depth = 0 else: depth = len(os.path.relpath(root, walk_path).split(os.path.sep)) if wdepth is not None and wdepth < depth: continue if (rexclude is not None) and (re.search(rexclude, root)): n_excluded += 1 continue if (rinclude is not None) and not re.search(rinclude, root): n_excluded += 1 continue if rfpat is not None and not re.search(rfpat, name): n_excluded += 1 continue ext = os.path.splitext(name)[1] if ext in ext_match: tile = constants.get_tilename(name) try: wkt = constants.tilename_to_extent(tile, return_wkt=True) except ValueError: n_badnames += 1 else: row, col = constants.tilename_to_index(tile) geom = "GeomFromText('{0}', {1})".format( wkt, constants.EPSG_CODE) try: if upsert: insert = 'INSERT OR REPLACE' else: insert = 'INSERT' sql = """{0} INTO coverage (tile_name, path, mtime, row, col, geom) VALUES ('{1}','{2}','{3}',{4},{5},{6})""" datasource.ExecuteSQL( sql.format(insert, tile, path, mtime, row, col, geom)) except: n_dublets += 1 else: n_insertions += 1 if n_insertions % 200 == 0: log("Done: {0:d}".format(n_insertions)) log("Inserted/updated {0:d} rows".format(n_insertions)) if not upsert: log("Encountered {0:d} 'dublet' tilenames".format(n_dublets)) if n_excluded > 0: log("Excluded {0:d} paths".format(n_excluded)) log("Encountered {0:d} bad tile-names.".format(n_badnames))