def open_vector(fname, key="", where=''): """ Open vector or feature """ parts = fname.split(':') if len(parts) == 1: vector = GeoVector(fname) vector.SetPrimaryKey(key) else: # or it is a database if parts[0] not in settings().DATABASES.keys(): raise Exception("%s is not a valid database" % parts[0]) try: db = settings().DATABASES[parts[0]] filename = ("PG:dbname=%s host=%s port=%s user=%s password=%s" % (db['NAME'], db['HOST'], db['PORT'], db['USER'], db['PASSWORD'])) vector = GeoVector(filename, parts[1]) vector.SetPrimaryKey(key) except Exception as e: VerboseOut(traceback.format_exc(), 4) if where != '': # return array of features # set_trace() return vector.where(where) features = [] else: return vector
def geometry_to_GeoVector(geometry): # create temporary geometry file with tempfile.NamedTemporaryFile(suffix='.geojson', mode='w', delete=False) as f: feature = {"type": "Feature", "geometry": geometry} fname = f.name f.write(json.dumps(feature)) return GeoVector(fname)
def get_features(layer, bbox=None, union=False, filename=''): """ Get features in this layer and return as GeoVector """ features = get_features_as_geojson(layer, bbox=bbox, union=union) if filename == '': f, filename = tempfile.mkstemp(suffix='.geojson') logger.info('Saving JSON as vector file', action='Save file', actee=filename, actor=__name__) os.write(f, json.dumps(features)) os.close(f) else: logger.info('Saving JSON as vector file', action='Save file', actee=filename, actor=__name__) logger.info('Writing GeoJSON to file %s' % filename) with open(filename, 'w') as f: f.write(json.dumps(features)) # create GeoVector return GeoVector(filename)
def open_vector(fname, key="", where=''): """Open vector or feature, returned as a gippy GeoVector or GeoFeature.""" parts = fname.split(':') if len(parts) == 1: vector = GeoVector(fname) vector.SetPrimaryKey(key) else: # or it is a database if parts[0] not in settings().DATABASES.keys(): raise Exception("%s is not a valid database" % parts[0]) db = settings().DATABASES[parts[0]] filename = ( "PG:dbname=%s host=%s port=%s user=%s password=%s" % (db['NAME'], db['HOST'], db['PORT'], db['USER'], db['PASSWORD'])) vector = GeoVector(filename, parts[1]) vector.SetPrimaryKey(key) if where != '': # return array of features return vector.where(where) else: return vector
def main(): parser = l2dParser(description='Create DEM(s) from LiDAR files', commands=True) parser.add_input_parser() parser.add_output_parser() parser.add_filter_parser() # parser.add_argument('--vendor_classified', # help='Files are not classified by l2d, the l2d naming scheme was not used for classified files', # default=False) args = parser.parse_args() start0 = datetime.now() lasdir = args.lasdir # open site vector if args.site is not None: try: site = GeoVector(args.site) except: print 'Error opening %s' % args.site exit(2) else: site = [None] # make sure outdir exists args.outdir = os.path.abspath(args.outdir) if not os.path.exists(args.outdir): os.makedirs(args.outdir) args.lasdir = os.path.abspath(args.lasdir) # the final filenames products = dem_products(args.demtype) bnames = {p: '%s%s.%s' % (args.demtype, args.suffix, p) for p in products} prefix = '' # if args.site is None else site.Basename() + '_' fouts = { p: os.path.join(args.outdir, '%s%s%s.%s.vrt' % (prefix, args.demtype, args.suffix, p)) for p in products } # pull out the arguments to pass to create_dems keys = [ 'radius', 'decimation', 'maxsd', 'maxz', 'maxangle', 'returnnum', 'outdir', 'suffix', 'verbose', 'overwrite', 'resolution' ] vargs = vars(args) kwargs = {k: vargs[k] for k in vargs if k in keys} # run if any products are missing exists = all([os.path.exists(f) for f in fouts.values()]) if exists and not args.overwrite: print 'Already created %s in %s' % (args.demtype, os.path.relpath(args.outdir)) exit(0) # loop through features pieces = [] for feature in site: try: # find las files if args.demtype == 'density': lasfiles = find_lasfiles(args.lasdir, site=feature, checkoverlap=True) else: if args.vendor_classified == False: parameters = class_params(feature, args.slope, args.cellsize) lasfiles = find_classified_lasfile(args.lasdir, site=feature, params=parameters) else: lasfiles = find_lasfiles(args.lasdir, site=feature, checkoverlap=True) # create dems pouts = create_dems(lasfiles, args.demtype, site=feature, gapfill=args.gapfill, **kwargs) # NOTE - if gapfill then fouts is dict, otherwise is list of dicts (1 for each radius) pieces.append(pouts) except Exception, e: print "Error creating %s %s: %s" % ( args.demtype, '' if feature is None else feature.Basename(), e) if args.verbose: import traceback print traceback.format_exc()
def main(): dhf = argparse.ArgumentDefaultsHelpFormatter desc = 'Process voxels into relative density metrics; note this script will require modifications for specific calculations, users are responsible for this' parser = argparse.ArgumentParser(description=desc, formatter_class=dhf) parser.add_argument( 'voxdir', help='Directory holding voxel lidar data') parser.add_argument( '--voxtype', help='Type of return data to use for calculations', nargs='*', default=['count']) parser.add_argument( '--metric', help='Metric name user defined, used for naming output image', default=None) parser.add_argument( '--start', help='Low height of relative density region of interest', default=['1']) parser.add_argument( '--stop', help='Top height of relative density region of interest', default=['5']) parser.add_argument( '--pixelsize', help='Output image pixel size, used to aggregate voxels in x-y dimension', default=['1']) parser.add_argument( '-s', '--site', default=None, help='Site shapefile name used for ') parser.add_argument( '--outdir', help='Directory to output metric rasters, directory name should specify type of metric') parser.add_argument( '-o', '--overwrite', default=False, action='store_true', help='Overwrite any existing output files') parser.add_argument( '-v', '--verbose', default=False, action='store_true', help='Print additional info') args = parser.parse_args() start0 = datetime.now() #variables describing region of interest and scale startoff = int(args.start) cutoff = int(args.stop) pixelsize = int(args.pixelsize) if args.metric is None: args.metric = 'rdm-%s_to_%s' %(startoff,cutoff) voxdir = args.voxdir # make sure outdir exists args.outdir = os.path.abspath(args.outdir) if not os.path.exists(args.outdir): os.makedirs(args.outdir) # the final filenames product = args.metric #fouts = os.path.join(args.outdir, '%s.voxel_metric.vrt' % (product)) fouts = {p: os.path.join(args.outdir, '%s.voxel_metric.vrt' % (p)) for p in [product]} # run if any products are missing exists = all([os.path.exists(f) for f in fouts.values()]) if exists and not args.overwrite: print 'Already created metric rasters in %s' % (os.path.relpath(args.outdir)) exit(0) # loop through voxel rasters # site = glob.glob('*.%s.*.tif' %(args.voxtype)) # open site vector if args.site is not None: try: site = GeoVector(args.site) except: print 'Error opening %s' % args.site exit(2) else: site = [None] pieces = [] for feature in site: try: # extract naming convention bname = '' if feature is None else feature.Basename() + '_' ftr = bname.split('_')[0] bname = os.path.join(os.path.abspath(voxdir), '%s' % (bname)) vox_name = bname + 'voxels.%s.tif' %(args.voxtype[0]) out = os.path.join(args.outdir, '%s_%s.voxel_metric.tif' % (ftr,product)) print out #open image vox_img = gippy.GeoImage(vox_name) proj = vox_img.Projection() affine = vox_img.Affine() affine[1],affine[5] = pixelsize,-pixelsize vox_arr = vox_img.Read().squeeze() # vox_img = gdal.Open(vox_name) # vox_arr = vox_img.ReadAsArray() nbands,nrows,ncols = vox_arr.shape print 'voxel dimensions: %s, %s, %s' %(nbands,nrows,ncols) sys.stdout.flush() # calculate relative density ratio of returns -- this section of code is the section that should be modified depending on needs # modifications if desired; also note, startoff and cutoff values may not be relevant if code is changed data = aggregate(vox_arr,pixelsize) nbands,nrows,ncols = data.shape print 'aggregated dimensions: %s, %s, %s, Calculating...' %data.shape sys.stdout.flush() i1 = numpy.sum(data[startoff+1:cutoff+1],axis=0,dtype=float) i2 = numpy.sum(data,axis=0,dtype=float) ratio = numpy.zeros(i1.shape, dtype=float) ratio[numpy.where(i2>0)] = i1[numpy.where(i2>0)]/i2[numpy.where(i2>0)] transformed = numpy.sqrt(ratio)+0.001 print 'writing image' sys.stdout.flush() #output ratio image imgout = gippy.GeoImage(out,ncols,nrows,1,gippy.GDT_Float32) imgout.SetProjection(proj) imgout.SetAffine(affine) imgout[0].Write(transformed) imgout = None # # modification area ends print 'clipping image to feature' clip_by_site(out,feature) pieces.append(out) except Exception, e: print "Error creating metric: %s" % e if args.verbose: import traceback print traceback.format_exc()
def main(): dhf = argparse.ArgumentDefaultsHelpFormatter desc = 'Voxelize lidar data to output rasters' parser = argparse.ArgumentParser(description=desc, formatter_class=dhf) parser.add_argument('lasdir', help='Directory holding classified LAS files') parser.add_argument( 'demdir', help='Directory holding DEMs (including DSM and DTM for each feature)') parser.add_argument( '--voxtypes', help= 'Type of return data in output voxels (e.g. counts, intensity); option to output new CHM with "chm"', nargs='*', default=['count', 'intensity']) parser.add_argument( '-s', '--site', default=None, help= 'Site shapefile name (use if used for DTM/DSM creation); if area of interest is smaller than whole scene, l2d_dems should be run again using voxel region of interest shapefile' ) parser.add_argument( '--vendor_classified', help= 'Files are not classified by l2d, the l2d naming scheme was not used for classified files', default=False) parser.add_argument('--slope', help='Slope (override)', default=None) parser.add_argument('--cellsize', help='Cell Size (override)', default=None) parser.add_argument('--outdir', help='Directory to output voxel rasters') parser.add_argument('-o', '--overwrite', default=False, action='store_true', help='Overwrite any existing output files') parser.add_argument('-v', '--verbose', default=False, action='store_true', help='Print additional info') args = parser.parse_args() start0 = datetime.now() # open site vector if args.site is not None: try: site = GeoVector(args.site) except: print 'Error opening %s' % args.site exit(2) else: site = [None] # make sure outdir exists args.outdir = os.path.abspath(args.outdir) if not os.path.exists(args.outdir): os.makedirs(args.outdir) args.lasdir = os.path.abspath(args.lasdir) # the final filenames products = args.voxtypes fouts = { p: os.path.join(args.outdir, '%s.voxels.vrt' % (p)) for p in products } # run if any products are missing exists = all([os.path.exists(f) for f in fouts.values()]) if exists and not args.overwrite: print 'Already created %s in %s' % (args.voxtypes, os.path.relpath(args.outdir)) exit(0) # loop through features pieces = [] for feature in site: try: # find las files if args.vendor_classified == False: parameters = class_params(feature, args.slope, args.cellsize) lasfiles = find_classified_lasfile(args.lasdir, site=feature, params=parameters) else: lasfiles = find_lasfiles(args.lasdir, site=feature, checkoverlap=True) # create voxels - perhaps not loop over features, but instead voxelize each tile...for loop over lasfiles here. would need to determine output image dimensions though since they could no longer be pulled from existing feature geotiff. pouts = create_voxels(lasfiles, voxtypes=args.voxtypes, demdir=args.demdir, site=feature, outdir=args.outdir, overwrite=args.overwrite) pieces.append(pouts) except Exception, e: print "Error creating voxels: %s" % e if args.verbose: import traceback print traceback.format_exc()