def main(): dhf = argparse.ArgumentDefaultsHelpFormatter parser = argparse.ArgumentParser(description='Classify LAS file(s)', formatter_class=dhf) parser.add_argument('lasdir', help='Directory of LAS file(s) to classify') parser.add_argument('-s', '--site', help='Polygon(s) to process', default=None) h = 'Amount to buffer out site polygons when merging LAS files' parser.add_argument('-b', '--buff', help=h, default=20) parser.add_argument('--slope', help='Slope (override)', default=None) parser.add_argument('--cellsize', help='Cell Size (override)', default=None) parser.add_argument('--maxWindowSize', help='Max Window Size (override)', default=None) parser.add_argument('--maxDistance', help='Max Distance (override)', default=None) parser.add_argument('--outdir', help='Output directory location', default='./') h = 'Decimate the points (steps between points, 1 is no pruning' parser.add_argument('--decimation', help=h, default=None) parser.add_argument( '-o', '--overwrite', default=False, action='store_true', help='Overwrite any existing output files') parser.add_argument('-v', '--verbose', help='Print additional info', default=False, action='store_true') args = parser.parse_args() start = datetime.now() if not os.path.exists(args.outdir): os.makedirs(args.outdir) if args.site is not None: site = gippy.GeoVector(args.site) else: site = [None] fouts = [] for feature in site: # get output filename fout = get_classification_filename(feature, args.outdir, args.slope, args.cellsize) # retrieve parameters from input site slope, cellsize = class_params(feature, args.slope, args.cellsize) if not os.path.exists(fout) or args.overwrite: try: filenames = find_lasfiles(args.lasdir, site=feature, checkoverlap=True) fout = classify(filenames, fout, slope=slope, cellsize=cellsize, site=feature, buff=args.buff, decimation=args.decimation, verbose=args.verbose) except Exception as e: print "Error creating %s: %s" % (os.path.relpath(fout), e) if args.verbose: import traceback print traceback.format_exc() fouts.append(fout) print 'l2d_classify completed in %s' % (datetime.now() - start)
def vectorize(img, vector, oformat=None): """ Create vector from img using gdal_polygonize. oformat -- defaults to (due to ogr2ogr) "ESRI Shapefile" """ conn_opt = '-8' # avoid islands as much as possible fmt = '' if oformat: fmt = '-f "{}"'.format(oformat) def gso_run(cmd, emsg): '''simple shell command wrapper''' with error_handler(emsg): verbose_out('Running: {}'.format(cmd), 4) status, output = commands.getstatusoutput(cmd) if status != 0: verbose_out( '++\n Ran command:\n {}\n\n++++\n Console output:\n {}\n++\n' .format(cmd, output), 1) raise RuntimeError(emsg) # Grab projection because gml doesn't carry it around by default wkt = gippy.GeoImage(img).Projection() # rasterize the vector with make_temp_dir(prefix='vectorize') as td: tvec = os.path.join(td, os.path.basename(vector)[:-4] + '.gml') polygonize = ( 'gdal_polygonize.py {CONNECTEDNESS} {IMAGE} {VECTOR}'.format( CONNECTEDNESS=conn_opt, IMAGE=img, VECTOR=tvec)) emsg = 'Error vectorizing raster {} to {}'.format(img, tvec) gso_run(polygonize, emsg) if gippy.GeoVector(tvec).NumFeatures() != 1: ivec = tvec tvec = tvec[:-4] + '_dissolve.gml' dissolve = ('ogr2ogr -f GML {OVEC} {IVEC} -dialect sqlite ' '-sql "SELECT DN as DN, ST_Union(geometryProperty) as ' 'geometry FROM out GROUP BY DN"'.format(OVEC=tvec, IVEC=ivec)) emsg = 'Error dissolving {} to {}'.format(ivec, tvec) gso_run(dissolve, emsg) make_final_prod = ( "ogr2ogr {FMT} -a_srs '{WKT}' '{OVEC}' '{IVEC}'".format( FMT=fmt, WKT=wkt, OVEC=vector, IVEC=tvec)) emsg = 'Error writing final output from {} to {}'.format(tvec, vector) gso_run(make_final_prod, emsg) return vector
def test_cookiecutter_real(self): """ Cookie cutter on single real image """ geoimg = gpt.get_test_image().select(['red']) #, 'green', 'blue']) vpath = os.path.join(os.path.dirname(__file__), 'vectors') # test with feature of different projection feature = gp.GeoVector(os.path.join(vpath, 'aoi1_epsg4326.shp')) extin = feature.extent() imgout = alg.cookie_cutter([geoimg], feature=feature[0], xres=0.0003, yres=0.0003) extout = imgout.extent() self.assertAlmostEqual(extout.x0(), extin.x0()) self.assertAlmostEqual(extout.y0(), extin.y0()) self.assertAlmostEqual(extout.x1(), extin.x1()) self.assertAlmostEqual(extout.y1(), extin.y1())
def test_cookiecutter_real_crop(self): """ Test cookie cutter with cropping """ geoimg = gpt.get_test_image().select(['red', 'green', 'blue']) vpath = os.path.join(os.path.dirname(__file__), 'vectors') feature = gp.GeoVector(os.path.join(vpath, 'aoi1_epsg32416.shp')) imgout = alg.cookie_cutter([geoimg], feature=feature[0], xres=30.0, yres=30.0, crop=True) extin = feature.extent() extout = imgout.extent() self.assertTrue(extout.x0() >= extin.x0()) self.assertTrue(extout.y0() >= extin.y0()) self.assertTrue(extout.x1() <= extin.x1()) self.assertTrue(extout.y1() <= extin.y1())
def test_cookiecutter_real_reproj(self): """ Test with different projection """ geoimg = gpt.get_test_image().select(['red', 'green', 'blue']) vpath = os.path.join(os.path.dirname(__file__), 'vectors') feature = gp.GeoVector(os.path.join(vpath, 'aoi1_epsg32416.shp')) extin = feature.extent() # test extent matches feature imgout = alg.cookie_cutter([geoimg], feature=feature[0], xres=30.0, yres=30.0) extout = imgout.extent() self.assertAlmostEqual(extout.x0(), extin.x0()) self.assertAlmostEqual(extout.y0(), extin.y0()) self.assertAlmostEqual(extout.x1(), extin.x1()) self.assertAlmostEqual(extout.y1(), extin.y1())
def test_cookiecutter_real_reproj(self): """ Test with different projection """ geoimg = gpt.get_test_image().select(['red', 'green', 'blue']) vpath = os.path.join(os.path.dirname(__file__), 'vectors') feature = gp.GeoVector(os.path.join(vpath, 'aoi1_epsg32416.shp')) extin = feature.extent() # test extent matches feature imgout = alg.cookie_cutter([geoimg], feature=feature[0], xres=30.0, yres=30.0) extout = imgout.extent() self.assertAlmostEqual(extout.x0() + 15, extin.x0()) self.assertAlmostEqual(extout.y0() + 15, extin.y0()) # cookie cutter will never add more than a pixel and a half in width self.assertTrue(extout.x1() - extin.x1() < 45.0) self.assertTrue(extout.y1() - extin.y1() < 45.0) self.assertEqual(imgout.resolution().x(), 30.0) self.assertEqual(imgout.resolution().y(), -30.0)
def generate_stats(): geotifs = [ op.join(os.getcwd(), f) for f in os.listdir('.') if f.endswith('.tif') ] geojsons = [ op.join(os.getcwd(), geoj) for geoj in os.listdir('.') if geoj.endswith('.geojson') ] output_path = op.join(os.getcwd(), 'outputs') output_files = [] if not op.isdir(output_path): makedirs(output_path) for geotif in geotifs: for geojson in geojsons: fname = op.splitext(geotif)[0] basename = fname.split('/')[-1] filename = op.splitext(geojson)[0] geo_fname = filename.split('/')[-1] date = basename[10:16] tile = basename[28:35] geoimg = gippy.GeoImage.open([geotif]) geoimg.set_nodata(0) geovec = gippy.GeoVector(geojson) res = geoimg.resolution() output_filename = '{}_{}_{}.csv'.format(geo_fname, date, tile) f = open(output_filename, 'w') f.write('min,max,mean,stddev,skew,count\n') fout = geoimg.basename() + '_{}.tif'.format(str(geo_fname)) imgout = alg.cookie_cutter([geoimg], fout, geovec[0], xres=res.x(), yres=res.y(), proj=geoimg.srs()) stats = imgout[0].stats() f.write(','.join([str(s) for s in stats]) + '\n') f.close() if output_filename not in output_files: output_files.append(output_filename) shutil.move(os.path.join('.', fout), os.path.join(output_path, fout)) print('{} site stats extracted!'.format(geojson)) return output_files
def test_cookiecutter_real(self): """ Cookie cutter on single real image """ geoimg = gpt.get_test_image().select(['red']) #, 'green', 'blue']) iext = geoimg.extent() vpath = os.path.join(os.path.dirname(__file__), 'vectors') # test with feature of different projection feature = gp.GeoVector(os.path.join(vpath, 'aoi1_epsg4326.shp')) extin = feature.extent() imgout = alg.cookie_cutter([geoimg], feature=feature[0], xres=0.0003, yres=0.0003) extout = imgout.extent() self.assertAlmostEqual(extout.x0() + 0.00015, extin.x0()) self.assertAlmostEqual(extout.y0() + 0.00015, extin.y0()) # cookie cutter will never add more than a pixel and a half in width self.assertTrue(extout.x1() - extin.x1() < 0.0045) self.assertTrue(extout.y1() - extin.y1() < 0.0045) self.assertAlmostEqual(imgout.resolution().x(), 0.0003) self.assertAlmostEqual(imgout.resolution().y(), -0.0003)
def crop2vector(img, vector): """ Crop a GeoImage down to a vector """ # transform vector to srs of image vecname = translate(vector.Filename(), img.Projection()) warped_vec = gippy.GeoVector(vecname) # rasterize the vector td = tempfile.mkdtemp() mask = gippy.GeoImage(os.path.join(td, vector.LayerName()), img, gippy.GDT_Byte, 1) maskname = mask.Filename() mask = None cmd = 'gdal_rasterize -at -burn 1 -l %s %s %s' % (warped_vec.LayerName(), vecname, maskname) result = commands.getstatusoutput(cmd) mask = gippy.GeoImage(maskname) img.AddMask(mask[0]).Process().ClearMasks() mask = None shutil.rmtree(os.path.dirname(maskname)) shutil.rmtree(os.path.dirname(vecname)) return img
def main(scenes, aoi, datadir='./', bands=None): scenes = Scenes.load(scenes) print(scenes.text_calendar()) bname = os.path.splitext(os.path.basename(aoi))[0] features = gippy.GeoVector(aoi) if not os.path.exists(datadir): os.makedirs(datadir) gippy.Options.set_verbose(5) #opts = {'COMPRESS': 'DEFLATE', 'PREDICTOR': '2', 'TILED': 'YES', 'BLOCKXSIZE': '512', 'BLOCKYSIZE': '512'} #opts = {'COMPRESS': 'LZW', 'TILED': 'YES', 'BLOCKXSIZE': '512', 'BLOCKYSIZE': '512'} opts = {'TILED': 'YES', 'BLOCKXSIZE': '512', 'BLOCKYSIZE': '512'} for date in scenes.dates(): _scenes = [s for s in scenes if s.date == date] outname = '%s_%s.tif' % (_scenes[0].date, _scenes[0].platform) fout = os.path.join(datadir, outname) if not os.path.exists(fout): try: geoimgs = [] for s in _scenes: links = s.links() if bands is None: bands = links.keys() filenames = [links[k].replace('https:/', '/vsicurl') for k in sorted(links) if k in bands] geoimg = gippy.GeoImage.open(filenames) geoimg.set_nodata(0) geoimgs.append(geoimg) # default to first image res and srs res = geoimgs[0].resolution() imgout = algs.cookie_cutter(geoimgs, fout, features[0], xres=res.x(), yres=res.y(), proj=geoimgs[0].srs(), options=opts) except Exception as err: print('Error: ', str(err))
def main(): dhf = argparse.ArgumentDefaultsHelpFormatter desc = 'Calculate and create CHM from a DSM and DTM' parser = argparse.ArgumentParser(description=desc, formatter_class=dhf) parser.add_argument( 'demdir', help='Directory holding DEMs (and used to store CHM output') parser.add_argument( '-s', '--site', default=None, help='Site shapefile name (use if used for DTM/DSM creation') parser.add_argument( '--dsm', default='dsm.max.tif', help= 'Filename of DSM input (will be preceded by feature name if using shapefile' ) parser.add_argument( '--dtm', default='dtm.idw.tif', help= 'Filename of DTM input (will be preceded by feature name if using shapefile' ) parser.add_argument('--fout', default='chm.tif', help='Output filename (created in demdir)') parser.add_argument('--hillshade', default=False, action='store_true', help='Generate hillshade') parser.add_argument('-v', '--verbose', default=False, action='store_true', help='Print additional info') args = parser.parse_args() start = dt.datetime.now() print 'Creating CHM from DEMS in %s' % (os.path.relpath(args.demdir)) if args.site is not None: site = gippy.GeoVector(args.site) else: site = [None] fout_final = os.path.join(args.demdir, os.path.splitext(args.fout)[0] + '.vrt') fouts = [] hillfouts = [] for feature in site: prefix = os.path.join( args.demdir, '' if feature is None else feature.Basename() + '_') fdtm = prefix + args.dtm fdsm = prefix + args.dsm if not os.path.exists(fdtm) or not os.path.exists(fdsm): print "No valid input files found (%s)" % prefix continue try: fout = create_chm(fdtm, fdsm, prefix + args.fout) fouts.append(fout) except Exception as e: print "Error creating %s: %s" % (fout, e) if args.verbose: import traceback print traceback.format_exc() if args.hillshade: hillfouts.append(create_hillshade(fout)) # if multiple file output then combine them together if len(fouts) > 0 and site[0] is not None: create_vrt(fouts, fout_final, site=site) if args.hillshade: fout = os.path.splitext(fout_final)[0] + '_hillshade.tif' create_vrt(hillfouts, fout, site=site) print 'Completed %s in %s' % (fout_final, dt.datetime.now() - start)
def setUp(self): """ Set up test environment """ if not os.path.exists(self.testdir): os.makedirs(self.testdir) self.features = gippy.GeoVector(self.vfilename)
def fetch(self, key, aoi, pansharpen=False, acomp=False, dra=False, **kwargs): if self.order(): # create tempfile for AOI with tempfile.NamedTemporaryFile(suffix='.geojson', mode='w', delete=False) as f: aoiname = f.name aoistr = json.dumps(aoi) f.write(aoistr) geovec = gippy.GeoVector(aoiname) ext = geovec.extent() bbox = [ext.x0(), ext.y0(), ext.x1(), ext.y1()] # defaults spec = '' pansharpen = False acomp = False dra = False nodata = 0 if self['eo:platform'] in ['GEOEYE01', 'QUICKBIRD02' ] else -1e10 opts = COG # set options if key == 'rgb': spec = 'rgb' nodata = 0 #opts = JPEG_COG elif key == 'rgb-pan': pansharpen = True spec = 'rgb' nodata = 0 elif key == 'visual': pansharpen = True dra = True nodata = 0 #opts = JPEG_COG elif key == 'analytic': acomp = True fout = os.path.join(self.get_path(), self.get_filename(suffix='_%s' % key)) + '.tif' with TemporaryDirectory() as temp_dir: try: if not os.path.exists(fout): logger.info('Fetching %s: %s' % (key, fout)) # TODO - allow for other projections img = CatalogImage( self['id'], pansharpen=pansharpen, acomp=acomp, dra=dra, bbox=bbox) #, proj=utm_epsg(scenes.center())) tmp_fout1 = os.path.join( temp_dir, '%s_%s_1.tif' % (self['id'], key)) tmp_fout2 = os.path.join( temp_dir, '%s_%s_2.tif' % (self['id'], key)) tif = img.geotiff(path=tmp_fout1, proj='EPSG:4326', spec=spec) # clip and save geoimg = gippy.GeoImage(tif, True) # workaround for gbdxtools scaling if key in ['rgb', 'visual']: geoimg = geoimg.autoscale(1, 255).save(tmp_fout2) geoimg.set_nodata(0) # this clips the image to the AOI res = geoimg.resolution() imgout = alg.cookie_cutter([geoimg], fout, geovec[0], xres=res.x(), yres=res.y(), proj=geoimg.srs(), options=opts) imgout.add_overviews([2, 4, 8, 16], resampler='average') imgout = None except Exception as e: logger.warning('Error fetching: %s' % str(e)) #logger.warning('Traceback: %s', traceback.format_exc()) os.remove(aoiname) return fout
def setUp(self): """ Open vector file """ self.features = gippy.GeoVector(self.vfilename)