def _read_point(cls, filename, roi, nodata): """ Read single point from mean/var file and return if valid, or mean/var of 3x3 neighborhood """ if not os.path.exists(filename): return (numpy.nan, numpy.nan) try: img = gippy.GeoImage(filename) vals = img[0].Read(roi).squeeze() variances = img[1].Read(roi) vals[numpy.where(vals == nodata)] = numpy.nan variances[numpy.where(variances == nodata)] = numpy.nan val = numpy.nan var = numpy.nan if ~numpy.isnan(vals[1, 1]): val = vals[1, 1] elif numpy.any(~numpy.isnan(vals)): val = numpy.mean(vals[~numpy.isnan(vals)]) if ~numpy.isnan(variances[1, 1]): var = variances[1, 1] elif numpy.any(~numpy.isnan(variances)): var = numpy.mean(variances[~numpy.isnan(variances)]) img = None return (val, var) except: VerboseOut(traceback.format_exc(), 4) return (numpy.nan, numpy.nan)
def load_image(self): """Load this asset into a GeoImage and return it.""" subdatasets = self.datafiles() image = gippy.GeoImage(subdatasets) colors = self.sensor_spec('colors') [image.SetBandName(name, i) for (i, name) in enumerate(colors, 1)] return image
def download(self, key, **kwargs): """ Download this key from scene assets """ if key != 'thumbnail': logger.warn('Downloading non-thumbnail images not supported') fname = super().download(key, **kwargs) if fname is not None: geoimg = gippy.GeoImage(fname) bname, ext = os.path.splitext(fname) wldfile = bname + '.wld' coords = self.geometry['coordinates'] while len(coords) == 1: coords = coords[0] lats = [c[1] for c in coords] lons = [c[0] for c in coords] with open(wldfile, 'w') as f: f.write('%s\n' % ((max(lons) - min(lons)) / geoimg.xsize())) f.write('0.0\n0.0\n') f.write('%s\n' % (-(max(lats) - min(lats)) / geoimg.ysize())) f.write('%s\n%s\n' % (min(lons), max(lats))) srs = '["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,' + \ 'AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],' + \ 'UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]]' #with open(fname+'.aux.xml', 'w') as f: # f.write('<PAMDataset><SRS>PROJCS%s</SRS></PAMDataset>' % srs) #geoimg = None # convert to GeoTiff #geoimg = gippy.GeoImage(fname) #geoimg.set_srs('epsg:4326') #geoimg.save(bname, format='GTIFF', options={'COMPRESS': 'JPEG'}) #os.remove(fname) #os.remove(wldfile) #fname = geoimg.filename() return fname
def mosaic(images, outfile, vector): """ Mosaic multiple files together, but do not warp """ nd = images[0][0].NoDataValue() srs = images[0].Projection() # check they all have same projection filenames = [images[0].Filename()] for f in range(1, images.NumImages()): if images[f].Projection() != srs: raise Exception( "Input files have non-matching projections and must be warped") filenames.append(images[f].Filename()) # transform vector to image projection geom = wktloads(transform_shape(vector.WKT(), vector.Projection(), srs)) extent = geom.bounds ullr = "%f %f %f %f" % (extent[0], extent[3], extent[2], extent[1]) # run merge command nodatastr = '-n %s -a_nodata %s -init %s' % (nd, nd, nd) cmd = 'gdal_merge.py -o %s -ul_lr %s %s %s' % (outfile, ullr, nodatastr, " ".join(filenames)) result = commands.getstatusoutput(cmd) VerboseOut('%s: %s' % (cmd, result), 4) imgout = gippy.GeoImage(outfile, True) imgout.SetMeta('GIPS_MOSAIC_SOURCES', ';'.join([os.path.basename(f) for f in filenames])) for b in range(0, images[0].NumBands()): imgout[b].CopyMeta(images[0][b]) imgout.CopyColorTable(images[0]) return crop2vector(imgout, vector)
def write_reduced(self, prod, fun, fout, meta, units): """ apply a function to reduce to a daily value """ assetname = self._products[prod]['assets'][0] layername = self._products[prod]['layers'][0] bandnames = self._products[prod]['bands'] assetfile = self.assets[assetname].filename ncroot = Dataset(assetfile) var = ncroot.variables[layername] missing = float(var.missing_value) scale = var.scale_factor assert scale == 1.0, "Handle non-unity scale functions" hourly = np.ma.MaskedArray(var[:]) hourly.mask = (hourly == missing) nb, ny, nx = hourly.shape # apply reduce rule daily = fun(hourly) daily[daily.mask] = missing utils.verbose_out('writing %s' % fout, 4) imgout = gippy.GeoImage(fout, nx, ny, 1, gippy.GDT_Float32) imgout[0].Write(np.array(np.flipud(daily)).astype('float32')) imgout.SetBandName(prod, 1) imgout.SetUnits(units) imgout.SetNoData(missing) imgout.SetProjection(self._projection) imgout.SetAffine(np.array(self._geotransform)) imgout.SetMeta(self.prep_meta(assetfile, meta))
def _read_point(cls, filename, roi, nodata): """ Read single point from mean/var file and return if valid, or mean/var of 3x3 neighborhood """ if not os.path.exists(filename): return (numpy.nan, numpy.nan) with utils.error_handler( 'Unable to read point from {}'.format(filename), continuable=True): img = gippy.GeoImage(filename) vals = img[0].Read(roi).squeeze() variances = img[1].Read(roi) vals[numpy.where(vals == nodata)] = numpy.nan variances[numpy.where(variances == nodata)] = numpy.nan val = numpy.nan var = numpy.nan if ~numpy.isnan(vals[1, 1]): val = vals[1, 1] elif numpy.any(~numpy.isnan(vals)): val = numpy.mean(vals[~numpy.isnan(vals)]) if ~numpy.isnan(variances[1, 1]): var = variances[1, 1] elif numpy.any(~numpy.isnan(variances)): var = numpy.mean(variances[~numpy.isnan(variances)]) img = None return (val, var) return (numpy.nan, numpy.nan)
def invoke(self): # Get inputs img = self.get_input_data_port('image') threshold = self.get_input_string_port('threshold') result_dir = self.get_output_data_port('result') os.makedirs(result_dir) # vectorize threshdolded (ie now binary) image all_lower = glob2.glob('%s/**/*.tif' % img) for img_file in all_lower: geoimg = gippy.GeoImage(img_file, True) coastline = bfvec.potrace(geoimg[0] > float(threshold), minsize=defaults['minsize'], close=defaults['close'], alphamax=defaults['smooth']) # convert coordinates to GeoJSON geojson = bfvec.to_geojson(coastline, source=geoimg.basename()) # write geojson output file with open(os.path.join(result_dir, 'result.geojson'), 'w') as f: f.write(json.dumps(geojson)) self.reason = 'Successfully traced raster'
def process(self, *args, **kwargs): """ Process all requested products for this tile """ products = super(sarannualData, self).process(*args, **kwargs) if len(products) == 0: return self.basename = self.basename + '_' + self.sensor_set[0] for key, val in products.requested.items(): fname = os.path.join(self.path, self.basename + '_' + key) # Verify that asset exists asset = self._products[val[0]]['assets'][0] try: datafiles = self.assets[asset].extract() except: VerboseOut( "Asset %s doesn't exist for tile %s" % (asset, self.id), 3) continue if val[0] == 'sign': bands = [ datafiles[b] for b in ["sl_HH", "sl_HV"] if b in datafiles ] if len(bands) > 0: img = gippy.GeoImage(bands) img.SetNoData(0) mask = gippy.GeoImage(datafiles['mask'], False) img.AddMask(mask[0] == 255) imgout = gippy.GeoImage(fname, img, gippy.GDT_Float32) imgout.SetNoData(-32768) for b in range(0, imgout.NumBands()): imgout.SetBandName(img[b].Description(), b + 1) (img[b].pow(2).log10() * 10 - 83.0).Process(imgout[b]) fname = imgout.Filename() img = None imgout = None [ RemoveFiles([f], ['.hdr', '.aux.xml']) for k, f in datafiles.items() if k != 'hdr' ] if val[0] == 'fnf': if 'C' in datafiles: # rename both files to product name os.rename(datafiles['C'], fname) os.rename(datafiles['C'] + '.hdr', fname + '.hdr') img = gippy.GeoImage(fname) img.SetNoData(0) img = None self.AddFile(self.sensor_set[0], key, fname)
def process(self, *args, **kwargs): """ Make sure all products have been pre-processed """ products = super(sarData, self).process(*args, **kwargs) if len(products) == 0: return sensor = self.sensor_set[0] self.basename = self.basename + '_' + sensor # extract all data from archive datafiles = self.assets[''].extract() meta = self.meta() for key, val in products.requested.items(): fname = os.path.join(self.path, self.basename + '_' + key) if val[0] == 'sign': bands = [datafiles[b] for b in ["sl_HH", "sl_HV"] if b in datafiles] img = gippy.GeoImage(bands) img.SetNoData(0) mask = gippy.GeoImage(datafiles['mask'], False) img.AddMask(mask[0] == 255) # apply date mask dateimg = gippy.GeoImage(datafiles['date'], False) dateday = (self.date - sarAsset._launchdate[sensor[0]]).days img.AddMask(dateimg[0] == dateday) #imgout = gippy.SigmaNought(img, fname, meta['CF']) imgout = gippy.GeoImage(fname, img, gippy.GDT_Float32) imgout.SetNoData(-32768) for b in range(0, imgout.NumBands()): imgout.SetBandName(img[b].Description(), b + 1) (img[b].pow(2).log10() * 10 + meta['CF']).Process(imgout[b]) fname = imgout.Filename() img = None imgout = None if val[0] == 'linci': # Note the linci product DOES NOT mask by date os.rename(datafiles['linci'], fname) os.rename(datafiles['linci'] + '.hdr', fname + '.hdr') if val[0] == 'date': # Note the date product DOES NOT mask by date os.rename(datafiles['date'], fname) os.rename(datafiles['date'] + '.hdr', fname + '.hdr') self.AddFile(sensor, key, fname) # Remove unused files # TODO - checking key rather than val[0] (the full product suffix) if 'hdr' in datafiles: del datafiles['hdr'] RemoveFiles(datafiles.values(), ['.hdr', '.aux.xml'])
def get_timeseries(self, product='', dates=None): """ Read all files as time series """ if dates is None: dates = self.dates # TODO - multiple sensors filenames = [self.data[date][product] for date in dates] img = gippy.GeoImage(filenames) return img
def open(self, product, sensor=None, update=False): """ Open and return a GeoImage """ if sensor is None: sensor = self.sensors[product] try: fname = self.filenames[(sensor, product)] return gippy.GeoImage(fname) except: raise Exception('error reading product (%s, %s)' % (sensor, product))
def test_bandmeta(self): """ Set metadata on band and retrieve """ fout = 'test-meta.tif' geoimg = gp.GeoImage.create(fout, xsz=100, ysz=100) geoimg[0].add_bandmeta('TESTKEY', 'TESTVALUE') geoimg = None geoimg = gp.GeoImage(fout) self.assertEqual(geoimg[0].bandmeta('TESTKEY'), 'TESTVALUE') os.remove(fout)
def test_save(self): """ Save image as new image with different datatype """ fout = 'test-byte.tif' geoimg = gpt.get_test_image().autoscale(1.0, 255.0).save(fout, 'uint8') geoimg = None geoimg = gp.GeoImage(fout) self.assertEqual(geoimg.type().string(), 'uint8') self.assertEqual(geoimg[0].min(), 1.0) self.assertEqual(geoimg[0].max(), 255.0) os.remove(fout)
def process_qa(self): """Produce the cloudmask product.""" for a_obj in self.assets.values(): try: src_img = gippy.GeoImage(a_obj.filename) except: os.remove(a_obj.filename) return # for both asset types the QA band is the last one qa_nparray = src_img[len(src_img) - 1].Read() temp_fp = self.temp_product_filename(a_obj.sensor, 'qa') imgout = gippy.GeoImage(temp_fp, src_img, gippy.GDT_Byte, 1) imgout[0].Write(qa_nparray.astype(numpy.uint8)) imgout.SetMeta( self.prep_meta(a_obj.filename, {'Mask_params': 'QA band'})) archived_fp = self.archive_temp_path(temp_fp) self.AddFile(a_obj.sensor, 'qa', archived_fp)
def stack(self, suffix='stack', **kwargs): """ Stack products (from single date) into single image file """ for date in self.inv.dates: filenames = [self.inv[date].filenames[p] for p in self.inv.products(date)] img = gippy.GeoImage(filenames) bname = basename(filenames[0]) bname = bname[0:bname.rfind('_', 0)] fout = os.path.join(self.inv.projdir, bname + '_' + suffix) imgout = img.Process(fout) imgout.CopyMeta(img)
def crop2vector(img, vector): """ Crop a GeoImage down to a vector """ # transform vector to srs of image vecname = translate(vector.Filename(), img.Projection()) warped_vec = gippy.GeoVector(vecname) # rasterize the vector td = tempfile.mkdtemp() mask = gippy.GeoImage(os.path.join(td, vector.LayerName()), img, gippy.GDT_Byte, 1) maskname = mask.Filename() mask = None cmd = 'gdal_rasterize -at -burn 1 -l %s %s %s' % (warped_vec.LayerName(), vecname, maskname) result = commands.getstatusoutput(cmd) mask = gippy.GeoImage(maskname) img.AddMask(mask[0]).Process().ClearMasks() mask = None shutil.rmtree(os.path.dirname(maskname)) shutil.rmtree(os.path.dirname(vecname)) return img
def process_cloudmask(self): """Produce the cloudmask product.""" for a_obj in self.assets.values(): src_img = gippy.GeoImage(a_obj.filename) # for both asset types the QA band is the last one qa_nparray = src_img[len(src_img) - 1].Read() # cirrus, cloud, adjacent cloud, cloud shadow are bits 0 to 3, # where bit 0 is LSB; value of 1 means that thing is present there. mask = (qa_nparray & 0b00001111) > 0 # on edit update Mask_params # build the product file temp_fp = self.temp_product_filename(a_obj.sensor, 'cloudmask') imgout = gippy.GeoImage(temp_fp, src_img, gippy.GDT_Byte, 1) imgout[0].Write(mask.astype(numpy.uint8)) imgout.SetNoData(0) # needed due to particulars of gdal_merge imgout.SetMeta(self.prep_meta( a_obj.filename, {'Mask_params': 'union of bits 0 to 3'})) # imgout.Process() # TODO needed? archived_fp = self.archive_temp_path(temp_fp) self.AddFile(a_obj.sensor, 'cloudmask', archived_fp)
def acolite_nc_to_prods(products, nc_file, meta, model_image): dsroot = netCDF4.Dataset(nc_file) prodout = dict() for p_type, p_fp in products.items(): p_spec = _aco_prod_templs[p_type] verbose_out('acolite processing: extracting {} to {}'.format( p_type, p_fp), 2) aco_key = p_spec['acolite-key'] bands = ([aco_key] if not p_spec.get('acolite-key-regex', False) else [k for k in dsroot.variables if re.search(aco_key, k)]) if len(bands) == 0: raise IOError("Couldn't find `{}` in {}".format(aco_key, nc_file)) verbose_out('Found bands for {}: {}'.format(p_type, bands), 5) npdtype = p_spec['dtype'] dtype, missing = _aco_img_params[npdtype] gain = p_spec.get('gain', 1.0) offset = p_spec.get('offset', 0.0) imgout = gippy.GeoImage(p_fp, model_image, dtype, len(bands)) pmeta = {'description': p_spec['description']} pmeta.update(meta) imgout.SetMeta(pmeta) for i, b in enumerate(bands): imgout.SetBandName(str(b), i + 1) arr = numpy.array(dsroot.variables[b][:]) fill = getattr(dsroot.variables[b], '_FillValue', _acolite_ndv) mask = arr != fill arr[numpy.invert(mask)] = missing arr[mask] = ((arr[mask] - offset) / gain) verbose_out('acolite processing: writing band {} of {}'.format( i, p_fp), 2) imgout[i].Write(arr.astype(npdtype)) prodout[p_type] = imgout.Filename() imgout = None imgout = gippy.GeoImage(p_fp, True) imgout.SetGain(gain) imgout.SetOffset(offset) imgout.SetNoData(missing) return prodout
def crop2vector(img, vector): """ Crop a GeoImage down to a vector - only used by mosaic """ # transform vector to srs of image vecname = transform(vector.Filename(), img.Projection()) warped_vec = open_vector(vecname) # rasterize the vector td = tempfile.mkdtemp() mask = gippy.GeoImage(os.path.join(td, vector.LayerName()), img, gippy.GDT_Byte, 1) maskname = mask.Filename() mask = None cmd = 'gdal_rasterize -at -burn 1 -l %s %s %s' % (warped_vec.LayerName(), vecname, maskname) result = commands.getstatusoutput(cmd) VerboseOut('%s: %s' % (cmd, result), 4) mask = gippy.GeoImage(maskname) img.AddMask(mask[0]).Process().ClearMasks() mask = None shutil.rmtree(os.path.dirname(maskname)) shutil.rmtree(os.path.dirname(vecname)) # VerboseOut('Cropped to vector in %s' % (datetime.now() - start), 3) return img
def create_chm(dtm, dsm, chm): """ Create CHM from a DTM and DSM - assumes common grid """ dtm_img = gippy.GeoImage(dtm) dsm_img = gippy.GeoImage(dsm) imgout = gippy.GeoImage(chm, dtm_img) # set nodata dtm_nodata = dtm_img[0].NoDataValue() dsm_nodata = dsm_img[0].NoDataValue() nodata = dtm_nodata imgout.SetNoData(nodata) dsm_arr = dsm_img[0].Read() dtm_arr = dtm_img[0].Read() # ensure same size arrays, clip to smallest s1 = dsm_arr.shape s2 = dtm_arr.shape if s1 != s2: if s1[0] > s2[0]: dsm_arr = dsm_arr[0:s2[0], :] elif s2[0] > s1[0]: dtm_arr = dtm_arr[0:s1[0], :] if s1[1] > s2[1]: dsm_arr = dsm_arr[:, 0:s2[1]] elif s2[1] > s1[1]: dtm_arr = dtm_arr[:, 0:s1[1]] arr = dsm_arr - dtm_arr # set to nodata if no ground pixel arr[dtm_arr == dtm_nodata] = nodata # set to nodata if no surface pixel locs = numpy.where(dsm_arr == dsm_nodata) arr[locs] = nodata imgout[0].Write(arr) return imgout.Filename()
def gridded_mosaic(images, outfile, rastermask, interpolation=0): """ Mosaic multiple files to grid and mask specified in rastermask """ nd = images[0][0].NoDataValue() mask_img = gippy.GeoImage(rastermask) srs = mask_img.Projection() filenames = [images[0].Filename()] for f in range(1, images.NumImages()): filenames.append(images[f].Filename()) imgout = gippy.GeoImage(outfile, mask_img, images[0].DataType(), images[0].NumBands()) imgout.SetNoData(nd) #imgout.ColorTable(images[0]) nddata = np.empty( (images[0].NumBands(), mask_img.YSize(), mask_img.XSize())) nddata[:] = nd imgout.Write(nddata) imgout = None # run warp command resampler = ['near', 'bilinear', 'cubic'] cmd = "gdalwarp -t_srs '{}' -r {} {} {}".format(srs, resampler[interpolation], " ".join(filenames), outfile) status, output = commands.getstatusoutput(cmd) verbose_out( ' COMMAND: {}\n exit_status: {}\n output: {}'.format( cmd, status, output), 4) imgout = gippy.GeoImage(outfile, True) imgout.SetMeta('GIPS_GRIDDED_MOSAIC_SOURCES', ';'.join([os.path.basename(f) for f in filenames])) for b in range(0, images[0].NumBands()): imgout[b].CopyMeta(images[0][b]) imgout.AddMask(mask_img[0]) imgout.Process()
def test_nodata(self): """ Set nodata and retrieve """ fout = 'test-nodata.tif' geoimg = gp.GeoImage.create(fout, xsz=100, ysz=100) geoimg.set_nodata(1) self.assertEqual(geoimg[0].nodata(), 1) geoimg = None geoimg = gp.GeoImage(fout) self.assertEqual(geoimg[0].nodata(), 1) # check that entire array is nan arr = np.where(geoimg.read() == np.nan) self.assertEqual(len(arr[0]), 0) self.assertEqual(len(arr[1]), 0) os.remove(fout)
def _readqa(self): # make sure metadata is loaded if not hasattr(self, 'metadata'): self.meta() if settings().REPOS[self.Repository.name.lower()]['extract']: # Extract files qadatafile = self.assets['DN'].extract( [self.metadata['qafilename']]) else: # Use tar.gz directly using GDAL's virtual filesystem qadatafile = os.path.join('/vsitar/' + self.assets['DN'].filename, self.metadata['qafilename']) qaimg = gippy.GeoImage(qadatafile) return qaimg
def vectorize(img, vector, oformat=None): """ Create vector from img using gdal_polygonize. oformat -- defaults to (due to ogr2ogr) "ESRI Shapefile" """ conn_opt = '-8' # avoid islands as much as possible fmt = '' if oformat: fmt = '-f "{}"'.format(oformat) def gso_run(cmd, emsg): '''simple shell command wrapper''' with error_handler(emsg): verbose_out('Running: {}'.format(cmd), 4) status, output = commands.getstatusoutput(cmd) if status != 0: verbose_out( '++\n Ran command:\n {}\n\n++++\n Console output:\n {}\n++\n' .format(cmd, output), 1) raise RuntimeError(emsg) # Grab projection because gml doesn't carry it around by default wkt = gippy.GeoImage(img).Projection() # rasterize the vector with make_temp_dir(prefix='vectorize') as td: tvec = os.path.join(td, os.path.basename(vector)[:-4] + '.gml') polygonize = ( 'gdal_polygonize.py {CONNECTEDNESS} {IMAGE} {VECTOR}'.format( CONNECTEDNESS=conn_opt, IMAGE=img, VECTOR=tvec)) emsg = 'Error vectorizing raster {} to {}'.format(img, tvec) gso_run(polygonize, emsg) if gippy.GeoVector(tvec).NumFeatures() != 1: ivec = tvec tvec = tvec[:-4] + '_dissolve.gml' dissolve = ('ogr2ogr -f GML {OVEC} {IVEC} -dialect sqlite ' '-sql "SELECT DN as DN, ST_Union(geometryProperty) as ' 'geometry FROM out GROUP BY DN"'.format(OVEC=tvec, IVEC=ivec)) emsg = 'Error dissolving {} to {}'.format(ivec, tvec) gso_run(dissolve, emsg) make_final_prod = ( "ogr2ogr {FMT} -a_srs '{WKT}' '{OVEC}' '{IVEC}'".format( FMT=fmt, WKT=wkt, OVEC=vector, IVEC=tvec)) emsg = 'Error writing final output from {} to {}'.format(tvec, vector) gso_run(make_final_prod, emsg) return vector
def gap_fill(filenames, fout, site=None, interpolation='nearest'): """ Gap fill from higher radius DTMs, then fill remainder with interpolation """ start = datetime.now() from scipy.interpolate import griddata if len(filenames) == 0: raise Exception('No filenames provided!') filenames = sorted(filenames) imgs = gippy.GeoImages(filenames) nodata = imgs[0][0].NoDataValue() arr = imgs[0][0].Read() for i in range(1, imgs.size()): locs = numpy.where(arr == nodata) arr[locs] = imgs[i][0].Read()[locs] # interpolation at bad points goodlocs = numpy.where(arr != nodata) badlocs = numpy.where(arr == nodata) arr[badlocs] = griddata(goodlocs, arr[goodlocs], badlocs, method=interpolation) # write output imgout = gippy.GeoImage(fout, imgs[0]) imgout.SetNoData(nodata) imgout[0].Write(arr) fout = imgout.Filename() imgout = None # align and clip if site is not None: from osgeo import gdal # get resolution ds = gdal.Open(fout, gdal.GA_ReadOnly) gt = ds.GetGeoTransform() ds = None parts = splitexts(fout) _fout = parts[0] + '_clip' + parts[1] CookieCutter(gippy.GeoImages([fout]), site, _fout, gt[1], abs(gt[5]), True) if os.path.exists(fout): os.remove(fout) os.rename(_fout, fout) print 'Completed gap-filling to create %s in %s' % (os.path.relpath(fout), datetime.now() - start) return fout
def test_persistent_metadata(self): """ Writing metadata and check for persistence after reopening """ fout = 'test-meta.tif' geoimg = gp.GeoImage.create(fout, xsz=1000, ysz=1000, nb=3) geoimg.set_bandnames(['red', 'green', 'blue']) geoimg.set_nodata(7) self.assertEqual(geoimg.bandnames()[0], 'red') geoimg = None # reopen geoimg = gp.GeoImage(fout) self.assertEqual(geoimg[0].nodata(), 7) self.assertEqual(list(geoimg.bandnames()), ['red', 'green', 'blue']) geoimg = None os.remove(fout)
def open_tile(filename): """ Open a tile image and assign projection and geotransform """ geoimg = gippy.GeoImage(filename, True) z, x, y = map(int, geoimg.basename().split('-')[0:4]) tile = Tile.from_google(google_x=x, google_y=y, zoom=z) geoimg.set_srs('EPSG:3857') minpt = tile.bounds[0].meters maxpt = tile.bounds[1].meters affine = np.array([ minpt[0], (maxpt[0] - minpt[0]) / geoimg.xsize(), 0.0, maxpt[1], 0.0, -(maxpt[1] - minpt[1]) / geoimg.ysize() ]) geoimg.set_affine(affine) geoimg.set_nodata(-1) return geoimg
def invoke(self): # Get inputs img = self.get_input_data_port('image') # calculate optimal threshold all_lower = glob2.glob('%s/**/*.tif' % img) for img_file in all_lower: geoimg = gippy.GeoImage(img_file, True) threshold = bfproc.otsu_threshold(geoimg[0]) self.set_output_string_port('threshold', threshold) print "Otsu's threshold = ", threshold self.reason = 'Successfully computed Otsu threshold'
def test_overviews(self): """ Add overviews to an image """ fout = 'test-overviews.tif' geoimg = gp.GeoImage.create(filename=fout, xsz=1000, ysz=1000, nb=2) fout = geoimg.filename() # add overviews geoimg.add_overviews() # clear overviews geoimg.add_overviews(levels=[]) self.assertFalse(os.path.exists(fout + '.ovr')) geoimg = None geoimg = gp.GeoImage(fout, False) geoimg.add_overviews() self.assertTrue(os.path.exists(fout + '.ovr')) os.remove(fout) os.remove(fout + '.ovr')
def test_gain_and_offset(self): """ Set and retrieve gain and offset """ fout = 'test-gainoffset.tif' gains = [2.0, 3.0] offsets = [4.0, 5.0] geoimg = gp.GeoImage.create(fout, nb=2) geoimg[0].set_gain(gains[0]) geoimg[1].set_gain(gains[1]) geoimg[0].set_offset(offsets[0]) geoimg[1].set_offset(offsets[1]) # check persistance geoimg = None geoimg = gp.GeoImage(fout) for i in range(0, 2): self.assertEqual(geoimg[i].gain(), gains[i]) self.assertEqual(geoimg[i].offset(), offsets[i]) os.remove(fout)