def test_cookiecutter_gain(self): """ Cookie cutter on int image with floating point gain """ bbox = np.array([0.0, 0.0, 1.0, 1.0]) geoimg = gp.GeoImage.create(xsz=1000, ysz=1000, bbox=bbox, dtype='int16') geoimg.set_gain(0.0001) arr = np.zeros((1000,1000)) + 0.0001 arr[0:500,:] = 0.0002 geoimg.write(deepcopy(arr)) res = geoimg.resolution() imgout = alg.cookie_cutter([geoimg], xres=res.x(), yres=res.y()) np.testing.assert_array_equal(arr, imgout.read())
def fetch(items, geometry, keys, path=config.DATADIR, filename=config.FILENAME, proj=None, res=None): """ This fetches data from just the AOI and clips it """ derived_item = create_derived_item(items, geometry) bands = [] for k in keys: bands += items[0].asset(k).get('eo:bands', []) filename = items[0].get_filename(path=path, filename=filename).replace( '.json', '.tif') os.makedirs(os.path.dirname(filename), exist_ok=True) derived_item._data['assets'] = { 'image': { 'type': 'image/vnd.stac.geotiff; cloud-optimized=true', 'title': 'Clipped image', 'href': filename, 'eo:bands': bands } } if os.path.exists(filename): return derived_item try: geoimgs = [] for item in items: geoimgs.append(open_image(item, keys)) # default to first image res and srs if res is None: res = geoimgs[0].resolution() res = [res.x(), res.y()] elif len(res) == 1: res = [res[0], res[0]] if proj is None: proj = geoimgs[0].srs() geovec = geometry_to_GeoVector(geometry) imgout = algs.cookie_cutter(geoimgs, filename, geovec[0], xres=res[0], yres=res[1], proj=proj, options=OPTS) logger.info("Created %s" % imgout.filename()) return derived_item except Exception as err: print('Error: ', str(err))
def test_cookiecutter_real_crop(self): """ Test cookie cutter with cropping """ geoimg = gpt.get_test_image().select(['red', 'green', 'blue']) vpath = os.path.join(os.path.dirname(__file__), 'vectors') feature = gp.GeoVector(os.path.join(vpath, 'aoi1_epsg32416.shp')) imgout = alg.cookie_cutter([geoimg], feature=feature[0], xres=30.0, yres=30.0, crop=True) extin = feature.extent() extout = imgout.extent() self.assertTrue(extout.x0() >= extin.x0()) self.assertTrue(extout.y0() >= extin.y0()) self.assertTrue(extout.x1() <= extin.x1()) self.assertTrue(extout.y1() <= extin.y1())
def test_cookiecutter_real_reproj(self): """ Test with different projection """ geoimg = gpt.get_test_image().select(['red', 'green', 'blue']) vpath = os.path.join(os.path.dirname(__file__), 'vectors') feature = gp.GeoVector(os.path.join(vpath, 'aoi1_epsg32416.shp')) extin = feature.extent() # test extent matches feature imgout = alg.cookie_cutter([geoimg], feature=feature[0], xres=30.0, yres=30.0) extout = imgout.extent() self.assertAlmostEqual(extout.x0(), extin.x0()) self.assertAlmostEqual(extout.y0(), extin.y0()) self.assertAlmostEqual(extout.x1(), extin.x1()) self.assertAlmostEqual(extout.y1(), extin.y1())
def test_cookiecutter_real(self): """ Cookie cutter on single real image """ geoimg = gpt.get_test_image().select(['red']) #, 'green', 'blue']) vpath = os.path.join(os.path.dirname(__file__), 'vectors') # test with feature of different projection feature = gp.GeoVector(os.path.join(vpath, 'aoi1_epsg4326.shp')) extin = feature.extent() imgout = alg.cookie_cutter([geoimg], feature=feature[0], xres=0.0003, yres=0.0003) extout = imgout.extent() self.assertAlmostEqual(extout.x0(), extin.x0()) self.assertAlmostEqual(extout.y0(), extin.y0()) self.assertAlmostEqual(extout.x1(), extin.x1()) self.assertAlmostEqual(extout.y1(), extin.y1())
def test_cookiecutter(self): """ Create mosaic from multiple images (cookie cutter) """ bbox1 = np.array([0.0, 0.0, 1.0, 1.0]) geoimg1 = gp.GeoImage.create(xsz=1000, ysz=1000, bbox=bbox1) bbox2 = np.array([1.0, 0.0, 1.0, 1.0]) geoimg2 = gp.GeoImage.create(xsz=1000, ysz=1000, bbox=bbox2) res = geoimg1.resolution() imgout = alg.cookie_cutter([geoimg1, geoimg2], xres=res.x(), yres=res.y()) ext = imgout.extent() self.assertEqual(ext.x0(), 0.0) self.assertEqual(ext.y0(), 0.0) self.assertEqual(ext.width(), 2.0) self.assertEqual(ext.height(), 1.0)
def test_cookiecutter_gain(self): """ Cookie cutter on int image with floating point gain """ bbox = np.array([0.0, 0.0, 1.0, 1.0]) geoimg = gp.GeoImage.create(xsz=1000, ysz=1000, bbox=bbox, dtype='int16') geoimg.set_gain(0.0001) arr = np.zeros((1000, 1000)) arr[0:500, :] = 0.0002 geoimg.write(deepcopy(arr)) res = geoimg.resolution() imgout = alg.cookie_cutter([geoimg], xres=res.x(), yres=res.y()) np.testing.assert_array_almost_equal(arr, imgout.read())
def fetch(scenes, assets, geovector, basename='image'): """ This fetches data from just the AOI and clips it """ opts = { 'COMPRESS': 'DEFLATE', 'PREDICTOR': '2', 'INTERLEAVE': 'BAND', 'TILED': 'YES', 'BLOCKXSIZE': '512', 'BLOCKYSIZE': '512' } path = scenes[0].get_path() fname = scenes[0].get_filename() # TODO - different paths for sat-fetch ? fout = os.path.join(path, fname + '.tif') # create derived scene metadata derived_scene = Scene.create_derived(scenes) derived_scene.feature['geometry'] = json.loads(geovector.json_geometry()) # add asset(s) to derived scene bands = [] for a in assets: bands += scenes[0].asset(a)['eo:bands'] derived_scene.feature['assets'][basename] = { 'href': fout, 'eo:bands': bands } if os.path.exists(fout): return derived_scene try: geoimgs = [] for s in scenes: geoimgs.append(open_image(s, assets)) # default to first image res and srs res = geoimgs[0].resolution() imgout = algs.cookie_cutter(geoimgs, fout, geovector, xres=res.x(), yres=res.y(), proj=geoimgs[0].srs(), options=opts) logger.info("Created %s" % imgout.filename()) return derived_scene except Exception as err: print('Error: ', str(err))
def test_cookiecutter(self): """ Create mosaic from multiple images (cookie cutter) """ bbox1 = np.array([0.0, 0.0, 1.0, 1.0]) geoimg1 = gp.GeoImage.create(xsz=1000, ysz=1000, bbox=bbox1) bbox2 = np.array([1.0, 0.0, 1.0, 1.0]) geoimg2 = gp.GeoImage.create(xsz=1000, ysz=1000, bbox=bbox2) res = geoimg1.resolution() imgout = alg.cookie_cutter([geoimg1, geoimg2], xres=res.x(), yres=res.y()) ext = imgout.extent() # This appears to be accurate to 7 decimal places. # Is something getting converted from a double to a float somewhere? self.assertAlmostEqual(ext.x0(), 0.0) self.assertAlmostEqual(ext.y0(), 0.0) self.assertAlmostEqual(ext.width(), 2.0, places=6) self.assertAlmostEqual(ext.height(), 1.0) # '' self.assertAlmostEqual(imgout.resolution().x(), res.x()) self.assertAlmostEqual(imgout.resolution().y(), res.y())
def test_cookiecutter_real_reproj(self): """ Test with different projection """ geoimg = gpt.get_test_image().select(['red', 'green', 'blue']) vpath = os.path.join(os.path.dirname(__file__), 'vectors') feature = gp.GeoVector(os.path.join(vpath, 'aoi1_epsg32416.shp')) extin = feature.extent() # test extent matches feature imgout = alg.cookie_cutter([geoimg], feature=feature[0], xres=30.0, yres=30.0) extout = imgout.extent() self.assertAlmostEqual(extout.x0() + 15, extin.x0()) self.assertAlmostEqual(extout.y0() + 15, extin.y0()) # cookie cutter will never add more than a pixel and a half in width self.assertTrue(extout.x1() - extin.x1() < 45.0) self.assertTrue(extout.y1() - extin.y1() < 45.0) self.assertEqual(imgout.resolution().x(), 30.0) self.assertEqual(imgout.resolution().y(), -30.0)
def generate_stats(): geotifs = [ op.join(os.getcwd(), f) for f in os.listdir('.') if f.endswith('.tif') ] geojsons = [ op.join(os.getcwd(), geoj) for geoj in os.listdir('.') if geoj.endswith('.geojson') ] output_path = op.join(os.getcwd(), 'outputs') output_files = [] if not op.isdir(output_path): makedirs(output_path) for geotif in geotifs: for geojson in geojsons: fname = op.splitext(geotif)[0] basename = fname.split('/')[-1] filename = op.splitext(geojson)[0] geo_fname = filename.split('/')[-1] date = basename[10:16] tile = basename[28:35] geoimg = gippy.GeoImage.open([geotif]) geoimg.set_nodata(0) geovec = gippy.GeoVector(geojson) res = geoimg.resolution() output_filename = '{}_{}_{}.csv'.format(geo_fname, date, tile) f = open(output_filename, 'w') f.write('min,max,mean,stddev,skew,count\n') fout = geoimg.basename() + '_{}.tif'.format(str(geo_fname)) imgout = alg.cookie_cutter([geoimg], fout, geovec[0], xres=res.x(), yres=res.y(), proj=geoimg.srs()) stats = imgout[0].stats() f.write(','.join([str(s) for s in stats]) + '\n') f.close() if output_filename not in output_files: output_files.append(output_filename) shutil.move(os.path.join('.', fout), os.path.join(output_path, fout)) print('{} site stats extracted!'.format(geojson)) return output_files
def test_cookiecutter_real(self): """ Cookie cutter on single real image """ geoimg = gpt.get_test_image().select(['red']) #, 'green', 'blue']) iext = geoimg.extent() vpath = os.path.join(os.path.dirname(__file__), 'vectors') # test with feature of different projection feature = gp.GeoVector(os.path.join(vpath, 'aoi1_epsg4326.shp')) extin = feature.extent() imgout = alg.cookie_cutter([geoimg], feature=feature[0], xres=0.0003, yres=0.0003) extout = imgout.extent() self.assertAlmostEqual(extout.x0() + 0.00015, extin.x0()) self.assertAlmostEqual(extout.y0() + 0.00015, extin.y0()) # cookie cutter will never add more than a pixel and a half in width self.assertTrue(extout.x1() - extin.x1() < 0.0045) self.assertTrue(extout.y1() - extin.y1() < 0.0045) self.assertAlmostEqual(imgout.resolution().x(), 0.0003) self.assertAlmostEqual(imgout.resolution().y(), -0.0003)
def main(scenes, aoi, datadir='./', bands=None): scenes = Scenes.load(scenes) print(scenes.text_calendar()) bname = os.path.splitext(os.path.basename(aoi))[0] features = gippy.GeoVector(aoi) if not os.path.exists(datadir): os.makedirs(datadir) gippy.Options.set_verbose(5) #opts = {'COMPRESS': 'DEFLATE', 'PREDICTOR': '2', 'TILED': 'YES', 'BLOCKXSIZE': '512', 'BLOCKYSIZE': '512'} #opts = {'COMPRESS': 'LZW', 'TILED': 'YES', 'BLOCKXSIZE': '512', 'BLOCKYSIZE': '512'} opts = {'TILED': 'YES', 'BLOCKXSIZE': '512', 'BLOCKYSIZE': '512'} for date in scenes.dates(): _scenes = [s for s in scenes if s.date == date] outname = '%s_%s.tif' % (_scenes[0].date, _scenes[0].platform) fout = os.path.join(datadir, outname) if not os.path.exists(fout): try: geoimgs = [] for s in _scenes: links = s.links() if bands is None: bands = links.keys() filenames = [links[k].replace('https:/', '/vsicurl') for k in sorted(links) if k in bands] geoimg = gippy.GeoImage.open(filenames) geoimg.set_nodata(0) geoimgs.append(geoimg) # default to first image res and srs res = geoimgs[0].resolution() imgout = algs.cookie_cutter(geoimgs, fout, features[0], xres=res.x(), yres=res.y(), proj=geoimgs[0].srs(), options=opts) except Exception as err: print('Error: ', str(err))
def mask_with_vector(geoimg, vector, filename=''): """ Mask geoimage with a vector """ ext = geoimg.geo_extent() ds, layer = open_vector(vector[0], vector[1]) geovec = get_features(layer, bbox=[ext.x0(), ext.y0(), ext.x1(), ext.y1()], union=True) res = geoimg.resolution() logger.info('Saving to file %s' % filename, action='Save file', actee=filename, actor=__name__) imgout = alg.cookie_cutter([geoimg], filename=filename, feature=geovec[0], proj=geoimg.srs(), xres=res.x(), yres=res.y()) return imgout
def main(args=None): print('started') if args is None: args = sys.argv if isinstance(args, str): args = args.split(' ') maxRows = '100' outDir = args[1] #where to put the mosaic zipDir = outDir + '/zip/' #where to put downloads unzipDir = outDir + '/unzip/' #where to put working files #create directory to store downloads, output, and working files if not os.path.isdir(outDir): os.mkdir(outDir) if not os.path.isdir(zipDir): os.mkdir(zipDir) if not os.path.isdir(unzipDir): os.mkdir(unzipDir) bbox = args[2] #corners of the bounding box username = args[3] password = args[4] product = 'GRD' mission = "1" #min sense time end = "NOW" #max sense time beginning = ABS_START i = 5 #argument index while i < len(args): arg = args[i] if arg == '-m': i += 1 mission = args[i] elif arg == '-e': i += 1 end = args[i] + MIDNIGHT elif arg == '-b': i += 1 beginning = args[i] + MIDNIGHT elif arg == '-p': i += 1 maxRows = args[i] elif arg == '-t': i += 1 type = args[i].upper() else: print('Unrecognized flag ' + arg) print(CODE_2_MEANING) sys.exit(2) i += 1 #create strings that will be used as options for querying scihub ingestOption = INGESTION_KEYWORD + ':[' + beginning + ' TO ' + end + ']' p = parse_bbox(bbox) #Point 0 appears twice because the polygon must be a closed loop geographicType = POLYGON_KEYWORD + p[0] + POINT_DELIM + p[1] + POINT_DELIM\ + p[2] + POINT_DELIM + p[3] + POINT_DELIM + p[0] + '))' footprintOption = FOOTPRINT_KEYWORD + geographicType + ')"' platformOption = 'platformname:Sentinel-' + mission if mission == '1': productOption = PRODUCT_KEYWORD + product + QUERY_DELIM else: productOption = '' #there's no QUERY_DELIM between product option and platform option because product option is sometime blank queryURL = URL_START + ingestOption + QUERY_DELIM + footprintOption + QUERY_DELIM + productOption\ + platformOption + '&rows=' + maxRows + '&start=0&format=json' queryURL = queryURL.replace(' ', '%20') session.auth = (username, password) print queryURL response = session.post(queryURL, auth=session.auth) try: json_feed = response.json()['feed'] total_results = int(json_feed['opensearch:totalResults']) except (ValueError, KeyError): print(response) print 'API response not valid. JSON decoding failed. Exiting with code 3.' exit(3) entries = json_feed.get('entry', []) download_all(entries, zipDir, unzipDir) #add the images to the list for dir in os.listdir(unzipDir): if dir[len(dir) - 5:] == '.SAFE': dir = dir + '/measurement/' dir = unzipDir + dir for file in os.listdir(dir): if file.endswith('tiff'): warp(file, dir) thisFile = dir + file files.append(thisFile) geoimgs = [] for file in files: img = gippy.GeoImage(file) img.set_nodata(0) geoimgs.append(img) print files algorithms.cookie_cutter(geoimgs, outDir + '/mosaic', xres=10.0, yres=10.0, proj='EPSG:3857')
def fetch(self, key, aoi, pansharpen=False, acomp=False, dra=False, **kwargs): if self.order(): # create tempfile for AOI with tempfile.NamedTemporaryFile(suffix='.geojson', mode='w', delete=False) as f: aoiname = f.name aoistr = json.dumps(aoi) f.write(aoistr) geovec = gippy.GeoVector(aoiname) ext = geovec.extent() bbox = [ext.x0(), ext.y0(), ext.x1(), ext.y1()] # defaults spec = '' pansharpen = False acomp = False dra = False nodata = 0 if self['eo:platform'] in ['GEOEYE01', 'QUICKBIRD02' ] else -1e10 opts = COG # set options if key == 'rgb': spec = 'rgb' nodata = 0 #opts = JPEG_COG elif key == 'rgb-pan': pansharpen = True spec = 'rgb' nodata = 0 elif key == 'visual': pansharpen = True dra = True nodata = 0 #opts = JPEG_COG elif key == 'analytic': acomp = True fout = os.path.join(self.get_path(), self.get_filename(suffix='_%s' % key)) + '.tif' with TemporaryDirectory() as temp_dir: try: if not os.path.exists(fout): logger.info('Fetching %s: %s' % (key, fout)) # TODO - allow for other projections img = CatalogImage( self['id'], pansharpen=pansharpen, acomp=acomp, dra=dra, bbox=bbox) #, proj=utm_epsg(scenes.center())) tmp_fout1 = os.path.join( temp_dir, '%s_%s_1.tif' % (self['id'], key)) tmp_fout2 = os.path.join( temp_dir, '%s_%s_2.tif' % (self['id'], key)) tif = img.geotiff(path=tmp_fout1, proj='EPSG:4326', spec=spec) # clip and save geoimg = gippy.GeoImage(tif, True) # workaround for gbdxtools scaling if key in ['rgb', 'visual']: geoimg = geoimg.autoscale(1, 255).save(tmp_fout2) geoimg.set_nodata(0) # this clips the image to the AOI res = geoimg.resolution() imgout = alg.cookie_cutter([geoimg], fout, geovec[0], xres=res.x(), yres=res.y(), proj=geoimg.srs(), options=opts) imgout.add_overviews([2, 4, 8, 16], resampler='average') imgout = None except Exception as e: logger.warning('Error fetching: %s' % str(e)) #logger.warning('Traceback: %s', traceback.format_exc()) os.remove(aoiname) return fout