def _sub(self, data): import datetime if data.get("aa", False): from ambry.geo.analysisarea import get_analysis_area aa = get_analysis_area(self.bundle.library, **data["aa"]) aa_d = dict(aa.__dict__) aa_d["aa_name"] = aa_d["name"] del aa_d["name"] data = dict(data.items() + aa_d.items()) data["bundle_name"] = self.bundle.identity.name data["date"] = datetime.datetime.now().date().isoformat() data["query"] = data.get("query", "").format(**data) data["extract_where"] = data.get("extract_where", "").format(**data) data["title"] = data.get("title", "").format(**data) data["description"] = data.get("description", "").format(**data) data["name"] = data.get("name", "").format(**data) data["layer_name"] = data.get("layer_name", "").format(**data) data["path"] = self.bundle.filesystem.path("extracts", format(data["name"])) data["done_if"] = data.get("done_if", "os.path.exists(path)").format(**data) return data
def make_hdf(self): import ambry.geo as dg from ambry.geo.analysisarea import get_analysis_area from osgeo.gdalconst import GDT_Float32 import numpy as np scale = 10 k = dg.DistanceKernel(21) k.matrix *= scale * 100 # Measured in cm: meters * 100 cm / m. k.matrix = np.array(k.matrix, dtype=int) # Intify p = self.partitions.find(table='streetlights', format='geo') if not p: raise Exception("Didn't find partition for streetlights") raster = self.partitions.find_or_new_hdf(table='distance') for cityrow in p.query("SELECT count(*) AS count, city FROM streetlights group BY city"): city = cityrow['city'] if not city or city == '-': continue; if cityrow['count'] < 100: continue # Most of the cityies have a, probably spurious, small number of lamps self.log("Making HDF for city {}".format(city)) lr = self.init_log_rate() aa = get_analysis_area(self.library, place=city, scale=scale) trans = aa.get_translator() a = aa.new_array(dtype = np.int16) a = a + 12000 # Set the baseline to a max at 120 meters. for row in p.query("""SELECT * FROM streetlights WHERE city = ?""", city): k.apply_min(a, trans(row['lon'], row['lat'])) lr("Distance raster point for {}".format(city)) raster.database.put_geo(city, a, aa) raster.database.flush() aa.write_geotiff( self.filesystem.path('extracts', city+"-dist"), a[:])
def test_sfschema(self): from ambry.geo.sfschema import TableShapefile from ambry.geo.analysisarea import get_analysis_area _, communities = self.bundle.library.dep('communities') csrs = communities.get_srs() gp = self.bundle.partitions.new_geo_partition(table='geot2') with gp.database.inserter(source_srs=csrs) as ins: for row in communities.query(""" SELECT *, X(Transform(Centroid(geometry), 4326)) AS lon, Y(Transform(Centroid(geometry), 4326)) as lat, AsText(geometry) as wkt, AsBinary(geometry) as wkb FROM communities"""): r = {'name':row['cpname'], 'lat': row['lat'], 'lon': row['lon'], 'wkt': row['wkt']} ins.insert(r) return aa = get_analysis_area(self.bundle.library, geoid = 'CG0666000') path1 = '/tmp/geot1.kml' if os.path.exists(path1): os.remove(path1) sfs1 = TableShapefile(self.bundle, path1, 'geot1' ) path2 = '/tmp/geot2.kml' if os.path.exists(path2): os.remove(path2) sfs2 = TableShapefile(self.bundle, path2, 'geot2', source_srs=communities.get_srs()) print sfs1.type, sfs2.type for row in communities.query(""" SELECT *, X(Transform(Centroid(geometry), 4326)) AS lon, Y(Transform(Centroid(geometry), 4326)) as lat, AsText(geometry) as wkt, AsBinary(geometry) as wkb FROM communities"""): sfs1.add_feature( {'name':row['cpname'], 'lat': row['lat'], 'lon': row['lon'], 'wkt': row['wkt']}) sfs2.add_feature( {'name':row['cpname'], 'lat': row['lat'], 'lon': row['lon'], 'wkt': row['wkt']}) sfs1.close() sfs2.close()
def evari_extract_image(self, data): import ambry.geo as dg from ambry.geo.analysisarea import get_analysis_area from osgeo.gdalconst import GDT_Float32 aa = get_analysis_area(self.library, geoid='CG0666000') trans = aa.get_translator() a_old = aa.new_array() a_new = aa.new_masked_array() a_nip = aa.new_array() a_total = aa.new_masked_array() k = dg.GaussianKernel(21,7) p = self.partitions.find(table='lights') for row in p.query("""SELECT * FROM lights """): p = trans(row['lon'], row['lat']) status = row['status'] if status == 'not in project': k.apply_add(a_nip, p) elif status == 'converted': k.apply_add(a_new, p) k.apply_add(a_total, p) elif status == 'not converted': k.apply_add(a_old, p) k.apply_add(a_total, p) def fnp(prefix): return self.filesystem.path('extracts','{}-{}'.format(prefix,data['name'])) aa.write_geotiff(fnp('old'), a_old, data_type=GDT_Float32) aa.write_geotiff(fnp('new'), a_new, data_type=GDT_Float32) aa.write_geotiff(fnp('nip'), a_nip, data_type=GDT_Float32) aa.write_geotiff(fnp('total'), a_total, data_type=GDT_Float32) pct = 1 - (a_new / a_total) aa.write_geotiff(fnp('pct'), pct, data_type=GDT_Float32) return fnp('total')
def x_test_basic(self): from ambry.geo.analysisarea import get_analysis_area, draw_edges from ambry.geo import Point from ambry.geo.kernel import GaussianKernel aa = get_analysis_area(self.bundle.library, geoid = 'CG0666000') a = aa.new_array() #draw_edges(a) print a.shape, a.size gaussian = GaussianKernel(11,6) for i in range(0,400, 20): p = Point(100+i,100+i) gaussian.apply_add(a,p) aa.write_geotiff('/tmp/box.tiff', a, data_type=GDT_Float32)