def test_caching(self): """ Test that the caching mechanism works and we can turn it on/off """ clear_cache() self.assertEqual(len(ZonalStatsCache.objects.all()), 0) zonal = zonal_stats(self.polygons[0], self.rast) self.assertEqual(zonal.from_cache, False) self.assertEqual(len(ZonalStatsCache.objects.all()), 1) zonal = zonal_stats(self.polygons[0], self.rast) self.assertEqual(zonal.from_cache, True) self.assertEqual(len(ZonalStatsCache.objects.all()), 1) zonal = zonal_stats(self.polygons[0], self.rast, read_cache=False) self.assertEqual(zonal.from_cache, False) self.assertEqual(len(ZonalStatsCache.objects.all()), 1) zonal = zonal_stats(self.polygons[3], self.rast) self.assertEqual(zonal.from_cache, False) self.assertEqual(len(ZonalStatsCache.objects.all()), 2) zonal = zonal_stats(self.polygons[3], self.rast) self.assertEqual(zonal.from_cache, True) self.assertEqual(len(ZonalStatsCache.objects.all()), 2) clear_cache() self.assertEqual(len(ZonalStatsCache.objects.all()), 0)
def get_water_impacts(bioregion): wi_geom = RasterDataset.objects.get(name='water_impacts') wi_stats = zonal_stats(bioregion.output_geom, wi_geom) if wi_stats.pixels: total_pixels = float(wi_stats.pixels) categories = wi_stats.categories.all() impacts_dict = {} for cat in categories: impacts_dict[cat.category] = cat.count perc_impacts = [] for cat, count in impacts_dict.items(): perc_impacts.append((count / total_pixels, cat)) perc_impacts.sort(reverse=True) final_impacts = [] for impact in perc_impacts: if impact[1] == 0: final_impacts.append(('Less Precipitation', impact[0])) elif impact[1] == 1: final_impacts.append( ('No Significant Change in Precipitation', impact[0])) elif impact[1] == 2: final_impacts.append(('More Precipitation', impact[0])) return final_impacts else: return [default_value]
def hazard_impact(hazard, bioregion): hzd_gdp_geom = RasterDataset.objects.get(name=hazard + '_impact') hzd_gdp_stats = zonal_stats(bioregion.output_geom, hzd_gdp_geom) if hzd_gdp_stats.sum > 0: return 'High' else: return 'Low'
def stats_for_geom(request, raster_name): # Confirm that we have a valid polygon geometry if 'geom_txt' in request.REQUEST: geom_txt = str(request.REQUEST['geom_txt']) else: return HttpResponse("Must supply a geom_txt parameter", status=404) try: geom = fromstr(geom_txt) except: return HttpResponse( "Must supply a parsable geom_txt parameter (wkt or json)", status=404) # Confirm raster with pk exists try: raster = RasterDataset.objects.get(name=raster_name) except: return HttpResponse("No raster with pk of %s" % pk, status=404) #TODO check if continuous zonal = zonal_stats(geom, raster) zonal.save() zqs = ZonalStatsCache.objects.filter(pk=zonal.pk) data = serializers.serialize("json", zqs, fields=('avg', 'min', 'max', 'median', 'mode', 'stdev', 'nulls', 'pixels', 'date_modified', 'raster')) return HttpResponse(data, mimetype='application/json')
def get_human_influence(bioregion): hii_geom = RasterDataset.objects.get(name='human_influence_index') hii_stats = zonal_stats(bioregion.output_geom, hii_geom) if hii_stats.avg: return hii_stats.avg / 64 else: return default_value
def hazard_impact(hazard, bioregion): hzd_gdp_geom = RasterDataset.objects.get(name=hazard + "_impact") hzd_gdp_stats = zonal_stats(bioregion.output_geom, hzd_gdp_geom) if hzd_gdp_stats.sum > 0: return "High" else: return "Low"
def hazard_is_likely(hazard, bioregion): hzd_geom = RasterDataset.objects.get(name=hazard) hzd_stats = zonal_stats(bioregion.output_geom, hzd_geom) if hzd_stats.sum > 0: return True else: return False
def get_human_consumption(bioregion): hc_geom = RasterDataset.objects.get(name='human_consumption') hc_stats = zonal_stats(bioregion.output_geom, hc_geom) if hc_stats.avg: return int(hc_stats.avg) else: return default_value
def get_projected_population(bioregion): pop_geom = RasterDataset.objects.get(name='population_2015') pop_stats = zonal_stats(bioregion.output_geom, pop_geom) if pop_stats.sum: return int(pop_stats.sum) else: return 0
def get_soil_suitability(bioregion): suit_geom = RasterDataset.objects.get(name='soil_suitability') suit_stats = zonal_stats(bioregion.output_geom, suit_geom) if suit_stats.avg: return suit_stats.avg else: return default_value
def get_displaced_populations(bioregion): pop_geom = RasterDataset.objects.get(name="population_2005") pop_stats = zonal_stats(bioregion.output_geom, pop_geom) pop_05_sum = pop_stats.sum if pop_05_sum in [None, 0]: return (0, 0), (0, 0), (0, 0) if report_cache_exists(bioregion, "displaced_people_1m"): displaced_1m = get_report_cache(bioregion, "displaced_people_1m") else: sr_objects = SeaRise1m.objects.filter( geometry__bboverlaps=bioregion.output_geom ) # .aggregate(Collect('geometry')) displaced_1m = get_displaced(pop_geom, pop_stats, pop_05_sum, sr_objects, bioregion) create_report_cache(bioregion, dict(displaced_people_1m=displaced_1m)) if report_cache_exists(bioregion, "displaced_people_3m"): displaced_3m = get_report_cache(bioregion, "displaced_people_3m") else: sr_objects = SeaRise3m.objects.filter( geometry__bboverlaps=bioregion.output_geom ) # .aggregate(Collect('geometry')) displaced_3m = get_displaced(pop_geom, pop_stats, pop_05_sum, sr_objects, bioregion) create_report_cache(bioregion, dict(displaced_people_3m=displaced_3m)) if report_cache_exists(bioregion, "displaced_people_6m"): displaced_6m = get_report_cache(bioregion, "displaced_people_6m") else: sr_objects = SeaRise6m.objects.filter( geometry__bboverlaps=bioregion.output_geom ) # .aggregate(Collect('geometry')) displaced_6m = get_displaced(pop_geom, pop_stats, pop_05_sum, sr_objects, bioregion) create_report_cache(bioregion, dict(displaced_people_6m=displaced_6m)) return displaced_1m, displaced_3m, displaced_6m
def get_soil_suitability(bioregion): suit_geom = RasterDataset.objects.get(name="soil_suitability") suit_stats = zonal_stats(bioregion.output_geom, suit_geom) if suit_stats.avg: return suit_stats.avg else: return default_value
def get_human_consumption(bioregion): hc_geom = RasterDataset.objects.get(name="human_consumption") hc_stats = zonal_stats(bioregion.output_geom, hc_geom) if hc_stats.avg: return int(hc_stats.avg) else: return default_value
def get_urban_pop(pop_2005, bioregion): if report_cache_exists(bioregion, 'urban_population'): urban_pop, urban_perc = get_report_cache(bioregion, 'urban_population') return (urban_pop, urban_perc) else: pop_geom = RasterDataset.objects.get(name='population_2005') urban_objects = UrbanExtent.objects.filter( geometry__bboverlaps=bioregion.output_geom) urban_pop = 0 for urban_object in urban_objects: urban_shape = urban_object.geometry if not urban_shape.valid: urban_shape = urban_shape.buffer(0) if urban_shape.intersects(bioregion.output_geom): urban_overlap = urban_shape.intersection(bioregion.output_geom) pop_stats = zonal_stats(urban_overlap, pop_geom) if pop_stats.sum: urban_pop += pop_stats.sum if pop_2005 == 0: urban_perc = 0 else: urban_perc = urban_pop / pop_2005 create_report_cache(bioregion, dict(urban_population=(urban_pop, urban_perc))) return (urban_pop, urban_perc)
def get_human_influence(bioregion): hii_geom = RasterDataset.objects.get(name="human_influence_index") hii_stats = zonal_stats(bioregion.output_geom, hii_geom) if hii_stats.avg: return hii_stats.avg / 64 else: return default_value
def get_sea_rise_impacts(bioregion): s1_geom = RasterDataset.objects.get(name="sea_rise_1m") s3_geom = RasterDataset.objects.get(name="sea_rise_3m") s6_geom = RasterDataset.objects.get(name="sea_rise_6m") s1_stats = zonal_stats(bioregion.output_geom, s1_geom) s3_stats = zonal_stats(bioregion.output_geom, s3_geom) s6_stats = zonal_stats(bioregion.output_geom, s6_geom) land_loss_1m = s1_stats.sum * sea_level_grid_size land_loss_3m = s3_stats.sum * sea_level_grid_size land_loss_6m = s6_stats.sum * sea_level_grid_size land_loss_1m_km = int(convert_float_to_area_display_units(land_loss_1m)) land_loss_1m_mi = int(convert_sq_km_to_sq_mi(land_loss_1m_km)) land_loss_3m_km = int(convert_float_to_area_display_units(land_loss_3m)) land_loss_3m_mi = int(convert_sq_km_to_sq_mi(land_loss_3m_km)) land_loss_6m_km = int(convert_float_to_area_display_units(land_loss_6m)) land_loss_6m_mi = int(convert_sq_km_to_sq_mi(land_loss_6m_km)) return (land_loss_1m_km, land_loss_1m_mi), (land_loss_3m_km, land_loss_3m_mi), (land_loss_6m_km, land_loss_6m_mi)
def get_underweight_children(bioregion): if no_poverty_data(bioregion, 0.1): return 0 uwc_geom = RasterDataset.objects.get(name="underweight_children") uwc_stats = zonal_stats(bioregion.output_geom, uwc_geom) if uwc_stats.sum: return int(uwc_stats.sum) else: return 0
def get_annual_precip(bioregion): precip_geom = RasterDataset.objects.get(name='annual_precipitation') precip_stats = zonal_stats(bioregion.output_geom, precip_geom) if precip_stats.avg: precip_cm = precip_stats.avg / 10 precip_in = convert_cm_to_in(precip_cm) return precip_cm, precip_in else: return default_value, default_value
def get_annual_temp(bioregion): temp_geom = RasterDataset.objects.get(name='annual_temperature') temp_stats = zonal_stats(bioregion.output_geom, temp_geom) if temp_stats.avg: temp_c = temp_stats.avg / 10 temp_f = temp_c * 9 / 5. + 32 return temp_c, temp_f else: return default_value, default_value
def get_min_temp(bioregion): min_temp_geom = RasterDataset.objects.get(name='min_temp') min_temp_stats = zonal_stats(bioregion.output_geom, min_temp_geom) if min_temp_stats.avg: min_temp_c = min_temp_stats.avg / 10 min_temp_f = min_temp_c * 9 / 5. + 32 return min_temp_c, min_temp_f else: return default_value, default_value
def get_temp_change(bioregion): tc_geom = RasterDataset.objects.get(name='temp_change') tc_stats = zonal_stats(bioregion.output_geom, tc_geom) if tc_stats.avg: temp_c = tc_stats.avg temp_f = temp_c * 1.8 return (temp_c, temp_f) else: return default_value
def get_underweight_children(bioregion): if no_poverty_data(bioregion, .1): return 0 uwc_geom = RasterDataset.objects.get(name='underweight_children') uwc_stats = zonal_stats(bioregion.output_geom, uwc_geom) if uwc_stats.sum: return int(uwc_stats.sum) else: return 0
def get_temp_change(bioregion): tc_geom = RasterDataset.objects.get(name="temp_change") tc_stats = zonal_stats(bioregion.output_geom, tc_geom) if tc_stats.avg: temp_c = tc_stats.avg temp_f = temp_c * 1.8 return (temp_c, temp_f) else: return default_value
def test_webservice(self): data = {'geom_txt': self.polygons[0].wkt} #self.settings_manager.set(ROOT_URLCONF = 'madrona.raster_stats.urls') response = self.client.get('/test_impact/', data) self.failUnlessEqual(response.status_code, 200) for obj in serializers.deserialize("json", response.content): web_zonal = obj.object util_zonal = zonal_stats(self.polygons[0], self.rast, read_cache=False) self.failUnlessEqual(web_zonal.avg, util_zonal.avg)
def get_terr_npp_avg(bioregion): terra_geom = get_terra_geom(bioregion) if terra_geom.area == 0.0: return 0 npp_geom = RasterDataset.objects.get(name='npp_terr') npp_stats = zonal_stats(terra_geom, npp_geom) if npp_stats.avg: npp_avg = npp_stats.avg / (26064.03459**2) else: npp_avg = 0 return npp_avg
def get_sea_rise_impacts(bioregion): s1_geom = RasterDataset.objects.get(name='sea_rise_1m') s3_geom = RasterDataset.objects.get(name='sea_rise_3m') s6_geom = RasterDataset.objects.get(name='sea_rise_6m') s1_stats = zonal_stats(bioregion.output_geom, s1_geom) s3_stats = zonal_stats(bioregion.output_geom, s3_geom) s6_stats = zonal_stats(bioregion.output_geom, s6_geom) land_loss_1m = s1_stats.sum * sea_level_grid_size land_loss_3m = s3_stats.sum * sea_level_grid_size land_loss_6m = s6_stats.sum * sea_level_grid_size land_loss_1m_km = int(convert_float_to_area_display_units(land_loss_1m)) land_loss_1m_mi = int(convert_sq_km_to_sq_mi(land_loss_1m_km)) land_loss_3m_km = int(convert_float_to_area_display_units(land_loss_3m)) land_loss_3m_mi = int(convert_sq_km_to_sq_mi(land_loss_3m_km)) land_loss_6m_km = int(convert_float_to_area_display_units(land_loss_6m)) land_loss_6m_mi = int(convert_sq_km_to_sq_mi(land_loss_6m_km)) return (land_loss_1m_km, land_loss_1m_mi), (land_loss_3m_km, land_loss_3m_mi), (land_loss_6m_km, land_loss_6m_mi)
def get_terr_npp_avg(bioregion): terra_geom = get_terra_geom(bioregion) if terra_geom.area == 0.0: return 0 npp_geom = RasterDataset.objects.get(name='npp_terr') npp_stats = zonal_stats(terra_geom, npp_geom) if npp_stats.avg: npp_avg = npp_stats.avg / (26064.03459 ** 2) else: npp_avg = 0 return npp_avg
def test_zonal_util(self): """ Tests that starspan works and stuff """ # shouldnt have any nulls zonal = zonal_stats(self.polygons[0], self.rast) self.assertEqual(zonal.nulls, 0) # doesnt even touch the raster, all should be null zonal = zonal_stats(self.polygons[1], self.rast) self.assertEqual(zonal.pixels, None) # Partly on and partly off the raster # no nulls but pixel count should be low zonal = zonal_stats(self.polygons[2], self.rast) self.assertEqual(zonal.nulls, 0) self.assertEqual(zonal.pixels, 225) # All on the raster but should have nulls zonal = zonal_stats(self.polygons[3], self.rast) self.assertEqual(zonal.nulls, 279)
def test_zonal_util(self): """ Tests that starspan works and stuff """ # shouldnt have any nulls zonal = zonal_stats(self.polygons[0], self.rast) self.assertEqual(zonal.nulls,0) # doesnt even touch the raster, all should be null zonal = zonal_stats(self.polygons[1], self.rast) self.assertEqual(zonal.pixels,None) # Partly on and partly off the raster # no nulls but pixel count should be low zonal = zonal_stats(self.polygons[2], self.rast) self.assertEqual(zonal.nulls,0) self.assertEqual(zonal.pixels,225) # All on the raster but should have nulls zonal = zonal_stats(self.polygons[3], self.rast) self.assertEqual(zonal.nulls,279)
def get_climate_change_vulnerability(bioregion): cc_geom = RasterDataset.objects.get(name='climate_impact') cc_stats = zonal_stats(bioregion.output_geom, cc_geom) if cc_stats.avg: if cc_stats.avg < -12: index = 'Low' elif cc_stats.avg < -4: index = 'Moderately Low' elif cc_stats.avg < 4: index = 'Moderate' elif cc_stats.avg < 12: index = 'Moderately High' else: index = 'High' return (index, cc_stats.avg) else: return default_value
def get_ocn_npp_avg(bioregion): oceanic_geom = get_oceanic_geom(bioregion) if oceanic_geom.area == 0: npp_avg = 0 else: npp_geom = RasterDataset.objects.get(name='npp_ocn') npp_stats = zonal_stats(oceanic_geom, npp_geom) #settling on the following to account for lack of overlap #(see if statement above which helps with this issue as well) try: npp_avg = npp_stats.avg * 365 / 1000 #mg per day converted to g per year except: npp_avg = 0 return npp_avg
def get_soil_suitability(bioregion): terra_geom = get_terra_geom(bioregion) if terra_geom.area == 0.0: return (0, 0, 0) suit_geom = RasterDataset.objects.get(name='soil_suitability') suit_stats = zonal_stats(terra_geom, suit_geom) if suit_stats.avg: proportion = suit_stats.avg area_km, area_mi = get_size(terra_geom) prop_area_km = area_km * proportion prop_area_mi = area_mi * proportion return (proportion, prop_area_km, prop_area_mi) else: return (0, 0, 0)
def get_displaced(pop_geom, pop_stats, pop_05_sum, sr_objects, bioregion): sr_geoms = [ sr.geometry.intersection(bioregion.output_geom) for sr in sr_objects if sr.geometry.intersects(bioregion.output_geom) ] sum_pop = 0 for sr_geom in sr_geoms: if not sr_geom.valid: sr_geom = sr_geom.buffer(0) pop_stats = zonal_stats(sr_geom, pop_geom) if pop_stats.sum: sum_pop += pop_stats.sum perc_pop = sum_pop / pop_05_sum sum_pop = int(sum_pop / 1000) * 1000 return (sum_pop, perc_pop)
def get_water_stress(bioregion): ws_geom = RasterDataset.objects.get(name='water_stress') ws_stats = zonal_stats(bioregion.output_geom, ws_geom) if ws_stats.avg: rwsi = ws_stats.avg if rwsi >= 3.5: rating = 'High' elif rwsi >= 2.5: rating = 'Moderate' elif rwsi >= 1.5: rating = 'Low' else: rating = 'Unstressed' return rwsi, rating return default_value, default_value
def get_proportion_equipped_for_irrigation(bioregion): terra_geom = get_terra_geom(bioregion) if terra_geom.area == 0.0: return (0, 0, 0) irrig_geom = RasterDataset.objects.get(name='irrig_equipped') irrig_stats = zonal_stats(terra_geom, irrig_geom) if irrig_stats.sum: hectares = irrig_stats.sum area_km, area_mi = get_size(terra_geom) proportion = hectares / 100 / area_km prop_area_km = area_km * proportion prop_area_mi = area_mi * proportion return (proportion, prop_area_km, prop_area_mi) else: return (0, 0, 0)
def get_water_stress(bioregion): ws_geom = RasterDataset.objects.get(name="water_stress") ws_stats = zonal_stats(bioregion.output_geom, ws_geom) if ws_stats.avg: rwsi = ws_stats.avg if rwsi >= 3.5: rating = "High" elif rwsi >= 2.5: rating = "Moderate" elif rwsi >= 1.5: rating = "Low" else: rating = "Unstressed" return rwsi, rating return default_value, default_value
def get_climate_change_vulnerability(bioregion): cc_geom = RasterDataset.objects.get(name="climate_impact") cc_stats = zonal_stats(bioregion.output_geom, cc_geom) if cc_stats.avg: if cc_stats.avg < -12: index = "Low" elif cc_stats.avg < -4: index = "Moderately Low" elif cc_stats.avg < 4: index = "Moderate" elif cc_stats.avg < 12: index = "Moderately High" else: index = "High" return (index, cc_stats.avg) else: return default_value
def get_agricultural_impacts(bioregion): terra_geom = get_terra_geom(bioregion) if terra_geom.area == 0.0: return [default_value] ag_geom = RasterDataset.objects.get(name='ag_impacts') ag_stats = zonal_stats(terra_geom, ag_geom) if ag_stats.pixels: total_pixels = float(ag_stats.pixels) categories = ag_stats.categories.all() impacts_dict = {} for cat in categories: impacts_dict[cat.category] = cat.count perc_impacts = [] for cat, count in impacts_dict.items(): perc_impacts.append((count / total_pixels, cat)) perc_impacts.sort(reverse=True) final_impacts = [] for impact in perc_impacts: if impact[1] == 0: final_impacts.append(('No Data', impact[0])) if impact[0] > .9: final_impacts = [default_value] break elif impact[1] == 1: final_impacts.append( ('a Major Increase in Production', impact[0])) elif impact[1] == 2: final_impacts.append( ('an General Increase in Production', impact[0])) elif impact[1] == 3: final_impacts.append( ('a Slight Increase in Production', impact[0])) elif impact[1] == 4: final_impacts.append( ('a Slight Decrease in Production', impact[0])) elif impact[1] == 5: final_impacts.append( ('a General Decrease in Production', impact[0])) elif impact[1] == 6: final_impacts.append( ('a Major Decrease in Production', impact[0])) return final_impacts else: return [default_value]
def get_marine_ecosystem_impacts(bioregion): oceanic_geom = get_oceanic_geom(bioregion) if oceanic_geom.area == 0: return 0 mi_geom = RasterDataset.objects.get(name='ocean_impact') mi_stats = zonal_stats(oceanic_geom, mi_geom) if mi_stats.avg: avg = round(mi_stats.avg, 1) if avg < 1.4: impact = 'Very Low' elif avg < 4.95: impact = 'Low' elif avg < 8.47: impact = 'Medium' elif avg < 12: impact = 'Medium High' elif avg < 15.52: impact = 'High' else: impact = 'Very High' return impact return default_value
def get_marine_ecosystem_impacts(bioregion): oceanic_geom = get_oceanic_geom(bioregion) if oceanic_geom.area == 0: return 0 mi_geom = RasterDataset.objects.get(name="ocean_impact") mi_stats = zonal_stats(oceanic_geom, mi_geom) if mi_stats.avg: avg = round(mi_stats.avg, 1) if avg < 1.4: impact = "Very Low" elif avg < 4.95: impact = "Low" elif avg < 8.47: impact = "Medium" elif avg < 12: impact = "Medium High" elif avg < 15.52: impact = "High" else: impact = "Very High" return impact return default_value
def get_agricultural_impacts(bioregion): terra_geom = get_terra_geom(bioregion) if terra_geom.area == 0.0: return [default_value] ag_geom = RasterDataset.objects.get(name="ag_impacts") ag_stats = zonal_stats(terra_geom, ag_geom) if ag_stats.pixels: total_pixels = float(ag_stats.pixels) categories = ag_stats.categories.all() impacts_dict = {} for cat in categories: impacts_dict[cat.category] = cat.count perc_impacts = [] for cat, count in impacts_dict.items(): perc_impacts.append((count / total_pixels, cat)) perc_impacts.sort(reverse=True) final_impacts = [] for impact in perc_impacts: if impact[1] == 0: final_impacts.append(("No Data", impact[0])) if impact[0] > 0.9: final_impacts = [default_value] break elif impact[1] == 1: final_impacts.append(("a Major Increase in Production", impact[0])) elif impact[1] == 2: final_impacts.append(("an General Increase in Production", impact[0])) elif impact[1] == 3: final_impacts.append(("a Slight Increase in Production", impact[0])) elif impact[1] == 4: final_impacts.append(("a Slight Decrease in Production", impact[0])) elif impact[1] == 5: final_impacts.append(("a General Decrease in Production", impact[0])) elif impact[1] == 6: final_impacts.append(("a Major Decrease in Production", impact[0])) return final_impacts else: return [default_value]
def get_displaced_populations(bioregion): pop_geom = RasterDataset.objects.get(name='population_2005') pop_stats = zonal_stats(bioregion.output_geom, pop_geom) pop_05_sum = pop_stats.sum if pop_05_sum in [None, 0]: return (0, 0), (0, 0), (0, 0) if report_cache_exists(bioregion, 'displaced_people_1m'): displaced_1m = get_report_cache(bioregion, 'displaced_people_1m') else: sr_objects = SeaRise1m.objects.filter( geometry__bboverlaps=bioregion.output_geom ) #.aggregate(Collect('geometry')) displaced_1m = get_displaced(pop_geom, pop_stats, pop_05_sum, sr_objects, bioregion) create_report_cache(bioregion, dict(displaced_people_1m=displaced_1m)) if report_cache_exists(bioregion, 'displaced_people_3m'): displaced_3m = get_report_cache(bioregion, 'displaced_people_3m') else: sr_objects = SeaRise3m.objects.filter( geometry__bboverlaps=bioregion.output_geom ) #.aggregate(Collect('geometry')) displaced_3m = get_displaced(pop_geom, pop_stats, pop_05_sum, sr_objects, bioregion) create_report_cache(bioregion, dict(displaced_people_3m=displaced_3m)) if report_cache_exists(bioregion, 'displaced_people_6m'): displaced_6m = get_report_cache(bioregion, 'displaced_people_6m') else: sr_objects = SeaRise6m.objects.filter( geometry__bboverlaps=bioregion.output_geom ) #.aggregate(Collect('geometry')) displaced_6m = get_displaced(pop_geom, pop_stats, pop_05_sum, sr_objects, bioregion) create_report_cache(bioregion, dict(displaced_people_6m=displaced_6m)) return displaced_1m, displaced_3m, displaced_6m
def get_urban_pop(pop_2005, bioregion): if report_cache_exists(bioregion, 'urban_population'): urban_pop, urban_perc = get_report_cache(bioregion, 'urban_population') return (urban_pop, urban_perc) else: pop_geom = RasterDataset.objects.get(name='population_2005') urban_objects = UrbanExtent.objects.filter(geometry__bboverlaps=bioregion.output_geom) urban_pop = 0 for urban_object in urban_objects: urban_shape = urban_object.geometry if not urban_shape.valid: urban_shape = urban_shape.buffer(0) if urban_shape.intersects(bioregion.output_geom): urban_overlap = urban_shape.intersection(bioregion.output_geom) pop_stats = zonal_stats(urban_overlap, pop_geom) if pop_stats.sum: urban_pop += pop_stats.sum if pop_2005 == 0: urban_perc = 0 else: urban_perc = urban_pop / pop_2005 create_report_cache(bioregion, dict(urban_population=(urban_pop, urban_perc))) return (urban_pop, urban_perc)
def stats_for_geom(request, raster_name): # Confirm that we have a valid polygon geometry if 'geom_txt' in request.REQUEST: geom_txt = str(request.REQUEST['geom_txt']) else: return HttpResponse("Must supply a geom_txt parameter", status=404) try: geom = fromstr(geom_txt) except: return HttpResponse("Must supply a parsable geom_txt parameter (wkt or json)", status=404) # Confirm raster with pk exists try: raster = RasterDataset.objects.get(name=raster_name) except: return HttpResponse("No raster with pk of %s" % pk, status=404) #TODO check if continuous zonal = zonal_stats(geom, raster) zonal.save() zqs = ZonalStatsCache.objects.filter(pk=zonal.pk) data = serializers.serialize("json", zqs, fields=('avg','min','max','median','mode','stdev','nulls','pixels','date_modified','raster')) return HttpResponse(data, mimetype='application/json')
def get_water_impacts(bioregion): wi_geom = RasterDataset.objects.get(name="water_impacts") wi_stats = zonal_stats(bioregion.output_geom, wi_geom) if wi_stats.pixels: total_pixels = float(wi_stats.pixels) categories = wi_stats.categories.all() impacts_dict = {} for cat in categories: impacts_dict[cat.category] = cat.count perc_impacts = [] for cat, count in impacts_dict.items(): perc_impacts.append((count / total_pixels, cat)) perc_impacts.sort(reverse=True) final_impacts = [] for impact in perc_impacts: if impact[1] == 0: final_impacts.append(("Less Precipitation", impact[0])) elif impact[1] == 1: final_impacts.append(("No Significant Change in Precipitation", impact[0])) elif impact[1] == 2: final_impacts.append(("More Precipitation", impact[0])) return final_impacts else: return [default_value]
def test_categories(self): zonal = zonal_stats(self.polygons[0], self.rast) sumpix = 0 for zc in zonal.categories.all(): sumpix += zc.count self.assertEqual(zonal.pixels, sumpix)
def get_infant_deaths(bioregion): id_geom = RasterDataset.objects.get(name="infant_deaths") id_stats = zonal_stats(bioregion.output_geom, id_geom) return int(id_stats.sum)
def get_languages(bioregion): if report_cache_exists(bioregion, 'languages'): language_names = get_report_cache(bioregion, 'languages') return language_names else: pop_geom = RasterDataset.objects.get(name='population_2005') languages = Language.objects.filter( geometry__bboverlaps=bioregion.output_geom) language_dict = {} pop_total = 0 for language in languages: #noticing issue on ninkasi and dionysus (not on local machine) #dionysus is using geos 3.1.0 #local machine (which does not crash when intersecting) is using 3.2.2 # select postgis_full_version(); gave me geos version #possible solutions could be to update geos (not really an option) or perhaps django has an update that catches that error... #the following error is output on dionysus when attempting intersection (after buffer(0)) on some problem geoemetries """ GEOS_NOTICE: Self-intersection at or near point 1.36725e+07 -289750 bufferOriginalPrecision failed (TopologyException: unable to assign hole to a shell), trying with reduced precision recomputing with precision scale factor = 10000 Scaler: offsetX,Y: 0,0 scaleFactor: 10000 ReScaler: offsetX,Y: 0,0 scaleFactor: 10000 recomputing with precision scale factor = 1000 Scaler: offsetX,Y: 0,0 scaleFactor: 1000 ReScaler: offsetX,Y: 0,0 scaleFactor: 1000 recomputing with precision scale factor = 100 Scaler: offsetX,Y: 0,0 scaleFactor: 100 ReScaler: offsetX,Y: 0,0 scaleFactor: 100 recomputing with precision scale factor = 10 Scaler: offsetX,Y: 0,0 scaleFactor: 10 python: ../../source/headers/geos/noding/SegmentString.h:175: void geos::noding::SegmentString::testInvariant() const: Assertion `pts->size() > 1' failed. """ try: if language.geometry.valid: language_intersection = language.geometry.intersection( bioregion.output_geom) else: language_intersection = language.geometry.buffer( 0).intersection(bioregion.output_geom) area = language_intersection.area except: area = 0 if area > 0: if language.name_prop is None: name = 'No Data' else: #name = (language.nam_ansi, language.familyprop) name = language.name_prop #area = geometry_area_in_display_units(language.geometry.intersection(bioregion.output_geom)) if language_intersection.valid: pop_stats = zonal_stats(language_intersection, pop_geom) else: buffered_intersection = language_intersection.buffer(0) pop_stats = zonal_stats(buffered_intersection, pop_geom) if pop_stats and pop_stats.sum: pop = pop_stats.sum else: pop = 0 pop_total += pop if name in language_dict.keys(): language_dict[name] += pop else: language_dict[name] = pop expected_pop = get_population(bioregion) if expected_pop > pop_total: language_dict['No Data'] = expected_pop - pop_total language_tuples = [(pop, name) for name, pop in language_dict.items()] language_tuples.sort(reverse=True) #language_names = [name for (pop, name) in language_tuples] language_names = language_tuples create_report_cache(bioregion, dict(languages=language_names)) return language_names
def get_languages(bioregion): if report_cache_exists(bioregion, 'languages'): language_names = get_report_cache(bioregion, 'languages') return language_names else: pop_geom = RasterDataset.objects.get(name='population_2005') languages = Language.objects.filter(geometry__bboverlaps=bioregion.output_geom) language_dict = {} pop_total = 0 for language in languages: #noticing issue on ninkasi and dionysus (not on local machine) #dionysus is using geos 3.1.0 #local machine (which does not crash when intersecting) is using 3.2.2 # select postgis_full_version(); gave me geos version #possible solutions could be to update geos (not really an option) or perhaps django has an update that catches that error... #the following error is output on dionysus when attempting intersection (after buffer(0)) on some problem geoemetries """ GEOS_NOTICE: Self-intersection at or near point 1.36725e+07 -289750 bufferOriginalPrecision failed (TopologyException: unable to assign hole to a shell), trying with reduced precision recomputing with precision scale factor = 10000 Scaler: offsetX,Y: 0,0 scaleFactor: 10000 ReScaler: offsetX,Y: 0,0 scaleFactor: 10000 recomputing with precision scale factor = 1000 Scaler: offsetX,Y: 0,0 scaleFactor: 1000 ReScaler: offsetX,Y: 0,0 scaleFactor: 1000 recomputing with precision scale factor = 100 Scaler: offsetX,Y: 0,0 scaleFactor: 100 ReScaler: offsetX,Y: 0,0 scaleFactor: 100 recomputing with precision scale factor = 10 Scaler: offsetX,Y: 0,0 scaleFactor: 10 python: ../../source/headers/geos/noding/SegmentString.h:175: void geos::noding::SegmentString::testInvariant() const: Assertion `pts->size() > 1' failed. """ try: if language.geometry.valid: language_intersection = language.geometry.intersection(bioregion.output_geom) else: language_intersection = language.geometry.buffer(0).intersection(bioregion.output_geom) area = language_intersection.area except: area = 0 if area > 0: if language.name_prop is None: name = 'No Data' else: #name = (language.nam_ansi, language.familyprop) name = language.name_prop #area = geometry_area_in_display_units(language.geometry.intersection(bioregion.output_geom)) if language_intersection.valid: pop_stats = zonal_stats(language_intersection, pop_geom) else: buffered_intersection = language_intersection.buffer(0) pop_stats = zonal_stats(buffered_intersection, pop_geom) if pop_stats and pop_stats.sum: pop = pop_stats.sum else: pop = 0 pop_total += pop if name in language_dict.keys(): language_dict[name] += pop else: language_dict[name] = pop expected_pop = get_population(bioregion) if expected_pop > pop_total: language_dict['No Data'] = expected_pop - pop_total language_tuples = [(pop, name) for name,pop in language_dict.items()] language_tuples.sort(reverse=True) #language_names = [name for (pop, name) in language_tuples] language_names = language_tuples create_report_cache(bioregion, dict(languages=language_names)) return language_names