def __load_geojson__(self, crs: int): # Load geojson fd = open(self.vectorlayer_file.path, 'r') geojson = json.load(fd) src_crs = 4326 for item in geojson['features']: # get data _properties = item['properties'] coords = item['geometry']['coordinates'] # create multipolygon polygons = [Polygon(coord[0]) for coord in coords] mpolygon = MultiPolygon(polygons) # Tranform geometry gcoord = SpatialReference(src_crs) mycoord = SpatialReference(crs) trans = CoordTransform(gcoord, mycoord) mpolygon.transform(trans) # Create vector geometry VectorGeometry.objects.create(vectorgeometry_layer=self, geom=mpolygon)
def _make_square_polygon(self, x, y): d = 0.1 polygon = MultiPolygon(Polygon( ((x, y), (x, y + d), (x + d, y + d), (x + d, y), (x, y)), srid=4326), srid=4326) polygon.transform(3857) return polygon
def _make_square_polygon(self, x, y): d = 0.1 polygon = MultiPolygon( Polygon( ((x, y), (x, y + d), (x + d, y + d), (x + d, y), (x, y)), srid=4326), srid=4326) polygon.transform(3857) return polygon
def create_country(coords, properties, name, polygon_type): if polygon_type == 'Polygon': multipolygon_latlng = MultiPolygon([Polygon(coord_set) for coord_set in coords], srid=4326) else: multipolygon_latlng = MultiPolygon([Polygon(coord_set[0]) for coord_set in coords], srid=4326) multipolygon_webmercator = multipolygon_latlng.transform(3857, clone=True) return Country(name=name, border_latlng=multipolygon_latlng, properties=properties, border_webmercator=multipolygon_webmercator)
def get_data_bounds_polygon(self): parsed_geojson = json.loads(self.data) def get_polygons(obj): polys = [] if "type" in obj: if obj["type"] == "FeatureCollection": for feature in obj["features"]: extent_polys = get_polygons(feature) polys.extend(extent_polys) elif obj["type"] == "Feature": # process "geometry" geom = GEOSGeometry(json.dumps(obj["geometry"])) # get extent_poly of geom extent_poly = Polygon.from_bbox(geom.extent) polys.append(extent_poly) elif obj["type"] in ("Polygon", "LineString", "Point"): # process "geometry" geom = GEOSGeometry(json.dumps(obj)) # get extent_poly of geom extent_poly = Polygon.from_bbox(geom.extent) polys.append(extent_poly) return polys geojson_extent_polygons = [] if isinstance(parsed_geojson, list): for obj in parsed_geojson: polygons = get_polygons(obj) geojson_extent_polygons.extend(polygons) elif "type" in parsed_geojson and parsed_geojson["type"] in ("FeatureCollection", "Feature", "Polygon", "LineString", "Point"): polygons = get_polygons(parsed_geojson) geojson_extent_polygons.extend(polygons) # process polygons into polyons extent mploy = MultiPolygon(geojson_extent_polygons) mploy.srid = WGS84_SRID # Expected... mploy.transform(METERS_SRID) poly = Polygon.from_bbox(mploy.extent) poly.srid = METERS_SRID return poly
def test_spatial_fields(self,): bbox = Polygon.from_bbox((-7.96, 22.6, -8.14, 27.12)) # in africa the_geom = MultiPolygon(GEOSGeometry(bbox, srid=4326), srid=4326) the_geog = MultiPolygon(GEOSGeometry(bbox), srid=4326) the_geom_webmercator = the_geom.transform(ct=3857, clone=True) job = Job.objects.all()[0] self.assertIsNotNone(job) geom = job.the_geom geog = job.the_geog geom_web = job.the_geom_webmercator self.assertEqual(the_geom, geom) self.assertEqual(the_geog, geog) self.assertEqual(the_geom_webmercator, geom_web)
def test_spatial_fields(self,): bbox = Polygon.from_bbox((-7.96, 22.6, -8.14, 27.12)) # in africa the_geom = MultiPolygon(GEOSGeometry(bbox, srid=4326), srid=4326) the_geog = MultiPolygon(GEOSGeometry(bbox)) the_geom_webmercator = the_geom.transform(ct=3857, clone=True) job = Job.objects.all()[0] self.assertIsNotNone(job) geom = job.the_geom geog = job.the_geog geom_web = job.the_geom_webmercator self.assertEqual(the_geom, geom) self.assertEqual(the_geog, geog) self.assertEqual(the_geom_webmercator, geom_web)
def remove_sliver_polygons(geom, srid=WEB_MERCATOR_SRID, minarea_sqm=10): """Routine to remove sliver polygons from a multipolygon object""" # Transform to projected coordinate system clean_geom = geom.transform(srid, clone=True) # Split into components components = [] while clean_geom: components.append(clean_geom.pop()) # Filter components by size big_components = [x for x in components if x.area > minarea_sqm] # If small area was found, update geom with larger components if (len(big_components) < len(components)): geom = MultiPolygon(big_components, srid=srid) geom.transform(WEB_MERCATOR_SRID) # Make sure its a proper multi polygon geom = convert_to_multipolygon(geom) # Return cleaned geometry return geom
def remove_sliver_polygons(geom, srid=WEB_MERCATOR_SRID, minarea_sqm=10): """Routine to remove sliver polygons from a multipolygon object""" # Transform to projected coordinate system clean_geom = geom.transform(srid, clone=True) # Split into components components = [] while clean_geom: components.append(clean_geom.pop()) # Filter components by size big_components = [x for x in components if x.area > minarea_sqm] # If small area was found, update geom with larger components if(len(big_components) < len(components)): geom = MultiPolygon(big_components, srid=srid) geom.transform(WEB_MERCATOR_SRID) # Make sure its a proper multi polygon geom = convert_to_multipolygon(geom) # Return cleaned geometry return geom
def setUp(self): self.loader = SIAStadsdeelLoader('sia-stadsdeel') # "sia-stadsdeel" type Areas are derived from "stadsdeel" type Areas, # here we create "stadsdeel" AreaType and provide some Area instances # using that type stadsdeel_area_type = AreaType.objects.create( name='Stadsdeel', code='stadsdeel', description='Stadsdeel voor tests.' ) cbs_gemeente_type = AreaType.objects.create( name='Stadsdeel', code='cbs-gemeente-2019', description='CBS gemeentegrens voor tests.' ) # We create three city distticts of 10x10 meters located at City Hall to # test with. (Code stolen from the Gebieden API tests.) x, y = RD_STADHUIS width, height = 10, 10 self.bbox_1 = [x, y, x + width, y + height] self.bbox_2 = [x, y + height, x + width, y + 2 * height] self.bbox_3 = [x, y + 2 * height, x + width, y + 3 * height] zuid_geometry = MultiPolygon([Polygon.from_bbox(self.bbox_1)]) zuid_geometry.srid = 28992 zuid_geometry.transform(ct=4326) noord_geometry = MultiPolygon([Polygon.from_bbox(self.bbox_2)]) noord_geometry.srid = 28992 noord_geometry.transform(ct=4326) weesp_geometry = MultiPolygon([Polygon.from_bbox(self.bbox_3)]) weesp_geometry.srid = 28992 weesp_geometry.transform(ct=4326) Area.objects.create( name='Zuid', code='SOME_CBS_CODE_1', _type=stadsdeel_area_type, geometry=zuid_geometry, ) Area.objects.create( name='Noord', code='SOME_CBS_CODE_2', _type=stadsdeel_area_type, geometry=noord_geometry, ) Area.objects.create( name='Weesp', code='SOME_CBS_CODE_3', _type=cbs_gemeente_type, geometry=weesp_geometry, ) self.assertEqual(Area.objects.count(), 3) self.assertEqual(Area.objects.filter(_type__code='stadsdeel').count(), 2)
def _elaborate_geo_filter(self, data): if issubclass(GEOSGeometry, django.contrib.gis.geos.GEOSGeometry) and \ issubclass(MultiPolygon, django.contrib.gis.geos.GEOSGeometry): if 'geo_field_name' in data and 'polygons_srid' in data and 'polygons' in data: geo_field_name = data.get('geo_field_name') polygons = [] for geo_text in data['polygons']: polygons.append(GEOSGeometry(geo_text)) geo = MultiPolygon(polygons) try: geo.srid = int(data.get('polygons_srid')) except ValueError: print(_('ERROR: geo srid is incorrect')) geo_field = self.model._meta.get_field(geo_field_name) _geo = geo.transform(geo_field.srid, clone=True).buffer( 0) # se l'unione del multipolygon risultasse invalida qset = Q(**{geo_field_name + '__within': _geo}) else: qset = Q() return qset return Q()
class LeaseBlockSelection(Analysis): leaseblock_ids = models.TextField() description = models.TextField(null=True, blank=True) geometry_actual = models.MultiPolygonField(srid=settings.GEOMETRY_DB_SRID, null=True, blank=True, verbose_name="Lease Block Selection Geometry") def serialize_attributes(self): blocks = LeaseBlock.objects.filter(prot_numb__in=self.leaseblock_ids.split(',')) def mean(data): return sum(data) / float(len(data)) if not blocks.exists(): return {'event': 'click', 'attributes': {'title': 'Number of blocks', 'data': 0}, 'report_values': {}} report_values = { 'wind-speed': { 'min': self.reduce(min, [b.min_wind_speed_rev for b in blocks], digits=3, offset=-0.125), 'max': self.reduce(max, [b.max_wind_speed_rev for b in blocks], digits=3, offset=0.125), 'avg': self.reduce(mean, [b.avg_wind_speed for b in blocks], digits=3), 'selection_id': self.uid }, 'distance-to-substation': { 'min': self.reduce(min, [b.substation_min_distance for b in blocks], digits=0), 'max': self.reduce(max, [b.substation_max_distance for b in blocks], digits=0), 'avg': self.reduce(mean, [b.substation_mean_distance for b in blocks], digits=1), 'selection_id': self.uid }, 'distance-to-awc': { 'min': self.reduce(min, [b.awc_min_distance for b in blocks], digits=0), 'max': self.reduce(max, [b.awc_max_distance for b in blocks], digits=0), 'avg': self.reduce(mean, [b.awc_avg_distance for b in blocks], digits=1), 'selection_id': self.uid }, 'distance-to-shipping': { 'min': self.reduce(min, [b.tsz_min_distance for b in blocks], digits=0), 'max': self.reduce(max, [b.tsz_max_distance for b in blocks], digits=0), 'avg': self.reduce(mean, [b.tsz_mean_distance for b in blocks], digits=1), 'selection_id': self.uid }, 'distance-to-shore': { 'min': self.reduce(min, [b.min_distance for b in blocks], digits=0), 'max': self.reduce(max, [b.max_distance for b in blocks], digits=0), 'avg': self.reduce(mean, [b.avg_distance for b in blocks], digits=1), 'selection_id': self.uid }, 'depth': { # note: accounting for the issue in which max_depth # is actually a lesser depth than min_depth 'min': -1 * self.reduce(max, [b.max_distance for b in blocks], digits=0, handle_none=0), 'max': -1 * self.reduce(min, [b.min_distance for b in blocks], digits=0, handle_none=0), 'avg': -1 * self.reduce(mean, [b.avg_distance for b in blocks], digits=0, handle_none=0), 'selection_id': self.uid }} attrs = ( ('Average Wind Speed Range', '%(min)s to %(max)s m/s' % report_values['wind-speed']), ('Average Wind Speed', '%(avg)s m/s' % report_values['wind-speed']), ('Distance to Coastal Substation', '%(min)s to %(max)s miles' % report_values['distance-to-substation']), ('Average Distance to Coastal Substation', '%(avg)s miles' % report_values['distance-to-substation']), ('Distance to Proposed AWC Hub', '%(min)s to %(max)s miles' % report_values['distance-to-awc']), ('Average Distance to Proposed AWC Hub', '%(avg)s miles' % report_values['distance-to-awc']), ('Distance to Ship Routing Measures', '%(min)s to %(max)s miles' % report_values['distance-to-shipping']), ('Average Distance to Ship Routing Measures', '%(avg)s miles' % report_values['distance-to-shipping']), ('Distance to Shore', '%(min)s to %(max)s miles' % report_values['distance-to-shore']), ('Average Distance to Shore', '%(avg)s miles' % report_values['distance-to-shore']), ('Depth', '%(min)s to %(max)s meters' % report_values['depth']), ('Average Depth', '%(avg)s meters' % report_values['depth']), ('Number of blocks', self.leaseblock_ids.count(',') + 1) ) attributes = [] for t, d in attrs: attributes.append({'title': t, 'data': d}) return {'event': 'click', 'attributes': attributes, 'report_values': report_values} @staticmethod def reduce(func, data, digits=None, filter_null=True, handle_none='Unknown', offset=None): """ self.reduce: LeaseBlock's custom reduce why not the built-in reduce()? handles rounding, null values, practical defaults. Input: func : function that aggregates data to a single value data : list of values Returns: a single value or a sensible default with some presentation logic intermingled In the case of `None` in your list, you can either filter them out with `filter_null` (default) or you can bail and use `handle_none` which either raises an exception or returns a default "null" value Rounding and offsetting by constant values are also handled for backwards compatibility. """ if filter_null: # Filter the incoming data to remove "nulls" # remove anything that is not a number data = [x for x in data if isinstance(x, (int, long, float, complex))] # Deal with any remaining None values if not data or None in data: if isinstance(handle_none, Exception): # We raise the exception to be handled upstream raise handle_none else: # bail and return the `handle_none` object # used for presentation or upstream logic ("Unknown", 0 or None) return handle_none # Rounding and offsetting agg = func(data) if offset: agg = agg + offset if isinstance(digits, int): agg = round(agg, digits) return agg def run(self): leaseblocks = LeaseBlock.objects.filter(prot_numb__in=self.leaseblock_ids.split(',')) if not leaseblocks.exists(): # We can't return False, because we'll get a collection without # any lease blocks, which doesn't work on the client side. # Throw an exception instead. # TODO: Make the client handle the "selection didn't work" case. # This is most likely because there are no lease blocks in the db. raise Exception("No lease blocks available with the current selection.") dissolved_geom = leaseblocks.aggregate(Union('geometry')) if dissolved_geom: dissolved_geom = dissolved_geom['geometry__union'] else: raise Exception("No lease blocks available with the current filters.") if type(dissolved_geom) == MultiPolygon: self.geometry_actual = dissolved_geom else: self.geometry_actual = MultiPolygon(dissolved_geom, srid=dissolved_geom.srid) return True def geojson(self, srid): props = get_properties_json(self) props['absolute_url'] = self.get_absolute_url() json_geom = self.geometry_actual.transform(srid, clone=True).json return get_feature_json(json_geom, json.dumps(props)) class Options: verbose_name = 'Lease Block Selection' form = 'scenarios.forms.LeaseBlockSelectionForm' form_template = 'selection/form.html'
class LeaseBlockSelection(Analysis): #input_a = models.IntegerField() #input_b = models.IntegerField() #output_sum = models.IntegerField(blank=True, null=True) leaseblock_ids = models.TextField() description = models.TextField(null=True, blank=True) #leaseblocks = models.ManyToManyField("LeaseBlock", null=True, blank=True) geometry_actual = models.MultiPolygonField(srid=settings.GEOMETRY_DB_SRID, null=True, blank=True, verbose_name="Lease Block Selection Geometry") @property def serialize_attributes(self): from general.utils import format attributes = [] report_values = {} leaseblocks = LeaseBlock.objects.filter(prot_numb__in=self.leaseblock_ids.split(',')) if (len(leaseblocks) > 0): #get wind speed range try: min_wind_speed = format(self.get_min_wind_speed(leaseblocks),3) max_wind_speed = format(self.get_max_wind_speed(leaseblocks),3) wind_speed_range = '%s to %s m/s' %(min_wind_speed, max_wind_speed) except: min_wind_speed = 'Unknown' max_wind_speed = 'Unknown' wind_speed_range = 'Unknown' attributes.append({'title': 'Average Wind Speed Range', 'data': wind_speed_range}) try: avg_wind_speed = format(self.get_avg_wind_speed(leaseblocks),3) avg_wind_speed_output = '%s m/s' %avg_wind_speed except: avg_wind_speed = 'Unknown' avg_wind_speed_output = 'Unknown' attributes.append({'title': 'Average Wind Speed', 'data': avg_wind_speed_output}) report_values['wind-speed'] = {'min': min_wind_speed, 'max': max_wind_speed, 'avg': avg_wind_speed, 'selection_id': self.uid} #get distance to coastal substation min_distance_to_substation = format(self.get_min_distance_to_substation(leaseblocks), 0) max_distance_to_substation = format(self.get_max_distance_to_substation(leaseblocks), 0) distance_to_substation_range = '%s to %s miles' %(min_distance_to_substation, max_distance_to_substation) attributes.append({'title': 'Distance to Coastal Substation', 'data': distance_to_substation_range}) avg_distance_to_substation = format(self.get_avg_distance_to_substation(leaseblocks), 1) avg_distance_to_substation_output = '%s miles' %avg_distance_to_substation attributes.append({'title': 'Average Distance to Coastal Substation', 'data': avg_distance_to_substation_output}) report_values['distance-to-substation'] = {'min': min_distance_to_substation, 'max': max_distance_to_substation, 'avg': avg_distance_to_substation, 'selection_id': self.uid} #get distance to awc range min_distance_to_awc = format(self.get_min_distance_to_awc(leaseblocks), 0) max_distance_to_awc = format(self.get_max_distance_to_awc(leaseblocks), 0) distance_to_awc_range = '%s to %s miles' %(min_distance_to_awc, max_distance_to_awc) attributes.append({'title': 'Distance to Proposed AWC Hub', 'data': distance_to_awc_range}) avg_distance_to_awc = format(self.get_avg_distance_to_awc(leaseblocks), 1) avg_distance_to_awc_output = '%s miles' %avg_distance_to_awc attributes.append({'title': 'Average Distance to Proposed AWC Hub', 'data': avg_distance_to_awc_output}) report_values['distance-to-awc'] = {'min': min_distance_to_awc, 'max': max_distance_to_awc, 'avg': avg_distance_to_awc, 'selection_id': self.uid} #get distance to shipping lanes min_distance_to_shipping = format(self.get_min_distance_to_shipping(leaseblocks), 0) max_distance_to_shipping = format(self.get_max_distance_to_shipping(leaseblocks), 0) miles_to_shipping = '%s to %s miles' %(min_distance_to_shipping, max_distance_to_shipping) attributes.append({'title': 'Distance to Shipping Lanes', 'data': miles_to_shipping}) avg_distance_to_shipping = format(self.get_avg_distance_to_shipping(leaseblocks),1) avg_distance_to_shipping_output = '%s miles' %avg_distance_to_shipping attributes.append({'title': 'Average Distance to Shipping Lanes', 'data': avg_distance_to_shipping_output}) report_values['distance-to-shipping'] = {'min': min_distance_to_shipping, 'max': max_distance_to_shipping, 'avg': avg_distance_to_shipping, 'selection_id': self.uid} #get distance to shore range min_distance = format(self.get_min_distance(leaseblocks), 0) max_distance = format(self.get_max_distance(leaseblocks), 0) distance_to_shore = '%s to %s miles' %(min_distance, max_distance) attributes.append({'title': 'Distance to Shore', 'data': distance_to_shore}) avg_distance = format(self.get_avg_distance(leaseblocks),1) avg_distance_output = '%s miles' %avg_distance attributes.append({'title': 'Average Distance to Shore', 'data': avg_distance_output}) report_values['distance-to-shore'] = {'min': min_distance, 'max': max_distance, 'avg': avg_distance, 'selection_id': self.uid} #get depth range min_depth = format(self.get_min_depth(leaseblocks), 0) max_depth = format(self.get_max_depth(leaseblocks), 0) depth_range = '%s to %s meters' %(min_depth, max_depth) if min_depth == 0 or max_depth == 0: depth_range = 'Unknown' attributes.append({'title': 'Depth', 'data': depth_range}) avg_depth = format(self.get_avg_depth(leaseblocks), 0) avg_depth_output = '%s meters' %avg_depth if avg_depth == 0: avg_depth_output = 'Unknown' attributes.append({'title': 'Average Depth', 'data': avg_depth_output}) report_values['depth'] = {'min': min_depth, 'max': max_depth, 'avg': avg_depth, 'selection_id': self.uid} ''' if self.input_filter_ais_density: attributes.append({'title': 'Excluding Areas with High Ship Traffic', 'data': ''}) ''' attributes.append({'title': 'Number of Leaseblocks', 'data': self.leaseblock_ids.count(',')+1}) else: attributes.append({'title': 'Number of Leaseblocks', 'data': 0}) return { 'event': 'click', 'attributes': attributes, 'report_values': report_values } def get_min_wind_speed(self, leaseblocks): min_wind_speed = leaseblocks[0].min_wind_speed_rev for lb in leaseblocks: if lb.min_wind_speed_rev < min_wind_speed: min_wind_speed = lb.min_wind_speed_rev return min_wind_speed - .125 def get_max_wind_speed(self, leaseblocks): max_wind_speed = leaseblocks[0].max_wind_speed_rev for lb in leaseblocks: if lb.max_wind_speed_rev > max_wind_speed: max_wind_speed = lb.max_wind_speed_rev return max_wind_speed + .125 def get_avg_wind_speed(self, leaseblocks): total = 0 for lb in leaseblocks: total += lb.min_wind_speed_rev total += lb.max_wind_speed_rev if total > 0: return total / (len(leaseblocks) * 2) else: return 0 def get_min_distance(self, leaseblocks): min_distance = leaseblocks[0].min_distance for lb in leaseblocks: if lb.min_distance < min_distance: min_distance = lb.min_distance return min_distance def get_max_distance(self, leaseblocks): max_distance = leaseblocks[0].max_distance for lb in leaseblocks: if lb.max_distance > max_distance: max_distance = lb.max_distance return max_distance def get_avg_distance(self, leaseblocks): total = 0 for lb in leaseblocks: total += lb.avg_distance if total > 0: return total / (len(leaseblocks)) else: return 0 # note: accounting for the issue in which min_depth is actually a greater depth than max_depth def get_max_depth(self, leaseblocks): min_depth = leaseblocks[0].min_depth for lb in leaseblocks: if lb.min_depth < min_depth: min_depth = lb.min_depth return -min_depth # note: accounting for the issue in which max_depth is actually a lesser depth than min_depth def get_min_depth(self, leaseblocks): max_depth = leaseblocks[0].max_depth for lb in leaseblocks: if lb.max_depth > max_depth: max_depth = lb.max_depth return -max_depth def get_avg_depth(self, leaseblocks): total = 0 for lb in leaseblocks: total += lb.avg_depth if total != 0: avg = -total / (len(leaseblocks)) return avg else: return 0 def get_min_distance_to_substation(self, leaseblocks): substation_min_distance = leaseblocks[0].substation_min_distance for lb in leaseblocks: if lb.substation_min_distance < substation_min_distance: substation_min_distance = lb.substation_min_distance return substation_min_distance def get_max_distance_to_substation(self, leaseblocks): substation_max_distance = leaseblocks[0].substation_max_distance for lb in leaseblocks: if lb.substation_max_distance > substation_max_distance: substation_max_distance = lb.substation_max_distance return substation_max_distance def get_avg_distance_to_substation(self, leaseblocks): total = 0 for lb in leaseblocks: total += lb.substation_mean_distance if total != 0: avg = total / len(leaseblocks) return avg else: return 0 def get_min_distance_to_awc(self, leaseblocks): awc_min_distance = leaseblocks[0].awc_min_distance for lb in leaseblocks: if lb.awc_min_distance < awc_min_distance: awc_min_distance = lb.awc_min_distance return awc_min_distance def get_max_distance_to_awc(self, leaseblocks): awc_max_distance = leaseblocks[0].awc_max_distance for lb in leaseblocks: if lb.awc_max_distance > awc_max_distance: awc_max_distance = lb.awc_max_distance return awc_max_distance def get_avg_distance_to_awc(self, leaseblocks): total = 0 for lb in leaseblocks: total += lb.awc_avg_distance if total != 0: avg = total / len(leaseblocks) return avg else: return 0 def get_min_distance_to_shipping(self, leaseblocks): tsz_min_distance = leaseblocks[0].tsz_min_distance for lb in leaseblocks: if lb.tsz_min_distance < tsz_min_distance: tsz_min_distance = lb.tsz_min_distance return tsz_min_distance def get_max_distance_to_shipping(self, leaseblocks): tsz_max_distance = leaseblocks[0].tsz_max_distance for lb in leaseblocks: if lb.tsz_max_distance > tsz_max_distance: tsz_max_distance = lb.tsz_max_distance return tsz_max_distance def get_avg_distance_to_shipping(self, leaseblocks): total = 0 for lb in leaseblocks: total += lb.tsz_mean_distance if total != 0: return total / len(leaseblocks) else: return 0 def run(self): leaseblocks = LeaseBlock.objects.filter(prot_numb__in=self.leaseblock_ids.split(',')) leaseblock_geoms = [lb.geometry for lb in leaseblocks] from django.contrib.gis.geos import MultiPolygon dissolved_geom = leaseblock_geoms[0] for geom in leaseblock_geoms: try: dissolved_geom = dissolved_geom.union(geom) except: pass if type(dissolved_geom) == MultiPolygon: self.geometry_actual = dissolved_geom else: self.geometry_actual = MultiPolygon(dissolved_geom, srid=dissolved_geom.srid) return True def geojson(self, srid): props = get_properties_json(self) props['absolute_url'] = self.get_absolute_url() json_geom = self.geometry_actual.transform(srid, clone=True).json return get_feature_json(json_geom, json.dumps(props)) class Options: verbose_name = 'Lease Block Selection' form = 'scenarios.forms.LeaseBlockSelectionForm' form_template = 'selection/form.html'
def map_tile(request, layer_slug, boundary_slug, tile_zoom, tile_x, tile_y, format): if not has_imaging_library: raise Http404("Cairo is not available.") layer = get_object_or_404(MapLayer, slug=layer_slug) # Load basic parameters. try: size = int(request.GET.get('size', '256' if format not in ('json', 'jsonp') else '64')) if size not in (64, 128, 256, 512, 1024): raise ValueError() srs = int(request.GET.get('srs', '3857')) except ValueError: raise Http404("Invalid parameter.") db_srs, out_srs = get_srs(srs) # Get the bounding box for the tile, in the SRS of the output. try: tile_x = int(tile_x) tile_y = int(tile_y) tile_zoom = int(tile_zoom) except ValueError: raise Http404("Invalid parameter.") # Guess the world size. We need to know the size of the world in # order to locate the bounding box of any viewport at zoom levels # greater than zero. if "radius" not in request.GET: p = Point( (-90.0, 0.0), srid=db_srs.srid ) p.transform(out_srs) world_left = p[0]*2 world_top = -world_left world_size = -p[0] * 4.0 else: p = Point((0,0), srid=out_srs.srid ) p.transform(db_srs) p1 = Point([p[0] + 1.0, p[1] + 1.0], srid=db_srs.srid) p.transform(out_srs) p1.transform(out_srs) world_size = math.sqrt(abs(p1[0]-p[0])*abs(p1[1]-p[1])) * float(request.GET.get('radius', '50')) world_left = p[0] - world_size/2.0 world_top = p[1] + world_size/2.0 tile_world_size = world_size / math.pow(2.0, tile_zoom) p1 = Point( (world_left + tile_world_size*tile_x, world_top - tile_world_size*tile_y) ) p2 = Point( (world_left + tile_world_size*(tile_x+1), world_top - tile_world_size*(tile_y+1)) ) bbox = Polygon( ((p1[0], p1[1]),(p2[0], p1[1]),(p2[0], p2[1]),(p1[0], p2[1]),(p1[0], p1[1])), srid=out_srs.srid ) # A function to convert world coordinates in the output SRS into # pixel coordinates. blon1, blat1, blon2, blat2 = bbox.extent bx = float(size)/(blon2-blon1) by = float(size)/(blat2-blat1) def viewport(coord): # Convert the world coordinates to image coordinates according to the bounding box # (in output SRS). return float(coord[0] - blon1)*bx, (size-1) - float(coord[1] - blat1)*by # Convert the bounding box to the database SRS. db_bbox = bbox.transform(db_srs, clone=True) # What is the width of a pixel in the database SRS? If it is smaller than # SIMPLE_SHAPE_TOLERANCE, load the simplified geometry from the database. shape_field = 'shape' pixel_width = (db_bbox.extent[2]-db_bbox.extent[0]) / size / 2 if pixel_width > boundaries_settings.SIMPLE_SHAPE_TOLERANCE: shape_field = 'simple_shape' # Query for any boundaries that intersect the bounding box. boundaries = Boundary.objects.filter(set=layer.boundaryset, shape__intersects=db_bbox)\ .values("id", "slug", "name", "label_point", shape_field) if boundary_slug: boundaries = boundaries.filter(slug=boundary_slug) boundary_id_map = dict( (b["id"], b) for b in boundaries ) if len(boundaries) == 0: if format == "svg": raise Http404("No boundaries here.") elif format in ("png", "gif"): # Send a 1x1 transparent image. Google is OK getting 404s for map tile images # but OpenLayers isn't. Maybe cache the image? im = cairo.ImageSurface(cairo.FORMAT_ARGB32, 1, 1) ctx = cairo.Context(im) buf = StringIO() im.write_to_png(buf) v = buf.getvalue() if format == "gif": v = convert_png_to_gif(v) r = HttpResponse(v, content_type='image/' + format) r["Content-Length"] = len(v) return r elif format == "json": # Send an empty "UTF-8 Grid"-like response. return HttpResponse('{"error":"nothing-here"}', content_type="application/json") elif format == "jsonp": # Send an empty "UTF-8 Grid"-like response. return HttpResponse(request.GET.get("callback", "callback") + '({"error":"nothing-here"})', content_type="text/javascript") # Query for layer style information and then set it on the boundary objects. styles = layer.boundaries.filter(boundary__in=boundary_id_map.keys()) for style in styles: boundary_id_map[style.boundary_id]["style"] = style # Create the image buffer. if format in ('png', 'gif'): im = cairo.ImageSurface(cairo.FORMAT_ARGB32, size, size) elif format == 'svg': buf = StringIO() im = cairo.SVGSurface(buf, size, size) elif format in ('json', 'jsonp'): # This is going to be a "UTF-8 Grid"-like response, but we generate that # info by first creating an actual image, with colors coded by index to # represent which boundary covers which pixels. im = cairo.ImageSurface(cairo.FORMAT_RGB24, size, size) # Color helpers. def get_rgba_component(c): return c if isinstance(c, float) else c/255.0 def get_rgba_tuple(clr, alpha=.25): # Colors are specified as tuples/lists with 3 (RGB) or 4 (RGBA) # components. Components that are float values must be in the # range 0-1, while all other values are in the range 0-255. # Because .gif does not support partial transparency, alpha values # are forced to 1. return (get_rgba_component(clr[0]), get_rgba_component(clr[1]), get_rgba_component(clr[2]), get_rgba_component(clr[3]) if len(clr) == 4 and format != 'gif' else (alpha if format != 'gif' else 1.0)) # Create the drawing surface. ctx = cairo.Context(im) ctx.select_font_face(maps_settings.MAP_LABEL_FONT, cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL) if format in ('json', 'jsonp'): # For the UTF-8 Grid response, turn off anti-aliasing since the color we draw to each pixel # is a code for what is there. ctx.set_antialias(cairo.ANTIALIAS_NONE) def max_extent(shape): a, b, c, d = shape.extent return max(c-a, d-b) # Transform the boundaries to output coordinates. draw_shapes = [] for bdry in boundaries: if not "style" in bdry: continue # Boundary had no corresponding MapLayerBoundary shape = bdry[shape_field] # Simplify to the detail that could be visible in the output. Although # simplification may be a little expensive, drawing a more complex # polygon is even worse. try: shape = shape.simplify(pixel_width, preserve_topology=True) except: # GEOSException pass # try drawing original # Make sure the results are all MultiPolygons for consistency. if shape.__class__.__name__ == 'Polygon': shape = MultiPolygon((shape,), srid=db_srs.srid) else: # Be sure to override SRS (for Google, see above). This code may # never execute? shape = MultiPolygon(list(shape), srid=db_srs.srid) # Is this shape too small to be visible? ext_dim = max_extent(shape) if ext_dim < pixel_width: continue # Convert the shape to the output SRS. shape.transform(out_srs) draw_shapes.append( (len(draw_shapes), bdry, shape, ext_dim) ) # Draw shading, for each linear ring of each polygon in the multipolygon. for i, bdry, shape, ext_dim in draw_shapes: if not bdry["style"].color and format not in ('json', 'jsonp'): continue for polygon in shape: for ring in polygon: # should just be one since no shape should have holes? color = bdry["style"].color if format in ('json', 'jsonp'): # We're returning a "UTF-8 Grid" indicating which feature is at # each pixel location on the grid. In order to compute the grid, # we draw to an image surface with a distinct color for each feature. # Then we convert the pixel data into the UTF-8 Grid format. ctx.set_source_rgb(*[ (((i+1)/(256**exp)) % 256)/255.0 for exp in xrange(3) ]) elif isinstance(color, (tuple, list)): # Specify a 3/4-tuple (or list) for a solid color. ctx.set_source_rgba(*get_rgba_tuple(color)) elif isinstance(color, dict): # Specify a dict of the form { "color1": (R,G,B), "color2": (R,G,B) } to # create a solid fill of color1 plus smaller stripes of color2. if color.get("color", None) != None: ctx.set_source_rgba(*get_rgba_tuple(color["color"])) elif color.get("color1", None) != None and color.get("color2", None) != None: pat = cairo.LinearGradient(0.0, 0.0, size, size) for x in xrange(0,size, 32): # divisor of the size so gradient ends at the end pat.add_color_stop_rgba(*([float(x)/size] + list(get_rgba_tuple(color["color1"], alpha=.3)))) pat.add_color_stop_rgba(*([float(x+28)/size] + list(get_rgba_tuple(color["color1"], alpha=.3)))) pat.add_color_stop_rgba(*([float(x+28)/size] + list(get_rgba_tuple(color["color2"], alpha=.4)))) pat.add_color_stop_rgba(*([float(x+32)/size] + list(get_rgba_tuple(color["color2"], alpha=.4)))) ctx.set_source(pat) else: continue # skip fill else: continue # Unknown color data structure. ctx.new_path() for pt in ring.coords: ctx.line_to(*viewport(pt)) ctx.fill() # Draw outlines, for each linear ring of each polygon in the multipolygon. for i, bdry, shape, ext_dim in draw_shapes: if format in ('json', 'jsonp'): continue if ext_dim < pixel_width * 3: continue # skip outlines if too small color = bdry["style"].color for polygon in shape: for ring in polygon: # should just be one since no shape should have holes? ctx.new_path() for pt in ring.coords: ctx.line_to(*viewport(pt)) if not isinstance(color, dict) or not "border" in color or not "width" in color["border"]: if ext_dim < pixel_width * 60: ctx.set_line_width(1) else: ctx.set_line_width(2.5) else: ctx.set_line_width(color["border"]["width"]) if not isinstance(color, dict) or not "border" in color or not "color" in color["border"]: ctx.set_source_rgba(.3,.3,.3, .75) # grey, semi-transparent else: ctx.set_source_rgba(*get_rgba_tuple(color["border"]["color"], alpha=.75)) ctx.stroke_preserve() # Draw labels. for i, bdry, shape, ext_dim in draw_shapes: if format in ('json', 'jsonp'): continue if ext_dim < pixel_width * 20: continue color = bdry["style"].color if isinstance(color, dict) and "label" in color and color["label"] == None: continue # Get the location of the label stored in the database, or fall back to # GDAL routine point_on_surface to get a point quickly. if bdry["style"].label_point: # Override the SRS on the point (for Google, see above). Then transform # it to world coordinates. pt = Point(tuple(bdry["style"].label_point), srid=db_srs.srid) pt.transform(out_srs) elif bdry["label_point"]: # Same transformation as above. pt = Point(tuple(bdry["label_point"]), srid=db_srs.srid) pt.transform(out_srs) else: # No label_point is specified so try to find one by using the # point_on_surface to find a point that is in the shape and # in the viewport's bounding box. try: pt = bbox.intersection(shape).point_on_surface except: # Don't know why this would fail. Bad geometry of some sort. # But we really don't want to leave anything unlabeled so # try the center of the bounding box. pt = bbox.centroid if not shape.contains(pt): continue # Transform to world coordinates and ensure it is within the bounding box. if not bbox.contains(pt): # If it's not in the bounding box and the shape occupies most of this # bounding box, try moving the point to somewhere in the current tile. try: inters = bbox.intersection(shape) if inters.area < bbox.area/3: continue pt = inters.point_on_surface except: continue pt = viewport(pt) txt = bdry["name"] if isinstance(bdry["style"].metadata, dict): txt = bdry["style"].metadata.get("label", txt) if ext_dim > size * pixel_width: ctx.set_font_size(18) else: ctx.set_font_size(12) x_off, y_off, tw, th = ctx.text_extents(txt)[:4] # Is it within the rough bounds of the shape and definitely the bounds of this tile? if tw < ext_dim/pixel_width/5 and th < ext_dim/pixel_width/5 \ and pt[0]-x_off-tw/2-4 > 0 and pt[1]-th-4 > 0 and pt[0]-x_off+tw/2+7 < size and pt[1]+6 < size: # Draw the background rectangle behind the text. ctx.set_source_rgba(0,0,0,.55) # black, some transparency ctx.new_path() ctx.line_to(pt[0]-x_off-tw/2-4,pt[1]-th-4) ctx.rel_line_to(tw+9, 0) ctx.rel_line_to(0, +th+8) ctx.rel_line_to(-tw-9, 0) ctx.fill() # Now a drop shadow (also is partially behind the first rectangle). ctx.set_source_rgba(0,0,0,.3) # black, some transparency ctx.new_path() ctx.line_to(pt[0]-x_off-tw/2-4,pt[1]-th-4) ctx.rel_line_to(tw+11, 0) ctx.rel_line_to(0, +th+10) ctx.rel_line_to(-tw-11, 0) ctx.fill() # Draw the text. ctx.set_source_rgba(1,1,1,1) # white ctx.move_to(pt[0]-x_off-tw/2,pt[1]) ctx.show_text(txt) if format in ("png", "gif"): # Convert the image buffer to raw bytes. buf = StringIO() im.write_to_png(buf) v = buf.getvalue() if format == "gif": v = convert_png_to_gif(v) # Form the response. r = HttpResponse(v, content_type='image/' + format) r["Content-Length"] = len(v) elif format == "svg": im.finish() v = buf.getvalue() r = HttpResponse(v, content_type='image/svg+xml') r["Content-Length"] = len(v) elif format in ('json', 'jsonp'): # Get the bytes, which are RGBA sequences. buf1 = list(im.get_data()) # Convert the 4-byte sequences back into integers that refer back to # the boundary list. Count the number of pixels for each shape. shapeidx = [] shapecount = { } for i in xrange(0, size*size): b = ord(buf1[i*4+2])*(256**0) + ord(buf1[i*4+1])*(256**1) + ord(buf1[i*4+0])*(256**2) shapeidx.append(b) if b > 0: shapecount[b] = shapecount.get(b, 0) + 1 # Assign low unicode code points to the most frequently occuring pixel values, # except always map zero to character 32. shapecode1 = { } shapecode2 = { } for k, count in sorted(shapecount.items(), key = lambda kv : kv[1]): b = len(shapecode1) + 32 + 1 if b >= 34: b += 1 if b >= 92: b += 1 shapecode1[k] = b shapecode2[b] = draw_shapes[k-1] buf = '' if format == 'jsonp': buf += request.GET.get("callback", "callback") + "(\n" buf += '{"grid":[' for row in xrange(size): if row > 0: buf += ",\n " buf += json.dumps(u"".join(unichr(shapecode1[k] if k != 0 else 32) for k in shapeidx[row*size:(row+1)*size])) buf += "],\n" buf += ' "keys":' + json.dumps([""] + [shapecode2[k][1]["slug"] for k in sorted(shapecode2)], separators=(',', ':')) + ",\n" buf += ' "data":' + json.dumps(dict( (shapecode2[k][1]["slug"], { "name": shapecode2[k][1]["name"], }) for k in sorted(shapecode2)), separators=(',', ':')) buf += "}" if format == 'jsonp': buf += ")" if format == "json": r = HttpResponse(buf, content_type='application/json') else: r = HttpResponse(buf, content_type='text/javascript') return r
class PlanningUnitSelection(Analysis): palnningunit_ids = models.TextField() description = models.TextField(null=True, blank=True) geometry_actual = models.MultiPolygonField( srid=settings.GEOMETRY_DB_SRID, null=True, blank=True, verbose_name="Planning Unit Selection Geometry") def serialize_attributes(self): blocks = PlanningUnit.objects.filter( prot_numb__in=self.planningunit_ids.split(',')) def mean(data): return sum(data) / float(len(data)) if not blocks.exists(): return { 'event': 'click', 'attributes': { 'title': 'Number of blocks', 'data': 0 }, 'report_values': {} } report_values = { # 'wind-speed': { # 'min': self.reduce(min, # [b.min_wind_speed_rev for b in blocks], digits=3, offset=-0.125), # 'max': self.reduce(max, # [b.max_wind_speed_rev for b in blocks], digits=3, offset=0.125), # 'avg': self.reduce(mean, # [b.avg_wind_speed for b in blocks], digits=3), # 'selection_id': self.uid }, # # 'distance-to-substation': { # 'min': self.reduce(min, # [b.substation_min_distance for b in blocks], digits=0), # 'max': self.reduce(max, # [b.substation_max_distance for b in blocks], digits=0), # 'avg': self.reduce(mean, # [b.substation_mean_distance for b in blocks], digits=1), # 'selection_id': self.uid }, # # 'distance-to-awc': { # 'min': self.reduce(min, # [b.awc_min_distance for b in blocks], digits=0), # 'max': self.reduce(max, # [b.awc_max_distance for b in blocks], digits=0), # 'avg': self.reduce(mean, # [b.awc_avg_distance for b in blocks], digits=1), # 'selection_id': self.uid }, # # 'distance-to-shipping': { # 'min': self.reduce(min, # [b.tsz_min_distance for b in blocks], digits=0), # 'max': self.reduce(max, # [b.tsz_max_distance for b in blocks], digits=0), # 'avg': self.reduce(mean, # [b.tsz_mean_distance for b in blocks], digits=1), # 'selection_id': self.uid }, # # 'distance-to-shore': { # 'min': self.reduce(min, # [b.min_distance for b in blocks], digits=0), # 'max': self.reduce(max, # [b.max_distance for b in blocks], digits=0), # 'avg': self.reduce(mean, # [b.avg_distance for b in blocks], digits=1), # 'selection_id': self.uid }, # # 'depth': { # # note: accounting for the issue in which max_depth # # is actually a lesser depth than min_depth # 'min': -1 * self.reduce(max, # [b.max_distance for b in blocks], digits=0, handle_none=0), # 'max': -1 * self.reduce(min, # [b.min_distance for b in blocks], digits=0, handle_none=0), # 'avg': -1 * self.reduce(mean, # [b.avg_distance for b in blocks], digits=0, handle_none=0), # 'selection_id': self.uid } } attrs = ( # ('Average Wind Speed Range', # '%(min)s to %(max)s m/s' % report_values['wind-speed']), # ('Average Wind Speed', # '%(avg)s m/s' % report_values['wind-speed']), # ('Distance to Coastal Substation', # '%(min)s to %(max)s miles' % report_values['distance-to-substation']), # ('Average Distance to Coastal Substation', # '%(avg)s miles' % report_values['distance-to-substation']), # ('Distance to Proposed AWC Hub', # '%(min)s to %(max)s miles' % report_values['distance-to-awc']), # ('Average Distance to Proposed AWC Hub', # '%(avg)s miles' % report_values['distance-to-awc']), # ('Distance to Ship Routing Measures', # '%(min)s to %(max)s miles' % report_values['distance-to-shipping']), # ('Average Distance to Ship Routing Measures', # '%(avg)s miles' % report_values['distance-to-shipping']), # ('Distance to Shore', # '%(min)s to %(max)s miles' % report_values['distance-to-shore']), # ('Average Distance to Shore', # '%(avg)s miles' % report_values['distance-to-shore']), # ('Depth', # '%(min)s to %(max)s meters' % report_values['depth']), # ('Average Depth', # '%(avg)s meters' % report_values['depth']), # ('Number of blocks', # self.leaseblock_ids.count(',') + 1) ) attributes = [] for t, d in attrs: attributes.append({'title': t, 'data': d}) return { 'event': 'click', 'attributes': attributes, 'report_values': report_values } @staticmethod def reduce(func, data, digits=None, filter_null=True, handle_none='Unknown', offset=None): """ self.reduce: PlanningUnit's custom reduce why not the built-in reduce()? handles rounding, null values, practical defaults. Input: func : function that aggregates data to a single value data : list of values Returns: a single value or a sensible default with some presentation logic intermingled In the case of `None` in your list, you can either filter them out with `filter_null` (default) or you can bail and use `handle_none` which either raises an exception or returns a default "null" value Rounding and offsetting by constant values are also handled for backwards compatibility. """ if filter_null: # Filter the incoming data to remove "nulls" # remove anything that is not a number data = [ x for x in data if isinstance(x, (int, long, float, complex)) ] # Deal with any remaining None values if not data or None in data: if isinstance(handle_none, Exception): # We raise the exception to be handled upstream raise handle_none else: # bail and return the `handle_none` object # used for presentation or upstream logic ("Unknown", 0 or None) return handle_none # Rounding and offsetting agg = func(data) if offset: agg = agg + offset if isinstance(digits, int): agg = round(agg, digits) return agg def run(self): planningunits = PlanningUnit.objects.filter( prot_numb__in=self.planningunit_ids.split(',')) if not planningunits.exists(): # We can't return False, because we'll get a collection without # any planning units, which doesn't work on the client side. # Throw an exception instead. # TODO: Make the client handle the "selection didn't work" case. # This is most likely because there are no planning units in the db. raise Exception( "No planning units available with the current selection.") dissolved_geom = planningunits.aggregate(Union('geometry')) if dissolved_geom: dissolved_geom = dissolved_geom['geometry__union'] else: raise Exception( "No planning units available with the current filters.") if type(dissolved_geom) == MultiPolygon: self.geometry_actual = dissolved_geom else: self.geometry_actual = MultiPolygon(dissolved_geom, srid=dissolved_geom.srid) return True def geojson(self, srid): props = get_properties_json(self) props['absolute_url'] = self.get_absolute_url() json_geom = self.geometry_actual.transform(srid, clone=True).json return get_feature_json(json_geom, json.dumps(props)) class Options: verbose_name = 'Planning Unit Selection' form = 'scenarios.forms.PlanningUnitSelectionForm' form_template = 'selection/form.html'
if attribute_name in date_values[pr_id]: data[attribute_name] = date_values[pr_id][attribute_name] geometry = project.get("geometry") if geometry is not None: # Drop the Z dimension coords = geometry["coordinates"] if geometry["type"] == "Polygon": geometry["coordinates"] = [[[y[0], y[1]] for y in x] for x in coords] geometry = MultiPolygon(GEOSGeometry(json.dumps(geometry))) elif geometry["type"] == "MultiPolygon": geometry["coordinates"] = [ [[[z[0], z[1]] for z in y] for y in x] for x in coords ] geometry = GEOSGeometry(json.dumps(geometry)) geometry.transform(ct) obj.geometry = geometry obj.save() # Save ProjectPhaseLogs if pr_id in date_values: for phase_name in phase_names: if phase_name not in date_values[pr_id]: continue log_entry = None try: log_entry = ProjectPhaseLog.objects.get( project=obj, phase=phases[phase_name]