def compute_spatial_metadata(self, **kwargs): """Other keyword args get passed in as a matter of course, like BBOX, time, and elevation, but this basic driver ignores them""" super(OGRDriver, self).compute_spatial_metadata(**kwargs) # if we have a zip archive, we should expand it now archive_filename = self.get_filename('zip') if os.path.exists(archive_filename): archive = ZipFile(self.cached_basename + self.src_ext) os.mkdir(self.cached_basename) # we will put everything cached underneath the cached base directory archive.extractall(self.cached_basename) ds = ogr.Open(self.get_master_filename()) lyr = ds.GetLayerByIndex(0) if 'sublayer' not in kwargs else ds.GetLayerByName(kwargs['sublayer']) xmin, xmax, ymin, ymax = lyr.GetExtent() crs = lyr.GetSpatialRef() self.resource.spatial_metadata.native_srs = crs.ExportToProj4() e4326 = osr.SpatialReference() e4326.ImportFromEPSG(4326) crx = osr.CoordinateTransformation(crs, e4326) x04326, y04326, _ = crx.TransformPoint(xmin, ymin) x14326, y14326, _ = crx.TransformPoint(xmax, ymax) self.resource.spatial_metadata.bounding_box = Polygon.from_bbox((x04326, y04326, x14326, y14326)) self.resource.spatial_metadata.native_bounding_box = Polygon.from_bbox((xmin, ymin, xmax, ymax)) self.resource.spatial_metadata.three_d = False self.resource.spatial_metadata.save() self.resource.save()
def compute_spatial_metadata(self, **kwargs): """Other keyword args get passed in as a matter of course, like BBOX, time, and elevation, but this basic driver ignores them""" self.cache_data_file(True) ds = gdal.Open(self.cached_basename + '.tif') nx = ds.RasterXSize ny = ds.RasterYSize x0, dx, _, y0, _, dy = ds.GetGeoTransform() xmin, xmax, ymin, ymax = ( x0, x0+dx*nx, y0 if dy > 0 else y0 + dy*ny, y0 + dy*ny if dy > 0 else y0 ) crs = osr.SpatialReference() crs.ImportFromWkt(ds.GetProjection()) self.resource.spatial_metadata.native_srs = crs.ExportToProj4() e4326 = osr.SpatialReference() e4326.ImportFromEPSG(4326) crx = osr.CoordinateTransformation(crs, e4326) x04326, y04326, _ = crx.TransformPoint(xmin, ymin) x14326, y14326, _ = crx.TransformPoint(xmax, ymax) self.resource.spatial_metadata.bounding_box = Polygon.from_bbox((x04326, y04326, x14326, y14326)) self.resource.spatial_metadata.native_bounding_box = Polygon.from_bbox((xmin, ymin, xmax, ymax)) self.resource.spatial_metadata.save() self.resource.save()
def test_native_reader(self): xml = """ <Metadata> <EOID>some_unique_id</EOID> <BeginTime>2013-08-27T10:00:00Z</BeginTime> <EndTime>2013-08-27T10:00:10Z</EndTime> <Footprint> <Polygon> <Exterior>0 0 20 0 20 10 0 10 0 0</Exterior> <!--<Interior></Interior>--> </Polygon> <Polygon> <Exterior>10 10 40 10 40 30 10 30 10 10</Exterior> <!--<Interior></Interior>--> </Polygon> </Footprint> </Metadata> """ reader = native.NativeFormat(env) self.assertTrue(reader.test(xml)) values = reader.read(xml) self.assertEqual({ "identifier": "some_unique_id", "begin_time": datetime(2013, 8, 27, 10, 0, 0, tzinfo=utc), "end_time": datetime(2013, 8, 27, 10, 0, 10, tzinfo=utc), "footprint": MultiPolygon( Polygon.from_bbox((0, 0, 10, 20)), Polygon.from_bbox((10, 10, 30, 40)) ) }, values)
def build_filters(self, filters=None): if filters is None: filters = {} applicable_filters = {} # Normal filtering filter_params = dict([(x, filters[x]) for x in filters if not x.startswith('!')]) applicable_filters['filter'] = super(HutSearchResource, self).build_filters(filter_params) # Exclude filtering exclude_params = dict([(x[1:], filters[x]) for x in filters if x.startswith('!')]) applicable_filters['exclude'] = super(HutSearchResource, self).build_filters(exclude_params) # Custom bounds filter if 'bounds' in filters: bounds = filters['bounds'] lat_lo, lng_lo, lat_hi, lng_hi = [float(x) for x in bounds.split(',')] # latitude first from request, longitude first for database! if lng_lo > 0 and lng_hi < 0: p1 = Polygon.from_bbox((lng_lo, lat_lo, 180, lat_hi)) p2 = Polygon.from_bbox((-180, lat_lo, lng_hi, lat_hi)) polygon = MultiPolygon(p1, p2) else: polygon = Polygon.from_bbox((lng_lo, lat_lo, lng_hi, lat_hi)) applicable_filters['filter']['location__within'] = polygon return applicable_filters
def test_native_writer(self): values = { "identifier": "some_unique_id", "begin_time": datetime(2013, 8, 27, 10, 0, 0, tzinfo=utc), "end_time": datetime(2013, 8, 27, 10, 0, 10, tzinfo=utc), "footprint": MultiPolygon( Polygon.from_bbox((0, 0, 10, 20)), Polygon.from_bbox((10, 10, 30, 40)) ) } writer = native.NativeFormat(env) f = StringIO() writer.write(values, f, pretty=True) self.assertEqual(dedent("""\ <Metadata> <EOID>some_unique_id</EOID> <BeginTime>2013-08-27T10:00:00Z</BeginTime> <EndTime>2013-08-27T10:00:10Z</EndTime> <Footprint> <Polygon> <Exterior>0.000000 0.000000 20.000000 0.000000 20.000000 10.000000 0.000000 10.000000 0.000000 0.000000</Exterior> </Polygon> <Polygon> <Exterior>10.000000 10.000000 40.000000 10.000000 40.000000 30.000000 10.000000 30.000000 10.000000 10.000000</Exterior> </Polygon> </Footprint> </Metadata> """), dedent(f.getvalue()))
def _extent(self, extents): union = None for extent in extents: if extent is not None: if union is None: union = Polygon.from_bbox(extent) else: union.union(Polygon.from_bbox(extent)) return None if union is None else union.extent
def test_polygons_from_bbox(self): "Testing `from_bbox` class method." bbox = (-180, -90, 180, 90) p = Polygon.from_bbox(bbox) self.assertEqual(bbox, p.extent) # Testing numerical precision x = 3.14159265358979323 bbox = (0, 0, 1, x) p = Polygon.from_bbox(bbox) y = p.extent[-1] self.assertEqual(format(x, ".13f"), format(y, ".13f"))
def compute_spatial_metadata(self, **kwargs): """Other keyword args get passed in as a matter of course, like BBOX, time, and elevation, but this basic driver ignores them""" super(KmzDriver, self).compute_spatial_metadata(**kwargs) # archive = ZipFile(self.cached_basename + self.src_ext) srs = osr.SpatialReference() srs.ImportFromEPSG(4326) self.resource.native_srs = srs.ExportToProj4() self.resource.bounding_box = Polygon.from_bbox((-180, -90, 180, 90)) self.resource.native_bounding_box = Polygon.from_bbox((-180, -90, 180, 90)) self.resource.three_d = False self.resource.save()
def compute_spatial_metadata(self, **kwargs): """Other keyword args get passed in as a matter of course, like BBOX, time, and elevation, but this basic driver ignores them""" super(PostGISDriver, self).compute_spatial_metadata(**kwargs) cfg = self.resource.driver_config connection = self._connection() xmin=ymin=float('inf') ymax=xmax=float('-inf') dataframe = self.get_filename('dfx') if os.path.exists(dataframe): os.unlink(dataframe) for entry in [cfg['table']] + cfg.get('tables', {}).values(): if isinstance(entry, list): table, geom_field = entry elif entry.startswith('#'): table, geom_field = self._table(sublayer=entry[1:]) else: table = entry geom_field = 'geometry' c = connection.cursor() c.execute("select AsText(st_extent({geom_field})) from {table}".format(geom_field=geom_field, table=table)) xmin0, ymin0, xmax0, ymax0 = GEOSGeometry(c.fetchone()[0]).extent xmin = xmin0 if xmin0 < xmin else xmin ymin = ymin0 if ymin0 < ymin else ymin xmax = xmax0 if xmax0 > xmax else xmax ymax = ymax0 if ymax0 > ymax else ymax crs = osr.SpatialReference() crs.ImportFromEPSG(cfg['srid']) self.resource.spatial_metadata.native_srs = crs.ExportToProj4() e4326 = osr.SpatialReference() e4326.ImportFromEPSG(4326) crx = osr.CoordinateTransformation(crs, e4326) x04326, y04326, _ = crx.TransformPoint(xmin, ymin) x14326, y14326, _ = crx.TransformPoint(xmax, ymax) print xmin, xmax, ymin, ymax print x04326, y04326, x14326, y14326 self.resource.spatial_metadata.bounding_box = Polygon.from_bbox((x04326, y04326, x14326, y14326)) self.resource.spatial_metadata.native_bounding_box = Polygon.from_bbox((xmin, ymin, xmax, ymax)) self.resource.spatial_metadata.three_d = False self.resource.spatial_metadata.save() self.resource.save()
def update_bounding_shape(self): ch = MultiPoint([e.location for e in self.photos.all()]).convex_hull # Point if ch.geom_typeid == 0: self.bounding_shape = Polygon.from_bbox((ch.coords[0], ch.coords[1], ch.coords[0], ch.coords[1])) # Polygon elif len(ch.coords[0]) > 2: self.bounding_shape = ch # Two-point line else: self.bounding_shape = Polygon.from_bbox((ch.coords[0][0], ch.coords[0][1], ch.coords[1][0], ch.coords[1][1])) self.bounding_shape_dirty = False self.save() return self.bounding_shape
def import_units(self): if not getattr(self, 'org_syncher', None): self.import_organizations(noop=True) if not getattr(self, 'dept_syncher', None): self.import_departments(noop=True) if self.options['single']: obj_id = self.options['single'] obj_list = [self.pk_get('unit', obj_id)] queryset = Unit.objects.filter(id=obj_id) else: obj_list = self.pk_get('unit') queryset = Unit.objects.all().select_related('services') self.target_srid = settings.PROJECTION_SRID self.bounding_box = Polygon.from_bbox(settings.BOUNDING_BOX) self.bounding_box.set_srid(4326) gps_srs = SpatialReference(4326) target_srs = SpatialReference(self.target_srid) target_to_gps_ct = CoordTransform(target_srs, gps_srs) self.bounding_box.transform(target_to_gps_ct) self.gps_to_target_ct = CoordTransform(gps_srs, target_srs) syncher = ModelSyncher(queryset, lambda obj: obj.id) for idx, info in enumerate(obj_list): self._import_unit(syncher, info) syncher.finish()
def search_catalog(request, *args, **kwargs): """A spatial search for the DataResource catalog. In the future, this will be more thorough, but right now it looks for a filter parameter in the request, and inside that a JSON payload including a bbox four-tuple of minx, maxx miny, maxy OR a geometry wkt and an optional srid. It then performs a broad overlap search and returns the results as a JSON or JSONP list of:: [{ "title" : "title", "path" : ["breadcrumps", "to", "resource"], "url" : "http://mydomain/ga_resources/path/to/resource/title" }] """ flt = json.loads(request.REQUEST['filter']) if 'bbox' in flt: minx, miny, maxx, maxy = flt['bbox'] geometry = Polygon.from_bbox((minx, miny, maxx, maxy)) else: geometry = GEOSGeometry(flt['boundary']) if 'srid' in flt: geometry.set_srid(flt['srid']) results = DataResource.objects.filter(bounding_box__overlaps=geometry) ret = [{'title': r.title, 'path': r.slug.split('/')[:-1], 'url': r.get_abolute_url()} for r in results] callback = None if 'jsonCallback' in request.REQUEST: callback = request.REQUEST['jsonCallback'] elif 'callback' in request.REQUEST: callback = request.REQUEST['callback'] if callback: return HttpResponse(callback + '(' + json.dumps(ret) + ")", mimetype='text/plain') else: return HttpResponse(json.dumps(ret), mimetype='application/json')
def _get_query_geometry(query_obj): if query_obj.geometry == None or query_obj.geometry == '': return None geom = None query_obj.geometrytype = query_obj.geometrytype or DEFAULT_GEOMETRY_TYPE if query_obj.geometrytype == 'esriGeometryPoint': try: xy = map(float, query_obj.geometry.split(",")) except: xy = DynamicObject(json.loads(query_obj.geometry)) xy = [xy.x, xy.y] geom = Point(*xy) elif query_obj.geometrytype == 'esriGeometryPolygon': poly = DynamicObject(json.loads(query_obj.geometry)) geom = Polygon(*poly.rings) elif query_obj.geometrytype == 'esriGeometryPolyline': line = DynamicObject(json.loads(query_obj.geometry)) geom = LineString(*line.paths) elif query_obj.geometrytype == 'esriGeometryEnvelope': try: bbox = map(float, query_obj.geometry.split(",")) except: bbox = DynamicObject(json.loads(query_obj.geometry)) bbox = [bbox.xmin, bbox.ymin, bbox.xmax, bbox.ymax] geom = Polygon.from_bbox(bbox) # set the projection on the geometry if geom and query_obj.insr: geom.srid = int(query_obj.insr) if geom.srid == 102100: geom.srid = 900913 return geom
def setUp(self,): self.path = os.path.dirname(os.path.realpath(__file__)) self.group, created = Group.objects.get_or_create(name="TestDefault") with patch('eventkit_cloud.jobs.signals.Group') as mock_group: mock_group.objects.get.return_value = self.group self.user = User.objects.create( username='******', email='*****@*****.**', password='******' ) bbox = Polygon.from_bbox((-10.85, 6.25, -10.62, 6.40)) tags = DatamodelPreset.objects.get(name='hdm').json_tags self.assertEqual(259, len(tags)) the_geom = GEOSGeometry(bbox, srid=4326) self.job = Job.objects.create( name='TestJob', description='Test description', user=self.user, the_geom=the_geom, json_tags=tags ) self.job.feature_save = True self.job.feature_pub = True self.job.save() self.run = ExportRun.objects.create(job=self.job, user=self.user)
def get(self, request, *args, **kwargs): if 'in_bbox' not in request.GET: return JSONResponse(message="missing search area", _code=422) # Get cordinates for bbox from get parameter p1x, p1y, p2x, p2y = ( float(n) for n in request.GET.get('in_bbox').split(',') ) # Create min and max points with cordinates in EPSG:900913 for bbox p1 = Point(p1x, p1y, srid=900913) p2 = Point(p2x, p2y, srid=900913) # Transform EPSG:900913 (from OpenLayers) to EPSG:4326 (WGS64) p1.transform(4326) p2.transform(4326) # Create bbox which represents the visible map visible_map = Polygon.from_bbox( (p1.x, p1.y, p2.x, p2.y) ) # Is the visible map to big we don't response places if visible_map.area > 10: return JSONResponse(message="search area to big", _code=422) # Look for countries which are intersects by our visible map limited # by GET parameter or max value limit = request.GET.get('limit', 500) places = KircheOsm.objects\ .filter(mpoly__intersects=visible_map)[0:limit] # Create our json objects of places places_of_worship = [] for place in places: # Use the GeoDjango Point type to transform the cordinations in # other epsg formats if not place.point: continue try: place.point.transform(request.GET.get('epsg', 4326)) except Exception: return JSONResponse(message='Error by epsg transformation', _code=422) _place = { 'id': place.id, 'name': place.name, 'lon': place.point.x, 'lat': place.point.y, 'religion': place.religion, 'denomination': place.denomination, } places_of_worship.append(_place) return JSONResponse( request_id=request.GET.get('request_id'), places_of_worship=places_of_worship, places_of_worship_count=places.count() )
def __init__(self, options): super(Importer, self).__init__() self._images = {obj.url: obj for obj in Image.objects.all()} self.options = options self.verbosity = options['verbosity'] self.logger = logging.getLogger(__name__) importer_langs = set(self.supported_languages) configured_langs = set(l[0] for l in settings.LANGUAGES) # Intersection is all the languages possible for the importer to use. self.languages = {} for lang_code in importer_langs & configured_langs: # FIXME: get language name translations from Django lang_obj, _ = Language.objects.get_or_create(id=lang_code) self.languages[lang_code] = lang_obj self.target_srid = settings.PROJECTION_SRID gps_srs = SpatialReference(4326) target_srs = SpatialReference(self.target_srid) if getattr(settings, 'BOUNDING_BOX'): self.bounding_box = Polygon.from_bbox(settings.BOUNDING_BOX) self.bounding_box.set_srid(self.target_srid) target_to_gps_ct = CoordTransform(target_srs, gps_srs) self.bounding_box.transform(target_to_gps_ct) else: self.bounding_box = None self.gps_to_target_ct = CoordTransform(gps_srs, target_srs) self.setup()
def __init__(self, options): super(Importer, self).__init__() self.options = options importer_langs = set(self.supported_languages) configured_langs = set(l[0] for l in settings.LANGUAGES) # Intersection is all the languages possible for the importer to use. self.languages = {} for lang_code in importer_langs & configured_langs: # FIXME: get language name translations from Django lang_obj, _ = Language.objects.get_or_create(id=lang_code) self.languages[lang_code] = lang_obj self.target_srid = settings.PROJECTION_SRID gps_srs = SpatialReference(4326) target_srs = SpatialReference(self.target_srid) if getattr(settings, 'BOUNDING_BOX'): self.bounding_box = Polygon.from_bbox(settings.BOUNDING_BOX) self.bounding_box.srid = self.target_srid target_to_gps_ct = CoordTransform(target_srs, gps_srs) self.bounding_box.transform(target_to_gps_ct) else: self.bounding_box = None self.gps_to_target_ct = CoordTransform(gps_srs, target_srs) self.setup() # this has to be run after setup, as it relies on organization and data source being set self._images = {obj.url: obj for obj in Image.objects.filter(publisher=self.organization, data_source=self.data_source)}
def get_geofeatures(request): cache_key = _generate_cache_key(request) cache_val = cache.get(cache_key, cache_control.GEOFEATURES_CACHE_GROUP) if cache_val: return APIResponseOK(content=cache_val) bounds_param = get_param(request.GET, "within") query = request.GET.copy() if bounds_param: pnts = bounds_param.split(",") bbox = (float(pnts[0]), float(pnts[1]), float(pnts[2]), float(pnts[3])) poly = Polygon.from_bbox(bbox) poly.set_srid(4326) del query["within"] base_query = Q() if bounds_param: base_query = base_query & Q(location__within=poly) # cast within bounds cast_base_query = models.Cast.get_privacy_q(request) & base_query q = qstranslate.QueryTranslator(models.Cast, CastAPI.ruleset, cast_base_query) try: casts = q.filter(query).select_related("author").prefetch_related("media_set").prefetch_related("tags") except qstranslate.InvalidParameterException, e: raise exceptions.APIBadRequest(e.message)
def _str_to_bbox(val): try: swlat, swlng, nwlat, nwlng = [float(s) for s in val.split(',')] return Polygon.from_bbox([swlng, swlat, nwlng, nwlat]) except: return None
def next_row(self): response = requests.get('https://biodiv-sports.fr/api/v2/sportpractice/') if response.status_code != 200: msg = _(u"Failed to download https://biodiv-sports.fr/api/v2/sportpractice/. HTTP status code {status_code}") raise GlobalImportError(msg.format(url=response.url, status_code=response.status_code)) for practice in response.json()['results']: defaults = {'name_' + lang: practice['name'][lang] for lang in practice['name'].keys() if lang in settings.MODELTRANSLATION_LANGUAGES} SportPractice.objects.get_or_create(id=practice['id'], defaults=defaults) bbox = Polygon.from_bbox(settings.SPATIAL_EXTENT) bbox.srid = settings.SRID bbox.transform(4326) # WGS84 url = self.url url += '&in_bbox={}'.format(','.join([str(coord) for coord in bbox.extent])) if self.practices: url += '&practices={}'.format(','.join([str(practice) for practice in self.practices])) response = requests.get(url) if response.status_code != 200: msg = _(u"Failed to download {url}. HTTP status code {status_code}") raise GlobalImportError(msg.format(url=response.url, status_code=response.status_code)) self.root = response.json() self.nb = int(self.root['count']) for row in self.items: yield row
def filter_bbox(self, queryset, bbox): """ modify the queryset q to limit to data that intersects with the provided bbox bbox - 4 tuple of floats representing 'southwest_lng,southwest_lat, northeast_lng,northeast_lat' returns the modified query """ bbox = [float(coord) for coord in bbox.split(',')] search_box = Polygon.from_bbox(bbox) for model in ModelDescription.objects.all(): django_model = model.get_django_model() sql = """SELECT "{layername}"."fid" FROM "{layername}" \ WHERE ST_Intersects("{layername}"."the_geom", ST_Transform(ST_GeomFromEWKT('srid=4326;{bbox_ewkt}'), \ {projection})) LIMIT 1;""".format( layername=model.name, bbox_ewkt=search_box.ewkt, projection=django_model.objects.first().the_geom.srid ) try: cursor.execute(sql) if not cursor.fetchone(): queryset = queryset.exclude(id=model.layer_id) except: pass return queryset
def validate_search_bbox(extents): """ Validates the export extents. Args: extents: a tuple of export extents (xmin, ymin, xmax, ymax) Returns: a a valid GEOSGeometry. Raises: ValidationError: if its not possible to create a GEOSGeometry from the provided extents or if the resulting GEOSGeometry is invalid. """ detail = OrderedDict() detail['id'] = _('invalid_bounds') detail['message'] = _('Invalid bounding box.') try: bbox = Polygon.from_bbox(extents) if bbox.valid: return bbox else: raise serializers.ValidationError(detail) except GEOSException: raise serializers.ValidationError(detail)
def mail_organizers_count(request): boroughs = request.GET.getlist('boroughs') lot_types = request.GET.getlist('lot_types') owner_names = request.GET.getlist('owner_names') user_types = request.GET.getlist('user_types') bbox = request.GET.get('bbox', None) lots = Lot.objects.filter( borough__in=boroughs, lotlayer__name__in=lot_types, owner__name__in=owner_names, ) if bbox: p = Polygon.from_bbox(bbox.split(',')) lots = lots.filter(centroid__within=p) organizers = 0 if 'organizers' in user_types: # TODO in Django 1.4, could use distinct('organizer__email') ? organizers = len(set(lots.exclude(organizer=None).values_list('organizer__email', flat=True))) watchers = 0 if 'watchers' in user_types: watchers = len(set(lots.exclude(watcher=None).values_list('watcher__email', flat=True))) counts = { 'organizers': organizers, 'watchers': watchers, } return HttpResponse(json.dumps(counts), mimetype='application/json')
def setUp(self,): self.url = settings.OVERPASS_API_URL self.bbox = [-10.85, 6.25, -10.62, 6.4] # [<long0>, <lat0>, <long1>, <lat1>] self.path = os.path.dirname(os.path.realpath(__file__)) self.formats = ExportFormat.objects.all() # pre-loaded by 'insert_export_formats' migration group, created = Group.objects.get_or_create(name='TestDefaultExportExtentGroup') with patch('eventkit_cloud.jobs.signals.Group') as mock_group: mock_group.objects.get.return_value = group self.user = User.objects.create(username='******', email='*****@*****.**', password='******') bbox = Polygon.from_bbox((-7.96, 22.6, -8.14, 27.12)) the_geom = GEOSGeometry(bbox, srid=4326) preset = DatamodelPreset.objects.get(name='hdm') tags = preset.json_tags self.assertEqual(259, len(tags)) self.job = Job.objects.create(name='TestJob', description='Test description', event='Nepal activation', user=self.user, the_geom=the_geom, json_tags=tags) self.uid = self.job.uid # add the formats to the job self.job.formats = self.formats self.job.save() self.osm = self.path + '/files/query.osm' self.query = '[maxsize:2147483648][timeout:1600];(relation(6.25,-10.85,6.4,-10.62);way(6.25,-10.85,6.4,-10.62);node(6.25,-10.85,6.4,-10.62));<;(._;>;);out body;'
def neighborhoods(request): """Return GeoJSON-encoded neighborhood objects matching the query""" try: bounds = request.GET['bounds'] except KeyError: return HttpResponseBadRequest('Missing `bounds` parameter') try: bbox = Polygon.from_bbox(bounds.split(',')) except ValueError: return HttpResponseBadRequest('Illegal `bounds` parameter') diagonal_in_meters = Geod(ellps='WGS84').inv(*bbox.extent)[2] if diagonal_in_meters > MINIMUM_DIAGONAL_IN_METERS: return HttpResponseBadRequest('`bounds` parameter out of range') neighborhoods = Neighborhood.objects.filter(geom__bboverlaps=bbox) features = [neighborhood.feature for neighborhood in neighborhoods] feature_collection = { 'type': 'FeatureCollection', 'features': features, } return HttpResponse(json.dumps(feature_collection), content_type='application/json')
def getBbox(long, lat, zoom_level, pixels=5): ''' Given a point (lat/lon), zoom number (in spherical mercator) and pixel tolerance, compute and return a BBOX geometry ''' try: p = Point(long, lat, srid=4326) logger.debug('Got point of %s', p.coords) p.transform(3857) x, y = p.coords logger.debug('Transformed %s to 3857', p.coords) # Get the number of meters per pixel... # At zoom level 0 (at the equator) there are ~156413 m/pixel, then it halves for # each zoom level beyond that... resolution = 156543.03392804062 / (2 ** zoom_level) spacing = pixels * resolution p = Point(x - spacing, y - spacing, srid=3857) p.transform(4326) xv1, yv1 = p.coords p = Point(x + spacing, y + spacing, srid=3857) p.transform(4326) xv2, yv2 = p.coords xmin = min(xv1, xv2) xmax = max(xv1, xv2) ymin = min(yv1, yv2) ymax = max(yv1, yv2) return Polygon.from_bbox((xmin, ymin, xmax, ymax)) except OverflowError as e: logger.exception('Overflow reverse-mapping from mercator to lat/long') return None
def setUp(self,): Group.objects.create(name='TestDefaultExportExtentGroup') self.user1 = User.objects.create_user( username='******', email='*****@*****.**', password='******' ) self.user2 = User.objects.create_user( username='******', email='*****@*****.**', password='******' ) extents = (-3.9, 16.1, 7.0, 27.6) bbox = Polygon.from_bbox(extents) the_geom = GEOSGeometry(bbox, srid=4326) self.job1 = Job.objects.create(name='TestJob1', description='Test description', user=self.user1, the_geom=the_geom) self.job2 = Job.objects.create(name='TestJob2', description='Test description', user=self.user2, the_geom=the_geom) format = ExportFormat.objects.get(slug='obf') self.job1.formats.add(format) self.job2.formats.add(format) token = Token.objects.create(user=self.user1) self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key, HTTP_ACCEPT='application/json; version=1.0', HTTP_ACCEPT_LANGUAGE='en', HTTP_HOST='testserver')
def setUp(self,): self.path = os.path.dirname(os.path.realpath(__file__)) parser = presets.PresetParser(self.path + '/files/hdm_presets.xml') self.tags = parser.parse() self.assertIsNotNone(self.tags) self.assertEquals(256, len(self.tags)) self.formats = ExportFormat.objects.all() #pre-loaded by 'insert_export_formats' migration Group.objects.create(name='DefaultExportExtentGroup') self.user = User.objects.create(username='******', email='*****@*****.**', password='******') bbox = Polygon.from_bbox((-7.96, 22.6, -8.14, 27.12)) the_geom = GEOSGeometry(bbox, srid=4326) self.job = Job.objects.create(name='TestJob', description='Test description', event='Nepal activation', user=self.user, the_geom=the_geom) self.uid = self.job.uid # add the formats to the job self.job.formats = self.formats self.job.save() for tag in self.tags: Tag.objects.create( key = tag['key'], value = tag['value'], job = self.job, data_model = 'osm', geom_types = tag['geom_types'] ) self.categories = self.job.categorised_tags
def setUp(self,): self.url = 'http://localhost/interpreter' self.bbox = '6.25,-10.85,6.40,-10.62' # monrovia self.path = os.path.dirname(os.path.realpath(__file__)) self.formats = ExportFormat.objects.all() #pre-loaded by 'insert_export_formats' migration Group.objects.create(name='TestDefaultExportExtentGroup') self.user = User.objects.create(username='******', email='*****@*****.**', password='******') bbox = Polygon.from_bbox((-7.96, 22.6, -8.14, 27.12)) the_geom = GEOSGeometry(bbox, srid=4326) self.job = Job.objects.create(name='TestJob', description='Test description', event='Nepal activation', user=self.user, the_geom=the_geom) self.uid = self.job.uid # add the formats to the job self.job.formats = self.formats self.job.save() self.osm = self.path + '/files/query.osm' self.query = '(node(6.25,-10.85,6.40,-10.62);<;);out body;' self.job.tags.all().delete() parser = presets.PresetParser(self.path + '/files/hdm_presets.xml') tags = parser.parse() self.assertIsNotNone(tags) self.assertEquals(256, len(tags)) # save all the tags from the preset for tag_dict in tags: tag = Tag.objects.create( key = tag_dict['key'], value = tag_dict['value'], job = self.job, data_model = 'osm', geom_types = tag_dict['geom_types'] ) self.assertEquals(256, self.job.tags.all().count())
def setUp(self): self.path = os.path.dirname(os.path.realpath(__file__)) self.formats = ExportFormat.objects.all() # pre-loaded by 'insert_export_formats' migration self.group, created = Group.objects.get_or_create(name='TestDefaultExportExtentGroup') with patch('eventkit_cloud.jobs.signals.Group') as mock_group: mock_group.objects.get.return_value = self.group self.user = User.objects.create(username='******', email='*****@*****.**', password='******') bbox = Polygon.from_bbox((-7.96, 22.6, -8.14, 27.12)) the_geom = GEOSGeometry(bbox, srid=4326) export_provider = DataProvider.objects.get(slug='osm-generic') provider_task = DataProviderTask.objects.create(provider=export_provider) self.tags = [ {'key': 'building', 'value': 'yes'}, {'key': 'place', 'value': 'city'}, {'key': 'highway', 'value': 'service'}, {'key': 'aeroway', 'value': 'helipad'} ] self.job = Job(name='TestJob', description='Test description', event='Nepal activation', user=self.user, the_geom=the_geom, json_tags=self.tags) self.job.save() self.uid = self.job.uid # add the formats to the job provider_task.formats.add(*self.formats) self.job.provider_tasks.add(provider_task) self.job.save()
def _get_box(self, value): bbox = json.loads(value) points = (bbox['_southWest']['lng'], bbox['_northEast']['lat'], bbox['_northEast']['lng'], bbox['_southWest']['lat']) box = Polygon.from_bbox(points) box.srid = SRID_WSG84 box.transform(SRID_RD) return box
def bbox_to_geojson(bbox=None): """ :param bbox: A list [xmin, ymin, xmax, ymax] :returns: A geojson of the bbox. """ bbox = Polygon.from_bbox(bbox) geometry = json.loads(GEOSGeometry(bbox, srid=4326).geojson) return {"type": "Feature", "geometry": geometry}
class CityFactory(factory.DjangoModelFactory): class Meta: model = models.City code = factory.Sequence(lambda n: u"#%s" % n) # id (!) with max_length=6 name = factory.Sequence(lambda n: u"City name %s" % n) geom = factory.Sequence(lambda _: MultiPolygon( Polygon.from_bbox(geom_city_iter.next()), srid=settings.SRID))
def test_job_outside_region(self, ): job = Job.objects.all()[0] bbox = Polygon.from_bbox((2.74, 47.66, 21.61, 60.24)) # outside any region the_geom = GEOSGeometry(bbox, srid=4326) job.the_geom = the_geom job.save() regions = Region.objects.filter(the_geom__intersects=job.the_geom).intersection(job.the_geom, field_name='the_geom').order_by('-intersection') self.assertEquals(0, len(regions))
def compute_fields(self, **kwargs): """Other keyword args get passed in as a matter of course, like BBOX, time, and elevation, but this basic driver ignores them""" super(KmzDriver, self).compute_fields(**kwargs) # archive = ZipFile(self.cached_basename + self.src_ext) srs = osr.SpatialReference() srs.ImportFromEPSG(4326) self.resource.spatial_metadata.native_srs = srs.ExportToProj4() self.resource.spatial_metadata.bounding_box = Polygon.from_bbox( (-180, -90, 180, 90)) self.resource.spatial_metadata.native_bounding_box = Polygon.from_bbox( (-180, -90, 180, 90)) self.resource.spatial_metadata.three_d = False self.resource.spatial_metadata.save() self.resource.save()
def encode_objects_for_tile(xyz): tile_bounds = Polygon.from_bbox(mercantile.bounds(*xyz)) tile_bounds.srid = SRID_LNGLAT tile_bounds.transform(SRID_SPHERICAL_MERCATOR) objects = Place.objects.filter(location__intersects=tile_bounds).select_related('building').all()[:20] return encode(objects, tile_bounds)
def _parse_mapkeys(self, map_keys): # Create a bounding box from the given longitude & latitude. lng = (float(map_keys['ne_lng']), float(map_keys['sw_lng'])) lat = (float(map_keys['ne_lat']), float(map_keys['sw_lat'])) box = Polygon.from_bbox((min(lng), min(lat), max(lng), max(lat))) return Q(location__geo_point__intersects=box)
def get_geom(self): entries = self.entry_set.exclude(geom=None) if entries: poly = entries.extent() geom = Polygon.from_bbox(poly) else: geom = None return geom
def q(z, x, y, track): bbox = Polygon.from_bbox( (*tile_to_3857(z, x, y + 1), *tile_to_3857(z, x + 1, y))) bbox.srid = 3857 q = Sounding.objects.filter(coord__coveredby=bbox) if track is not None: q = q.filter(track=track) return q
def in_bbox_filter(self, qs, name, value): bbox_values, err = bbox.valid_bbox(value) lon1, lat1, lon2, lat2 = bbox_values poly_bbox = Polygon.from_bbox((lon1, lat1, lon2, lat2)) if err: raise ValidationError(f"bbox invalid {err}:{bbox_values}") return qs.filter(signal__location__geometrie__bboverlaps=poly_bbox)
def filter(self, qs, value): try: bbox = value.split(",") geom = Polygon.from_bbox(bbox) except (ValueError, GEOSException): return qs qs = qs.filter(coordinates__within=geom) return qs
def serve_lac_json(request): qs = models.O3gmLac.objects.all() try: bbox = request.GET.get('bbox').split(',') geom = Polygon.from_bbox(bbox) qs = qs.filter(geometry__intersects=geom) except Exception, e: log.error(e)
def poly_from_bbox(bbox_val): points = bbox_val.split(',') if len(points) != 4: raise InvalidFilterError("bbox must be in format 'left,bottom,right,top'") try: return Polygon.from_bbox( [float(p) for p in points]) except ValueError: raise InvalidFilterError("bbox values must be floating point")
def test_gis_query_as_string(self): """GIS queries can be represented as strings.""" query = City.objects.filter(point__within=Polygon.from_bbox((0, 0, 2, 2))) self.assertIn( connection.ops.quote_name(City._meta.db_table), str(query.query), )
class RestrictedAreaFactory(factory.DjangoModelFactory): class Meta: model = models.RestrictedArea name = factory.Sequence(lambda n: u"Restricted area name %s" % n) geom = factory.Sequence(lambda _: MultiPolygon( Polygon.from_bbox(geom_area_iter.next()), srid=settings.SRID)) area_type = factory.SubFactory(RestrictedAreaTypeFactory)
def image(request, id=None): @logged_in_or_basicauth() def handle_update(request, image): data = simplejson.loads(request.raw_post_data) image.from_json(data, request.user) image.save() return json_response(request, image) if id == None and request.method == "POST": i = Image() return handle_update(request, i) elif id != None: i = Image.objects.get(pk=id) if request.method == "DELETE": i.delete() return json_response(request, "") elif request.method == "POST": return handle_update(request, i) return json_response(request, i) else: images = Image.objects.all().select_related() output = request.GET.get('output', 'simple') if 'archive' in request.GET and request.GET['archive'].lower() in ( "true", "t", "1"): images = images.filter(archive=True) else: images = images.filter(archive=False) if 'layer' in request.GET: images = images.filter(layers__id=request.GET['layer']) if 'bbox' in request.GET: left, bottom, right, top = map(float, request.GET['bbox'].split(",")) box = Polygon.from_bbox([left, bottom, right, top]) images = images.filter(bbox__intersects=box) limit = min(int(request.GET.get("limit", 1000)), 10000) start = int(request.GET.get("start", 0)) end = start + limit images = images.order_by("-id") # Instantiating full image objects for thousands of images is slow; # instead, just use .values and make our own dict here. Adding more # properties here should be done with consideration. if output == 'simple': data = { 'images': [ dict(x) for x in images[start:end].values( "id", "width", "height", "url", "bbox") ] } for i in data['images']: i['bbox'] = list(i['bbox'].extent) else: data = { 'images': [i.to_json(output=output) for i in images[start:end]] } return json_response(request, data)
def get_geofeatures(request): bounds_param = get_param(request.GET, 'within') query = request.GET.copy() if bounds_param: pnts = bounds_param.split(',') bbox = (float(pnts[0]), float(pnts[1]), float(pnts[2]), float(pnts[3])) poly = Polygon.from_bbox(bbox) poly.set_srid(4326) del query['within'] base_query = Q() if bounds_param: base_query = base_query & Q(location__within=poly) # cast within bounds cast_base_query = models.Cast.get_privacy_q(request) & base_query q = qstranslate.QueryTranslator(models.Cast, CastAPI.ruleset, cast_base_query) casts = q.filter(query) cast_arr = [] for c in casts: if c.location: cast_arr.append(geojson_serialize(c, c.location, request)) #event within bounds events = models.Event.objects.filter(base_query) event_arr = [] for e in events: if e.location: event_arr.append(geojson_serialize(e, e.location, request)) # itinerary intersects bounds if bounds_param: base_query = Q(path__intersects = poly) itins = models.Itinerary.objects.filter(base_query) itin_arr = [] for i in itins: if i.path: itin_arr.append(geojson_serialize(i, i.path, request)) features_dict = {} features_dict['casts'] = dict(type='FeatureCollection', features=cast_arr) features_dict['events'] = dict(type='FeatureCollection', features=event_arr) features_dict['itineraries'] = dict(type='FeatureCollection', features=itin_arr) dict(casts = cast_arr, events = event_arr, itineraries = itin_arr) return APIResponseOK(content=features_dict)
def getdmadetail(request): print(request.GET) print(request.POST) dma_no = request.GET.get("dma_no") # (u'118.28575800964357', u'29.8010417315232', u'118.53518199035648', u'29.924899835516314') # left = 118.28575800964357 # top = 29.8010417315232 # right = 118.53518199035648 # bottom = 29.924899835516314 print('1.12...', left, top, right, bottom) bbox = (float(left), float(top), float(right), float(bottom)) print(bbox) geom = Polygon.from_bbox(bbox) print('geom:', geom) # pgeojson = {"type":"FeatureCollection","features":[{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[13178892.355395831,3489944.851357296],[13181281.012238158,3490454.4314972665],[13181822.4413829,3488777.063506157],[13180803.280811375,3487460.647792236],[13178648.181393484,3487938.3792190184],[13178202.298825681,3489021.2372169197],[13178892.355395831,3489944.851357296]]]},"properties":"null"}]} # return JsonResponse(pgeojson) # geodata=FenceShape.objects.filter(geomdata__intersects=geom) rsql = ''' SELECT id,geomdata,geomjson FROM `virvo_fenceshape` WHERE MBRWITHIN(geomdata, ST_GEOMFROMTEXT('{}', 0 ) ) ; '''.format(geom) geodata = FenceShape.objects.raw(rsql) data = [] data_property = [] for q in geodata: f = FenceShape.objects.get(name=q.name) print(' \t\n:', q.id, '#########', f.geomdata.geojson, '-----', q.geomjson) # tmp = q.geojsondata() # print('tmp:',tmp) print('\r\n') # data.append(json.dumps(tmp)) # data.append(json.loads(f.geomjson)) properties = { "strokeColor": q.strokeColor, "fillColor": q.fillColor, "name": q.name } data.append(json.loads(f.geomdata.geojson)) data_property.append(properties) # return return_feature_collection(data) ret = build_feature_collection(data, data_property) print('\r\n') # print('ere&*^*&^*&:::::',ret) # print(json.loads(ret)) tmp = JsonResponse(ret, safe=False) print('jsontmp:', tmp.content) return JsonResponse(ret)
def test_job_region(self, ): bbox = Polygon.from_bbox((-7.96, 22.6, -8.14, 27.12)) # africa region = Region.objects.filter(the_geom__contains=bbox)[0] self.assertIsNotNone(region) self.assertEquals('Africa', region.name) self.job.region = region self.job.save() saved_job = Job.objects.all()[0] self.assertEqual(saved_job.region, region)
def get(self, request, x0=None, y0=None, x1=None, y1=None): bbox = Polygon.from_bbox([x0, y0, x1, y1]) if x0 else None geom = get_geom_from_cache() ocorrencias = Ocorrencia.objects.filter_by_geom_and_bbox(geom, bbox) if not get_user_agent(request).is_mobile: ocorrencias = SearchForm(request.GET or None, queryset=ocorrencias).get_queryset() response = StreamingHttpResponse(streaming_ocorrencias(ocorrencias), content_type='application/json') response['Cache-Control'] = 'max-age=0, no-cache, no-store' return response
def points(request): BB = request.REQUEST.get('BB') ret = DataPoint.objects.filter(position__contained=Polygon.from_bbox( [float(bb) for bb in BB.split(',')])) return HttpResponse( json.dumps({ 'type': 'FeatureCollection', 'features': [geoj(r) for r in ret] }))
def make_unformated_data(feature): _dict = feature["properties"] if 'geometry' in feature: geom = {self.Meta.geo_field: feature["geometry"]} _dict.update(geom) if self.Meta.bbox_geo_field and 'bbox' in feature: # build a polygon from the bbox _dict.update({self.Meta.bbox_geo_field: Polygon.from_bbox(feature['bbox'])}) return _dict
def handle(self, *args, **options): for rec in json.loads(open('./fixtures/langs.json').read())['features']: lang = Language.objects.get(name=rec['properties']['title']) print(rec['properties']['title'], 'geom being loaded.') lang.geom = GEOSGeometry(json.dumps(rec['geometry'])) lang.bbox = Polygon.from_bbox(lang.geom.extent) lang.color = rec['properties']['color'].replace(' ','') lang.save()
def filter_locality(extent=None, country=None, timestamp_from=None, timestamp_to=None, place=None): """ Filter osm locality by extent and country :param extent: extent of data :type extent: str (with comma separator) :param country: specific country :type country: str :param timestamp_from: start time :type timestamp_from: timestamp :param timestamp_to: end time :type timestamp_to: timestamp :return: LocalityOSMView """ # check extent data queryset = all_locality() if extent: try: polygon = parse_bbox(extent) except (ValueError, IndexError): raise Exception('extent is incorrect format') queryset = queryset.in_polygon(polygon) # check by country if country: try: # getting country's polygon country = Country.objects.get(name__iexact=country) if country: polygons = country.polygon_geometry queryset = queryset.in_polygon(polygons) except Country.DoesNotExist: raise Exception('%s is not found or not a country.' % country) if place: try: geo = search_by_geoname(place) bbox = (geo['southwest']['lng'], geo['southwest']['lat'], geo['northeast']['lng'], geo['northeast']['lat']) geom = Polygon.from_bbox(bbox) queryset = queryset.filter(geometry__within=geom) except Exception: pass if timestamp_from: queryset = queryset.filter(changeset_timestamp__gte=timestamp_from) if timestamp_to: queryset = queryset.filter(changeset_timestamp__lte=timestamp_to) return queryset
def test_job_outside_region(self, ): job = Job.objects.all()[0] bbox = Polygon.from_bbox( (2.74, 47.66, 21.61, 60.24)) # outside any region the_geom = MultiPolygon(GEOSGeometry(bbox, srid=4326)) job.the_geom = the_geom job.save() regions = Region.objects.filter(the_geom__intersects=job.the_geom) self.assertEqual(0, len(regions))
def service(self, request, nelat, nelng, swlat, swlng): boundsRect = Polygon.from_bbox((min(nelng, swlng), min(nelat, swlat), max(nelng, swlng), max(nelat, swlat))) stopGeoQuerySet = Stop.objects.filter(point__within=boundsRect)[:50] jsonOut = json.dumps({'stops': list(stopGeoQuerySet)}, cls=ModelJSONEncoder) print 'Area', boundsRect.area return HttpResponse( jsonOut, content_type='application/javascript; charset=utf8')
def __sanitize(self, qdict): ''' Convert the values into python types, sanitize them using the rule set. All values lists ''' for k, v in qdict.items(): field, modifier = self.__get_field(k) # If it's not a valid field, or a field that should be exposed, remove it if not field in self.ruleset: del qdict[k] else: # CHECK THE TYPE type = self.ruleset[field]['type'] if type == 'string': qdict[k] = [str(v)] elif type == 'int': qdict[k] = [int(v)] elif type == 'bool': qdict[k] = [(v.lower() == 'true')] elif type == 'list': list = v.split(',') qdict[k] = list elif type == 'datetime': qdict[k] = [strtodate(v)] elif type == 'geo_distance': # The value should be a tuple like this: (point, distance) # where point is a Point object and distance a Distance object dist = v.split(',') pnt = Point(float(dist[0]), float(dist[1])) # TODO: make the measurement type specifiable i.e. 30mi, 20km # right now it defaults to meters. dist = D(m=dist[2]) qdict[k] = [(pnt, dist)] elif type == 'geo_polygon': pnts = v.split(',') bbox = (float(pnts[0]), float(pnts[1]), float(pnts[2]), float(pnts[3])) poly = Polygon.from_bbox(bbox) # TODO: 4326 - this should be a setting? poly.set_srid(4326) qdict[k] = [poly] else: qdict[k] = [str(v)]
def test_fields(self): size_request = SizeIncreaseRequest.objects.all()[0] bbox = Polygon.from_bbox((-7.96, 22.6, -8.14, 27.12)) the_geom = MultiPolygon(GEOSGeometry(bbox, srid=4326), srid=4326) self.assertEqual(size_request.the_geom, the_geom) self.assertEqual(size_request.requested_aoi_size, 5000) self.assertEqual(size_request.requested_data_size, 1000) self.assertEqual(size_request.status, "pending") self.assertEqual(size_request.user, self.user)
def get_queryset(self): collection_id = self.kwargs['collection_id'] params = self.request.GET.dict() qset = Feature.objects.filter(collection_id=collection_id) if 'bbox' in params and params['bbox']: bbox_filter = params['bbox'] bbox = Polygon.from_bbox((float(b) for b in bbox_filter.split(','))) qset = qset.filter(geometry__bboverlaps=bbox) return qset
def get_queryset(self): box = self.request.query_params.get("box", None) if box: [xmin, ymin, xmax, ymax] = box.split(',') poly = Polygon.from_bbox((xmin, ymin, xmax, ymax)) return Product.objects.select_related('creator').filter( creator__address__point__within=poly).filter(quantity__gt=0) else: return Product.objects.filter(quantity__gt=0)