def __init__(self, colinfos, row, srid): ''' Convert a SQL result set row and a list of SQL result set colinfos to a properties array and a geometry geojson string. :param colinfos: A list of PEP-249 colinfo tuples. :param row: An SQL result set row with as many members as ``colinfos`` :param srid: The target spatial reference ID to convert the geometry to. ''' self.props = {} self.id = None self.geometry = None for i, colinfo in enumerate(colinfos): if _is_geom_column(colinfo): geom = GEOSGeometry(row[i]) if srid is not None and srid != geom.srid: geom.transform(srid) self.geometry = geom else: if _is_id_column(colinfo): self.id = row[i] self.props[colinfo[0]] = row[i]
def __init__(self,colinfos,row,srid): ''' Convert a SQL result set row and a list of SQL result set colinfos to a properties array and a geometry geojson string. :param colinfos: A list of PEP-249 colinfo tuples. :param row: An SQL result set row with as many members as ``colinfos`` :param srid: The target spatial reference ID to convert the geometry to. ''' self.props = {} self.id = None self.geometry = None for i,colinfo in enumerate(colinfos): if _is_geom_column(colinfo): geom = GEOSGeometry(row[i]) if srid is not None and srid != geom.srid: geom.transform(srid) self.geometry = geom else: if _is_id_column(colinfo): self.id = row[i] self.props[colinfo[0]] = row[i]
def bulk_geocode(self, codes, srid=None): chunks = _chunk(codes) for chunk in chunks: features = self.code_store.find_features(spec={ "_id" : { "$in" : chunk }}) if srid: for feature in features: g = GEOSGeometry(json.dumps(feature['geometry']), srid=self.code_store.srid) g.transform(srid) feature['geometry'] = json.loads(g.json) yield feature['_id'], feature else: for feature in features: yield feature['_id'], feature
def __getitem__(self, code): srid=None if isinstance(code, tuple): code, srid = code val = self.code_store.coll.find_one(code) if val: if srid: g = GEOSGeometry(json.dumps(val['geometry']), srid=self.code_store.srid) g.transform(srid) val['geometry'] = json.loads(g.json) return val else: raise KeyError(code)
def _handle_geom(self, geometry): """ Geometry processing (in place), depending on options """ # Optional force 2D if self.options.get('force2d'): wkb_w = WKBWriter() wkb_w.outdim = 2 geometry = GEOSGeometry(wkb_w.write(geometry), srid=geometry.srid) # Optional geometry simplification simplify = self.options.get('simplify') if simplify is not None: geometry = geometry.simplify(tolerance=simplify, preserve_topology=True) # Optional geometry reprojection if self.srid != geometry.srid: geometry.transform(self.srid) return geometry
def _handle_geom(self, value): """ Geometry processing (in place), depending on options """ if value is None: geometry = None elif isinstance(value, dict) and 'type' in value: geometry = value else: if isinstance(value, GEOSGeometry): geometry = value else: try: # this will handle string representations (e.g. ewkt, bwkt) geometry = GEOSGeometry(value) except ValueError: # if the geometry couldn't be parsed. # we can't generate valid geojson error_msg = 'The field ["%s", "%s"] could not be parsed as a valid geometry' % ( self.geometry_field, value) raise SerializationError(error_msg) # Optional force 2D if self.options.get('force2d'): wkb_w = WKBWriter() wkb_w.outdim = 2 geometry = GEOSGeometry(wkb_w.write(geometry), srid=geometry.srid) # Optional geometry simplification simplify = self.options.get('simplify') if simplify is not None: geometry = geometry.simplify(tolerance=simplify, preserve_topology=True) # Optional geometry reprojection if geometry.srid and geometry.srid != self.srid: geometry.transform(self.srid) # Optional bbox if self.options.get('bbox_auto'): self._current['bbox'] = geometry.extent self._current['geometry'] = geometry
def _handle_geom(self, value): """ Geometry processing (in place), depending on options """ if value is None: geometry = None elif isinstance(value, dict) and 'type' in value: geometry = value else: if isinstance(value, GEOSGeometry): geometry = value else: try: # this will handle string representations (e.g. ewkt, bwkt) geometry = GEOSGeometry(value) except ValueError: # if the geometry couldn't be parsed. # we can't generate valid geojson error_msg = 'The field ["%s", "%s"] could not be parsed as a valid geometry' % ( self.geometry_field, value ) raise SerializationError(error_msg) # Optional force 2D if self.options.get('force2d'): wkb_w = WKBWriter() wkb_w.outdim = 2 geometry = GEOSGeometry(wkb_w.write(geometry), srid=geometry.srid) # Optional geometry simplification simplify = self.options.get('simplify') if simplify is not None: geometry = geometry.simplify(tolerance=simplify, preserve_topology=True) # Optional geometry reprojection if geometry.srid and geometry.srid != self.srid: geometry.transform(self.srid) # Optional bbox if self.options.get('bbox_auto'): self._current['bbox'] = geometry.extent self._current['geometry'] = geometry
def __getitem__(self, code): """ Returns a feature if an exact match is found or a feature collection if an approximate match is found :param code: :return: """ srid=None if isinstance(code, tuple): code, srid = code val = self.code_store.coll.find_one(code) if val: if srid: g = GEOSGeometry(json.dumps(val['geometry']), srid=self.code_store.srid) g.transform(srid) val['geometry'] = json.loads(g.json) return val else: val = { 'type' : "FeatureCollection" } features = [] ngrams = self.parser(code) scores = {} for ngram in ngrams: counts = self.ngram_store.find({'ngram' : ngram}) for count in counts: if count['code'] not in scores: scores[ count['code'] ] += count['count'] for code, ct in sorted(scores.items(), key=lambda x,y: y): features.append(self.code_store.coll.find_one(code)) if len(features): val['features'] = features return val else: raise KeyError(code)
def validate_analysis_extent(request): if request.method != 'POST': return HttpResponseBadRequest() try: hazard_id = request.POST.get('hazard_id') exposure_id = request.POST.get('exposure_id') view_extent = request.POST.get('view_extent') hazard_layer = Layer.objects.get(id=hazard_id) exposure_layer = Layer.objects.get(id=exposure_id) except Exception as e: LOGGER.exception(e) return HttpResponseBadRequest() # calculate extent try: # Check hazard and exposure intersected hazard_srid, hazard_wkt = hazard_layer.geographic_bounding_box.split( ';') hazard_srid = re.findall(r'\d+', hazard_srid) hazard_geom = GEOSGeometry(hazard_wkt, srid=int(hazard_srid[0])) hazard_geom.transform(4326) exposure_srid, exposure_wkt = exposure_layer.geographic_bounding_box.\ split(';') exposure_srid = re.findall(r'\d+', exposure_srid) exposure_geom = GEOSGeometry(exposure_wkt, srid=int(exposure_srid[0])) exposure_geom.transform(4326) analysis_geom = exposure_geom.intersection(hazard_geom) if not analysis_geom: # hazard and exposure doesn't intersect message = _("Hazard and exposure does not intersect.") retval = { 'is_valid': False, 'is_warned': False, 'extent': view_extent, 'reason': message } return HttpResponse(json.dumps(retval), content_type="application/json") # This bbox is in the format [x0,y0,x1,y1] x0, y0, x1, y1 = [float(n) for n in view_extent.split(',')] view_geom = GEOSGeometry(bbox_to_wkt(x0, x1, y0, y1), srid=4326) analysis_geom = view_geom.intersection(analysis_geom) if not analysis_geom: # previous hazard and exposure intersection doesn't intersect # view extent message = _("View extent does not intersect hazard and exposure.") retval = { 'is_valid': False, 'is_warned': False, 'extent': view_extent, 'reason': message } return HttpResponse(json.dumps(retval), content_type="application/json") # Check the size of the extent # convert to EPSG:3410 for equal area projection analysis_geom.transform('3410') area = analysis_geom.area # Transform back to EPSG:4326 analysis_geom.transform('4326') area_limit = settings.INASAFE_ANALYSIS_AREA_LIMIT if area > area_limit: # Area exceeded designated area limit. # Don't allow analysis when exceeding area limit message = _( 'Analysis extent exceeded area limit: {limit} km<sup>2</sup>.' '<br /> Analysis might take a long time to complete. ' '<br /> Please reduce extent and try again') # Convert m2 into km2. area_limit = area_limit / 1000000 message = message.format(limit=area_limit) retval = { 'is_valid': False, 'is_warned': False, 'extent': view_extent, 'area': area, 'reason': message } return HttpResponse(json.dumps(retval), content_type="application/json") # convert analysis extent to bbox string again view_extent = ','.join([str(f) for f in analysis_geom.extent]) message = _("Analysis will be performed on this given view extent.") retval = { 'is_valid': True, 'is_warned': False, 'extent': view_extent, 'area': area, 'reason': message } return HttpResponse(json.dumps(retval), content_type="application/json") except Exception as e: LOGGER.exception(e) return HttpResponseServerError()
def validate_analysis_extent(request): if request.method != 'POST': return HttpResponseBadRequest() try: hazard_id = request.POST.get('hazard_id') exposure_id = request.POST.get('exposure_id') aggregation_id = request.POST.get('aggregation_id') view_extent = request.POST.get('view_extent') hazard_layer = Layer.objects.get(id=hazard_id) exposure_layer = Layer.objects.get(id=exposure_id) aggregation_layer = None if aggregation_id: aggregation_layer = Layer.objects.get(id=aggregation_id) except Exception as e: LOGGER.exception(e) return HttpResponseBadRequest() # calculate extent try: # Check hazard and exposure intersected hazard_srid, hazard_wkt = hazard_layer.geographic_bounding_box.split( ';') hazard_srid = re.findall(r'\d+', hazard_srid) hazard_geom = GEOSGeometry(hazard_wkt, srid=int(hazard_srid[0])) hazard_geom.transform(4326) exposure_srid, exposure_wkt = exposure_layer.geographic_bounding_box.\ split(';') exposure_srid = re.findall(r'\d+', exposure_srid) exposure_geom = GEOSGeometry(exposure_wkt, srid=int(exposure_srid[0])) exposure_geom.transform(4326) analysis_geom = exposure_geom.intersection(hazard_geom) if aggregation_layer: aggregation_srid, aggregation_wkt = aggregation_layer.\ geographic_bounding_box.split(';') aggregation_srid = re.findall(r'\d+', aggregation_srid) aggregation_geom = GEOSGeometry(aggregation_wkt, srid=int(aggregation_srid[0])) aggregation_geom.transform(4326) analysis_geom = analysis_geom.intersection(aggregation_geom) if not analysis_geom: # hazard and exposure doesn't intersect message = _("Hazard and exposure does not intersect.") retval = { 'is_valid': False, 'is_warned': False, 'extent': view_extent, 'reason': message } return HttpResponse(json.dumps(retval), content_type="application/json") # This bbox is in the format [x0,y0,x1,y1] x0, y0, x1, y1 = [float(n) for n in view_extent.split(',')] view_geom = GEOSGeometry(bbox_to_wkt(x0, x1, y0, y1), srid=4326) analysis_geom = view_geom.intersection(analysis_geom) if not analysis_geom: # previous hazard and exposure intersection doesn't intersect # view extent message = _("View extent does not intersect hazard and exposure.") retval = { 'is_valid': False, 'is_warned': False, 'extent': view_extent, 'reason': message } return HttpResponse(json.dumps(retval), content_type="application/json") # Check the size of the extent # convert to EPSG:3410 for equal area projection analysis_geom.transform('3410') area = analysis_geom.area # Transform back to EPSG:4326 analysis_geom.transform('4326') # convert analysis extent to bbox string again view_extent = ','.join([str(f) for f in analysis_geom.extent]) message = _("Analysis will be performed on this given view extent.") retval = { 'is_valid': True, 'is_warned': False, 'extent': view_extent, 'area': area, 'reason': message } return HttpResponse(json.dumps(retval), content_type="application/json") except Exception as e: LOGGER.exception(e) return HttpResponseServerError()
def importData(file, characterEncoding, format, user, folder): cursor = connection.cursor() start_time = time.time() #manage zipfile fd, fname = tempfile.mkstemp(suffix=fileExt_dic[format]) os.close(fd) f = open(fname, "wb") for chunk in file.chunks(): f.write(chunk) f.close() if not zipfile.is_zipfile(fname): os.remove(fname) return "Not a valid zip archive.", None zip = zipfile.ZipFile(fname) hasSuffix = {} required_suffixes = suffixes_dic[format] for suffix in required_suffixes: hasSuffix[suffix] = False for info in zip.infolist(): extension = os.path.splitext(info.filename)[1].lower() if extension in required_suffixes: hasSuffix[extension] = True for suffix in required_suffixes: if not hasSuffix[suffix]: zip.close() os.remove(fname) return "Archive missing required %s file." % suffix, None zip = zipfile.ZipFile(fname) shapefileName = None dirname = tempfile.mkdtemp() for info in zip.infolist(): if info.filename.lower().endswith(filenameExt_dic[format]): shapefileName = info.filename dstFile = os.path.join(dirname, info.filename) f = open(dstFile, "wb") f.write(zip.read(info.filename)) f.close() zip.close() #verify if shapefile is valid try: srcPath = os.path.join(dirname, shapefileName) srcLayers = fiona.listlayers(srcPath) shapefileOK = True except: traceback.print_exc() shapefileOK = False if not shapefileOK: os.remove(fname) shutil.rmtree(dirname) return "Not a valid vector file.", None #add shapefile object to database try: for i in srcLayers: with fiona.open(srcPath) as c: srcSpatialRef = to_string(c.crs) print srcSpatialRef project = CoordTransform(SpatialReference(srcSpatialRef), SpatialReference(3857)) geometryType = c.schema['geometry'] shapefile = Shapefile.objects.create( filename=c.name, parent=folder, srs_wkt=srcSpatialRef, geom_type=geometryType, encoding=characterEncoding, created_by=user) #define shapefile's attributes for keys, values in c.schema['properties'].iteritems(): dict = {} dict['name'] = keys props = re.split('\W+', values) dict['type'] = utils.fionaTypeToInt(props[0]) try: dict['width'] = int(props[1]) except IndexError: dict['width'] = 0 if dict['type'] == 2: try: dict['precision'] = int(props[2]) except IndexError: dict['precision'] = 15 else: dict['precision'] = 0 attr = Attribute.objects.create(shapefile=shapefile, **dict) #store shapefile's features for srcFeature in c: try: wkt = dumps(srcFeature['geometry']) geosGeometry = GEOSGeometry(wkt) geosGeometry.srid = SpatialReference( srcSpatialRef).srid geosGeometry.transform(project) except TypeError: geosGeometry = None geometryField = utils.calcGeometryField(geometryType) args = {} args['shapefile'] = shapefile args[geometryField] = geosGeometry args['attribute_value'] = srcFeature['properties'] args['id_relat'] = srcFeature['id'] feature = Feature.objects.create(**args) print("Temps final: --- %s seconds ---" % str(time.time() - start_time)) return None, shapefile except BaseException, e: #cleaning up os.remove(fname) shutil.rmtree(dirname, ignore_errors=False, onerror=handleRemoveReadonly) shapefile.delete() return e, None
def importData(file, characterEncoding, format, user, folder): cursor = connection.cursor() start_time = time.time() #manage zipfile fd,fname = tempfile.mkstemp(suffix=fileExt_dic[format]) os.close(fd) f = open(fname, "wb") for chunk in file.chunks(): f.write(chunk) f.close() if not zipfile.is_zipfile(fname): os.remove(fname) return "Not a valid zip archive.", None zip = zipfile.ZipFile(fname) hasSuffix = {} required_suffixes = suffixes_dic[format] for suffix in required_suffixes: hasSuffix[suffix] = False for info in zip.infolist(): extension = os.path.splitext(info.filename)[1].lower() if extension in required_suffixes: hasSuffix[extension] = True for suffix in required_suffixes: if not hasSuffix[suffix]: zip.close() os.remove(fname) return "Archive missing required %s file." % suffix, None zip = zipfile.ZipFile(fname) shapefileName = None dirname = tempfile.mkdtemp() for info in zip.infolist(): if info.filename.lower().endswith(filenameExt_dic[format]): shapefileName = info.filename dstFile = os.path.join(dirname, info.filename) f = open(dstFile, "wb") f.write(zip.read(info.filename)) f.close() zip.close() #verify if shapefile is valid try: srcPath = os.path.join(dirname,shapefileName) srcLayers = fiona.listlayers(srcPath) shapefileOK = True except: traceback.print_exc() shapefileOK = False if not shapefileOK: os.remove(fname) shutil.rmtree(dirname) return "Not a valid vector file.", None #add shapefile object to database try: for i in srcLayers: with fiona.open(srcPath) as c: srcSpatialRef = to_string(c.crs) print srcSpatialRef project = CoordTransform(SpatialReference(srcSpatialRef),SpatialReference(3857)) geometryType = c.schema['geometry'] shapefile = Shapefile.objects.create(filename=c.name, parent=folder, srs_wkt=srcSpatialRef, geom_type=geometryType, encoding=characterEncoding, created_by=user) #define shapefile's attributes for keys, values in c.schema['properties'].iteritems(): dict = {} dict['name'] = keys props = re.split('\W+', values) dict['type'] = utils.fionaTypeToInt(props[0]) try: dict['width'] = int(props[1]) except IndexError: dict['width'] = 0 if dict['type'] == 2: try: dict['precision'] = int(props[2]) except IndexError: dict['precision'] = 15 else: dict['precision'] = 0 attr = Attribute.objects.create(shapefile=shapefile, **dict) #store shapefile's features for srcFeature in c: try: wkt = dumps(srcFeature['geometry']) geosGeometry = GEOSGeometry(wkt) geosGeometry.srid = SpatialReference(srcSpatialRef).srid geosGeometry.transform(project) except TypeError: geosGeometry = None geometryField = utils.calcGeometryField(geometryType) args = {} args['shapefile'] = shapefile args[geometryField] = geosGeometry args['attribute_value'] = srcFeature['properties'] args['id_relat'] = srcFeature['id'] feature = Feature.objects.create(**args) print("Temps final: --- %s seconds ---" % str(time.time() - start_time)) return None, shapefile except BaseException, e: #cleaning up os.remove(fname) shutil.rmtree(dirname, ignore_errors=False, onerror=handleRemoveReadonly) shapefile.delete() return e, None