def extractPolygonesWaterZones(iface, dlg, assb, dir_dest, layersName): # Part2 : Nous avons enfin notre shape (type géométrique: polygone) # On poursuit le traitement jusqu'à l'extraction des zones en eau canvas = iface.mapCanvas() li = layerList() layer = None if layersName['polygonize'] in li: layer = li[layersName['polygonize']] setLayerVisible(li[layersName['polygonize']], False) if layersName['filtre'] in li: setLayerVisible(li[layersName['filtre']], False) layerWaterName = layersName['eau'] layerWaterPath = dir_dest + os.sep + layerWaterName + EXT_VECTOR if os.path.exists(layerWaterPath): try: os.remove(layerWaterPath) except: QMessageBox.information( None, "Attention !!!", layerWaterPath + " ne peut pas être effacé. Vérifiez que le fichier n'est pas verrouillé par un autre utilisateur ou que le fichier peut être effacé manuellement (droits d'écriture sur le répertoire).", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, layerWaterPath + " ne peut pas être effacé.") return messInfo(dlg, "Création de la couche: " + layerWaterName + ".") messInfo(dlg, "") if layer is None: messErreur(dlg, layerWaterName + " ne peut pas être chargé.") return crs = layer.crs() crsWkt = crs.toWkt() layerWater = QgsVectorLayer("Polygon?crs=" + crsWkt, layerWaterName, "memory") if layerWater: QgsProject.instance().addMapLayer(layerWater) else: messErreur(dlg, layerWaterName + " ne peut pas être chargé.") return li = layerList() symbol = li[layerWaterName].renderer().symbol() symbol.setColor(QColor.fromRgb(0, 0, 255)) provider = li[layerWaterName].dataProvider() fields = layer.fields() wfields = QgsFields() for f in fields: provider.addAttributes([QgsField(f.name(), f.type())]) wfields.append(QgsField(f.name(), f.type())) writer = QgsVectorFileWriter(layerWaterPath, "CP1250", wfields, QgsWkbTypes.Polygon, crs, FORMAT_VECT) if writer.hasError() != QgsVectorFileWriter.NoError: messErreur(dlg, layerWaterPath + " ne peut pas être créé.") return li[layerWaterName].startEditing() # Zones d'eau on récupère tous les polygones for elem in layer.getFeatures(): if elem['DN'] == 1: messInfo(dlg, "----> Ajout du polygone de Fid: " + str(elem.id())) geom = elem.geometry() feature = QgsFeature(fields) feature.setGeometry(geom) feature.setAttributes(elem.attributes()) provider.addFeatures([feature]) writer.addFeature(feature) del writer li[layerWaterName].commitChanges() setLayerVisible(li[layerWaterName], False) node = QgsProject.instance().layerTreeRoot().findLayer( li[layerWaterName].id()) iface.layerTreeView().layerTreeModel().refreshLayerLegend(node) li[layerWaterName].triggerRepaint() canvas.refresh() setLayerVisible(li[layersName['seuil']], False) li[layersName['seuil']].triggerRepaint() # Nous avons les poygones des zones immergées le traitement s'arrête ici setLayerVisible(li[layerWaterName], True) li[layerWaterName].triggerRepaint() canvas.refresh() extent = li[layerWaterName].extent() canvas.setExtent(extent) messInfo( dlg, "Temps total de traitement: " + str(round(time.time() - start_time)) + " secondes.") messInfo(dlg, "") endTreatment(iface, dlg, assb, layersName) return
lon_field = 'Longitude' # set the name for the field containing the latitude lat_field = 'Latitude' #ok if records have slightly different crs (ex. WGS84 and GDA94) crs = 4283 Output_Layer = filepath + shp spatRef = QgsCoordinateReferenceSystem(crs, QgsCoordinateReferenceSystem.EpsgCrsId) inp_tab = QgsVectorLayer(Input_Table, "Input_Table", "ogr") fields = inp_tab.fields() #creates the vector layer into which the data will be saved outLayer = QgsVectorFileWriter(Output_Layer, None, fields, QgsWkbTypes.Point, spatRef, "ESRI Shapefile") pt = QgsPointXY() outFeature = QgsFeature() #puts data in the shape file using the info in the csv file for feat in inp_tab.getFeatures(): attrs = feat.attributes() pt.setX(float(feat[lon_field])) pt.setY(float(feat[lat_field])) outFeature.setAttributes(attrs) outFeature.setGeometry(QgsGeometry.fromPointXY(pt)) outLayer.addFeature(outFeature) del outLayer #split layer into one layer per species
def __init__(self, destination, encoding, fields, geometryType, crs, options=None): self.destination = destination self.isNotFileBased = False self.layer = None self.writer = None if encoding is None: settings = QSettings() encoding = settings.value('/Processing/encoding', 'System', type=str) if self.destination.startswith(self.MEMORY_LAYER_PREFIX): self.isNotFileBased = True uri = GEOM_TYPE_MAP[geometryType] + "?uuid=" + unicode( uuid.uuid4()) if crs.isValid(): uri += '&crs=' + crs.authid() fieldsdesc = [] for f in fields: qgsfield = _toQgsField(f) fieldsdesc.append( 'field=%s:%s' % (qgsfield.name(), TYPE_MAP_MEMORY_LAYER.get(qgsfield.type(), "string"))) if fieldsdesc: uri += '&' + '&'.join(fieldsdesc) self.layer = QgsVectorLayer(uri, self.destination, 'memory') self.writer = self.layer.dataProvider() elif self.destination.startswith(self.POSTGIS_LAYER_PREFIX): self.isNotFileBased = True uri = QgsDataSourceURI( self.destination[len(self.POSTGIS_LAYER_PREFIX):]) connInfo = uri.connectionInfo() (success, user, passwd) = QgsCredentials.instance().get(connInfo, None, None) if success: QgsCredentials.instance().put(connInfo, user, passwd) else: raise GeoAlgorithmExecutionException( "Couldn't connect to database") print uri.uri() try: db = postgis_utils.GeoDB(host=uri.host(), port=int(uri.port()), dbname=uri.database(), user=user, passwd=passwd) except postgis_utils.DbError as e: raise GeoAlgorithmExecutionException( "Couldn't connect to database:\n%s" % e.message) def _runSQL(sql): try: db._exec_sql_and_commit(unicode(sql)) except postgis_utils.DbError as e: raise GeoAlgorithmExecutionException( 'Error creating output PostGIS table:\n%s' % e.message) fields = [_toQgsField(f) for f in fields] fieldsdesc = ",".join( '%s %s' % (f.name(), TYPE_MAP_POSTGIS_LAYER.get(f.type(), "VARCHAR")) for f in fields) _runSQL("CREATE TABLE %s.%s (%s)" % (uri.schema(), uri.table().lower(), fieldsdesc)) _runSQL( "SELECT AddGeometryColumn('{schema}', '{table}', 'the_geom', {srid}, '{typmod}', 2)" .format(table=uri.table().lower(), schema=uri.schema(), srid=crs.authid().split(":")[-1], typmod=GEOM_TYPE_MAP[geometryType].upper())) self.layer = QgsVectorLayer(uri.uri(), uri.table(), "postgres") self.writer = self.layer.dataProvider() elif self.destination.startswith(self.SPATIALITE_LAYER_PREFIX): self.isNotFileBased = True uri = QgsDataSourceURI( self.destination[len(self.SPATIALITE_LAYER_PREFIX):]) print uri.uri() try: db = spatialite_utils.GeoDB(uri=uri) except spatialite_utils.DbError as e: raise GeoAlgorithmExecutionException( "Couldn't connect to database:\n%s" % e.message) def _runSQL(sql): try: db._exec_sql_and_commit(unicode(sql)) except spatialite_utils.DbError as e: raise GeoAlgorithmExecutionException( 'Error creating output Spatialite table:\n%s' % unicode(e)) fields = [_toQgsField(f) for f in fields] fieldsdesc = ",".join( '%s %s' % (f.name(), TYPE_MAP_SPATIALITE_LAYER.get(f.type(), "VARCHAR")) for f in fields) _runSQL("DROP TABLE IF EXISTS %s" % uri.table().lower()) _runSQL("CREATE TABLE %s (%s)" % (uri.table().lower(), fieldsdesc)) _runSQL( "SELECT AddGeometryColumn('{table}', 'the_geom', {srid}, '{typmod}', 2)" .format(table=uri.table().lower(), srid=crs.authid().split(":")[-1], typmod=GEOM_TYPE_MAP[geometryType].upper())) self.layer = QgsVectorLayer(uri.uri(), uri.table(), "spatialite") self.writer = self.layer.dataProvider() else: formats = QgsVectorFileWriter.supportedFiltersAndFormats() OGRCodes = {} for (key, value) in formats.items(): extension = unicode(key) extension = extension[extension.find('*.') + 2:] extension = extension[:extension.find(' ')] OGRCodes[extension] = value extension = self.destination[self.destination.rfind('.') + 1:] if extension not in OGRCodes: extension = 'shp' self.destination = self.destination + '.shp' qgsfields = QgsFields() for field in fields: qgsfields.append(_toQgsField(field)) self.writer = QgsVectorFileWriter(self.destination, encoding, qgsfields, geometryType, crs, OGRCodes[extension])
def _clip_vector_layer(layer, extent, extra_keywords=None, explode_flag=True, hard_clip_flag=False, explode_attribute=None): """Clip a Hazard or Exposure layer to the extents provided. The layer must be a vector layer or an exception will be thrown. The output layer will always be in WGS84/Geographic. :param layer: A valid QGIS vector or raster layer :type layer: :param extent: Either an array representing the exposure layer extents in the form [xmin, ymin, xmax, ymax]. It is assumed that the coordinates are in EPSG:4326 although currently no checks are made to enforce this. or: A QgsGeometry of type polygon. **Polygon clipping is currently only supported for vector datasets.** :type extent: list(float, float, float, float) :param extra_keywords: Optional keywords dictionary to be added to output layer. :type extra_keywords: dict :param explode_flag: A bool specifying whether multipart features should be 'exploded' into singleparts. **This parameter is ignored for raster layer clipping.** :type explode_flag: bool :param hard_clip_flag: A bool specifying whether line and polygon features that extend beyond the extents should be clipped such that they are reduced in size to the part of the geometry that intersects the extent only. Default is False. **This parameter is ignored for raster layer clipping.** :type hard_clip_flag: bool :param explode_attribute: A str specifying to which attribute #1, #2 and so on will be added in case of explode_flag being true. The attribute is modified only if there are at least 2 parts. :type explode_attribute: str :returns: Clipped layer (placed in the system temp dir). The output layer will be reprojected to EPSG:4326 if needed. :rtype: QgsVectorLayer """ if not layer or not extent: message = tr('Layer or Extent passed to clip is None.') raise InvalidParameterError(message) if layer.type() != QgsMapLayer.VectorLayer: message = tr('Expected a vector layer but received a %s.' % str(layer.type())) raise InvalidParameterError(message) # handle, file_name = tempfile.mkstemp('.sqlite', 'clip_', # temp_dir()) handle, file_name = tempfile.mkstemp('.shp', 'clip_', temp_dir()) # Ensure the file is deleted before we try to write to it # fixes windows specific issue where you get a message like this # ERROR 1: c:\temp\inasafe\clip_jpxjnt.shp is not a directory. # This is because mkstemp creates the file handle and leaves # the file open. os.close(handle) os.remove(file_name) # Get the clip extents in the layer's native CRS geo_crs = QgsCoordinateReferenceSystem() geo_crs.createFromSrid(4326) transform = QgsCoordinateTransform(geo_crs, layer.crs()) allowed_clip_values = [QGis.WKBPolygon, QGis.WKBPolygon25D] if isinstance(extent, list): rectangle = QgsRectangle(extent[0], extent[1], extent[2], extent[3]) # noinspection PyCallByClass # noinspection PyTypeChecker polygon = QgsGeometry.fromRect(rectangle) elif (isinstance(extent, QgsGeometry) and extent.wkbType in allowed_clip_values): rectangle = extent.boundingBox().toRectF() polygon = extent else: raise InvalidClipGeometryError( tr('Clip geometry must be an extent or a single part' 'polygon based geometry.')) projected_extent = transform.transformBoundingBox(rectangle) # Get vector layer provider = layer.dataProvider() if provider is None: message = tr('Could not obtain data provider from ' 'layer "%s"' % layer.source()) raise Exception(message) # Get the layer field list, select by our extent then write to disk # .. todo:: FIXME - for different geometry types we should implement # different clipping behaviour e.g. reject polygons that # intersect the edge of the bbox. Tim request = QgsFeatureRequest() if not projected_extent.isEmpty(): request.setFilterRect(projected_extent) request.setFlags(QgsFeatureRequest.ExactIntersect) field_list = provider.fields() writer = QgsVectorFileWriter( file_name, None, field_list, layer.wkbType(), geo_crs, # 'SQLite') # FIXME (Ole): This works but is far too slow 'ESRI Shapefile') if writer.hasError() != QgsVectorFileWriter.NoError: message = tr('Error when creating shapefile: <br>Filename:' '%s<br>Error: %s' % (file_name, writer.hasError())) raise Exception(message) # Reverse the coordinate xform now so that we can convert # geometries from layer crs to geocrs. transform = QgsCoordinateTransform(layer.crs(), geo_crs) # Retrieve every feature with its geometry and attributes count = 0 has_multipart = False for feature in provider.getFeatures(request): geometry = feature.geometry() # Loop through the parts adding them to the output file # we write out single part features unless explode_flag is False if explode_flag: geometry_list = explode_multipart_geometry(geometry) else: geometry_list = [geometry] for part_index, part in enumerate(geometry_list): part.transform(transform) if hard_clip_flag: # Remove any dangling bits so only intersecting area is # kept. part = clip_geometry(polygon, part) if part is None: continue feature.setGeometry(part) # There are multiple parts and we want to show it in the # explode_attribute if part_index > 0 and explode_attribute is not None: has_multipart = True writer.addFeature(feature) count += 1 del writer # Flush to disk if count < 1: message = tr( 'No features fall within the clip extents. Try panning / zooming ' 'to an area containing data and then try to run your analysis ' 'again. If hazard and exposure data doesn\'t overlap at all, it ' 'is not possible to do an analysis. Another possibility is that ' 'the layers do overlap but because they may have different ' 'spatial references, they appear to be disjointed. If this is the ' 'case, try to turn on reproject on-the-fly in QGIS.') raise NoFeaturesInExtentError(message) keyword_io = KeywordIO() if extra_keywords is None: extra_keywords = {} extra_keywords['had multipart polygon'] = has_multipart keyword_io.copy_keywords(layer, file_name, extra_keywords=extra_keywords) base_name = '%s clipped' % layer.name() layer = QgsVectorLayer(file_name, base_name, 'ogr') return layer
def processAlgorithm(self, parameters, context, model_feedback): # entrées profils_l = self.parameterAsVectorLayer(parameters, 'profils', context) mnt = self.parameterAsRasterLayer(parameters, 'mnt', context) # sorties output = self.parameterAsOutputLayer(parameters, 'OUTPUT', context) # paramètres echantillons_nb = parameters['echantillons_nb'] # nombre d'échantillons seuil_diff = parameters['seuil_diff'] seuil_rug = parameters['seuil_rug'] # variables propres à Processing feedback = QgsProcessingMultiStepFeedback(profils_l.featureCount()*2, model_feedback) status = 0 results = {} # l'algo SAGA Cross Profiles ajoute à chaque profil un attribut LINE qui permet d'identifier pour chaque profil la ligne dont il est issu # permet de traiter les profils de cours d'eau à cours d'eau if profils_l.fields().indexOf('LINE')<0: feedback.reportError("Les profils en entrée doivent contenir un attribut numérique LINE qui identifie chaque cours d'eau de manière unique !", True) return{} # préparation de la sortie id = 1 fields = QgsFields() fields.append(QgsField("id", QVariant.Int)) fields.append(QgsField("obstruct", QVariant.Int)) writer = QgsVectorFileWriter(output, "System", fields, QgsWkbTypes.LineString, QgsCoordinateReferenceSystem(2154), "ESRI Shapefile") # on récupère les identifiants uniques de lignes pour traiter les profils par cours d'eau lines_ids = profils_l.uniqueValues(profils_l.fields().indexOf('LINE')) for line_id in lines_ids: # variables liées aux traitements low = None # dernière valeur d'altitude non-obstruée ids = [] # liste des identifiants de profils obstrués plist = [] # liste des X derniers profils p2 = None # id du deuxième profil traité count = 0 # pour chaque cours d'eau for profil_f in profils_l.getFeatures("LINE = %s"%line_id): # ajout des points sur chaque profil profil_g = profil_f.geometry() freq = profil_g.length()/(echantillons_nb-1) echantillons_g = [QgsGeometry().fromPointXY(profil_g.asMultiPolyline()[0][0])] for i in range(1, echantillons_nb-1): echantillons_g.append(profil_g.interpolate(freq*i)) echantillons_g.append(QgsGeometry().fromPointXY(profil_g.asMultiPolyline()[0][-1])) # on affecte aux points la valeur du MNT correspondante elevations = [] for echantillon_g in echantillons_g: elevation = mnt.dataProvider().sample(echantillon_g.asPoint(), 1)[0] elevations.append(elevation) # exécuté pour le tout premier profil qui va déterminer la première valeur d'altitude considérée if low == None: low = min(elevations) plist.append(profil_f) # détection des obstructions else: # seuil utilisé pour détecter les ruptures franches et limiter les "petites détections" if min(elevations) <= low+seuil_diff: # en cas de longue portion souterraine, la condition d'écoulement peut être remplie alors que le CE est toujours souterrain (cas sur le Malvan à Cagnes) # on vérifie alors si le terrain est relativement plat (rugosité faible) et, si c'est le cas, on maintient l'obstruction # pose problème sur les CE peu profonds if not plist and (max(elevations)-min(elevations))<seuil_rug: ids.append(profil_f.id()) # il y a écoulement else: low = min(elevations) plist.append(profil_f.id()) else: # il n'y a pas écoulement ids.append(profil_f.id()) # s'il y a un groupe de 5 (ou moins) profils non-obstrués au milieu de profils obstrués, on les ajoute au traitement if len(plist) <= 5: ids += plist del plist[:] # récupération de l'id du deuxième profil if count == 1: p2 = profil_f.id() status += 1 count += 1 feedback.setCurrentStep(status) if feedback.isCanceled(): return {} # si le second profil est obstrué, on considère qu'il y a erreur et on ne traite pas le CE # lorsque le cas arrive, il s'agit souvent d'un CE non-présent sur le MNT et le traitement est erronné if p2 in ids: del ids[:] # post processing pour étendre la détection à x profils amont/aval pour permettre l'interpolation # extention modulable par la variable ext ci-dessous ext = 2 prev = [] count = 0 for profil_f in profils_l.getFeatures("LINE = %s"%line_id): if len(prev) > 0: if count == 0: if profil_f.id() in ids and prev[-1] not in ids: ids += prev if profil_f.id() not in ids and prev[-1] in ids: ids.append(profil_f.id()) count += 1 else: if count < ext: ids.append(profil_f.id()) count += 1 else: count = 0 prev.append(profil_f.id()) if len(prev) > ext: del prev[0] # attribution d'un identifiant unique à chaque groupe de profils souterrains # ecriture de chaque profil dans la nouvelle couche qui contient deux attributs : # id : identifie les profils obstrués contigus pour les traiter en groupe # obstruct : à 1 si le profil est obstrué, sinon à 0 for profil_f in profils_l.getFeatures("LINE = %s"%line_id): if profil_f.id() not in ids: profil_f.setAttributes([0,0]) id += 1 else: profil_f.setAttributes([id,1]) writer.addFeature(profil_f) status += 1 feedback.setCurrentStep(status) if feedback.isCanceled(): return {} results['OUTPUT']=output return results
def run(bar, buildings_layer_path, diffraction_points_layer_path): buildings_layer_name = os.path.splitext( os.path.basename(buildings_layer_path))[0] buildings_layer = QgsVectorLayer(buildings_layer_path, buildings_layer_name, "ogr") diffraction_points_fields = QgsFields() diffraction_points_writer = QgsVectorFileWriter( diffraction_points_layer_path, "System", diffraction_points_fields, QgsWkbTypes.Point, buildings_layer.crs(), "ESRI Shapefile") # gets features from layer buildings_feat_all = buildings_layer.dataProvider().getFeatures() buildings_feat_total = buildings_layer.dataProvider().featureCount() buildings_feat_number = 0 all_coord_points = [] for buildings_feat in buildings_feat_all: buildings_feat_number = buildings_feat_number + 1 barValue = buildings_feat_number / float(buildings_feat_total) * 100 bar.setValue(barValue) building_geom = buildings_feat.geometry() if building_geom.isMultipart(): building_geom.convertToSingleType() buildings_pt = building_geom.asPolygon() if len(buildings_pt) > 0: for i in range(0, len(buildings_pt)): buildings_pts = buildings_pt[i] #### # start part to delete pseudo vertex # this part it's different from the diffraction delete pseudo vertex part pts_index_to_delete_list = [] m_delta = 0.01 for ii in range(0, len(buildings_pts) - 1): x1 = buildings_pts[ii - 1][0] x2 = buildings_pts[ii][0] x3 = buildings_pts[ii + 1][0] y1 = buildings_pts[ii - 1][1] y2 = buildings_pts[ii][1] y3 = buildings_pts[ii + 1][1] # particular cases: first point to delete! (remember that the first and the last have the same coordinates) if ii == 0 and (x2 == x1 and y2 == y1): x1 = buildings_pts[ii - 2][0] y1 = buildings_pts[ii - 2][1] # angular coefficient to find pseudo vertex if x2 - x1 != 0 and x3 - x1 != 0: m1 = (y2 - y1) / (x2 - x1) m2 = (y3 - y1) / (x3 - x1) if m1 <= m2 + m_delta and m1 >= m2 - m_delta: pts_index_to_delete_list.append(ii) # particular cases: first point to delete! (remember that the first and the last have the same coordinates) # here we delete the last and add x3,y3 (buildings_pts[ii+1] - the new last point) if ii == 0: pts_index_to_delete_list.append( len(buildings_pts) - 1) buildings_pts.append(buildings_pts[ii + 1]) # del pseudo vertex pts_index_to_delete_list = sorted(pts_index_to_delete_list, reverse=True) for pt_index_to_del in pts_index_to_delete_list: del buildings_pts[pt_index_to_del] # remove duplicates from the single buildings buildings_pts = list(set(buildings_pts)) for pt in buildings_pts: all_coord_points.append(pt) # remove duplicates from vertex of different buildings all_coord_points = collections.Counter(all_coord_points) for coord in list(all_coord_points.keys()): if all_coord_points[coord] == 1: pt = QgsFeature() pt.setGeometry( QgsGeometry.fromPointXY(QgsPointXY(coord[0], coord[1]))) diffraction_points_writer.addFeature(pt) # del diffraction_points_writer
def run(self): """Risk plugin for classified polygon hazard on polygon population. Counts population in an area exposed to hazard zones and then computes the proportion of each area that is affected. The population in each area is then calculated as the proportion of the original population to the affected area. :returns: Impact layer :rtype: Vector """ self.validate() self.prepare() self.provenance.append_step( 'Calculating Step', 'Impact function is calculating the impact.') # Identify hazard and exposure layers hazard = self.hazard.layer exposure = self.exposure.layer # prepare objects for re-projection of geometries crs_wgs84 = QgsCoordinateReferenceSystem("EPSG:4326") hazard_to_exposure = QgsCoordinateTransform( hazard.crs(), exposure.crs()) wgs84_to_hazard = QgsCoordinateTransform( crs_wgs84, hazard.crs()) wgs84_to_exposure = QgsCoordinateTransform( crs_wgs84, exposure.crs()) extent = QgsRectangle( self.requested_extent[0], self.requested_extent[1], self.requested_extent[2], self.requested_extent[3]) extent_hazard = wgs84_to_hazard.transformBoundingBox(extent) extent_exposure = wgs84_to_exposure.transformBoundingBox(extent) extent_exposure_geom = QgsGeometry.fromRect(extent_exposure) # make spatial index of hazard hazard_index = QgsSpatialIndex() hazard_features = {} for feature in hazard.getFeatures(QgsFeatureRequest(extent_hazard)): feature.geometry().transform(hazard_to_exposure) hazard_index.insertFeature(feature) hazard_features[feature.id()] = QgsFeature(feature) # create impact layer filename = unique_filename(suffix='.shp') impact_fields = exposure.dataProvider().fields() impact_fields.append(QgsField(self.target_field, QVariant.Int)) unaffected_fields = exposure.dataProvider().fields() unaffected_fields.append(QgsField(self.target_field, QVariant.Int)) writer = QgsVectorFileWriter( filename, "utf-8", impact_fields, QGis.WKBPolygon, exposure.crs()) # Evaluating the impact self.evaluate_impact( exposure, extent_exposure, extent_exposure_geom, hazard_index, hazard_features, writer, unaffected_fields, impact_fields) del writer impact_layer = QgsVectorLayer(filename, "Impacted People", "ogr") # Generate the report of affected populations in the areas # To avoid Null for value in self.all_areas_population.values(): if isinstance(value, QPyNullVariant): value = 0 self.total_population += value self.areas = self.all_areas_ids self.affected_areas = self.all_affected_areas self.areas_population = self.all_areas_population # Calculating number of people affected # This will help area report mixin to know how # to calculate the all row values before other # rows values in the report table self.evaluate_affected_people() impact_summary = self.html_report() # Define style for the impact layer transparent_color = QColor() transparent_color.setAlpha(0) # Retrieve the classification that is used by the hazard layer. vector_hazard_classification = self.hazard.keyword( 'vector_hazard_classification') # Get the dictionary that contains the definition of the classification vector_hazard_classification = definition(vector_hazard_classification) # Get the list classes in the classification vector_hazard_classes = vector_hazard_classification['classes'] classes = self.hazard_class_mapping classes_colours = {} color_mapping = { 'wet': '#F31A1C', 'low': '#1EFC7C', 'medium': '#FFA500', 'high': '#F31A1C' } classes_values = { 'wet': 1, 'low': 1, 'medium': 2, 'high': 3 } # Assigning colors for vector_hazard_class in vector_hazard_classes: key = vector_hazard_class['key'] if key in classes.keys() and key in color_mapping.keys(): classes_colours[key] = color_mapping[key] # Define style info for output polygons showing population counts style_classes = [] index = 0 for class_key, colour in classes_colours.items(): style_class = dict() if class_key in classes.keys(): label = classes[class_key][0] else: continue transparency = 0 style_class['label'] = label style_class['value'] = classes_values[class_key] style_class['colour'] = colour style_class['transparency'] = transparency style_classes.append(style_class) index = index + 1 style_info = dict( target_field=self.target_field, style_classes=style_classes, style_type='categorizedSymbol') extra_keywords = { 'impact_summary': impact_summary, 'target_field': self.target_field, 'map_title': tr('Affected People'), } self.set_if_provenance() impact_layer_keywords = self.generate_impact_keywords(extra_keywords) # Create vector layer and return impact_layer = Vector( data=impact_layer, name=tr('People affected by each hazard zone'), keywords=impact_layer_keywords, style_info=style_info) self._impact = impact_layer return impact_layer
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput zone_layer = self.inputs[0].value zone_field = self.inputs[1].value zone_count_field = self.inputs[2].value fp_layer = self.inputs[3].value # merge with zone tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) analyzer = QgsOverlayAnalyzer() try: analyzer.intersection(fp_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # count footprint in each zone gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_") area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME) ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME) stats = {} for _feature in layer_features(tmp_join_layer): gid = _feature.attributeMap()[gid_idx].toString() if ht_idx > 0: ht = _feature.attributeMap()[ht_idx].toDouble()[0] else: ht = 0 # if height is not defined, it is set to 0 # this will cause the system to ignore area generate without having to # remove the field area = _feature.attributeMap()[area_idx].toDouble()[0] * ht # if not stats.has_key(gid): stats[gid] = (1, area) else: stat = stats[gid] stats[gid] = (stat[0] + 1, stat[1] + area) output_layername = 'zone_%s' % get_unique_filename() output_file = '%s%s.shp' % (self._tmp_dir, output_layername) logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG) try: fields = { 0: QgsField(GID_FIELD_NAME, QVariant.Int), 1: QgsField(zone_field, QVariant.String), 2: QgsField(CNT_FIELD_NAME, QVariant.Int), 3: QgsField(AREA_FIELD_NAME, QVariant.Int), } writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile") f = QgsFeature() for _f in layer_features(zone_layer): # write to file f.setGeometry(_f.geometry()) f.addAttribute(0, _f.attributeMap()[0]) f.addAttribute(1, _f.attributeMap()[1]) # retrieve count from statistic try: gid = _f.attributeMap()[0].toString() stat = stats[gid] bldg_count = stat[0] area = stat[1] except: bldg_count, area = 0, 0 f.addAttribute(2, QVariant(bldg_count)) f.addAttribute(3, QVariant(area)) writer.addFeature(f) del writer, f except Exception as err: remove_shapefile(output_file) raise OperatorError("error creating zone: %s" % err, self.__class__) # clean up del tmp_join_layer remove_shapefile(tmp_join_file) # store data in output output_layer = load_shapefile(output_file, output_layername) if not output_layer: raise OperatorError( 'Error loading footprint centroid file' % (output_file), self.__class__) self.outputs[0].value = output_layer self.outputs[1].value = output_file
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput zone_layer = self.inputs[0].value zone_field = self.inputs[1].value popgrid_layer = self.inputs[2].value pop_to_bldg = float(self.inputs[3].value) # merge with zone tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) analyzer = QgsOverlayAnalyzer() try: analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # count footprint in each zone stats = {} _gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_") _cnt_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME) for _f in layer_features(tmp_join_layer): # retrieve count from statistic _gid = _f.attributeMap()[_gid_idx].toString() _count = _f.attributeMap()[_cnt_idx].toString() if stats.has_key(_gid): stats[_gid] += float(_count) / pop_to_bldg else: stats[_gid] = float(_count) / pop_to_bldg output_layername = 'zone_%s' % get_unique_filename() output_file = '%s%s.shp' % (self._tmp_dir, output_layername) logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG) try: fields = { 0: QgsField(GID_FIELD_NAME, QVariant.Int), 1: QgsField(zone_field, QVariant.String), 2: QgsField(CNT_FIELD_NAME, QVariant.Int), } writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile") f = QgsFeature() for _f in layer_features(zone_layer): # write to file f.setGeometry(_f.geometry()) f.addAttribute(0, _f.attributeMap()[0]) f.addAttribute(1, _f.attributeMap()[1]) # retrieve count from statistic try: gid = _f.attributeMap()[0].toString() bldg_count = stats[gid] except: bldg_count = 0 f.addAttribute(2, QVariant(bldg_count)) writer.addFeature(f) del writer, f except Exception as err: remove_shapefile(output_file) raise OperatorError("error creating zone: %s" % err, self.__class__) # clean up del tmp_join_layer remove_shapefile(tmp_join_file) # store data in output output_layer = load_shapefile(output_file, output_layername) if not output_layer: raise OperatorError( 'Error loading footprint centroid file' % (output_file), self.__class__) self.outputs[0].value = output_layer self.outputs[1].value = output_file
def loadCsv(self): Input_Table = self.lnEditCsvPath.text( ) # set the filepath for the input CSV lon_field = 'origin_lon' # set the name for the field containing the longitude lat_field = 'origin_lat' # set the name for the field containing the latitude lon_dest = 'dest_lon' lat_dest = 'dest_lat' crs = 4326 # WGS 84 (GPS data) strCRS = "EPSG" + str(4326) directory = os.path.dirname(Input_Table) + "/output" import time # timestamp = time.time() ts = time.gmtime() ts = time.strftime("%Y%m%dT%H%M%S", ts) filename = ntpath.basename(self.lnEditCsvPath.text()).replace( ".csv", "") + "_" + ts + "_" + strCRS + ".shp" if not os.path.exists(directory): os.makedirs(directory) outputLayerPath = directory + "//" + filename # set the filepath for the output shapefile #print (outputLayerPath) spatRef = QgsCoordinateReferenceSystem( crs, QgsCoordinateReferenceSystem.EpsgCrsId) inp_tab = QgsVectorLayer(Input_Table, 'Input_Table', 'ogr') prov = inp_tab.dataProvider() fields = inp_tab.fields() outLayer = QgsVectorFileWriter(outputLayerPath, None, fields, QgsWkbTypes.Point, spatRef, "ESRI Shapefile") # outLayer = QgsVectorFileWriter(Output_Layer, None, fields, QGis.WKBPoint, spatRef) # reprojecting to metric datum system for k-means clustering purposes pt = QgsPointXY() pt_dest = QgsPointXY() outFeature = QgsFeature() outFeature_dest = QgsFeature() for feat in inp_tab.getFeatures(): attrs = feat.attributes() pt.setX(float(feat[lon_field])) pt.setY(float(feat[lat_field])) outFeature.setAttributes(attrs) outFeature.setGeometry(QgsGeometry.fromPointXY(pt)) outLayer.addFeature(outFeature) pt_dest.setX(float(feat[lon_dest])) pt_dest.setY(float(feat[lat_dest])) outFeature_dest.setAttributes(attrs) outFeature_dest.setGeometry(QgsGeometry.fromPointXY(pt_dest)) outLayer.addFeature(outFeature_dest) del outLayer return outputLayerPath
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput zone_layer = self.inputs[0].value zone_field = self.inputs[1].value fp_layer = self.inputs[2].value # merge with zone to get assignment tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) analyzer = QgsOverlayAnalyzer() try: analyzer.intersection(fp_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join, [zone_field]) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) fields = { 0: QgsField(self._lon_field, QVariant.Double), 1: QgsField(self._lat_field, QVariant.Double), 2: QgsField(zone_field, QVariant.String), } zone_idx = layer_field_index(tmp_join_layer, zone_field) fp_layername = 'fpc_%s' % get_unique_filename() fp_file = '%s%s.shp' % (self._tmp_dir, fp_layername) try: writer = QgsVectorFileWriter(fp_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile") f = QgsFeature() for _f in layer_features(tmp_join_layer): centroid = _f.geometry().centroid().asPoint() lon = centroid.x() lat = centroid.y() zone_str = str(_f.attributeMap()[zone_idx].toString()).upper() f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat))) f.addAttribute(0, QVariant(lon)) f.addAttribute(1, QVariant(lat)) f.addAttribute(2, QVariant(zone_str)) writer.addFeature(f) del writer except Exception as err: logAPICall.log(err, logAPICall.ERROR) remove_shapefile(fp_file) raise OperatorError("error creating joined grid: %s" % err, self.__class__) # load shapefile as layer fp_layer = load_shapefile(fp_file, fp_layername) if not fp_layer: raise OperatorError( 'Error loading footprint centroid file' % (fp_file), self.__class__) # clean up del tmp_join_layer remove_shapefile(tmp_join_file) self.outputs[0].value = fp_layer self.outputs[1].value = fp_file
def processAlgorithm(self, progress): """Here is where the processing itself takes place.""" # The first thing to do is retrieve the values of the parameters # entered by the user base_filename = self.getParameterValue(self.BASE_LAYER) displaced_filename = self.getParameterValue(self.DISPLACED_LAYER) output = self.getOutputValue(self.OUTPUT_LAYER) # Input layers vales are always a string with its location. # That string can be converted into a QGIS object (a # QgsVectorLayer in this case) using the # processing.getObjectFromUri() method. base_layer = dataobjects.getObjectFromUri(base_filename) displayed_layer = dataobjects.getObjectFromUri(displaced_filename) # And now we can process # First, get the ID field index for each layer base_id_idx = base_layer.fieldNameIndex( self.getParameterValue(self.BASE_ID_FIELD)) displaced_id_idx = displayed_layer.fieldNameIndex( self.getParameterValue(self.DISPLACED_ID_FIELD)) # Grab the ID field and drop it in a fields object for the output. fields = QgsFields() fields.append(displayed_layer.fields()[displaced_id_idx]) # Displaced points features = vector.features(displayed_layer) displaced_points = { f[displaced_id_idx]: f.geometry().asPoint() for f in features } # Base points features = vector.features(base_layer) base_points = { f[base_id_idx]: f.geometry().asPoint() for f in features } # Build the output layer settings = QSettings() systemEncoding = settings.value('/UI/encoding', 'System') provider = displayed_layer.dataProvider() writer = QgsVectorFileWriter(output, systemEncoding, fields, QGis.WKBLineString, provider.crs()) # Loop over the displayed points and build the line that links them to # the base points for id, endpoint in displaced_points.iteritems(): try: startpoint = base_points[id] except KeyError: ProcessingLog.addToLog( ProcessingLog.LOG_WARNING, "Couldn't find input feature with ID {}".format(id)) else: feature = QgsFeature() feature.setGeometry( QgsGeometry.fromPolyline([startpoint, endpoint])) feature.setAttributes([ id, ]) writer.addFeature(feature)
def processAlgorithm(self, progress): """Here is where the processing itself takes place.""" # The first thing to do is retrieve the values of the parameters # entered by the user inputFilename = self.getParameterValue(self.INPUT_LAYER) radius = float(self.getParameterValue(self.PROTECTION_DISTANCE)) base_epsilon = float( ProcessingConfig.getSetting( DifferentialPrivacyUtils.DIFFERENTIAL_EPSILON)) limit_nine_five = self.getParameterValue(self.LIMIT_NINETY_FIVE) # scale should be 1 / epsilon where epsilon is some base epsilon constant / chosen radius r_generator = gamma(2., scale=radius / base_epsilon) theta_generator = uniform(scale=2 * np.pi) output = self.getOutputValue(self.OUTPUT_LAYER) # Input layers vales are always a string with its location. # That string can be converted into a QGIS object (a # QgsVectorLayer in this case) using the # processing.getObjectFromUri() method. vectorLayer = dataobjects.getObjectFromUri(inputFilename) # And now we can process # First we create the output layer. The output value entered by # the user is a string containing a filename, so we can use it # directly settings = QSettings() systemEncoding = settings.value('/UI/encoding', 'System') provider = vectorLayer.dataProvider() writer = QgsVectorFileWriter(output, systemEncoding, provider.fields(), provider.geometryType(), provider.crs()) # Now we take the features from input layer and add them to the # output. Method features() returns an iterator, considering the # selection that might exist in layer and the configuration that # indicates should algorithm use only selected features or all # of them nine_five_distance = r_generator.ppf(0.95) features = vector.features(vectorLayer) for f in features: r = r_generator.rvs() if limit_nine_five and r > nine_five_distance: r = nine_five_distance theta = theta_generator.rvs() g = f.geometryAndOwnership() g.translate(np.cos(theta) * r, np.sin(theta) * r) f.setGeometry(g) writer.addFeature(f) ProcessingLog.addToLog( ProcessingLog.LOG_INFO, "95% confiedence distance: {}".format(nine_five_distance)) self.setOutputValue(self.NINETY_FIVE_DISTANCE, nine_five_distance)
def processing(options, f, progressBar, progressMessage): ''' Select trees which are on the contour of the forest and isolated trees. ''' # Export Grid contour and isolated to crowns values forestSelectedPath = options['dst'] + 'tif/' + f + \ '_forest_selected.tif' crownsPath = options['dst'] + 'shp/' + f + '_crowns.shp' # crownsStatsPath = options['dst'] + 'shp/' + f + '_crowns_stats.shp' outputDir = options["dst"] fileTxt = open(outputDir + "/log.txt", "a") fileTxt.write("gridstatisticsforpolygons started\n") fileTxt.close() crowns = QgsVectorLayer(crownsPath, "crowns", "ogr") inputStatRaster = QgsRasterLayer(forestSelectedPath, "forestSelected") z_stat = QgsZonalStatistics(crowns, inputStatRaster, '_', 1, QgsZonalStatistics.Max) result_z_stat = z_stat.calculateStatistics(QgsFeedback()) outputDir = options["dst"] fileTxt = open(outputDir + "/log.txt", "a") fileTxt.write("gridstatisticsforpolygons passed\n") fileTxt.close() # crowns = QgsVectorLayer(crownsStatsPath, 'Crowns stats', 'ogr') crowns.selectByExpression('"_max"=1.0') selected_array = crowns.getValues("N", True) crowns.invertSelection() unselected_array = crowns.getValues("N", True) unselected_crowns_ids = crowns.getValues("$id", True) unselected_top_ids = crowns.getValues('"N" - 1', True) crowns.dataProvider().deleteFeatures(unselected_crowns_ids[0]) treetopsPath = options['dst'] + 'shp/' + f + '_treetops.shp' treetops = QgsVectorLayer(treetopsPath, 'Tree tops', 'ogr') treetops.dataProvider().deleteFeatures(unselected_top_ids[0]) treetopsSelectedPath = options['dst'] + 'shp/' + f + \ '_treetops_selected.shp' crownsSelectedPath = options['dst'] + 'shp/' + f + '_crowns_selected.shp' treetopsTrianglesPath = options['dst'] + 'shp/' + f + \ '_treetops_triangles.shp' outputDir = options["dst"] fileTxt = open(outputDir + "/log.txt", "a") fileTxt.write("advancedpythonfieldcalculator started\n") fileTxt.close() treetops.dataProvider().addAttributes([QgsField('N', QVariant.Int)]) treetops.updateFields() treetops.startEditing() for treetop in treetops.getFeatures(): treetops.changeAttributeValue(treetop.id(), treetop.fieldNameIndex('N'), treetop.id()) treetops.commitChanges() outputDir = options["dst"] fileTxt = open(outputDir + "/log.txt", "a") fileTxt.write("joinattributesbylocation started\n") fileTxt.close() # Adapted from https://github.com/qgis/QGIS-Processing # TODO: replace by native QGIS c++ algo when available... crowns.dataProvider().addAttributes([QgsField('tid', QVariant.Int)]) crowns.updateFields() crowns.startEditing() fcount = crowns.featureCount() counter = 0 for crown in crowns.getFeatures(): counter += 1 progressBar.setValue(100 + int(counter * (600 / fcount))) progressMessage.setText('Joining crown ' + str(counter) + '/' + str(fcount)) request = QgsFeatureRequest() request.setFilterRect(crown.geometry().boundingBox()) dp = treetops.dataProvider() for r in dp.getFeatures(request): if crown.geometry().intersects(r.geometry()): crowns.changeAttributeValue(crown.id(), crown.fieldNameIndex('tid'), r.id()) crowns.commitChanges() fileTxt = open(outputDir + "/log.txt", "a") fileTxt.write("delaunaytriangulation started\n") fileTxt.close() # delaunay triangulation Adapted from official Python plugin # TODO: replace by native QGIS c++ algo when available... fields = QgsFields() fields.append(QgsField('POINTA', QVariant.Double, '', 24, 15)) fields.append(QgsField('POINTB', QVariant.Double, '', 24, 15)) fields.append(QgsField('POINTC', QVariant.Double, '', 24, 15)) crs = QgsCoordinateReferenceSystem('EPSG:2056') triangleFile = QgsVectorFileWriter(treetopsTrianglesPath, 'utf-8', fields, QgsWkbTypes.Polygon, crs, 'ESRI Shapefile') pts = [] ptDict = {} ptNdx = -1 c = voronoi.Context() features = treetops.getFeatures() total = 100.0 / treetops.featureCount() if treetops.featureCount() else 0 progressMessage.setText('Starting triangulation...') for current, inFeat in enumerate(features): geom = QgsGeometry(inFeat.geometry()) if geom.isNull(): continue if geom.isMultipart(): points = geom.asMultiPoint() else: points = [geom.asPoint()] for n, point in enumerate(points): x = point.x() y = point.y() pts.append((x, y)) ptNdx += 1 ptDict[ptNdx] = (inFeat.id(), n) progressMessage.setText('Triangulation step 1 ok') if len(pts) < 3: raise QgsProcessingException( 'Input file should contain at least 3 points. Choose ' 'another file and try again.') uniqueSet = set(item for item in pts) ids = [pts.index(item) for item in uniqueSet] sl = voronoi.SiteList([voronoi.Site(*i) for i in uniqueSet]) c.triangulate = True voronoi.voronoi(sl, c) triangles = c.triangles feat = QgsFeature() total = 100.0 / len(triangles) if triangles else 1 for current, triangle in enumerate(triangles): indices = list(triangle) indices.append(indices[0]) polygon = [] attrs = [] step = 0 for index in indices: fid, n = ptDict[ids[index]] request = QgsFeatureRequest().setFilterFid(fid) inFeat = next(treetops.getFeatures(request)) geom = QgsGeometry(inFeat.geometry()) point = QgsPoint(geom.asPoint()) polygon.append(point) if step <= 3: attrs.append(ids[index]) step += 1 linestring = QgsLineString(polygon) poly = QgsPolygon() poly.setExteriorRing(linestring) feat.setAttributes(attrs) geometry = QgsGeometry().fromWkt(poly.asWkt()) feat.setGeometry(geometry) triangleFile.addFeature(feat) progressMessage.setText('Triangulation terminated') # Remove triangles with perimeter higher than threshold triangles = QgsVectorLayer(treetopsTrianglesPath, 'triangles', 'ogr') maxPeri = str(options['MaxTrianglePerimeter']) triangles.selectByExpression('$perimeter > ' + maxPeri) triangles_to_delete_ids = triangles.getValues("$id", True) triangles.dataProvider().deleteFeatures(triangles_to_delete_ids[0]) outputDir = options["dst"] fileTxt = open(outputDir + "/log.txt", "a") fileTxt.write("treeSelector passed\n") fileTxt.close() progressMessage.setText('Starting convexhull computing...')
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput svy_layer = self.inputs[0].value # make sure input is correct # NOTE: these checks cannot be performed at set input time # because the data layer maybe is not loaded yet self._test_layer_loaded(svy_layer) total_features = svy_layer.dataProvider().featureCount() if total_features > MAX_FEATURES_IN_MEMORY: # use bsddb to store temporary lat/lon tmp_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename()) db = bsddb.btopen(tmp_db_file, 'c') else: db = {} # tally statistics for each grid_id/building type combination tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME) for f in layer_features(svy_layer): geom = f.geometry() centroid = geom.centroid().asPoint() grid_id = latlon_to_grid(centroid.y(), centroid.x()) tax_str = str(f.attributeMap()[tax_idx].toString()) key = '%s %s' % (tax_str, grid_id) if db.has_key(key): db[key] = str(int(db[key]) + 1) # value as string required by bsddb else: db[key] = '1' # value as string required by bsddb # loop through all zones and assign mapping scheme # outputs exposure_layername = 'exp_%s' % get_unique_filename() exposure_file = '%s%s.shp' % (self._tmp_dir, exposure_layername) try: writer = QgsVectorFileWriter(exposure_file, "utf-8", self._fields, self._outputGeometryType(), self._crs, "ESRI Shapefile") f = QgsFeature() gid = 0 for key, val in db.iteritems(): (tax_str, grid_id) = key.split(' ') lon, lat = grid_to_latlon(int(grid_id)) f.setGeometry(self._outputGeometryFromGridId(grid_id)) f.addAttribute(0, QVariant(grid_id)) f.addAttribute(1, QVariant(lon)) f.addAttribute(2, QVariant(lat)) f.addAttribute(3, QVariant(tax_str)) f.addAttribute(4, QVariant('')) f.addAttribute(5, QVariant(val)) writer.addFeature(f) gid += 1 del writer, f except Exception as err: remove_shapefile(exposure_file) raise OperatorError("error creating exposure file: %s" % err, self.__class__) # load shapefile as layer exposure_layer = load_shapefile(exposure_file, exposure_layername) if not exposure_layer: raise OperatorError( 'Error loading exposure file %s' % (exposure_file), self.__class__) # store data in output self.outputs[0].value = exposure_layer self.outputs[1].value = exposure_file
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput zone_layer = self.inputs[0].value zone_field = self.inputs[1].value count_field = self.inputs[2].value grid_layer = self.inputs[3].value zone_stats = {} zone_count_stats = {} gid_idx = layer_field_index(zone_layer, self._gid_field) count_idx = layer_field_index(zone_layer, count_field) for _f in layer_features(zone_layer): gid = _f.attributeMap()[gid_idx].toString() zone_stats[gid] = 0 zone_count_stats[gid] = _f.attributeMap()[count_idx].toDouble()[0] # create storage for temporary output data use_grid_db = grid_layer.dataProvider().featureCount( ) > MAX_FEATURES_IN_MEMORY if False: tmp_grid_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename()) grid_points = bsddb.btopen(tmp_grid_db_file, 'c') else: grid_points = {} # merge to create stats tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) analyzer = QgsOverlayAnalyzer() try: analyzer.intersection(grid_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join, [zone_field, count_field]) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) stats = layer_multifields_stats(tmp_join_layer, [zone_field, count_field]) if stats == False: raise OperatorError( "error creating statistic based on input files", self.__class__) zone_idx = layer_field_index(tmp_join_layer, zone_field) count_idx = layer_field_index(tmp_join_layer, count_field) lon_idx = layer_field_index(tmp_join_layer, self._lon_field) lat_idx = layer_field_index(tmp_join_layer, self._lat_field) gid_idx = layer_field_index(tmp_join_layer, self._gid_field) try: for _f in layer_features(tmp_join_layer): lon = _f.attributeMap()[lon_idx].toDouble()[0] lat = _f.attributeMap()[lat_idx].toDouble()[0] zone_str = str(_f.attributeMap()[zone_idx].toString()).upper() count_val = _f.attributeMap()[count_idx].toDouble()[0] gid = _f.attributeMap()[gid_idx].toString() # update stats zone_stats[gid] += 1 grid_points[self._make_key(zone_str, gid, lon, lat)] = 1 except Exception as err: raise OperatorError("error processing joined layer: " % err, self.__class__) # test for zones without a grid point assigned count_idx = layer_field_index(zone_layer, count_field) gid_idx = layer_field_index(zone_layer, self._gid_field) zone_idx = layer_field_index(zone_layer, zone_field) _x_off, _y_off = self._x_off / 2.0, self._y_off / 2.0 try: for _f in layer_features(zone_layer): centroid = _f.geometry().centroid().asPoint() zone_str = str(_f.attributeMap()[zone_idx].toString()).upper() count_val = _f.attributeMap()[count_idx].toDouble()[0] gid = _f.attributeMap()[gid_idx].toString() if zone_stats[gid] == 0: # get lower left corner lon = int(centroid.x() / DEFAULT_GRID_SIZE) * self._x_off + _x_off lat = int( centroid.y() / self._y_off) * self._y_off + _y_off #self._write_feature(writer, f, lon, lat, zone_str, count_val) zone_stats[gid] += 1 grid_points[self._make_key(zone_str, gid, lon, lat)] = 1 except Exception as err: raise OperatorError("error processing missing points: " % err, self.__class__) # output result fields = { 0: QgsField(self._lon_field, QVariant.Double), 1: QgsField(self._lat_field, QVariant.Double), 2: QgsField(zone_field, QVariant.String), 3: QgsField(count_field, QVariant.Double) } grid_layername = 'grid_%s' % (get_unique_filename()) grid_file = '%s%s.shp' % (self._tmp_dir, grid_layername) try: f = QgsFeature() writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile") for key, value in grid_points.iteritems(): [zone, zone_gid, lon, lat] = self._parse_key(key) f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat))) """ f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat))) f.addAttribute(0, QVariant(lon)) f.addAttribute(1, QVariant(lat)) f.addAttribute(2, QVariant(zone_str)) f.addAttribute(3, QVariant(count_val / total_features)) writer.addFeature(f) """ value = float( value) / zone_stats[zone_gid] * zone_count_stats[zone_gid] #grid_points[key] = value self._write_feature(writer, f, lon, lat, zone, value) del writer except Exception as err: raise OperatorError("error creating joined grid file: " % err, self.__class__) # load result layer grid_layer = load_shapefile(grid_file, grid_layername) if not grid_layer: raise OperatorError('Error loading joined grid file' % (grid_file), self.__class__) # clean up del tmp_join_layer remove_shapefile(tmp_join_file) self.outputs[0].value = grid_layer self.outputs[1].value = grid_file
def do_operation(self): """ perform apply mapping scheme operation """ # input/output data checking already done during property set src_layer = self.inputs[0].value zone_field = self.inputs[1].value count_field = self.inputs[2].value ms = self.inputs[3].value # make sure input is correct # NOTE: these checks cannot be performed at set input time # because the data layer maybe is not loaded yet self._test_layer_loaded(src_layer) self._test_layer_field_exists(src_layer, zone_field) self._test_layer_field_exists(src_layer, count_field) # loop through all zones and assign mapping scheme # outputs exposure_layername = 'exp_%s' % get_unique_filename() exposure_file = '%sexp_%s.shp' % (self._tmp_dir, exposure_layername) # loop through all input features provider = src_layer.dataProvider() if provider is None: raise OperatorError("input layer not correctly loaded", self.__class__) zone_idx = layer_field_index(src_layer, zone_field) if zone_idx == -1: raise OperatorError( "field %s not found in input layer" % zone_field, self.__class__) count_idx = layer_field_index(src_layer, count_field) if count_idx == -1: raise OperatorError( "field %s not found in input layer" % count_field, self.__class__) gid_idx = layer_field_index(src_layer, GID_FIELD_NAME) if gid_idx == -1: raise OperatorError( "field %s not found in input layer" % GID_FIELD_NAME, self.__class__) area_idx = layer_field_index(src_layer, AREA_FIELD_NAME) provider.select(provider.attributeIndexes(), provider.extent()) provider.rewind() try: writer = QgsVectorFileWriter(exposure_file, "utf-8", self._fields, provider.geometryType(), self._crs, "ESRI Shapefile") out_feature = QgsFeature() gid = 0 for in_feature in layer_features(src_layer): geom = in_feature.geometry() centroid = geom.centroid().asPoint() gid = in_feature.attributeMap()[gid_idx] zone_str = str(in_feature.attributeMap()[zone_idx].toString()) count = in_feature.attributeMap()[count_idx].toDouble()[0] if area_idx > 0: area = in_feature.attributeMap()[area_idx].toDouble()[0] else: area = 0 count = int(count + 0.5) if count == 0: continue stats = ms.get_assignment_by_name(zone_str) # use default stats if missing if stats is None: raise Exception("no mapping scheme found for zone %s" % zone_str) for _sample in stats.get_samples(count, self._extrapolationOption): # write out if there are structures assigned _type = _sample[0] _cnt = _sample[1] if area > 0: # use area provided by footprint/zone if defined _size = area * (float(_sample[1]) / count) if _sample[3] > 0 and _sample[2] > 0: _cost = (_sample[3] / _sample[2]) * area else: _cost = 0 else: # use mapping scheme generic area otherwise _size = _sample[2] _cost = _sample[3] if _cnt > 0: out_feature.setGeometry(geom) #out_feature.addAttribute(0, QVariant(gid)) out_feature.addAttribute(0, gid) out_feature.addAttribute(1, QVariant(centroid.x())) out_feature.addAttribute(2, QVariant(centroid.y())) out_feature.addAttribute(3, QVariant(_type)) out_feature.addAttribute(4, QVariant(zone_str)) out_feature.addAttribute(5, QVariant(_cnt)) out_feature.addAttribute(6, QVariant(_size)) out_feature.addAttribute(7, QVariant(_cost)) writer.addFeature(out_feature) del writer, out_feature except Exception as err: remove_shapefile(exposure_file) raise OperatorError("error creating exposure file: %s" % err, self.__class__) del src_layer # load shapefile as layer exposure_layer = load_shapefile(exposure_file, exposure_layername) if not exposure_layer: raise OperatorError( 'Error loading exposure file' % (exposure_file), self.__class__) # store data in output self.outputs[0].value = exposure_layer self.outputs[1].value = exposure_file
def run(self): """Risk plugin for classified polygon hazard on land cover. Counts area of land cover types exposed to hazard zones. :returns: Impact layer :rtype: Vector """ # Identify hazard and exposure layers hazard = self.hazard.layer exposure = self.exposure.layer type_attr = self.exposure.keyword('field') self.hazard_class_attribute = self.hazard.keyword('field') hazard_value_to_class = {} self.hazard_class_mapping = self.hazard.keyword('value_map') for key, values in self.hazard_class_mapping.items(): for value in values: hazard_value_to_class[value] = self.hazard_columns[key] # prepare objects for re-projection of geometries crs_wgs84 = QgsCoordinateReferenceSystem('EPSG:4326') hazard_to_exposure = QgsCoordinateTransform(hazard.crs(), exposure.crs()) wgs84_to_hazard = QgsCoordinateTransform(crs_wgs84, hazard.crs()) wgs84_to_exposure = QgsCoordinateTransform(crs_wgs84, exposure.crs()) extent = QgsRectangle(self.requested_extent[0], self.requested_extent[1], self.requested_extent[2], self.requested_extent[3]) extent_hazard = wgs84_to_hazard.transformBoundingBox(extent) extent_exposure = wgs84_to_exposure.transformBoundingBox(extent) extent_exposure_geom = QgsGeometry.fromRect(extent_exposure) # make spatial index of hazard hazard_index = QgsSpatialIndex() hazard_features = {} for f in hazard.getFeatures(QgsFeatureRequest(extent_hazard)): f.geometry().transform(hazard_to_exposure) hazard_index.insertFeature(f) hazard_features[f.id()] = QgsFeature(f) # create impact layer filename = unique_filename(suffix='.shp') impact_fields = exposure.dataProvider().fields() impact_fields.append(QgsField(self.target_field, QVariant.String)) writer = QgsVectorFileWriter(filename, 'utf-8', impact_fields, QGis.WKBPolygon, exposure.crs()) # Iterate over all exposure polygons and calculate the impact. _calculate_landcover_impact(exposure, extent_exposure, extent_exposure_geom, self.hazard_class_attribute, hazard_features, hazard_index, hazard_value_to_class, impact_fields, writer) del writer impact_layer = QgsVectorLayer(filename, 'Impacted Land Cover', 'ogr') if impact_layer.featureCount() == 0: raise ZeroImpactException() zone_field = None if self.aggregator: zone_field = self.aggregator.exposure_aggregation_field # This is not the standard way to use mixins # Martin preferred to call it directly - normally it is called with # multiple inheritance. Thats ok but we need to monkey patch the # notes function as it is not overloaded by this class mixin = LandCoverReportMixin( question=self.question, impact_layer=impact_layer, target_field=self.target_field, ordered_columns=self.hazard_columns.values(), affected_columns=self.affected_hazard_columns, land_cover_field=type_attr, zone_field=zone_field) mixin.notes = self.notes impact_data = mixin.generate_data() # Define style for the impact layer style_classes = [ dict(label=self.hazard_columns['low'], value=self.hazard_columns['low'], colour='#acffb6', border_color='#000000', transparency=0, size=0.5), dict(label=self.hazard_columns['medium'], value=self.hazard_columns['medium'], colour='#ffe691', border_color='#000000', transparency=0, size=0.5), dict(label=self.hazard_columns['high'], value=self.hazard_columns['high'], colour='#F31A1C', border_color='#000000', transparency=0, size=0.5), ] style_info = dict(target_field=self.target_field, style_classes=style_classes, style_type='categorizedSymbol') extra_keywords = { 'map_title': self.map_title(), 'target_field': self.target_field } impact_layer_keywords = self.generate_impact_keywords(extra_keywords) # Create vector layer and return impact_layer = Vector(data=impact_layer, name=self.map_title(), keywords=impact_layer_keywords, style_info=style_info) impact_layer.impact_data = impact_data self._impact = impact_layer return impact_layer
def run_blur(self): self.progressBar_blur.setValue(0) self.label_progress.setText('') # Get all the fields. layer_to_blur = self.comboBox_layerToBlur.currentLayer() radius = self.spinBox_radius.value() display = self.checkBox_addToMap.isChecked() selected_features_only = self.checkBox_selectedOnlyFeatures.isChecked() file_name = self.lineEdit_outputFile.text() export_radius = self.checkBox_exportRadius.isChecked() export_centroid = self.checkBox_exportCentroid.isChecked() if self.checkBox_envelope.isChecked(): layer_envelope = self.comboBox_envelope.currentLayer() else: layer_envelope = None # Test values try: if not layer_to_blur: raise NoLayerProvidedException if not file_name and not display: raise NoFileNoDisplayException if layer_to_blur.crs().mapUnits() != 0: msg = tr('The projection of the map or of the layer is not ' 'in meters. These parameters should be in meters.') display_message_bar(msg, level=QgsMessageBar.WARNING, duration=5) if not file_name: file_name = getTempFilenameInTempFolder('blurring.shp') if layer_envelope: if layer_to_blur.crs() != layer_envelope.crs(): raise DifferentCrsException( epsg1=layer_to_blur.crs().authid(), epsg2=layer_envelope.crs().authid()) self.label_progress.setText('Creating index ...') layer_envelope = LayerIndex(layer_envelope) self.progressBar_blur.setValue(0) self.label_progress.setText('Blurring ...') if selected_features_only: features = layer_to_blur.selectedFeatures() nb_features = layer_to_blur.selectedFeatureCount() else: features = layer_to_blur.getFeatures() nb_features = layer_to_blur.featureCount() # Fields fields = layer_to_blur.pendingFields() if export_radius: fields.append(QgsField(u"Radius", QVariant.Int)) if export_centroid: fields.append(QgsField(u"X centroid", QVariant.Int)) fields.append(QgsField(u"Y centroid", QVariant.Int)) # Creating the output shapefile file_writer = QgsVectorFileWriter(file_name, 'utf-8', fields, QGis.WKBPolygon, layer_to_blur.crs(), 'ESRI Shapefile') if file_writer.hasError() != QgsVectorFileWriter.NoError: raise CreatingShapeFileException(suffix=file_writer.hasError()) # Creating the algorithm with radius algo = Blur(radius, layer_envelope, export_radius, export_centroid) for j, feature in enumerate(features): feature = algo.blur(feature) file_writer.addFeature(feature) # Update progress bar percent = int((j + 1) * 100 / nb_features) self.progressBar_blur.setValue(percent) # Write all features in the file del file_writer if display: old_default_projection = self.settings.value( '/Projections/defaultBehaviour') self.settings.setValue('/Projections/defaultBehaviour', 'useProject') layer_name = basename(file_name) new_layer = QgsVectorLayer(file_name, layer_name, 'ogr') new_layer.commitChanges() new_layer.clearCacheImage() # noinspection PyArgumentList QgsMapLayerRegistry.instance().addMapLayers([new_layer]) self.settings.setValue('/Projections/defaultBehaviour', old_default_projection) msg = tr('Successful export in %s' % file_name) iface.messageBar().pushMessage(msg, level=QgsMessageBar.INFO, duration=5) self.signalAskCloseWindow.emit() except GeoHealthException, e: self.label_progress.setText('') display_message_bar(msg=e.msg, level=e.level, duration=e.duration)
def createVectorWriter(destination, encoding, fields, geometryType, crs, context, options=None): layer = None sink = None if encoding is None: settings = QgsSettings() encoding = settings.value('/Processing/encoding', 'System', str) if destination.startswith(MEMORY_LAYER_PREFIX): uri = QgsWkbTypes.displayString(geometryType) + "?uuid=" + str(uuid.uuid4()) if crs.isValid(): uri += '&crs=' + crs.authid() fieldsdesc = [] for f in fields: qgsfield = _toQgsField(f) fieldsdesc.append('field=%s:%s' % (qgsfield.name(), TYPE_MAP_MEMORY_LAYER.get(qgsfield.type(), "string"))) if fieldsdesc: uri += '&' + '&'.join(fieldsdesc) layer = QgsVectorLayer(uri, destination, 'memory') sink = layer.dataProvider() context.temporaryLayerStore().addMapLayer(layer, False) elif destination.startswith(POSTGIS_LAYER_PREFIX): uri = QgsDataSourceUri(destination[len(POSTGIS_LAYER_PREFIX):]) connInfo = uri.connectionInfo() (success, user, passwd) = QgsCredentials.instance().get(connInfo, None, None) if success: QgsCredentials.instance().put(connInfo, user, passwd) else: raise GeoAlgorithmExecutionException("Couldn't connect to database") try: db = postgis.GeoDB(host=uri.host(), port=int(uri.port()), dbname=uri.database(), user=user, passwd=passwd) except postgis.DbError as e: raise GeoAlgorithmExecutionException( "Couldn't connect to database:\n%s" % e.message) def _runSQL(sql): try: db._exec_sql_and_commit(str(sql)) except postgis.DbError as e: raise GeoAlgorithmExecutionException( 'Error creating output PostGIS table:\n%s' % e.message) fields = [_toQgsField(f) for f in fields] fieldsdesc = ",".join('%s %s' % (f.name(), TYPE_MAP_POSTGIS_LAYER.get(f.type(), "VARCHAR")) for f in fields) _runSQL("CREATE TABLE %s.%s (%s)" % (uri.schema(), uri.table().lower(), fieldsdesc)) if geometryType != QgsWkbTypes.NullGeometry: _runSQL("SELECT AddGeometryColumn('{schema}', '{table}', 'the_geom', {srid}, '{typmod}', 2)".format( table=uri.table().lower(), schema=uri.schema(), srid=crs.authid().split(":")[-1], typmod=QgsWkbTypes.displayString(geometryType).upper())) layer = QgsVectorLayer(uri.uri(), uri.table(), "postgres") sink = layer.dataProvider() context.temporaryLayerStore().addMapLayer(layer, False) elif destination.startswith(SPATIALITE_LAYER_PREFIX): uri = QgsDataSourceUri(destination[len(SPATIALITE_LAYER_PREFIX):]) try: db = spatialite.GeoDB(uri=uri) except spatialite.DbError as e: raise GeoAlgorithmExecutionException( "Couldn't connect to database:\n%s" % e.message) def _runSQL(sql): try: db._exec_sql_and_commit(str(sql)) except spatialite.DbError as e: raise GeoAlgorithmExecutionException( 'Error creating output Spatialite table:\n%s' % str(e)) fields = [_toQgsField(f) for f in fields] fieldsdesc = ",".join('%s %s' % (f.name(), TYPE_MAP_SPATIALITE_LAYER.get(f.type(), "VARCHAR")) for f in fields) _runSQL("DROP TABLE IF EXISTS %s" % uri.table().lower()) _runSQL("CREATE TABLE %s (%s)" % (uri.table().lower(), fieldsdesc)) if geometryType != QgsWkbTypes.NullGeometry: _runSQL("SELECT AddGeometryColumn('{table}', 'the_geom', {srid}, '{typmod}', 2)".format( table=uri.table().lower(), srid=crs.authid().split(":")[-1], typmod=QgsWkbTypes.displayString(geometryType).upper())) layer = QgsVectorLayer(uri.uri(), uri.table(), "spatialite") sink = layer.dataProvider() context.temporaryLayerStore().addMapLayer(layer, False) else: formats = QgsVectorFileWriter.supportedFiltersAndFormats() OGRCodes = {} for (key, value) in list(formats.items()): extension = str(key) extension = extension[extension.find('*.') + 2:] extension = extension[:extension.find(' ')] OGRCodes[extension] = value OGRCodes['dbf'] = "DBF file" extension = destination[destination.rfind('.') + 1:] if extension not in OGRCodes: extension = 'shp' destination = destination + '.shp' if geometryType == QgsWkbTypes.NoGeometry: if extension == 'shp': extension = 'dbf' destination = destination[:destination.rfind('.')] + '.dbf' if extension not in NOGEOMETRY_EXTENSIONS: raise GeoAlgorithmExecutionException( "Unsupported format for tables with no geometry") qgsfields = QgsFields() for field in fields: qgsfields.append(_toQgsField(field)) # use default dataset/layer options dataset_options = QgsVectorFileWriter.defaultDatasetOptions(OGRCodes[extension]) layer_options = QgsVectorFileWriter.defaultLayerOptions(OGRCodes[extension]) sink = QgsVectorFileWriter(destination, encoding, qgsfields, geometryType, crs, OGRCodes[extension], dataset_options, layer_options) return sink, destination, layer
def compute(self, bound, xOffset, yOffset, polygon): crs = None layer = ftools_utils.getMapLayerByName( unicode(self.inShape.currentText())) if self.angle.value() != 0.0: bound = self.initRotation(bound) if layer is None: crs = self.iface.mapCanvas().mapRenderer().destinationCrs() else: crs = layer.crs() if not crs.isValid(): crs = None fields = QgsFields() fields.append(QgsField("ID", QVariant.Int)) fieldCount = 1 if polygon: fields.append(QgsField("X_MIN", QVariant.Double)) fields.append(QgsField("X_MAX", QVariant.Double)) fields.append(QgsField("Y_MIN", QVariant.Double)) fields.append(QgsField("Y_MAX", QVariant.Double)) fieldCount = 5 check = QFile(self.shapefileName) if check.exists(): if not QgsVectorFileWriter.deleteShapeFile(self.shapefileName): return writer = QgsVectorFileWriter(self.shapefileName, self.encoding, fields, QGis.WKBPolygon, crs) else: fields.append(QgsField("COORD", QVariant.Double)) fieldCount = 2 check = QFile(self.shapefileName) if check.exists(): if not QgsVectorFileWriter.deleteShapeFile(self.shapefileName): return writer = QgsVectorFileWriter(self.shapefileName, self.encoding, fields, QGis.WKBLineString, crs) outFeat = QgsFeature() outFeat.initAttributes(fieldCount) outFeat.setFields(fields) outGeom = QgsGeometry() idVar = 0 self.progressBar.setValue(0) if not polygon: # counters for progressbar - update every 5% count = 0 count_max = (bound.yMaximum() - bound.yMinimum()) / yOffset count_update = count_max * 0.10 y = bound.yMaximum() while y >= bound.yMinimum(): pt1 = QgsPoint(bound.xMinimum(), y) pt2 = QgsPoint(bound.xMaximum(), y) if self.angle.value() != 0.0: self.rotatePoint(pt1) self.rotatePoint(pt2) line = [pt1, pt2] outFeat.setGeometry(outGeom.fromPolyline(line)) outFeat.setAttribute(0, idVar) outFeat.setAttribute(1, y) writer.addFeature(outFeat) y = y - yOffset idVar = idVar + 1 count += 1 if int(math.fmod(count, count_update)) == 0: prog = int(count / count_max * 50) self.progressBar.setValue(prog) self.progressBar.setValue(50) # counters for progressbar - update every 5% count = 0 count_max = (bound.xMaximum() - bound.xMinimum()) / xOffset count_update = count_max * 0.10 x = bound.xMinimum() while x <= bound.xMaximum(): pt1 = QgsPoint(x, bound.yMaximum()) pt2 = QgsPoint(x, bound.yMinimum()) if self.angle.value() != 0.0: self.rotatePoint(pt1) self.rotatePoint(pt2) line = [pt1, pt2] outFeat.setGeometry(outGeom.fromPolyline(line)) outFeat.setAttribute(0, idVar) outFeat.setAttribute(1, x) writer.addFeature(outFeat) x = x + xOffset idVar = idVar + 1 count += 1 if int(math.fmod(count, count_update)) == 0: prog = 50 + int(count / count_max * 50) self.progressBar.setValue(prog) else: # counters for progressbar - update every 5% count = 0 count_max = (bound.yMaximum() - bound.yMinimum()) / yOffset count_update = count_max * 0.05 y = bound.yMaximum() while y >= bound.yMinimum(): x = bound.xMinimum() while x <= bound.xMaximum(): pt1 = QgsPoint(x, y) pt2 = QgsPoint(x + xOffset, y) pt3 = QgsPoint(x + xOffset, y - yOffset) pt4 = QgsPoint(x, y - yOffset) pt5 = QgsPoint(x, y) if self.angle.value() != 0.0: self.rotatePoint(pt1) self.rotatePoint(pt2) self.rotatePoint(pt3) self.rotatePoint(pt4) self.rotatePoint(pt5) polygon = [[pt1, pt2, pt3, pt4, pt5]] outFeat.setGeometry(outGeom.fromPolygon(polygon)) outFeat.setAttribute(0, idVar) outFeat.setAttribute(1, x) outFeat.setAttribute(2, x + xOffset) outFeat.setAttribute(3, y - yOffset) outFeat.setAttribute(4, y) writer.addFeature(outFeat) idVar = idVar + 1 x = x + xOffset y = y - yOffset count += 1 if int(math.fmod(count, count_update)) == 0: prog = int(count / count_max * 100) self.progressBar.setValue(100) del writer
def points_along_line(layerout, startpoint, endpoint, distance, label, layer, selected_only=True, force=False, fo_fila=False, divide=0, decimal=2): """Adding Points along the line """ crs = layer.crs().authid() # TODO check for virtual or shapelayer and set virt_layer according to it shape = False if shape: # define fields for feature attributes. A list of QgsField objects is needed fields = [ QgsField("first", QVariant.Int), QgsField("second", QVariant.String) ] # create an instance of vector file writer, which will create the vector file. # Arguments: # 1. path to new file (will fail if exists already) # 2. encoding of the attributes # 3. field map # 4. geometry type - from WKBTYPE enum # 5. layer's spatial reference (instance of # QgsCoordinateReferenceSystem) - optional # 6. driver name for the output file writer = QgsVectorFileWriter("my_shapes.shp", "CP1250", fields, Qgis.WKBPoint, crs, "ESRI Shapefile") if writer.hasError() != QgsVectorFileWriter.NoError: # fix_print_with_import print("Error when creating shapefile: ", writer.hasError()) # add a feature fet = QgsFeature() fet.setGeometry(QgsGeometry.fromPoint(QgsPoint(10, 10))) fet.setAttributes([1, "text"]) writer.addFeature(fet) # delete the writer to flush features to disk (optional) del writer layer_type = "Shapefile" # TODO Add Shapefile functionality here else: layer_type = "memory" virt_layer = QgsVectorLayer("Point?crs=%s" % crs, layerout, layer_type) provider = virt_layer.dataProvider() virt_layer.startEditing() # actually writes attributes units = layer.crs().mapUnits() unitname = QgsUnitTypes.toString(units) provider.addAttributes([ QgsField("fid", QVariant.Int), QgsField("cng" + unitname, QVariant.Double) ]) def get_features(): """Getting the features """ if selected_only: return layer.selectedFeatures() else: return layer.getFeatures() # Loop through all (selected) features for feature in get_features(): geom = feature.geometry() # Add feature ID of selected feature fid = feature.id() if not geom: QgsMessageLog.logMessage("No geometry", "QChainage") continue features = create_points_at(startpoint, endpoint, distance, geom, fid, force, fo_fila, divide) provider.addFeatures(features) virt_layer.updateExtents() proj = QgsProject.instance() proj.addMapLayers([virt_layer]) virt_layer.commitChanges() virt_layer.reload() # generic labeling properties if label: virt_layer.setCustomProperty("labeling", "pal") virt_layer.setCustomProperty("labeling/enabled", "true") virt_layer.setCustomProperty("labeling/fieldName", "cng") virt_layer.setCustomProperty("labeling/fontSize", "10") virt_layer.setCustomProperty("labeling/multiLineLabels", "true") virt_layer.setCustomProperty("labeling/formatNumbers", "true") virt_layer.setCustomProperty("labeling/decimals", decimal) virt_layer.setCustomProperty("labeling/Size", "5") # symbol = QgsMarkerSymbol.createSimple({"name": "capital"}) # virt_layer.setRenderer(QgsSingleSymbolRenderer(symbol)) virt_layer.triggerRepaint() return
def parse(self): """ Start parsing the osm file """ # Configuration for OGR gdal.SetConfigOption('OSM_CONFIG_FILE', self._osm_conf) gdal.SetConfigOption('OSM_USE_CUSTOM_INDEXING', 'NO') if not isfile(self.__osmFile): raise GeoAlgorithmExecutionException("File doesn't exist") uri = self.__osmFile + "|layername=" layers = {} # If loadOnly, no parsing required: # It's used only when we ask to open an osm file if self.__loadOnly: file_name = basename(self.__osmFile) for layer in self.__layers: layers[layer] = QgsVectorLayer(uri + layer, file_name + " " + layer, "ogr") if not layers[layer].isValid(): print "Error on the layer", layers[layer].lastError() return layers # Check if the order is node before way,relation # We don't check way before relation, # because we can have only nodes and relations with open(self.__osmFile) as f: for line in f: if re.search(r'node', line): break if re.search(r'(way|relation)', line): raise WrongOrderOSMException # Foreach layers for layer in self.__layers: self.signalText.emit(tr("OSMData", u"Parsing layer : " + layer)) layers[layer] = {} # Reading it with a QgsVectorLayer layers[layer]['vectorLayer'] = QgsVectorLayer( uri + layer, "test_" + layer, "ogr") if not layers[layer]['vectorLayer'].isValid(): msg = "Error on the layer : " + \ layers[layer]['vectorLayer'].lastError() raise GeoAlgorithmExecutionException(msg) # Set some default tags layers[layer]['tags'] = ['full_id', 'osm_id', 'osm_type'] # Save the geometry type of the layer layers[layer]['geomType'] = layers[layer]['vectorLayer'].wkbType() # Set a featureCount layers[layer]['featureCount'] = 0 # Get the other_tags fields = layers[layer]['vectorLayer'].pendingFields() field_names = [field.name() for field in fields] other_tags_index = field_names.index('other_tags') features = layers[layer]['vectorLayer'].getFeatures() for i, feature in enumerate(features): layers[layer]['featureCount'] += 1 # Improve the parsing if comma in whitelist, # we skip the parsing of tags, but featureCount is needed if self.__whiteListColumn[layer] == ',': continue # Get the "others_tags" field attributes = feature.attributes()[other_tags_index] if attributes: h_store = pghstore.loads(attributes) for key in h_store: if key not in layers[layer]['tags']: # If the key in OSM is not already in the table if self.__whiteListColumn[layer]: if key in self.__whiteListColumn[layer]: layers[layer]['tags'].append(key) else: layers[layer]['tags'].append(key) percent = int(100 / len(self.__layers) * (i + 1)) self.signalPercentage.emit(percent) # Delete empty layers if this option is set to True if self.__deleteEmptyLayers: delete_layers = [] for keys, values in layers.iteritems(): if values['featureCount'] < 1: delete_layers.append(keys) for layer in delete_layers: del layers[layer] # Creating GeoJSON files for each layers for layer in self.__layers: msg = tr("OSMData", u"Creating GeoJSON file : " + layer) self.signalText.emit(msg) self.signalPercentage.emit(0) # Creating the temp file tf = tempfile.NamedTemporaryFile(delete=False, suffix="_" + layer + ".geojson") layers[layer]['geojsonFile'] = tf.name tf.flush() tf.close() # Adding the attribute table fields = QgsFields() for key in layers[layer]['tags']: fields.append(QgsField(key, QVariant.String)) encoding = get_default_encoding() file_writer = QgsVectorFileWriter( layers[layer]['geojsonFile'], encoding, fields, layers[layer]['geomType'], layers[layer]['vectorLayer'].crs(), 'GeoJSON') # Foreach feature in the layer features = layers[layer]['vectorLayer'].getFeatures() for i, feature in enumerate(features): fet = QgsFeature() fet.setGeometry(feature.geometry()) new_attributes = [] attributes = feature.attributes() if layer in ['points', 'lines', 'multilinestrings']: if layer == 'points': osm_type = "node" elif layer == 'lines': osm_type = "way" elif layer == 'multilinestrings': osm_type = 'relation' new_attributes.append(self.DIC_OSM_TYPE[osm_type] + str(attributes[0])) new_attributes.append(attributes[0]) new_attributes.append(osm_type) if attributes[1]: h_store = pghstore.loads(attributes[1]) for tag in layers[layer]['tags'][3:]: if unicode(tag) in h_store: new_attributes.append(h_store[tag]) else: new_attributes.append("") fet.setAttributes(new_attributes) file_writer.addFeature(fet) elif layer == 'multipolygons': if attributes[0]: osm_type = "relation" new_attributes.append(self.DIC_OSM_TYPE[osm_type] + str(attributes[0])) new_attributes.append(str(attributes[0])) else: osm_type = "way" new_attributes.append(self.DIC_OSM_TYPE[osm_type] + str(attributes[1])) new_attributes.append(attributes[1]) new_attributes.append(osm_type) h_store = pghstore.loads(attributes[2]) for tag in layers[layer]['tags'][3:]: if unicode(tag) in h_store: new_attributes.append(h_store[tag]) else: new_attributes.append("") fet.setAttributes(new_attributes) file_writer.addFeature(fet) percentage = int(100 / layers[layer]['featureCount'] * (i + 1)) self.signalPercentage.emit(percentage) del file_writer return layers
def generate_sampling_points(self, pixel_values, number_of_samples, min_distance, neighbor_aggregation, attempts_by_sampling, progress_bar, random_seed): """Some code base from (by Alexander Bruy): https://github.com/qgis/QGIS/blob/release-2_18/python/plugins/processing/algs/qgis/RandomPointsExtent.py """ self.pixel_values = pixel_values self.number_of_samples = number_of_samples # desired self.total_of_samples = None # total generated self.min_distance = min_distance self.neighbor_aggregation = neighbor_aggregation progress_bar.setValue(0) # init progress bar self.ThematicR_boundaries = QgsGeometry().fromRect( self.ThematicR.extent()) fields = QgsFields() fields.append(QgsField('id', QVariant.Int, '', 10, 0)) thematic_CRS = self.ThematicR.qgs_layer.crs() file_format = \ "GPKG" if self.output_file.endswith(".gpkg") else "ESRI Shapefile" if self.output_file.endswith(".shp") else None writer = QgsVectorFileWriter(self.output_file, "System", fields, QgsWkbTypes.Point, thematic_CRS, file_format) if self.sampling_type == "simple": total_of_samples = self.number_of_samples if self.sampling_type == "stratified": total_of_samples = sum(self.number_of_samples) self.samples_in_categories = [0] * len( self.number_of_samples) # total generated by categories nPoints = 0 nIterations = 0 self.index = QgsSpatialIndex() if attempts_by_sampling: maxIterations = total_of_samples * attempts_by_sampling else: maxIterations = float('Inf') # init the random sampling seed self.random_seed = random_seed random.seed(self.random_seed) points_generated = [] while nIterations < maxIterations and nPoints < total_of_samples: random_sampling_point = RandomPoint(self.ThematicR.extent()) # checks to the sampling point, else discard and continue if not self.check_sampling_point(random_sampling_point): nIterations += 1 continue if self.sampling_type == "stratified": self.samples_in_categories[ random_sampling_point.index_pixel_value] += 1 points_generated.append(random_sampling_point) # it requires tmp save the point to check min distance for the next sample f = QgsFeature(nPoints) f.setGeometry(random_sampling_point.QgsGeom) self.index.insertFeature(f) self.points[nPoints] = random_sampling_point.QgsPnt nPoints += 1 nIterations += 1 # update progress bar progress_bar.setValue(int(nPoints)) # guarantee the random order for the classification random.shuffle(points_generated) self.points = dict() # restart for num_point, point_generated in enumerate(points_generated): # random sampling point passed the checks, save it f = QgsFeature() f.initAttributes(1) f.setFields(fields) f.setAttribute('id', num_point + 1) f.setGeometry(point_generated.QgsGeom) writer.addFeature(f) self.points[num_point] = point_generated.QgsPnt # save the total point generated self.total_of_samples = len(points_generated) del writer, self.index
def open_file( dialog=None, osm_file=None, output_geom_types=None, white_list_column=None, layer_name="OsmFile", config_outputs=None, output_dir=None, final_query=None, prefix_file=None): """ Open an osm file. Memory layer if no output directory is set, or Geojson in the output directory. :param final_query: The query where the file comes from. Might be empty if it's a local OSM file. :type final_query: basestring """ outputs = {} if output_dir: for layer in ['points', 'lines', 'multilinestrings', 'multipolygons']: if not prefix_file: prefix_file = layer_name outputs[layer] = join( output_dir, prefix_file + "_" + layer + ".geojson") if isfile(outputs[layer]): raise FileOutPutException(suffix='(' + outputs[layer] + ')') # Legacy, waiting to remove the OsmParser for QGIS >= 3.6 # Change in osm_file_dialog.py L131 too output_geom_legacy = [l.value.lower() for l in output_geom_types] if not white_list_column: white_list_column = {} white_list_legacy = ( {l.value.lower(): csv for l, csv in white_list_column.items()} ) LOGGER.info('The OSM file is: {}'.format(osm_file)) # Parsing the file osm_parser = OsmParser( osm_file=osm_file, layers=output_geom_legacy, white_list_column=white_list_legacy) osm_parser.signalText.connect(dialog.set_progress_text) osm_parser.signalPercentage.connect(dialog.set_progress_percentage) start_time = time.time() layers = osm_parser.parse() elapsed_time = time.time() - start_time parser_time = time.strftime("%Hh %Mm %Ss", time.gmtime(elapsed_time)) LOGGER.info('The OSM parser took: {}'.format(parser_time)) # Finishing the process with geojson or memory layer num_layers = 0 for i, (layer, item) in enumerate(layers.items()): dialog.set_progress_percentage(i / len(layers) * 100) QApplication.processEvents() if item['featureCount'] and ( LayerType(layer.capitalize()) in output_geom_types): final_layer_name = layer_name # If configOutputs is not None (from My Queries) if config_outputs: if config_outputs[layer]['namelayer']: final_layer_name = config_outputs[layer]['namelayer'] if output_dir: dialog.set_progress_text( tr('From memory layer to GeoJSON: ' + layer)) # Transforming the vector file osm_geometries = { 'points': QgsWkbTypes.Point, 'lines': QgsWkbTypes.LineString, 'multilinestrings': QgsWkbTypes.MultiLineString, 'multipolygons': QgsWkbTypes.MultiPolygon} memory_layer = item['vector_layer'] encoding = get_default_encoding() writer = QgsVectorFileWriter( outputs[layer], encoding, memory_layer.fields(), osm_geometries[layer], memory_layer.crs(), "GeoJSON") for f in memory_layer.getFeatures(): writer.addFeature(f) del writer # Loading the final vector file new_layer = QgsVectorLayer( outputs[layer], final_layer_name, "ogr") else: new_layer = item['vector_layer'] new_layer.setName(final_layer_name) # Try to set styling if defined if config_outputs and config_outputs[layer]['style']: new_layer.loadNamedStyle(config_outputs[layer]['style']) else: # Loading default styles if layer == "multilinestrings" or layer == "lines": if "colour" in item['tags']: new_layer.loadNamedStyle( join(dirname(dirname(abspath(__file__))), "styles", layer + "_colour.qml")) # Add action about OpenStreetMap add_actions(new_layer, item['tags']) if final_query: QgsExpressionContextUtils.setLayerVariable( new_layer, 'quickosm_query', final_query) QgsProject.instance().addMapLayer(new_layer) num_layers += 1 return num_layers
def _clipVectorLayer(theLayer, theExtent, theExtraKeywords=None, explodeMultipart=True): """Clip a Hazard or Exposure layer to the extents of the current view frame. The layer must be a vector layer or an exception will be thrown. The output layer will always be in WGS84/Geographic. Args: * theLayer - a valid QGIS vector layer in EPSG:4326 * theExtent - an array representing the exposure layer extents in the form [xmin, ymin, xmax, ymax]. It is assumed that the coordinates are in EPSG:4326 although currently no checks are made to enforce this. * theExtraKeywords - any additional keywords over and above the original keywords that should be associated with the cliplayer. * explodeMultipart - a bool describing if to convert multipart features into singleparts Returns: Path to the output clipped layer (placed in the system temp dir). Raises: None """ if not theLayer or not theExtent: myMessage = tr('Layer or Extent passed to clip is None.') raise InvalidParameterException(myMessage) if theLayer.type() != QgsMapLayer.VectorLayer: myMessage = tr('Expected a vector layer but received a %s.' % str(theLayer.type())) raise InvalidParameterException(myMessage) #myHandle, myFilename = tempfile.mkstemp('.sqlite', 'clip_', # temp_dir()) myHandle, myFilename = tempfile.mkstemp('.shp', 'clip_', temp_dir()) # Ensure the file is deleted before we try to write to it # fixes windows specific issue where you get a message like this # ERROR 1: c:\temp\inasafe\clip_jpxjnt.shp is not a directory. # This is because mkstemp creates the file handle and leaves # the file open. os.close(myHandle) os.remove(myFilename) # Get the clip extents in the layer's native CRS myGeoCrs = QgsCoordinateReferenceSystem() myGeoCrs.createFromId(4326, QgsCoordinateReferenceSystem.EpsgCrsId) myXForm = QgsCoordinateTransform(myGeoCrs, theLayer.crs()) myRect = QgsRectangle(theExtent[0], theExtent[1], theExtent[2], theExtent[3]) myProjectedExtent = myXForm.transformBoundingBox(myRect) # Get vector layer myProvider = theLayer.dataProvider() if myProvider is None: myMessage = tr('Could not obtain data provider from ' 'layer "%s"' % theLayer.source()) raise Exception(myMessage) # Get the layer field list, select by our extent then write to disk # .. todo:: FIXME - for different geometry types we should implement # different clipping behaviour e.g. reject polygons that # intersect the edge of the bbox. Tim myAttributes = myProvider.attributeIndexes() myFetchGeometryFlag = True myUseIntersectFlag = True myProvider.select(myAttributes, myProjectedExtent, myFetchGeometryFlag, myUseIntersectFlag) myFieldList = myProvider.fields() myWriter = QgsVectorFileWriter( myFilename, 'UTF-8', myFieldList, theLayer.wkbType(), myGeoCrs, #'SQLite') # FIXME (Ole): This works but is far too slow 'ESRI Shapefile') if myWriter.hasError() != QgsVectorFileWriter.NoError: myMessage = tr('Error when creating shapefile: <br>Filename:' '%s<br>Error: %s' % (myFilename, myWriter.hasError())) raise Exception(myMessage) # Reverse the coordinate xform now so that we can convert # geometries from layer crs to geocrs. myXForm = QgsCoordinateTransform(theLayer.crs(), myGeoCrs) # Retrieve every feature with its geometry and attributes myFeature = QgsFeature() myCount = 0 while myProvider.nextFeature(myFeature): myGeometry = myFeature.geometry() # Loop through the parts adding them to the output file # we write out single part features unless explodeMultipart is False if explodeMultipart: myGeometryList = explodeMultiPartGeometry(myGeometry) else: myGeometryList = [myGeometry] for myPart in myGeometryList: myPart.transform(myXForm) myFeature.setGeometry(myPart) myWriter.addFeature(myFeature) myCount += 1 del myWriter # Flush to disk if myCount < 1: myMessage = tr('No features fall within the clip extents. ' 'Try panning / zooming to an area containing data ' 'and then try to run your analysis again.') raise NoFeaturesInExtentException(myMessage) myKeywordIO = KeywordIO() myKeywordIO.copyKeywords(theLayer, myFilename, theExtraKeywords=theExtraKeywords) return myFilename # Filename of created file
def run(self): self.mutex.lock() self.stopMe = 0 self.mutex.unlock() interrupted = False polyProvider = self.layerPoly.dataProvider() pointProvider = self.layerPoints.dataProvider() fieldList = ftools_utils.getFieldList(self.layerPoly) index = polyProvider.fieldNameIndex(unicode(self.fieldName)) if index == -1: index = polyProvider.fields().count() fieldList.append(QgsField(unicode(self.fieldName), QVariant.Int, "int", 10, 0, self.tr("point count field"))) # Add the selected vector fields to the output polygon vector layer selectedItems = self.attributeList.selectedItems() for item in selectedItems: global typeDouble columnName = unicode(item.text() + "_" + self.statistics) index = polyProvider.fieldNameIndex(unicode(columnName)) if index == -1: if item.type() == typeDouble or self.statistics == "mean" or self.statistics == "stddev": fieldList.append(QgsField(columnName, QVariant.Double, "double", 24, 15, "Value")) else: fieldList.append(QgsField(columnName, QVariant.Int, "int", 10, 0, "Value")) sRs = polyProvider.crs() if QFile(self.outPath).exists(): if not QgsVectorFileWriter.deleteShapeFile(self.outPath): return writer = QgsVectorFileWriter(self.outPath, self.encoding, fieldList, polyProvider.geometryType(), sRs) spatialIndex = ftools_utils.createIndex(pointProvider) self.emit(SIGNAL("rangeChanged(int)"), polyProvider.featureCount()) polyFeat = QgsFeature() pntFeat = QgsFeature() outFeat = QgsFeature() inGeom = QgsGeometry() polyFit = polyProvider.getFeatures() while polyFit.nextFeature(polyFeat): inGeom = polyFeat.geometry() atMap = polyFeat.attributes() outFeat.setAttributes(atMap) outFeat.setGeometry(inGeom) count = 0 pointList = [] hasIntersection = True pointList = spatialIndex.intersects(inGeom.boundingBox()) if len(pointList) > 0: hasIntersection = True else: hasIntersection = False if hasIntersection: valueList = {} for item in selectedItems: valueList[item.text()] = [] for p in pointList: pointProvider.getFeatures(QgsFeatureRequest().setFilterFid(p)).nextFeature(pntFeat) tmpGeom = QgsGeometry(pntFeat.geometry()) if inGeom.intersects(tmpGeom): count += 1 for item in selectedItems: valueList[item.text()].append(pntFeat.attribute(item.text())) self.mutex.lock() s = self.stopMe self.mutex.unlock() if s == 1: interrupted = True break atMap.append(count) # Compute the statistical values for selected vector attributes for item in selectedItems: values = valueList[item.text()] # Check if the input contains non-numeric values non_numeric_values = False for value in values: if not isinstance(value, type(float())) and not isinstance(value, type(int())): non_numeric_values = True break # Jump over invalid values if non_numeric_values is True: continue if values and len(values) > 0: if self.statistics == "sum": value = reduce(myAdder, values) elif self.statistics == "mean": value = reduce(myAdder, values) / float(len(values)) elif self.statistics == "min": values.sort() value = values[0] elif self.statistics == "max": values.sort() value = values[-1] elif self.statistics == "stddev": value = two_pass_variance(values) value = math.sqrt(value) atMap.append(value) else: # no intersection - store at least the zero count atMap.append(0) outFeat.setAttributes(atMap) writer.addFeature(outFeat) self.emit(SIGNAL("updateProgress()")) self.mutex.lock() s = self.stopMe self.mutex.unlock() if s == 1: interrupted = True break del writer if not interrupted: self.emit(SIGNAL("processingFinished()")) else: self.emit(SIGNAL("processingInterrupted()"))
def processAlgorithm(self, progress): """Here is where the processing itself takes place.""" # The first thing to do is retrieve the values of the parameters # entered by the user inputFilename = self.getParameterValue(self.INPUT_LAYER) output = self.getOutputValue(self.OUTPUT_LAYER) # Input layers vales are always a string with its location. # That string can be converted into a QGIS object (a # QgsVectorLayer in this case) using the # processing.getObjectFromUri() method. vectorLayer = dataobjects.getObjectFromUri(inputFilename) # And now we can process # First we create the output layer. The output value entered by # the user is a string containing a filename, so we can use it # directly provider = vectorLayer.dataProvider() settings = QSettings() systemEncoding = settings.value('/UI/encoding', 'System') writer = QgsVectorFileWriter(output, systemEncoding, provider.fields(), provider.geometryType(), provider.crs()) # Do the transform QgsMessageLog.logMessage("Start processing ...", 'WGS2GCJ', QgsMessageLog.INFO) # engine = OffsetWGS84Engine() features = vector.features(vectorLayer) total_cnt = len(features) cnt = 0 step = int(total_cnt / 100) if step == 0: step = 1 for f in features: attrs = f.attributes() geom = f.geometry() geom_type = geom.wkbType() new_f = QgsFeature() if geom_type == QGis.WKBPoint: vertices = geom.asPoint() new_vert = wgs2bd(vertices[0], vertices[1]) new_f.setGeometry( QgsGeometry.fromPoint(QgsPoint(new_vert[0], new_vert[1]))) elif geom_type == QGis.WKBMultiPoint: vertices = geom.asMultiPoint() new_vert = [] for pt in vertices: new_pt = wgs2bd(pt[0], pt[1]) new_vert.append(QgsPoint(new_pt[0], new_pt[1])) new_f.setGeometry(QgsGeometry.fromMultiPoint(new_vert)) elif geom_type == QGis.WKBLineString: vertices = geom.asPolyline() new_vert = [] for pt in vertices: new_pt = wgs2bd(pt[0], pt[1]) new_vert.append(QgsPoint(new_pt[0], new_pt[1])) new_f.setGeometry(QgsGeometry.fromPolyline(new_vert)) elif geom_type == QGis.WKBMultiLineString: vertices = geom.asMultiPolyline() new_vert = [] for part in vertices: linestring = [] for pt in part: new_pt = wgs2bd(pt[0], pt[1]) linestring.append(QgsPoint(new_pt[0], new_pt[1])) new_vert.append(linestring) new_f.setGeometry(QgsGeometry.fromMultiPolyline(new_vert)) elif geom_type == QGis.WKBPolygon: vertices = geom.asPolygon() new_vert = [] for ring in vertices: ring_vert = [] for pt in ring: new_pt = wgs2bd(pt[0], pt[1]) ring_vert.append(QgsPoint(new_pt[0], new_pt[1])) new_vert.append(ring_vert) new_f.setGeometry(QgsGeometry.fromPolygon(new_vert)) elif geom_type == QGis.WKBMultiPolygon: vertices = geom.asMultiPolygon() new_vert = [] for part in vertices: ply = [] for ring in part: ring_vert = [] for pt in ring: new_pt = wgs2bd(pt[0], pt[1]) ring_vert.append(QgsPoint(new_pt[0], new_pt[1])) ply.append(ring_vert) new_vert.append(ply) new_f.setGeometry(QgsGeometry.fromMultiPolygon(new_vert)) else: continue new_f.setAttributes(attrs) writer.addFeature(new_f) cnt = cnt + 1 if (cnt % step == 0): progress.setPercentage((float(cnt) / float(total_cnt) * 100)) QgsMessageLog.logMessage("Successful finished.", 'WGS2BD', QgsMessageLog.INFO)
def compute(self, inName, weightField="", times=1, uniqueField=""): vlayer = ftools_utils.getVectorLayerByName(inName) provider = vlayer.dataProvider() weightIndex = provider.fieldNameIndex(weightField) uniqueIndex = provider.fieldNameIndex(uniqueField) feat = QgsFeature() sRs = provider.crs() check = QFile(self.shapefileName) if check.exists(): if not QgsVectorFileWriter.deleteShapeFile(self.shapefileName): return if uniqueIndex != -1: uniqueValues = ftools_utils.getUniqueValues(provider, int( uniqueIndex ) ) single = False else: uniqueValues = [1] single = True if self.function == 2: fieldList = QgsFields() fieldList.append( QgsField("STD_DIST", QVariant.Double) ) fieldList.append( QgsField("UID", QVariant.String) ) writer = QgsVectorFileWriter(self.shapefileName, self.encoding, fieldList, QGis.WKBPolygon, sRs) else: fieldList = QgsFields() fieldList.append( QgsField("MEAN_X", QVariant.Double) ) fieldList.append( QgsField("MEAN_Y", QVariant.Double) ) fieldList.append( QgsField("UID", QVariant.String) ) writer = QgsVectorFileWriter(self.shapefileName, self.encoding, fieldList, QGis.WKBPoint, sRs) outfeat = QgsFeature() outfeat.setFields( fieldList ) points = [] weights = [] nFeat = provider.featureCount() * len(uniqueValues) nElement = 0 self.progressBar.setValue(0) self.progressBar.setRange(0, nFeat) for j in uniqueValues: cx = 0.00 cy = 0.00 points = [] weights = [] fit = provider.getFeatures() while fit.nextFeature(feat): nElement += 1 self.progressBar.setValue(nElement) if single: check = unicode(j).strip() else: check = unicode(feat[uniqueIndex]).strip() if check == unicode(j).strip(): cx = 0.00 cy = 0.00 if weightIndex == -1: weight = 1.00 else: weight = float(feat[weightIndex]) geom = QgsGeometry(feat.geometry()) geom = ftools_utils.extractPoints(geom) for i in geom: cx += i.x() cy += i.y() points.append(QgsPoint((cx / len(geom)), (cy / len(geom)))) weights.append(weight) sumWeight = sum(weights) cx = 0.00 cy = 0.00 item = 0 for item, i in enumerate(points): cx += i.x() * weights[item] cy += i.y() * weights[item] cx = cx / sumWeight cy = cy / sumWeight meanPoint = QgsPoint(cx, cy) if self.function == 2: values = [] md = 0.00 sd = 0.00 dist = QgsDistanceArea() item = 0 for i in points: tempDist = dist.measureLine(i, meanPoint) values.append(tempDist) item += 1 md += tempDist md = md / item for i in values: sd += (i-md)*(i-md) sd = sqrt(sd/item) outfeat.setGeometry(QgsGeometry.fromPoint(meanPoint).buffer(sd * times, 10)) outfeat.setAttribute(0, sd) outfeat.setAttribute(1, j) else: outfeat.setGeometry(QgsGeometry.fromPoint(meanPoint)) outfeat.setAttribute(0, cx) outfeat.setAttribute(1, cy) outfeat.setAttribute(2, j) writer.addFeature(outfeat) if single: break del writer
def runThresholding(iface, dlg, conf, layersName, dir_raster_src, dir_dest, ficRaster, seuilStr, fromActiveLayerRaster): # Recuperation du chemin compler du fichier raster source if fromActiveLayerRaster: if ficRaster == "": QMessageBox.information( None, "Attention !!!", "Le fichier raster est inexistant ou incorrect ou le foramt n'est pas supporté par le plugin !", QMessageBox.Ok, QMessageBox.NoButton) return None else: if os.path.isfile(ficRaster): try: dir_raster_src.decode('ascii') dir_dest.decode('ascii') except: QMessageBox.information( None, "Attention !!!", "Certaines fonctions comme gdal_polygonize n'acceptent pas les dossiers avec des caractères accentués. Le chemin d'accès au fichier raster n'est pas valable.", QMessageBox.Ok, QMessageBox.NoButton) return None if platform.system() == "Linux" and conf.rbOTB.isChecked(): try: ficRaster.decode('ascii') except: QMessageBox.information( None, "Attention !!!", "Certaines fonctions comme Band Math (OTB) n'acceptent pas les caractères accentués. Le nom du raster n'est pas valable.", QMessageBox.Ok, QMessageBox.NoButton) return None else: QMessageBox.information( None, "Attention !!!", "Le fichier raster est inexistant ou incorrect ou le foramt n'est pas supporté par le plugin !", QMessageBox.Ok, QMessageBox.NoButton) return None if dlg.rbSeuil.isChecked(): if dlg.delta.text() in ('', '+', '-') or float(dlg.delta.text()) == 0: QMessageBox.information(None, "Attention !!!", "Valeur de delta incorrecte !", QMessageBox.Ok, QMessageBox.NoButton) dlg.delta.setFocus() return None # On lance le seuillage messInfo(dlg, "Seuillage en cours...") messInfo(dlg, "") canvas = iface.mapCanvas() li = layerList() # Nom du fichier raster if fromActiveLayerRaster: if ficRaster in li: layerRaster = li[ficRaster] rasterAssembly = layerRaster.dataProvider().dataSourceUri() else: QMessageBox.information( None, "Attention !!!", ficRaster + " n'existe plus dans la liste des couches disponible. Vérifiez réininitialisé la liste des couches d'entrée.", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, ficRaster + " n'existe plus dans la liste.") return None else: rasterAssembly = ficRaster extension_input_raster = os.path.splitext( os.path.basename(rasterAssembly))[1] messInfo(dlg, "Raster en entrée: " + layersName['raster']) li = layerList() canvas.refresh() # Variables global start_time raster = None # récupération du nom de base pour les fichiers temporaires et du répertoire de travail if fromActiveLayerRaster: if layersName['raster'] in li: raster = li[layersName['raster']] else: raster = loadRaster(dlg, ficRaster, layersName['raster']) if not raster: messErreur(dlg, "Le raster ne peut pas être chargé.") return None start_time = time.time() setLayerVisible(raster, True) # Création d'une couche vectorielle sur l'emprise du raster # Va permettre d'éliminer ultérieurement les bords du cadre lors de la recherche des contours LayerRasterExtendName = layersName['emprise'] LayerRasterExtendPath = dir_dest + os.sep + LayerRasterExtendName + EXT_VECTOR if os.path.exists(LayerRasterExtendPath): try: os.remove(LayerRasterExtendPath) except: QMessageBox.information( None, "Attention !!!", LayerRasterExtendPath + " ne peut pas être effacé. Vérifiez que le fichier n'est pas verrouillé par un autre utilisateur ou que le fichier peut être effacé manuellement (droits d'écriture sur le répertoire).", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, LayerRasterExtendPath + " ne peut pas être effacé.") return None messInfo(dlg, "Création de la couche: " + LayerRasterExtendName + ".") messInfo(dlg, "") crs = raster.crs() crsWkt = crs.toWkt() layerExtend = QgsVectorLayer("Polygon?crs=" + crsWkt, LayerRasterExtendName, "memory") if not layerExtend.isValid(): messErreur(dlg, LayerRasterExtendPath + " ne peut pas être chargé.") return None QgsProject.instance().addMapLayer(layerExtend) li = layerList() symbol = li[LayerRasterExtendName].renderer().symbol() symbol.setColor(QColor.fromRgb(0, 0, 255)) symbol.setOpacity(0.4) provider = li[LayerRasterExtendName].dataProvider() fields = QgsFields() fields.append(QgsField("HEIGHT", QVariant.Double)) fields.append(QgsField("WIDTH", QVariant.Double)) for f in fields: provider.addAttributes([f]) writer = QgsVectorFileWriter(LayerRasterExtendPath, "CP1250", fields, QgsWkbTypes.Polygon, crs, FORMAT_VECT) if writer.hasError() != QgsVectorFileWriter.NoError: messErreur(dlg, LayerRasterExtendPath + " ne peut pas être créé.") return None li[LayerRasterExtendName].startEditing() extent = raster.extent() minx = extent.xMinimum() miny = extent.yMinimum() maxx = extent.xMaximum() maxy = extent.yMaximum() height = raster.height() width = raster.width() cntx = minx + (width / 2.0) cnty = miny + (height / 2.0) area = width * height perim = (2 * width) + (2 * height) rect = [ QgsPointXY(minx, miny), QgsPointXY(minx, maxy), QgsPointXY(maxx, maxy), QgsPointXY(maxx, miny), QgsPointXY(minx, miny) ] geometry = QgsGeometry().fromPolygonXY([rect]) feat = QgsFeature() feat.setGeometry(geometry) feat.setAttributes([height, width]) writer.addFeature(feat) provider.addFeatures([feat]) del writer li[LayerRasterExtendName].commitChanges() setLayerVisible(li[LayerRasterExtendName], False) node = QgsProject.instance().layerTreeRoot().findLayer( li[LayerRasterExtendName].id()) iface.layerTreeView().layerTreeModel().refreshLayerLegend(node) li[LayerRasterExtendName].triggerRepaint() canvas.refresh() rasterTreatName = "" # Cas du traitement d'une image optique if conf.rbOptique.isChecked(): # Calcul du NDVI if dlg.rbComputeNdvi.isChecked(): rasterTreatName = layersName['ndvi'] dir_raster_treat = dir_dest layer = computeNdvi(dlg, conf, dir_raster_src, dir_dest, layersName["raster"], layersName["ndvi"], extension_input_raster) if layer is None: return None QgsProject.instance().addMapLayer(layer) setLayerVisible(layer, False) extension_input_raster = EXT_RASTER # Calcul du NDWI2 elif dlg.rbComputeNdwi2.isChecked(): rasterTreatName = layersName['ndwi2'] dir_raster_treat = dir_dest layer = computeNdwi2(dlg, conf, dir_raster_src, dir_dest, layersName["raster"], layersName["ndwi2"], extension_input_raster) if layer is None: return None QgsProject.instance().addMapLayer(layer) setLayerVisible(layer, False) extension_input_raster = EXT_RASTER else: rasterTreatName = layersName['raster'] dir_raster_treat = dir_raster_src # Cas du traitement d'une image radar elif conf.rbRadar.isChecked(): # Despeckele Lee if dlg.rbDespeckLee.isChecked(): rasterTreatName = layersName['lee'] dir_raster_treat = dir_dest layer = despeckeleLee(dlg, conf, dir_raster_src, dir_dest, layersName["raster"], layersName["lee"], extension_input_raster) if layer is None: return None QgsProject.instance().addMapLayer(layer) setLayerVisible(layer, False) extension_input_raster = EXT_RASTER # Despeckele Gamma elif dlg.rbDespeckGamma.isChecked(): rasterTreatName = layersName['gamma'] dir_raster_treat = dir_dest layer = despeckeleGamma(dlg, conf, dir_raster_src, dir_dest, layersName["raster"], layersName["gamma"], extension_input_raster) if layer is None: return None QgsProject.instance().addMapLayer(layer) setLayerVisible(layer, False) extension_input_raster = EXT_RASTER else: rasterTreatName = layersName['raster'] dir_raster_treat = dir_raster_src li = layerList() # Calcul du masque d'eau à partir du seuil estimé deltaStr = dlg.delta.text() layers_list = computeMaskThreshold(dlg, conf, dir_raster_treat, dir_dest, rasterTreatName, layersName['seuil'], seuilStr, deltaStr, extension_input_raster) if layers_list is None: return None # Informations de style for layer in layers_list: QgsProject.instance().addMapLayer(layer) fcn = QgsColorRampShader() fcn.setColorRampType(QgsColorRampShader.Type.Exact) lst = [QgsColorRampShader.ColorRampItem(1, QColor(QColor(0, 0, 255)))] fcn.setColorRampItemList(lst) shader = QgsRasterShader() shader.setRasterShaderFunction(fcn) renderer = QgsSingleBandPseudoColorRenderer(layer.dataProvider(), 1, shader) if renderer: layer.setRenderer(renderer) if layer.renderer(): layer.renderer().setOpacity(0.5) layer.triggerRepaint() setLayerVisible(layer, False) li = layerList() messInfo( dlg, "Temps de calcul: " + str(round(time.time() - start_time)) + " secondes.") messInfo(dlg, "") global start_timeVect start_timeVect = time.time() layerName = li[layersName['raster']] setLayerVisible(layerName, False) layerSeuilName = layersName['seuil'] + seuilStr layerSeuil = li[layerSeuilName] setLayerVisible(layerSeuil, True) li[layersName['raster']].triggerRepaint() canvas.refresh() extent = li[layersName['raster']].extent() canvas.setExtent(extent) # Retour avec le bon nom du fichier seuillé layersName['seuil'] = layerSeuilName return layersName