def createContours(canvas, dhm): contourLyr = dhm['contour'] contourName = "Hoehenlinien_" + dhm['name'] # Get current CRS of qgis project s = QSettings() oldValidation = s.value("/Projections/defaultBehaviour") crs = canvas.mapSettings().destinationCrs() crsEPSG = crs.authid() # If project and raster CRS are equal and set correctly if crsEPSG == dhm['spatialRef'] and "USER" not in crsEPSG: s.setValue("/Projections/defaultBehaviour", "useProject") else: crs = dhm['layer'].crs() # If contours exist, remove them if contourLyr: QgsProject.instance().removeMapLayer(contourLyr.id()) contourLyr = None # If no contours exist, create them else: outputPath = os.path.join(os.path.dirname(dhm['path']), contourName + '.shp') if os.path.exists(outputPath): contourLyr = QgsVectorLayer(outputPath, contourName, "ogr") else: processingParams = { 'INPUT': dhm['layer'], 'BAND': 1, 'INTERVAL': 20, 'FIELD_NAME': "Hoehe", 'OUTPUT': outputPath } algOutput = run("gdal:contour", processingParams) contourLyr = QgsVectorLayer(algOutput['OUTPUT'], contourName, "ogr") # contourLyr the same CRS as qgis project contourLyr.setCrs(crs) QgsProject.instance().addMapLayer(contourLyr) s.setValue("/Projections/defaultBehaviour", oldValidation) # More useful stuff # uri = "linestring?crs=epsg:{}".format(crsNum) # contourName = "Hoehenlinien_" + self.dhm['name'] # contour = QgsVectorLayer(uri, contourName, "memory") return contourLyr
def generalize(prjpath, inlayer, outlayer, size): """Generalize polygon layer to simplify its geometry.""" inlayerpath=os.path.join(prjpath, inlayer+".shp") outlayerpath=os.path.join(prjpath, outlayer+".shp") errlayerpath=os.path.join(prjpath, "error.shp") try: processing.run('grass7:v.generalize', {'input': inlayerpath, 'threshold': size, 'output': outlayerpath, 'error': errlayerpath, '-l' : True, '-t' : False, 'GRASS_MIN_AREA_PARAMETER' : 0.0001, 'GRASS_OUTPUT_TYPE_PARAMETER' : 0, 'GRASS_REGION_PARAMETER' : None, 'GRASS_SNAP_TOLERANCE_PARAMETER' : -1, 'GRASS_VECTOR_DSCO' : '', 'GRASS_VECTOR_LCO' : '', 'alpha' : 1, 'angle_thresh' : 3, 'beta' : 1, 'betweeness_thresh' : 0, 'cats' : '', 'closeness_thresh' : 0, 'degree_thresh' : 0, 'reduction' : 50, 'slide' : 0.5, 'type' : [0,1,2], 'where' : ''} ) except Exception as e: print("Generalizing " + inlayerpath + "!") print(str(e)) return False return True
def testRun(self): context = QgsProcessingContext() # try running an alg using processing.run - ownership of result layer should be transferred back to the caller res = processing.run('qgis:buffer', {'DISTANCE': 1, 'INPUT': points(), 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT}, context=context) self.assertIn('OUTPUT', res) # output should be the layer instance itself self.assertIsInstance(res['OUTPUT'], QgsVectorLayer) # Python should have ownership self.assertTrue(sip.ispyowned(res['OUTPUT'])) del context gc.collect() self.assertFalse(sip.isdeleted(res['OUTPUT'])) # now try using processing.run with is_child_algorithm = True. Ownership should remain with the context context = QgsProcessingContext() res = processing.run('qgis:buffer', {'DISTANCE': 1, 'INPUT': points(), 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT}, context=context, is_child_algorithm=True) self.assertIn('OUTPUT', res) # output should be a layer string reference, NOT the layer itself self.assertIsInstance(res['OUTPUT'], str) layer = context.temporaryLayerStore().mapLayer(res['OUTPUT']) self.assertIsInstance(layer, QgsVectorLayer) # context should have ownership self.assertFalse(sip.ispyowned(layer)) del context gc.collect() self.assertTrue(sip.isdeleted(layer))
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ # Retrieve the feature source and sink. The 'dest_id' variable is used # to uniquely identify the feature sink, and must be included in the # dictionary returned by the processAlgorithm function. #couche = self.parameterAsSource(parameters, self.INPUT, context) reseau = self.parameterAsFile(parameters, self.RESEAU, context) matrice = self.parameterAsFile(parameters, self.MATRICE, context) parametres = self.parameterAsFile(parameters, self.PARAMETRES, context) penalites = self.parameterAsFile(parameters, self.PENALITES, context) sortie=os.path.splitext(self.parameterAsFileOutput(parameters, self.SORTIE, context))[0] download=self.parameterAsBool(parameters,self.DOWNLOAD,context) if download==True: feedback.setProgressText(self.tr("Downloading Muslic binary")) processing.run("native:filedownloader", {'URL':'https://github.com/crocovert/muslic/raw/master/Muslic/bin/Release/Muslic.exe','OUTPUT':os.path.dirname(__file__)+"/Muslic.exe"}) feedback.setProgressText(self.tr("Muslic downloaded succesfully")) if sys.platform.startswith('win'): prog=os.path.dirname(__file__)+"/Muslic.exe" elif sys.platform.startswith('linux'): prog="mono "+os.path.dirname(__file__)+"/Muslic.exe" CREATE_NO_WINDOW = 0x08000000 feedback.setProgressText(self.tr("Multimodal calculations... That could take some time")) if len(penalites)==0: musliw=subprocess.call([prog,reseau,matrice,sortie,parametres]) else: musliw=subprocess.call([prog,reseau,matrice,sortie,parametres,penalites]) return {'musliw': 'OK'}
def layer_geom(layer): featureList = [] if layer.featureCount() > settings.PROJESTIONS_MAX_FEATURES: # If too many features, randomly select some import processing processing.run('qgis:randomselection', { 'INPUT': layer, 'METHOD': 0, 'NUMBER': settings.PROJESTIONS_MAX_FEATURES, }, feedback=QgsProcessingFeedback()) featureList = layer.selectedFeatures() layer.removeSelection() else: # Else get all of the features feature = QgsFeature() iterator = layer.getFeatures() while iterator.nextFeature(feature): feature.setGeometry(feature.geometry().simplify(0.1)) featureList.append(feature) feature = QgsFeature() exporter = QgsJsonExporter(layer, 6) exporter.setExcludedAttributes(layer.attributeList()) return exporter.exportFeatures(featureList)
def fixgeometry(prjpath, fixlayer, outlayer): """Fix the geometry of a layer.""" inlayerpath=os.path.join(prjpath, fixlayer+".shp") outlayerpath=os.path.join(prjpath, outlayer+".shp") try: processing.run('native:fixgeometries', { 'INPUT': inlayerpath, 'OUTPUT': outlayerpath} ) except Exception as e: print("fixgeometries of %s to %s!"% (inlayerpath, outlayerpath) ) print(str(e)) return False return True
def dissolve(projectpath, dissolve_layer, dissolve_field, outlayerName): """Dissolve a polygon layer.""" layers=QgsProject.instance().mapLayersByName(dissolve_layer) inlayer=os.path.join(projectpath, dissolve_layer+".shp") outlayer=os.path.join(projectpath, outlayerName+".shp") try: processing.run('qgis:dissolve', {'FIELD': [dissolve_field], 'INPUT': inlayer, 'OUTPUT':outlayer} ) except Exception as e: print("Dissolving " + inlayer + "!") print(str(e)) return False return True
def test_parameterAs_ScriptMode(self): """ This test will pass an instance of QgsCoordinateReferenceSystem for 'epsg' parameter of otb::Rasterization. There is same test in otb_algorithm_tests.yaml which passes an instance of str for epsg parameter. """ outdir = tempfile.mkdtemp() self.cleanup_paths.append(outdir) context = QgsProcessingContext() context.setProject(QgsProject.instance()) feedback = QgsProcessingFeedback() vectorFile = os.path.join(AlgorithmsTestBase.processingTestDataPath(), 'polys.gml') vectorLayer = QgsProcessingUtils.mapLayerFromString(vectorFile, context) parameters = { 'in': vectorLayer, 'epsg': QgsCoordinateReferenceSystem('EPSG:4326'), 'spx': 1.0, 'spy': 1.0, 'outputpixeltype': 1, 'out': os.path.join(outdir, 'raster.tif') } results = processing.run('otb:Rasterization', parameters, None, feedback) result_lyr = QgsProcessingUtils.mapLayerFromString(results['out'], context) self.assertTrue(result_lyr.isValid())
def union(union_a, union_b): """Union of two vector layers. Issue https://github.com/inasafe/inasafe/issues/3186 :param union_a: The vector layer for the union. :type union_a: QgsVectorLayer :param union_b: The vector layer for the union. :type union_b: QgsVectorLayer :return: The clip vector layer. :rtype: QgsVectorLayer .. versionadded:: 4.0 """ output_layer_name = union_steps['output_layer_name'] output_layer_name = output_layer_name % ( union_a.keywords['layer_purpose'], union_b.keywords['layer_purpose'] ) keywords_union_1 = union_a.keywords keywords_union_2 = union_b.keywords inasafe_fields_union_1 = keywords_union_1['inasafe_fields'] inasafe_fields_union_2 = keywords_union_2['inasafe_fields'] inasafe_fields = inasafe_fields_union_1 inasafe_fields.update(inasafe_fields_union_2) parameters = {'INPUT': union_a, 'OVERLAY': union_b, 'OUTPUT': 'memory:'} # TODO implement callback through QgsProcessingFeedback object initialize_processing() feedback = create_processing_feedback() context = create_processing_context(feedback=feedback) result = processing.run('native:union', parameters, context=context) if result is None: raise ProcessingInstallationError union_layer = result['OUTPUT'] union_layer.setName(output_layer_name) # use to avoid modifying original source union_layer.keywords = dict(union_a.keywords) union_layer.keywords['inasafe_fields'] = inasafe_fields union_layer.keywords['title'] = output_layer_name union_layer.keywords['layer_purpose'] = 'aggregate_hazard' union_layer.keywords['hazard_keywords'] = keywords_union_1.copy() union_layer.keywords['aggregation_keywords'] = keywords_union_2.copy() fill_hazard_class(union_layer) check_layer(union_layer) return union_layer
def createSubbasinHRU(projectpath): """Use Subbasin polygons to intersect the HRU polygons to create a new layer that links Subbasin with HRU.""" # Use the undissolved layer HRUfixedLayerName=h_const.HRULayerName+"_f" subbasHRUunfixedLayerName=h_const.subbasHRULayerName+"_u" # Delete existing shapefile SubbasinHRU h_utils.unloadShapefile(h_const.subbasHRULayerName) if h_utils.shapefileExists(projectpath, h_const.subbasHRULayerName): ok=h_utils.delExistingShapefile(projectpath, h_const.subbasHRULayerName) if not ok: return False # Check Subbasin and HRU types if not h_utils.layerNameTypeOK(h_const.subbasLayerName, h_const.subbasGeomType) or \ not h_utils.layerNameTypeOK(HRUfixedLayerName, h_const.HRUGeomType): return False # Intersect Subbasin with HRU subbasinlayerpath=os.path.join(projectpath, h_const.subbasLayerName+".shp") hrulayerpath=os.path.join(projectpath, HRUfixedLayerName+".shp") outlayerpath=os.path.join(projectpath, subbasHRUunfixedLayerName+".shp") try: processing.run('qgis:intersection', { 'INPUT': subbasinlayerpath, 'INPUT_FIELDS': [], 'OUTPUT': outlayerpath, 'OVERLAY': hrulayerpath, 'OVERLAY_FIELDS': [] } ) except Exception as e: print("intersecting %s with %s!"% (subbasinlayerpath, hrulayerpath)) print(str(e)) return False # Clean geometry ok= h_utils.fixgeometry(projectpath, subbasHRUunfixedLayerName, h_const.subbasHRULayerName) if not ok: return False # Update the area values of the SubbasinHRU polygons in the attr. table h_utils.loadShapefileToCanvas(projectpath, h_const.subbasHRULayerName) ok= h_utils.addMeasureToAttrTable( h_const.subbasHRULayerName, h_const.subbasHRUFieldArea) h_utils.unloadShapefile(h_const.subbasHRULayerName) return ok
def createGroundwaterSubbasinHRU(prjpath): """Use groundwater cells to intersect the subbasinHRU polygons to create a SubGroundHRU layer.""" subbasHRUfixedLayerName=h_const.subbasHRULayerName+"_f" # Add to the attr. table of Groundwater a field that keeps the cells' id ok=h_utils.addShapeIdsToAttrTable(h_const.grdwatLayerName, h_const.grdwatFieldId) if not ok: return False # Delete existing shapefile SubGroundHRU h_utils.unloadShapefile(h_const.grdwatSubbasHRULayerName) if h_utils.shapefileExists(prjpath, h_const.grdwatSubbasHRULayerName): ok=h_utils.delExistingShapefile(prjpath,h_const.grdwatSubbasHRULayerName) if not ok: return False # Before intersect, fix SubbasinHRU geometry ok= h_utils.fixgeometry(prjpath, h_const.subbasHRULayerName, subbasHRUfixedLayerName) if not ok: return False # Intersect Groundwater with SubbasinHRU subhrufixlayerpath=os.path.join(prjpath,subbasHRUfixedLayerName+".shp") grdwatlayerpath=os.path.join(prjpath, h_const.grdwatLayerName+".shp") outlayerpath=os.path.join(prjpath, h_const.grdwatSubbasHRULayerName+".shp") try: processing.run('qgis:intersection', { 'INPUT': subhrufixlayerpath, 'INPUT_FIELDS': [], 'OUTPUT': outlayerpath, 'OVERLAY': grdwatlayerpath, 'OVERLAY_FIELDS': [] } ) except Exception as e: print(str(e)) print("Intersecting %s with %s" % (subhrufixlayerpath, grdwatlayerpath)) if not ok: return False # Load SubGroundHRU h_utils.loadShapefileToCanvas(prjpath, h_const.grdwatSubbasHRULayerName) # Update the area values of the SubGroundHRU polygons in the attr. table ok= h_utils.addMeasureToAttrTable( h_const.grdwatSubbasHRULayerName, h_const.grdwatSubbasHRUFieldArea) # Unload SubGroundHRU layer h_utils.unloadShapefile(h_const.grdwatSubbasHRULayerName) return ok
def rasterstatistics(prjpath, DEMlayer, masklayer, outlayer): """Calculate the average value of a raster layer over the provided polygons in a mask layer.""" inlayerpath=os.path.join(prjpath, DEMlayer+".tif") masklayerpath=os.path.join(prjpath, masklayer+".shp") outlayerpath=os.path.join(prjpath, outlayer+".shp") try: processing.run('saga:rasterstatisticsforpolygons', { 'COUNT' : False, 'GRIDS' : [inlayerpath], 'MAX' : False, 'MEAN' : True, 'METHOD' : 1, 'MIN' : False, 'NAMING' : 1, 'POLYGONS' : masklayerpath, 'QUANTILE' : 0, 'RANGE' : False, 'RESULT' : outlayerpath, 'STDDEV' : False, 'SUM' : False, 'VAR' : False } ) except Exception as e: print("rasterstatistics of %s on %s!"% (masklayer, inlayerpath) ) print(str(e)) return False return True
def createRiverGroundwater(prjpath): """Use groundwater cells to clip the river segments to create a new layer. """ # Add to the attr. table of Groundwater a field that keeps the cells id ok=h_utils.addShapeIdsToAttrTable(h_const.grdwatLayerName, h_const.grdwatFieldId) if not ok: return False # Add to the attr. table of River a field that keeps the segments id ok=h_utils.addShapeIdsToAttrTable(h_const.riverLayerName, h_const.riverFieldId) if not ok: return False # Delete existing shapefile h_utils.unloadShapefile(h_const.riverGrdwatLayerName) if h_utils.shapefileExists(prjpath, h_const.riverGrdwatLayerName ): ok=h_utils.delExistingShapefile( prjpath, h_const.riverGrdwatLayerName ) if not ok: return False # Intersect river with Groundwater riverlayerpath=os.path.join(prjpath, h_const.riverLayerName+".shp") grdwatlayerpath=os.path.join(prjpath, h_const.grdwatLayerName+".shp") outlayerpath=os.path.join(prjpath, h_const.riverGrdwatLayerName+".shp") try: processing.run('qgis:intersection', { 'INPUT': riverlayerpath, 'INPUT_FIELDS': [], 'OUTPUT': outlayerpath, 'OVERLAY': grdwatlayerpath, 'OVERLAY_FIELDS': [] } ) except Exception as e: print("Intersecting %s with %s" % (riverlayerpath, grdwatlayerpath) ) print(str(e)) if not ok: return False # Load RiverGroundwater h_utils.loadShapefileToCanvas(prjpath, h_const.riverGrdwatLayerName) # Update the length of the segments to the RiverGroundwater attr. table ok= h_utils.addMeasureToAttrTable( h_const.riverGrdwatLayerName, h_const.riverGrdwatFieldLength ) # Unload the layer h_utils.unloadShapefile(h_const.riverGrdwatLayerName) return ok
def intersection(source, mask): """Intersect two layers. Issue https://github.com/inasafe/inasafe/issues/3186 :param source: The vector layer to clip. :type source: QgsVectorLayer :param mask: The vector layer to use for clipping. :type mask: QgsVectorLayer :return: The clip vector layer. :rtype: QgsVectorLayer .. versionadded:: 4.0 """ output_layer_name = intersection_steps['output_layer_name'] output_layer_name = output_layer_name % ( source.keywords['layer_purpose']) parameters = {'INPUT': source, 'OVERLAY': mask, 'OUTPUT': 'memory:'} # TODO implement callback through QgsProcessingFeedback object initialize_processing() feedback = create_processing_feedback() context = create_processing_context(feedback=feedback) result = processing.run('native:intersection', parameters, context=context) if result is None: raise ProcessingInstallationError intersect = result['OUTPUT'] intersect.setName(output_layer_name) intersect.keywords = dict(source.keywords) intersect.keywords['title'] = output_layer_name intersect.keywords['layer_purpose'] = \ layer_purpose_exposure_summary['key'] intersect.keywords['inasafe_fields'] = \ dict(source.keywords['inasafe_fields']) intersect.keywords['inasafe_fields'].update( mask.keywords['inasafe_fields']) intersect.keywords['hazard_keywords'] = \ dict(mask.keywords['hazard_keywords']) intersect.keywords['exposure_keywords'] = dict(source.keywords) intersect.keywords['aggregation_keywords'] = dict( mask.keywords['aggregation_keywords']) check_layer(intersect) return intersect
def checkGrass7IsInstalled(ignorePreviousState=False): if isWindows(): path = Grass7Utils.grassPath() if path == '': return Grass7Utils.tr( 'GRASS GIS 7 folder is not configured. Please configure ' 'it before running GRASS GIS 7 algorithms.') cmdpath = os.path.join(path, 'bin', 'r.out.gdal.exe') if not os.path.exists(cmdpath): return Grass7Utils.tr( 'The specified GRASS 7 folder "{}" does not contain ' 'a valid set of GRASS 7 modules.\nPlease, go to the ' 'Processing settings dialog, and check that the ' 'GRASS 7\nfolder is correctly configured'.format(os.path.join(path, 'bin'))) if not ignorePreviousState: if Grass7Utils.isGrass7Installed: return try: from processing import run result = run( 'grass7:v.voronoi', points(), False, False, None, -1, 0.0001, 0, None, ) if not os.path.exists(result['output']): return Grass7Utils.tr( 'It seems that GRASS GIS 7 is not correctly installed and ' 'configured in your system.\nPlease install it before ' 'running GRASS GIS 7 algorithms.') except: return Grass7Utils.tr( 'Error while checking GRASS GIS 7 installation. GRASS GIS 7 ' 'might not be correctly configured.\n') Grass7Utils.isGrass7Installed = True
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ # Retrieve the feature source and sink. The 'dest_id' variable is used # to uniquely identify the feature sink, and must be included in the # dictionary returned by the processAlgorithm function. source = self.parameterAsSource( parameters, self.INPUT, context ) addressField = self.parameterAsString( parameters, self.AddressField, context ) feedback.pushInfo(addressField) # If source was not found, throw an exception to indicate that the algorithm # encountered a fatal error. The exception text can be any string, but in this # case we use the pre-built invalidSourceError method to return a standard # helper text for when a source cannot be evaluated if source is None: raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT)) fields = QgsFields() fields.append(QgsField("id",QVariant.Int)) fields.append(QgsField("oldAddress",QVariant.String)) fields.append(QgsField("lat",QVariant.Double)) fields.append(QgsField("lng",QVariant.Double)) fields.append(QgsField("address",QVariant.String)) fields.append(QgsField("country",QVariant.String)) fields.append(QgsField("state",QVariant.String)) fields.append(QgsField("county",QVariant.String)) fields.append(QgsField("city",QVariant.String)) fields.append(QgsField("district",QVariant.String)) fields.append(QgsField("street",QVariant.String)) fields.append(QgsField("number",QVariant.String)) fields.append(QgsField("zip",QVariant.String)) fields.append(QgsField("relevance",QVariant.Double)) fields.append(QgsField("qu_country",QVariant.Double)) fields.append(QgsField("qu_city",QVariant.Double)) fields.append(QgsField("qu_street",QVariant.Double)) fields.append(QgsField("qu_number",QVariant.Double)) fields.append(QgsField("matchtype",QVariant.String)) (sink, dest_id) = self.parameterAsSink( parameters, self.OUTPUT, context, fields, QgsWkbTypes.Point, QgsCoordinateReferenceSystem(4326) ) # Send some information to the user feedback.pushInfo('{} addresses to geocode'.format(source.featureCount())) # If sink was not created, throw an exception to indicate that the algorithm # encountered a fatal error. The exception text can be any string, but in this # case we use the pre-built invalidSinkError method to return a standard # helper text for when a sink cannot be evaluated if sink is None: raise QgsProcessingException(self.invalidSinkError(parameters, self.OUTPUT)) # Compute the number of steps to display within the progress bar and # get features from source total = 100.0 / source.featureCount() if source.featureCount() else 0 features = source.getFeatures() #get the keys: credFile = os.path.dirname(os.path.realpath(__file__)) + os.sep + 'creds' + os.sep + 'credentials.json' feedback.pushInfo('{} as the file for credentials'.format(credFile)) creds = self.loadCredFunctionAlg() for current, feature in enumerate(features): # Stop the algorithm if cancel button has been clicked if feedback.isCanceled(): break #get the location from the API: ApiUrl = "https://geocoder.api.here.com/6.2/geocode.json?app_id=" + creds["id"] + "&app_code=" + creds["code"] + "&searchtext=" + feature[addressField] r = requests.get(ApiUrl) responseAddress = json.loads(r.text)["Response"]["View"][0]["Result"][0] geocodeResponse = self.convertGeocodeResponse(responseAddress) lat = responseAddress["Location"]["DisplayPosition"]["Latitude"] lng = responseAddress["Location"]["DisplayPosition"]["Longitude"] # Add a feature in the sink #feedback.pushInfo(str(lat)) fet = QgsFeature() fet.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(lng,lat))) fet.setAttributes([ feature.id(), feature[addressField], lat, lng, geocodeResponse["Label"], geocodeResponse["Country"], geocodeResponse["State"], geocodeResponse["County"], geocodeResponse["City"], geocodeResponse["District"], geocodeResponse["Street"], geocodeResponse["HouseNumber"], geocodeResponse["PostalCode"], geocodeResponse["Relevance"], geocodeResponse["CountryQuality"], geocodeResponse["CityQuality"], geocodeResponse["StreetQuality"], geocodeResponse["NumberQuality"], geocodeResponse["MatchType"] ]) sink.addFeature(fet, QgsFeatureSink.FastInsert) # Update the progress bar feedback.setProgress(int(current * total)) # To run another Processing algorithm as part of this algorithm, you can use # processing.run(...). Make sure you pass the current context and feedback # to processing.run to ensure that all temporary layer outputs are available # to the executed algorithm, and that the executed algorithm can send feedback # reports to the user (and correctly handle cancelation and progress reports!) if False: buffered_layer = processing.run("native:buffer", { 'INPUT': dest_id, 'DISTANCE': 1.5, 'SEGMENTS': 5, 'END_CAP_STYLE': 0, 'JOIN_STYLE': 0, 'MITER_LIMIT': 2, 'DISSOLVE': False, 'OUTPUT': 'memory:' }, context=context, feedback=feedback)['OUTPUT'] # Return the results of the algorithm. In this case our only result is # the feature sink which contains the processed features, but some # algorithms may return multiple feature sinks, calculated numeric # statistics, etc. These should all be included in the returned # dictionary, with keys matching the feature corresponding parameter # or output names. return {self.OUTPUT: dest_id}
def fwdet(inundation_polygon, dem, water_depth_output_filename=None): ''' Calculate water depth from a flood extent polygon (e.g. from remote sensing analysis) based on an underlying DEM. Program procedure: 1. Extract a clipping DEM using the inundPolygon and elevation DEM 2. Extract polyline from inundPolygon 3. Convert polyline to raster 4. Associate raster line values with underlying DEM values 5. Use grass7 Grow Distance function to calculate approximated flooding water level 6. Run saga gaussian filter to smooth the the grass7 Grow Distance output Publication: Cohen et al., https://doi.org/10.1111/1752-1688.12609 ''' from os import system from os.path import dirname, join, splitext from shutil import copyfile, copy2 # Qgis imports import processing from qgis.core import QgsRasterLayer, QgsRasterFileWriter, QgsRasterPipe, QgsVectorLayer, QgsProject dem_layer = QgsRasterLayer(dem, 'dem_extent') dem_extent = float_extent(dem_layer) dem_size_x, dem_size_y = dem_layer.rasterUnitsPerPixelX( ), dem_layer.rasterUnitsPerPixelY() # Function input parameter dictionaries inudation_polygon_input = { 'INPUT': inundation_polygon, 'OUTPUT': 'TEMPORARY_OUTPUT', } clip_input = { 'INPUT': dem, 'POLYGONS': inundation_polygon, 'OUTPUT': 'TEMPORARY_OUTPUT', } # End function input parameter dictionaries # Begin processing # Fix inundation polygon geometries flood_extent_polygon = processing.run("native:fixgeometries", inudation_polygon_input)['OUTPUT'] polygons_to_lines_input = { 'INPUT': flood_extent_polygon, 'OUTPUT': 'TEMPORARY_OUTPUT', } # Polygons to polylines proceedure flood_extent_polyline = processing.run("native:polygonstolines", polygons_to_lines_input)['OUTPUT'] # Clip dem to inundation polygon clip_dem = processing.run("saga:cliprasterwithpolygon", clip_input)['OUTPUT'] rasterize_input = { 'INPUT': flood_extent_polyline, 'FIELD': '', 'BURN': 1, 'UNITS': 1, 'WIDTH': dem_size_x, 'HEIGHT': dem_size_y, 'EXTENT': float_extent(flood_extent_polyline), 'NODATA': 0, 'OPTIONS': '', 'DATA_TYPE': 0, 'INIT': None, 'INVERT': False, 'EXTRA': '', 'OUTPUT': 'TEMPORARY_OUTPUT' } flood_extent_rasterized = processing.run("gdal:rasterize", rasterize_input)['OUTPUT'] # associate underlying dem values to lineRaster extracted_elevation = raster_calculator([flood_extent_rasterized, dem], '({0} * {1}) / {0}', 0, 'extracted_elevation.tif') grow_distance_input = { 'input': extracted_elevation, 'metric': 0, '-m': False, '-': False, 'distance': 'TEMPORARY_OUTPUT', 'value': 'TEMPORARY_OUTPUT', 'GRASS_REGION_PARAMETER': None, 'GRASS_REGION_CELLSIZE_PARAMETER': 0, 'GRASS_RASTER_FORMAT_OPT': '', 'GRASS_RASTER_FORMAT_META': '', 'value': join(dirname(extracted_elevation), 'euclidean_nearest.tif') } euclidean_nearest = processing.run("grass7:r.grow.distance", grow_distance_input)['value'] # clip grow distance output using clipDEM flood_water_depth = raster_calculator([clip_dem, euclidean_nearest], '(({1} - {0}) > 0) * ({1} - {0})', 0, 'waterDepth.tif') low_pass_filter_input = { 'INPUT': flood_water_depth, 'SIGMA': 1, 'MODE': 0, 'RADIUS': 3, 'RESULT': 'TEMPORARY_OUTPUT' } low_pass_filter = processing.run("saga:gaussianfilter", low_pass_filter_input)['RESULT'] if water_depth_output_filename: copyfile(flood_water_depth, water_depth_output_filename) low_pass_water_depth_output_filename = '{}_low_pass.{}'.format( splitext(water_depth_output_filename)[0], 'tif') low_pass_outfile_input = { 'INPUT': low_pass_filter, 'TARGET_CRS': None, 'NODATA': None, 'COPY_SUBDATASETS': False, 'OPTIONS': '', 'EXTRA': '', 'DATA_TYPE': 0, 'OUTPUT': low_pass_water_depth_output_filename } processing.run("gdal:translate", low_pass_outfile_input)
poly_contours.selectByExpression("ID = " + str(player) + "and T = " + str(contours[-2]) + ' and "T Stamp" = ' + str(T0)) intersection_lines.selectByExpression("Player1 = " + str(player) + ' and "T Stamp" = ' + str(T0)) algresult1 = processing.run( "native:splitwithlines", { 'INPUT': QgsProcessingFeatureSourceDefinition( poly_contours.id(), True), 'LINES': QgsProcessingFeatureSourceDefinition( intersection_lines.id(), True), 'OUTPUT': 'TEMPORARY_OUTPUT' }) output1 = algresult1["OUTPUT"] algresult2 = processing.run( "native:selectbylocation", { 'INPUT': output1, 'PREDICATE': [1], 'INTERSECT': QgsProcessingFeatureSourceDefinition( centroids.id(), True),
def processAlgorithm(self, parameters, context, model_feedback): # Use a multi-step feedback, so that individual child algorithm progress reports are adjusted for the # overall progress through the model feedback = QgsProcessingMultiStepFeedback(5, model_feedback) results = {} outputs = {} # Drop field(s) alg_params = { 'COLUMN': [ 'ne_10m_adm', 'ScaleRank', 'LabelRank', 'FeatureCla', 'OID_', 'SOVEREIGNT', 'SOV_A3', 'ADM0_DIF', 'LEVEL', 'TYPE', 'ADM0_A3', 'GEOU_DIF', 'GEOUNIT', 'GU_A3', 'SU_DIF', 'SUBUNIT', 'SU_A3', 'NAME', 'ABBREV', 'POSTAL', 'NAME_FORMA', 'TERR_', 'NAME_SORT', 'MAP_COLOR', 'POP_EST', 'GDP_MD_EST', 'FIPS_10_', 'ISO_A2', 'ISO_N3' ], 'INPUT': 'C:/Users/se.4537/Dropbox/PoliteconGIS/LBS_2020/PhD/lecture_2/gis_data/ne_10m_admin_0_countries/ne_10m_admin_0_countries.shp', 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['DropFields'] = processing.run('qgis:deletecolumn', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(1) if feedback.isCanceled(): return {} # Reproject layer alg_params = { 'INPUT': outputs['DropFields']['OUTPUT'], 'OPERATION': '', 'TARGET_CRS': QgsCoordinateReferenceSystem('ESRI:54034'), 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['ReprojectLayer'] = processing.run('native:reprojectlayer', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(2) if feedback.isCanceled(): return {} # Fix geometries alg_params = { 'INPUT': outputs['ReprojectLayer']['OUTPUT'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['FixGeometries'] = processing.run('native:fixgeometries', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(3) if feedback.isCanceled(): return {} # Field calculator alg_params = { 'FIELD_LENGTH': 10, 'FIELD_NAME': 'km2area', 'FIELD_PRECISION': 3, 'FIELD_TYPE': 0, 'FORMULA': 'area($geometry)/1000000', 'INPUT': outputs['FixGeometries']['OUTPUT'], 'NEW_FIELD': True, 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['FieldCalculator'] = processing.run('qgis:fieldcalculator', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(4) if feedback.isCanceled(): return {} # Reproject layer alg_params = { 'INPUT': outputs['FieldCalculator']['OUTPUT'], 'OPERATION': '', 'TARGET_CRS': QgsCoordinateReferenceSystem('EPSG:4326'), 'OUTPUT': parameters['Test123'] } outputs['ReprojectLayer'] = processing.run('native:reprojectlayer', alg_params, context=context, feedback=feedback, is_child_algorithm=True) results['Test123'] = outputs['ReprojectLayer']['OUTPUT'] return results
def processAlgorithm(self, parameters, context, feedback): # Dummy function to enable running an alg inside an alg def no_post_process(alg, context, feedback): pass # Retrieving parameters if qgs_version < 31400: connection_name = self.parameterAsString(parameters, self.DATABASE, context) db = postgis.GeoDB.from_name(connection_name) uri = db.uri schema = self.parameterAsString(parameters, self.SCHEMA, context) else: connection_name = self.parameterAsConnectionName(parameters, self.DATABASE, context) md = QgsProviderRegistry.instance().providerMetadata('postgres') conn = md.createConnection(connection_name) uri = QgsDataSourceUri(conn.uri()) schema = self.parameterAsSchema(parameters, self.SCHEMA, context) geopackage = parameters[self.GEOPACKAGE] # Debugging info ''' feedback.pushInfo('Input parameters:') feedback.pushInfo('connection = ' + connection) feedback.pushInfo('db = ' + str(db)) feedback.pushInfo('schema = ' + schema) feedback.pushInfo('geopackage = ' + geopackage) feedback.pushInfo('') ''' # Raise error if reamb isn't in schema name if not 'reamb' in schema: raise QgsProcessingException('A palavra reamb precisa fazer parte do nome do esquema') # Connect with Geopackage feedback.pushInfo('Listing non-empty layers from geopackage') with lite.connect(geopackage) as con: feedback.pushInfo('Con = ' + str(con)) layers_import = [] # will store the non-empty tables # Create cursor cur = con.cursor() # Fetch layer names cur.execute("SELECT table_name FROM gpkg_geometry_columns") rows = cur.fetchall() layer_names = [camada[0] for camada in rows] feedback.pushInfo('Layers = ' + str(layer_names)) # Append non-empty geometry layers to list for layer in layer_names: # Count rows cur.execute("SELECT COUNT(1) FROM {}".format(layer)) rows = cur.fetchall() rows_count = rows[0][0] #feedback.pushInfo('Rows = ' + str(rows_count)) # Append to list if rows_count > 0: #feedback.pushInfo('Table non-empty = ' + str(rows_count)) layers_import.append(layer) feedback.pushInfo('Non-empty tables = ' + str(layers_import)) feedback.pushInfo('') # Connect with PostGIS database con = psycopg2.connect(user = uri.username(), password = uri.password(), host = uri.host(), port = uri.port(), database = uri.database()) feedback.pushInfo('Uri = ' + str(uri)) feedback.pushInfo('Uri text = ' + uri.uri()) feedback.pushInfo('Connection = ' + str(con)) # Clean PostGIS schema if marked #cleanSchema = self.parameterAsBool(parameters, self.CLEAN_SCHEMA, context) cleanSchema = False if cleanSchema: with con: select_schema_tables = "SELECT table_name FROM information_schema.tables " \ "WHERE table_type = '{}' AND table_schema = '{}'".format('BASE TABLE', schema) cur = con.cursor() cur.execute(select_schema_tables) rows = cur.fetchall() schema_tables = [table[0] for table in rows] for table in schema_tables: feedback.pushInfo("Deleting from {}.{}".format(schema, table)) cur.execute("DELETE FROM {}.{}".format(schema, table)) con.commit() cur.close() con.close() feedback.pushInfo('') # ============================================================================= # # Testing # nome = 'cbge_trecho_arruamento_l' # # QGIS Vector Layer from geopackage layer # uri_geopackage = geopackage + '|layername=' + nome # vlayer = QgsVectorLayer(uri_geopackage, 'geopackage_layer', 'ogr') # # # Use database table as QGIS Vector Layer # uri_tabela = uri # uri_tabela.setDataSource(schema, nome, 'geom') # uri_tabela.setWkbType(vlayer.wkbType()) # uri_tabela.setSrid(str(vlayer.sourceCrs().postgisSrid())) # target = QgsVectorLayer(uri_tabela.uri(), 'teste', 'postgres') # feedback.pushInfo(uri_tabela.uri()) # feedback.pushInfo('Validade = ' + str(target.isValid())) # # # processing.run("script:appendfeaturestolayer", {'SOURCE_LAYER':vlayer, 'TARGET_LAYER':target, 'ACTION_ON_DUPLICATE':0}, context=context, feedback=feedback, onFinish=no_post_process) # ============================================================================= # Import layers for layer in layers_import: feedback.pushInfo("Importing {}.{}".format(schema, layer)) # QGIS Vector Layer from source uri_geopackage = geopackage + '|layername=' + layer source = QgsVectorLayer(uri_geopackage, 'geopackage_layer', 'ogr') if not source.isValid(): raise QgsProcessingException('Source layer not valid') # QGIS Vector Layer from target uri_table = uri uri_table.setDataSource(schema, layer, 'geom') uri_table.setWkbType(source.wkbType()) uri_table.setSrid(str(source.sourceCrs().postgisSrid())) target = QgsVectorLayer(uri_table.uri(), 'schema_table', 'postgres') if not target.isValid(): raise QgsProcessingException('Target layer not valid') # Run QGIS script for importing processing.run("publibase:appendfeaturestolayer", {'SOURCE_LAYER':source, 'TARGET_LAYER':target, 'ACTION_ON_DUPLICATE':0}, context=context, feedback=feedback, onFinish=no_post_process) feedback.pushInfo('') return {'Result':'Layers imported'}
def run(self): """Run method that performs all the real work""" # show the dialog self.dlg.show() self.dlg.comboBox.clear() layers = QgsProject.instance().mapLayers().values() for layer in layers: if layer.type() == QgsMapLayer.VectorLayer : self.dlg.comboBox.addItem( layer.name(), layer ) # Run the dialog event loop result = self.dlg.exec_() # See if OK was pressed if result: index = self.dlg.comboBox.currentIndex() selection = self.dlg.comboBox.itemData(index) checkedLayers = QgsProject.instance().layerTreeRoot().checkedLayers() #search existence of output folder, if not create it if not os.path.isdir(self.folderName): raise FileNotFoundError( errno.ENOENT, os.strerror(errno.ENOENT), self.folderName) directory = self.folderName + "/vectors" if not os.path.exists(directory): os.makedirs(directory) directory = self.folderName + "/rasters" if not os.path.exists(directory): os.makedirs(directory) # Progress bar progressMessageBar = iface.messageBar().createMessage("Clipping...") progress = QProgressBar() progress.setMaximum(len(checkedLayers) - 1) progress.setAlignment(Qt.AlignLeft|Qt.AlignVCenter) progressMessageBar.layout().addWidget(progress) iface.messageBar().pushWidget(progressMessageBar, Qgis.Info) progression = 0 #clip part for layer in checkedLayers : out = None #clip vector layer (if displayed) if layer.type() == QgsMapLayer.VectorLayer and layer != selection : output = self.folderName + "/vectors/clip_" + layer.name() + ".shp" # check file isn't openned and is writable version = 0 while self.isFileOpened(output): output = self.folderName + "/vectors/clip_" + layer.name() + "("+ str(version) + ").shp" version +=1 processing.run("native:clip", {"INPUT" : layer.id(), "OVERLAY" : selection.id(), "OUTPUT" : output}) # load layer if self.dlg.checkBox.isChecked(): out = iface.addVectorLayer(output, "", "ogr") if not out: iface.messageBar().pushMessage("Error", "Could not load " + output, level=Qgis.Warning) #clip raster layer (if displayed) if layer.type() == QgsMapLayer.RasterLayer : # get extension about the raster filename, file_extension = os.path.splitext(layer.source()) output = self.folderName + "/rasters/clip_" + layer.name() + file_extension # check file isn't openned and is writable version = 0 while self.isFileOpened(output): output = self.folderName + "/rasters/clip_" + layer.name() + "("+ str(version) + ")" + file_extension version +=1 processing.run("gdal:cliprasterbymasklayer", {"INPUT" : layer.id(), "MASK" : selection.id(), "CROP_TO_CUTLINE" : True, "OUTPUT" : output}) # load layer if self.dlg.checkBox.isChecked(): out = iface.addRasterLayer(output, "") if not out.isValid(): iface.messageBar().pushMessage("Error", "Could not load " + output, level=Qgis.Warning) # Update progression time.sleep(1) progress.setValue(progression + 1) progression += 1 iface.messageBar().clearWidgets()
def processAlgorithm(self, parameters, context, model_feedback): # Use a multi-step feedback, so that individual child algorithm progress reports are adjusted for the # overall progress through the model feedback = QgsProcessingMultiStepFeedback(6, model_feedback) results = {} outputs = {} # Load flower layer into project alg_params = { 'INPUT': '/home/gift/Documents/git_projects/qgis/QGIS_Flower_Analysis/output_shapefiles/Flower.shp', 'NAME': 'f' } outputs['LoadFlowerLayerIntoProject'] = processing.run( 'native:loadlayer', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(1) if feedback.isCanceled(): return {} # Load schleswig holstein layer into project alg_params = { 'INPUT': '/home/gift/Documents/git_projects/qgis/QGIS_Flower_Analysis/output_shapefiles/Schleswig_Holstein.shp', 'NAME': 'sh' } outputs['LoadSchleswigHolsteinLayerIntoProject'] = processing.run( 'native:loadlayer', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(2) if feedback.isCanceled(): return {} # Join attributes by location alg_params = { 'DISCARD_NONMATCHING': False, 'INPUT': outputs['LoadFlowerLayerIntoProject']['OUTPUT'], 'JOIN': outputs['LoadSchleswigHolsteinLayerIntoProject']['OUTPUT'], 'JOIN_FIELDS': None, 'METHOD': 1, 'PREDICATE': [0], 'PREFIX': '', 'OUTPUT': parameters['J'] } outputs['JoinAttributesByLocation'] = processing.run( 'qgis:joinattributesbylocation', alg_params, context=context, feedback=feedback, is_child_algorithm=True) results['J'] = outputs['JoinAttributesByLocation']['OUTPUT'] feedback.setCurrentStep(3) if feedback.isCanceled(): return {} # Refactor fields alg_params = { 'FIELDS_MAPPING': [{ 'expression': '"Flo_color"', 'length': 254, 'name': 'Flo_color', 'precision': 0, 'type': 10 }, { 'expression': '"Flo_count"', 'length': 11, 'name': 'Flo_count', 'precision': 0, 'type': 10 }, { 'expression': '"Flo_shape"', 'length': 254, 'name': 'Flo_shape', 'precision': 0, 'type': 10 }, { 'expression': '"Month"', 'length': 200, 'name': 'Month', 'precision': 0, 'type': 10 }, { 'expression': '"Day"', 'length': 200, 'name': 'Day', 'precision': 0, 'type': 10 }, { 'expression': '"Year"', 'length': 200, 'name': 'Year', 'precision': 0, 'type': 10 }, { 'expression': '"Key"', 'length': 200, 'name': 'Key', 'precision': 0, 'type': 10 }, { 'expression': '"Matured"', 'length': 254, 'name': 'Matured', 'precision': 0, 'type': 10 }, { 'expression': '"Latitude"', 'length': 30, 'name': 'Latitude', 'precision': 15, 'type': 6 }, { 'expression': '"Longitude"', 'length': 30, 'name': 'Longitude', 'precision': 15, 'type': 6 }, { 'expression': '"Location"', 'length': 254, 'name': 'Location', 'precision': 0, 'type': 10 }, { 'expression': '"Date"', 'length': 10, 'name': 'Date', 'precision': 0, 'type': 14 }, { 'expression': '"Code"', 'length': 4, 'name': 'Code', 'precision': 0, 'type': 10 }, { 'expression': '"State"', 'length': 50, 'name': 'State', 'precision': 0, 'type': 10 }, { 'expression': '"District"', 'length': 50, 'name': 'District', 'precision': 0, 'type': 10 }], 'INPUT': outputs['JoinAttributesByLocation']['OUTPUT'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['RefactorFields'] = processing.run('qgis:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(4) if feedback.isCanceled(): return {} # Replace null values in District alg_params = { 'FIELD_LENGTH': 10, 'FIELD_NAME': 'District', 'FIELD_PRECISION': 3, 'FIELD_TYPE': 2, 'FORMULA': 'CASE \nWHEN \"District\" IS NULL THEN \'None\'\nELSE \"District\"\nEND', 'INPUT': outputs['RefactorFields']['OUTPUT'], 'NEW_FIELD': False, 'OUTPUT': parameters['D'] } outputs['ReplaceNullValuesInDistrict'] = processing.run( 'qgis:fieldcalculator', alg_params, context=context, feedback=feedback, is_child_algorithm=True) results['D'] = outputs['ReplaceNullValuesInDistrict']['OUTPUT'] feedback.setCurrentStep(5) if feedback.isCanceled(): return {} # Replace null values in state alg_params = { 'FIELD_LENGTH': 10, 'FIELD_NAME': 'State', 'FIELD_PRECISION': 3, 'FIELD_TYPE': 2, 'FORMULA': 'CASE \nWHEN \"State\" IS NULL THEN \'Outside\' \nELSE \"State\"\nEND', 'INPUT': outputs['ReplaceNullValuesInDistrict']['OUTPUT'], 'NEW_FIELD': False, 'OUTPUT': parameters['S'] } outputs['ReplaceNullValuesInState'] = processing.run( 'qgis:fieldcalculator', alg_params, context=context, feedback=feedback, is_child_algorithm=True) results['S'] = outputs['ReplaceNullValuesInState']['OUTPUT'] return results
def _getWater(self, airspace, buffer): """Get the water layer.""" gshhsPath = self._config.gshhsPath print("Loading coastlines and lakes") coastlines = QgsVectorLayer( os.path.join(gshhsPath, 'GSHHS_shp/f/GSHHS_f_L1.shp'), 'Coastline') lakes = QgsVectorLayer( os.path.join(gshhsPath, 'GSHHS_shp/f/GSHHS_f_L2.shp'), 'Lakes') # Clip by the buffer print("Clipping coastlines to buffer") result = processing.run('qgis:clip', { 'INPUT': coastlines, 'OVERLAY': buffer, 'OUTPUT': _MEMORY_OUTPUT }) clipped_coastlines = result['OUTPUT'] result = processing.run('qgis:clip', { 'INPUT': lakes, 'OVERLAY': buffer, 'OUTPUT': _MEMORY_OUTPUT }) clipped_lakes = result['OUTPUT'] # Simplify print("Simplify coastline geometries") result = processing.run( 'qgis:simplifygeometries', { 'INPUT': clipped_coastlines, 'TOLERANCE': 0.002, 'OUTPUT': _MEMORY_OUTPUT }) cleaned = result['OUTPUT'] # Delete any small islands print("Deleting small islands") it = cleaned.getFeatures( QgsFeatureRequest().setFilterExpression("$area < 0.0005")) cleaned.dataProvider().deleteFeatures([i.id() for i in it]) # Invert to get the water print("Inverting coastline") result = processing.run('qgis:difference', { 'INPUT': buffer, 'OVERLAY': cleaned, 'OUTPUT': _MEMORY_OUTPUT }) difference = result['OUTPUT'] # Merge sea with lakes print("Combining lakes and sea") result = processing.run('qgis:mergevectorlayers', { 'LAYERS': [difference, clipped_lakes], 'OUTPUT': _MEMORY_OUTPUT }) merged_water = result['OUTPUT'] # Re-clip by the airspace print("Clipping water to airspace") result = processing.run('qgis:clip', { 'INPUT': merged_water, 'OVERLAY': airspace, 'OUTPUT': _MEMORY_OUTPUT }) clipped = result['OUTPUT'] # Multipart to single part print("Converting water to single part") result = processing.run('qgis:multiparttosingleparts', { 'INPUT': clipped, 'OUTPUT': self.getOgrString('Water') }) water = result['OUTPUT'] water.setName('Water') # Delete any small area of water print("Deleting small areas of water") it = water.getFeatures( QgsFeatureRequest().setFilterExpression("$area < 0.0005")) water.dataProvider().deleteFeatures([i.id() for i in it]) # Add an elevation attribute (0) print("Adding height data") water.startEditing() water.addAttribute(QgsField("elevation", QVariant.Double)) elevationIndex = water.fields().indexFromName('elevation') for f in water.getFeatures(): f[elevationIndex] = 0 water.updateFeature(f) water.commitChanges() # Styling water.renderer().symbol().setColor(QColor.fromRgb(0x00, 0xff, 0xff)) return water
# timorBiomes = QgsVectorLayer('./tmp_output/timorBiomes.gpkg|layername=final', "Biomes", 'ogr') if not timorBiomes.isValid(): print("Layer failed to load!") else: project.addMapLayer(timorBiomes) # native:joinattributestable timorBiomesESval = processing.run( "native:joinattributestable", { 'DISCARD_NONMATCHING': False, 'FIELD': 'BIOME', 'FIELDS_TO_COPY': [], 'FIELD_2': 'Biome', 'INPUT': timorBiomes, 'INPUT_2': ESval, 'METHOD': 0, 'OUTPUT': 'TEMPORARY_OUTPUT', 'PREFIX': '' })['OUTPUT'] project.addMapLayer(timorBiomesESval) # background uri = "url=http://basemaps.cartocdn.com/light_all/%7Bz%7D/%7Bx%7D/%7By%7D.png&zmax=19&zmin=0&type=xyz" mts_layer = QgsRasterLayer(uri, 'Background: CartoDb Positron', 'wms') bckgr = project.addMapLayer(mts_layer) # wdpa wdpa = QgsVectorLayer(
def processAlgorithm(self, parameters, context, feedback): # parameters # Database connection parameters connection_name = QgsExpressionContextUtils.projectScope( context.project()).variable('gobs_connection_name') if not connection_name: connection_name = os.environ.get("GOBS_CONNECTION_NAME") spatiallayer = self.SPATIALLAYERS[parameters[self.SPATIALLAYER]] sourcelayer = self.parameterAsVectorLayer(parameters, self.SOURCELAYER, context) uniqueid = self.parameterAsString(parameters, self.UNIQUEID, context) uniquelabel = self.parameterAsString(parameters, self.UNIQUELABEL, context) date_validity_min = self.parameterAsString(parameters, self.DATE_VALIDITY_MIN, context) manual_date_validity_min = self.parameterAsString( parameters, self.MANUAL_DATE_VALIDITY_MIN, context) date_validity_max = self.parameterAsString(parameters, self.DATE_VALIDITY_MAX, context) manual_date_validity_max = self.parameterAsString( parameters, self.MANUAL_DATE_VALIDITY_MAX, context) msg = '' status = 1 # Get chosen spatial layer id id_spatial_layer = spatiallayer.split('-')[-1].strip() feedback.pushInfo( tr('CHECK COMPATIBILITY BETWEEN SOURCE AND TARGET GEOMETRY TYPES')) # Get spatial layer geometry type sql = ''' SELECT sl_geometry_type FROM gobs.spatial_layer WHERE id = {0} LIMIT 1 '''.format(id_spatial_layer) target_type = None [header, data, rowCount, ok, error_message] = fetchDataFromSqlQuery(connection_name, sql) if not ok: status = 0 msg = tr('* The following error has been raised' ) + ' %s' % error_message feedback.reportError(msg) raise QgsProcessingException(msg) else: for line in data: target_type = line[0].lower() # Check multi type target_is_multi = target_type.startswith('multi') # Get vector layer geometry type # And compare it with the spatial_layer type source_type = QgsWkbTypes.geometryDisplayString( int(sourcelayer.geometryType())).lower() source_wtype = QgsWkbTypes.displayString(int( sourcelayer.wkbType())).lower() ok = True if not target_type.endswith(source_type): ok = False msg = tr( 'Source vector layer and target spatial layer do not have compatible geometry types' ) msg += ' - SOURCE: {}, TARGET: {}'.format(source_type, target_type) feedback.pushInfo(msg) raise QgsProcessingException(msg) source_is_multi = source_wtype.startswith('multi') # Cannot import multi type into single type target spatial layer if source_is_multi and not target_is_multi: ok = False msg = tr( 'Cannot import a vector layer with multi geometries into a target spatial layer with a simple geometry type defined' ) msg += ' - SOURCE: {}, TARGET: {}'.format(source_wtype, target_type) feedback.pushInfo(msg) raise QgsProcessingException(msg) # Import data to temporary table feedback.pushInfo(tr('IMPORT SOURCE LAYER INTO TEMPORARY TABLE')) temp_schema = 'public' temp_table = 'temp_' + str(time.time()).replace('.', '') processing.run("qgis:importintopostgis", { 'INPUT': parameters[self.SOURCELAYER], 'DATABASE': connection_name, 'SCHEMA': temp_schema, 'TABLENAME': temp_table, 'PRIMARY_KEY': 'gobs_id', 'GEOMETRY_COLUMN': 'geom', 'ENCODING': 'UTF-8', 'OVERWRITE': True, 'CREATEINDEX': False, 'LOWERCASE_NAMES': False, 'DROP_STRING_LENGTH': True, 'FORCE_SINGLEPART': False }, context=context, feedback=feedback) feedback.pushInfo( tr('* Source layer has been imported into temporary table')) # Add ST_Multi if needed st_multi_left = '' st_multi_right = '' if target_is_multi: st_multi_left = 'ST_Multi(' st_multi_right = ')' # Get target geometry type in integer geometry_type_integer = 1 if target_type.replace('multi', '') == 'linestring': geometry_type_integer = 2 if target_type.replace('multi', '') == 'polygon': geometry_type_integer = 3 # Format validity timestamp fields if manual_date_validity_min.strip(): manualdate = manual_date_validity_min.strip().replace('/', '-') casted_timestamp_min = ''' '{0}'::timestamp '''.format(manualdate) else: casted_timestamp_min = ''' s."{0}"::timestamp '''.format(date_validity_min) has_max_validity = False if manual_date_validity_max.strip() or date_validity_max: has_max_validity = True if manual_date_validity_max.strip(): manualdate = manual_date_validity_max.strip().replace('/', '-') casted_timestamp_max = ''' '{0}'::timestamp '''.format(manualdate) else: casted_timestamp_max = ''' s."{0}"::timestamp '''.format(date_validity_max) # Copy data to spatial_object feedback.pushInfo(tr('COPY IMPORTED DATA TO spatial_object')) sql = ''' INSERT INTO gobs.spatial_object ( so_unique_id, so_unique_label, geom, fk_id_spatial_layer, so_valid_from ''' if has_max_validity: sql += ', so_valid_to' sql += ''' ) SELECT "{so_unique_id}", "{so_unique_label}", {st_multi_left}ST_Transform(ST_CollectionExtract(ST_MakeValid(geom),{geometry_type_integer}), 4326){st_multi_right} AS geom, {id_spatial_layer}, {casted_timestamp_min} '''.format(so_unique_id=uniqueid, so_unique_label=uniquelabel, st_multi_left=st_multi_left, geometry_type_integer=geometry_type_integer, st_multi_right=st_multi_right, id_spatial_layer=id_spatial_layer, casted_timestamp_min=casted_timestamp_min) if has_max_validity: sql += ', {casted_timestamp_max}'.format( casted_timestamp_max=casted_timestamp_max) sql += ''' FROM "{temp_schema}"."{temp_table}" AS s -- Update line if data already exists -- i.e. Same external ids for the same layer and the same start validity date -- so_unique_id, fk_id_spatial_layer AND so_valid_from are the same -- This is considered as the same object as the one already in database -- We update the geometry, label, and end date of validity ON CONFLICT ON CONSTRAINT spatial_object_unique_key DO UPDATE SET (geom, so_unique_label, so_valid_to) = (EXCLUDED.geom, EXCLUDED.so_unique_label, EXCLUDED.so_valid_to) WHERE True ; '''.format(temp_schema=temp_schema, temp_table=temp_table) try: [header, data, rowCount, ok, error_message] = fetchDataFromSqlQuery(connection_name, sql) if not ok: status = 0 msg = tr('* The following error has been raised' ) + ' %s' % error_message feedback.reportError(msg) feedback.pushInfo(sql) else: status = 1 msg = tr('* Source data has been successfully imported !') feedback.pushInfo(msg) except Exception as e: status = 0 msg = tr( '* An unknown error occured while adding features to spatial_object table' ) msg += ' ' + str(e) # Check there is no issues with related observation data # For each series related to the chosen spatial layer # SELECT gobs.find_observation_with_wrong_spatial_object({fk_id_series}) # v1/ Only check and display warning # v2/ Check and try to update with gobs.update_observations_with_wrong_spatial_objects # v3/ Find orphans # Remove temporary table feedback.pushInfo(tr('DROP TEMPORARY DATA')) sql = ''' DROP TABLE IF EXISTS "%s"."%s" ; ''' % (temp_schema, temp_table) [header, data, rowCount, ok, error_message] = fetchDataFromSqlQuery(connection_name, sql) if ok: feedback.pushInfo(tr('* Temporary data has been deleted.')) else: feedback.reportError( tr('* An error occured while droping temporary table') + ' "%s"."%s"' % (temp_schema, temp_table)) msg = tr('SPATIAL LAYER HAS BEEN SUCCESSFULLY IMPORTED !') return {self.OUTPUT_STATUS: status, self.OUTPUT_STRING: msg}
def _getCleanContours(self, contours, perimeter, airspace): """Get the cleaned contours.""" # Simplify the contours print("Simplify contours") result = processing.run('qgis:simplifygeometries', { 'INPUT': contours, 'TOLERANCE': 0.002, 'OUTPUT': _MEMORY_OUTPUT }) simplified = result['OUTPUT'] simplified.setName('Contours - Simplified') # Merge with perimeter print("Merging contours with perimter") result = processing.run('qgis:mergevectorlayers', { 'LAYERS': [simplified, perimeter], 'OUTPUT': _MEMORY_OUTPUT }) merged = result['OUTPUT'] merged.setName('Contours - Merged') # Polygonise print("Polygonise contours") result = processing.run('qgis:polygonize', { 'INPUT': merged, 'OUTPUT': _MEMORY_OUTPUT, }) polygons = result['OUTPUT'] polygons.setName('Contours - Polygons') # Select all polygons smaller than 0.0005 sq degrees (about 38ha at lat=52)) # and eliminate them print("Eliminating small contour polygons") selection = polygons.getFeatures( QgsFeatureRequest().setFilterExpression('$area < 0.00005')) polygons.selectByIds([k.id() for k in selection]) result = processing.run( 'qgis:eliminateselectedpolygons', { 'INPUT': polygons, 'OUTPUT': _MEMORY_OUTPUT, 'MODE': 2 # Largest common boundary }) cleaned = result['OUTPUT'] cleaned.setName('Contours - Cleaned') # Delete any features that weren't eliminated (outside a common boundary) print("Deleting remaining small contour polygons") selection = cleaned.getFeatures( QgsFeatureRequest().setFilterExpression('$area < 0.00005')) cleaned.dataProvider().deleteFeatures([k.id() for k in selection]) # Clip to airspace print("Clipping contours to bounds") result = processing.run('qgis:clip', { 'INPUT': cleaned, 'OUTPUT': _MEMORY_OUTPUT, 'OVERLAY': airspace }) clipped = result['OUTPUT'] clipped.setName('Contours - Clipped') # Multipart to single part print("Converting contours to single part") result = processing.run('qgis:multiparttosingleparts', { 'INPUT': clipped, 'OUTPUT': self.getOgrString('Contours - Final') }) final = result['OUTPUT'] final.setName('Contours - Final') # Styling final.renderer().symbol().setColor(QColor.fromRgb(0xff, 0x9e, 0x17)) return final
def unionespacial(self, event): # Datos de viviendas de catastro bu = self.input_gml.filePath() # Datos de perímetros de núcleos de población EIEL nucleo = self.input_shp.filePath() # Unir aributos por localización: Viviendas y núcleos bu_output = 'C:/V_EIEL/viviendasclasificadas.shp' processing.run( "qgis:joinattributesbylocation", { 'INPUT': bu, 'JOIN': nucleo, 'PREDICATE': 0, 'JOIN_FIELDS': ['CODIGO', 'DENOMINACI'], 'METHOD': 0, 'PREFIX': 'NU_', 'OUTPUT': bu_output }) joinat = QgsVectorLayer(bu_output, "Viviendas clasificadas nucleo", "ogr") QgsProject.instance().addMapLayers([joinat]) # Selecciono registros sin clasificar de la capa de viviendas clasificadas utilizando la capa núcleos expresion = "NU_CODIGO is NULL" joinat.selectByExpression(expresion, QgsVectorLayer.SetSelection) #Genero el buffer de la capa núcleos nucleo = self.input_shp.filePath() file_output = 'C:/V_EIEL/buffernucleo.shp' processing.run( "native:buffer", { 'INPUT': nucleo, 'DISTANCE': 200, 'SEGMENTS': 10, 'DISSOLVE': False, 'END_CAP_STYLE': 0, 'JOIN_STYLE': 0, 'MITER_LIMIT': 1, 'OUTPUT': file_output }) lyrBuffer = QgsVectorLayer(file_output, "Buffer nucleo", "ogr") QgsProject.instance().addMapLayers([lyrBuffer]) # Unión espacial de los registros seleccionados de joinat con buffer bu_output_2 = 'C:/V_EIEL/viviendasclasificadas_2.shp' processing.run( "qgis:joinattributesbylocation", { 'INPUT': QgsProcessingFeatureSourceDefinition( 'Viviendas clasificadas nucleo', True), 'JOIN': lyrBuffer, 'PREDICATE': 0, 'JOIN_FIELDS': ['CODIGO', 'DENOMINACI'], 'METHOD': 0, 'PREFIX': 'NU_', 'OUTPUT': bu_output_2 }) joinat_2 = QgsVectorLayer(bu_output_2, "Viviendas clasificadas buffer", "ogr") QgsProject.instance().addMapLayers([joinat_2]) joinat.removeSelection() joinat.commitChanges() # El resto de viviendas no clasificadas mediante la union con capa buffer pasan a estar en diseminado joinat_2 = iface.activeLayer() expresion_2 = "NU_CODIGO_ is NULL" joinat_2.selectByExpression(expresion_2, QgsVectorLayer.SetSelection) joinat_2.startEditing() n = joinat_2.selectedFeatureCount() for i in range(0, n): diseminado = joinat_2.selectedFeatures() viv_diseminado = diseminado[i] viv_diseminado.setAttribute("NU_CODIGO_", "99") viv_diseminado["NU_CODIGO_"] = "99" joinat_2.updateFeature(viv_diseminado) viv_diseminado.setAttribute("NU_DENOM_1", "DISEMINADO") viv_diseminado["NU_DENOM_1"] = "DISEMINADO" joinat_2.updateFeature(viv_diseminado) joinat_2.commitChanges() joinat_2.removeSelection() joinat_2.startEditing() features = joinat_2.getFeatures() for feature in features: feature.setAttribute(feature.fieldNameIndex('NU_CODIGO'), feature['NU_CODIGO_']) feature.setAttribute(feature.fieldNameIndex('NU_DENOMIN'), feature['NU_DENOM_1']) joinat_2.updateFeature(feature) joinat_2.commitChanges() joinat_2.removeSelection() # Elimino los campos NU_CODIGO_ y NU_DENOM_1 para conservar la misma estructura en las dos capas joint attributes joinat_2.startEditing() joinat_2.deleteAttributes([27, 28]) joinat_2.updateFields() joinat_2.commitChanges() # Creo la capa union de Viviendas clasificadas nucleo(solo la selección) y viviendas clasificadas buffer # En primer lugar extraigo las viviendas clasificadas en la union con la capa nucleos expresion_3 = "NU_CODIGO is not NULL" joinat.selectByExpression(expresion_3, QgsVectorLayer.SetSelection) joinat.startEditing() seleccion = 'C:/V_EIEL/viviendasclasificadas_seleccion.shp' processing.run("native:saveselectedfeatures", { 'INPUT': joinat, 'OUTPUT': seleccion }) nucleo_seleccion = QgsVectorLayer( seleccion, "Viviendas clasificadas nucleo seleccion", "ogr") QgsProject.instance().addMapLayers([nucleo_seleccion]) joinat.removeSelection() resultado = 'C:/V_EIEL/viviendasclasificadas_resultado.shp' processing.run("native:mergevectorlayers", { 'LAYERS': [nucleo_seleccion, joinat_2], 'OUTPUT': resultado }) resultado_merge = QgsVectorLayer(resultado, "Viviendas clasificadas", "ogr") QgsProject.instance().addMapLayers([resultado_merge]) # Suprimo del proyecto todas las capas intermedias generadas en el proceso QgsProject.instance().removeMapLayer(nucleo_seleccion) QgsProject.instance().removeMapLayer(joinat_2) QgsProject.instance().removeMapLayer(joinat) QgsProject.instance().removeMapLayer(lyrBuffer) #Representación categorizada de la capa resultado #Valores únicos resultado_merge = iface.activeLayer() valoresnucleo = [] unico = resultado_merge.dataProvider() campos = unico.fields() id = campos.indexFromName('NU_DENOMIN') valoresnucleo = unico.uniqueValues(id) #Creación de categorías categorias = [] for valornucleo in valoresnucleo: # inicio el valor de símbolo por defecto para la geometría tipo symbol = QgsSymbol.defaultSymbol(resultado_merge.geometryType()) # configuración de capa de simbología layer_style = {} layer_style['color'] = '%d, %d, %d' % (random.randint( 0, 256), random.randint(0, 256), random.randint(0, 256)) layer_style['outline'] = '#000000' symbol_layer = QgsSimpleFillSymbolLayer.create(layer_style) # sustitución de simbología por defecto por simbología configurada if symbol_layer is not None: symbol.changeSymbolLayer(0, symbol_layer) # creación de objeto renderer categoria = QgsRendererCategory(valornucleo, symbol, str(valornucleo)) # generación de entrada para la lista de categorías categorias.append(categoria) renderer = QgsCategorizedSymbolRenderer('NU_DENOMIN', categorias) # asignación del renderer a la capa if renderer is not None: resultado_merge.setRenderer(renderer) resultado_merge.triggerRepaint() # Cálculo de estadísticas resultado = iface.activeLayer() estadisticas = 'C:/V_EIEL/estadisticas.csv' processing.run( "qgis:statisticsbycategories", { 'CATEGORIES_FIELD_NAME': ['NU_DENOMIN', 'NU_CODIGO'], 'INPUT': 'Viviendas clasificadas', 'OUTPUT': 'C:/V_EIEL/estadisticas.csv', 'VALUES_FIELD_NAME': 'numberOfDw', }) # Cargo datos calculados de estadísticas de distribución de viviendas en QTableWidget tbl_resultados with open(estadisticas, 'r') as leer_estadisticas: registros = leer_estadisticas.read().splitlines() contar = 0 #Descarto la primera linea del archivo por contener las cabeceras de los campos for registro in registros: r = 0 if contar > 0: campos = registro.split(',') #Puesto que el campo codigo se almacena con "" las elimino para que no aparezcan en la tabla sc = campos[1].lstrip('"').rstrip('"') #Cargo datos del csv en Qtable widget self.tbl_resultados.insertRow(r) self.tbl_resultados.setItem(r, 0, QTableWidgetItem(str(sc))) self.tbl_resultados.setItem( r, 1, QTableWidgetItem(str(campos[0]))) self.tbl_resultados.setItem( r, 2, QTableWidgetItem(str(campos[7]))) r = r + 1 contar = contar + 1 # Rastreo de registros duplicados en capa resultado por intersectar con dos buffer o dos núcleos features = resultado_merge.getFeatures() referencias = [] referencias_dup = [] for f in features: idr = f.fieldNameIndex('reference') referencia = f.attribute(idr) if referencia not in referencias: referencias.append(referencia) else: referencias_dup.append(referencia) self.lst_duplicados.addItems(referencias_dup) total_duplicados = self.lst_duplicados.count() self.text_duplicados.append(str(total_duplicados))
class county_vector_aggregate(QgsProcessingAlgorithm): # Init abbr_state_dict = { 'WY': 'wyoming', 'WV': 'westvirginia', 'WA': 'washington', 'WI': 'wisconsin', 'VT': 'vermont', 'VA': 'virginia', 'UT': 'utah', 'SD': 'southdakota', 'TX': 'texas', 'SC': 'southcarolina', 'RI': 'rhodeisland', 'PA': 'pennsylvania', 'OR': 'oregon', 'NY': 'newyork', 'OK': 'oklahoma', 'OH': 'ohio', 'NV': 'nevada', 'NM': 'newmexico', 'NJ': 'newjersey', 'NH': 'newhampshire', 'NE': 'nebraska', 'ND': 'northdakota', 'MT': 'montana', 'MO': 'missouri', 'NC': 'northcarolina', 'MS': 'mississippi', 'MI': 'michigan', 'MN': 'minnesota', 'ME': 'maine', 'MA': 'massachusetts', 'KY': 'kentucky', 'LA': 'louisiana', 'KS': 'kansas', 'IN': 'indiana', 'ID': 'idaho', 'IL': 'illinois', 'IA': 'iowa', 'GA': 'georgia', 'FL': 'florida', 'DE': 'delaware', 'DC': 'dc', 'CT': 'connecticut', 'CO': 'colorado', 'CA': 'california', 'AZ': 'arizona', 'AR': 'arkansas', 'AL': 'alabama', 'MD': 'maryland' } Processing.initialize() feedback = QgsProcessingFeedback() field = 'nccpi2cs' # Interate over every state. for state_abbr in abbr_state_dict: state_name = abbr_state_dict[state_abbr] # Skip states that have been processed previously. if os.path.exists('F:/DoA Modeling/DoA/' + state_name + '_soil/nccpi2cs.csv') or os.path.exists( 'F:/DoA Modeling/DoA/' + state_name + '_soil/' + field + '_' + state_abbr + '.csv'): continue f = open( 'F:/DoA Modeling/DoA/' + state_name + '_soil/' + field + '_' + state_abbr + '.csv', 'w') os.mkdir('F:/DoA Modeling/DoA/' + state_name + '_soil/temp_files/') for i in range(1, 298, 2): county_num = str(i).zfill(3) # Find County extract_params = { 'EXPRESSION': '\"AREASYMBOL\" = \'' + state_abbr + county_num + '\'', 'INPUT': 'F:/DoA Modeling/DoA/' + state_name + '_soil/soils/gSSURGO_' + state_abbr + '.gdb|layername=SAPOLYGON', #'OUTPUT': 'memory:' 'OUTPUT': 'F:/DoA Modeling/DoA/' + state_name + '_soil/temp_files/extract_' + state_abbr + county_num + '.shp', } extract_res = processing.run('native:extractbyexpression', extract_params, feedback=feedback) QgsMessageLog.logMessage( 'buffer_layer: ' + str(extract_res['OUTPUT']), "Sanity Check") #Create Buffer buffer_params = { 'DISSOLVE': False, 'DISTANCE': 0, 'END_CAP_STYLE': 0, 'INPUT': QgsVectorLayer(extract_res['OUTPUT'], "new_buffer"), 'JOIN_STYLE': 0, 'MITER_LIMIT': 2, #'OUTPUT': 'memory:', 'OUTPUT': 'F:/DoA Modeling/DoA/' + state_name + '_soil/temp_files/buffer_' + state_abbr + county_num + '.shp', 'SEGMENTS': 5 } bf_res = processing.run('native:buffer', parameters=buffer_params, feedback=feedback) buffer_layer = QgsVectorLayer(bf_res['OUTPUT'], "new_clip") # Clip Raster clip_params = { 'ALPHA_BAND': False, 'CROP_TO_CUTLINE': True, 'DATA_TYPE': 5, 'INPUT': 'F:/DoA Modeling/DoA/' + state_name + '_soil/rasterized_soil_' + state_abbr + '.tif', 'KEEP_RESOLUTION': False, 'MASK': buffer_layer, 'NODATA': None, 'OPTIONS': '', 'OUTPUT': 'F:/DoA Modeling/DoA/' + state_name + '_soil/temp_files/clip_' + state_abbr + county_num + '.tif', } clip_res = processing.run('gdal:cliprasterbymasklayer', parameters=clip_params, feedback=feedback) clip_layer = QgsRasterLayer(clip_res['OUTPUT'], "new_stats") # Get Statistics stats_params = { 'BAND': 1, 'INPUT': clip_layer, 'OUTPUT_HTML_FILE': 'F:/DoA Modeling/DoA/' + state_name + '_soil/temp_files/stats_' + state_abbr + county_num + '.html' } stats_res = processing.run('qgis:rasterlayerstatistics', parameters=stats_params) # Write to file f.write(state_abbr + county_num + ',' + str(stats_res['MEAN']) + '\n') f.close()
def processAlgorithm(self, parameters, context, ofeedback): """ Here is where the processing itself takes place. """ # Use a multi-step feedback, so that individual child algorithm progress reports are adjusted for the # overall progress through the model ret=dict() ret['QLSC']=parameters['qlsc'] ret['OUTPUT']='' # Algorithm will open a new project, some verifications: if QgsProject.instance().isDirty(): iface.messageBar().pushMessage(self.tr("Unsaved project:"), self.tr( "Save your works before run this algorithm"), level=Qgis.Critical) return ret actualProject=QgsProject.instance().absoluteFilePath() feedback=QgsProcessingMultiStepFeedback(2, ofeedback) results={} outputs={} with tempfile.TemporaryDirectory() as tmpdirname: project_path=os.path.join(tmpdirname, 'project.qgz') csv_path=os.path.join(tmpdirname, 'codification.csv') shutil.copyfile(parameters['project'], project_path) shutil.copyfile(os.path.join(os.path.dirname( parameters['project']), 'logo.svg'), os.path.join(tmpdirname, 'logo.svg')) alg_params={ 'QLSC': parameters['qlsc'], 'OUTPUT': csv_path, 'source':True, 'pathname':True, 'dirname':True, 'basename':True, 'layername':True, 'internaltype':True, 'displaytype':True, 'geometrytype':True, 'description':True, 'attributes':True } outputs['ConvertQlscFileToCsvFile']=processing.run( 'landsurvey:qlsc2csv', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(1) if feedback.isCanceled(): return {} r=self.generatePDF( project_path, parameters['layout'], parameters['outputpdf']) if actualProject != '': QgsProject.instance().read(actualProject) else: QgsProject.instance().clear() if r[0]: ret['OUTPUT']=parameters['outputpdf'] else: iface.messageBar().pushMessage( self.tr("Cannot print layout"), r[1], level=Qgis.Critical) return ret
##Select by attribute=name ##Tests=group #inputs ##INPUT_LAYER=vector ##OUTPUT_LAYER=vectorOut #outputs ##OUTPUT_LAYER=output outputVector import processing result = processing.run("qgis:selectbyattribute", {'INPUT': INPUT_LAYER, 'FIELD': "id2", 'OPERATOR': 0, 'VALUE': "2"}, context=context, feedback=feedback) result = processing.run("qgis:saveselectedfeatures", {'INPUT': result["OUTPUT"], 'OUTPUT': parameters['OUTPUT_LAYER']}, context=context, feedback=feedback) OUTPUT_LAYER = result['OUTPUT']
def processAlgorithm(self, parameters, context, model_feedback): """ Process algorithm. """ feedback = QgsProcessingMultiStepFeedback(8, model_feedback) results = {} outputs = {} project = QgsProject.instance() feedback.setCurrentStep(1) if feedback.isCanceled(): return {} feedback.pushInfo("Starting...") # Get some parameters chid = self.parameterAsString(parameters, "chid", context) project.writeEntry("QGIS2FDS", "chid", parameters["chid"]) path = self.parameterAsFile(parameters, "path", context) project.writeEntry("QGIS2FDS", "path", parameters["path"]) landuse_type = self.parameterAsEnum(parameters, "landuse_type", context) project.writeEntry("QGIS2FDS", "landuse_type", parameters["landuse_type"]) # Get layers in their respective crs dem_layer = self.parameterAsRasterLayer(parameters, "dem_layer", context) project.writeEntry("QGIS2FDS", "dem_layer", parameters["dem_layer"]) if parameters["landuse_layer"] is None: # it is optional landuse_layer = None else: landuse_layer = self.parameterAsRasterLayer( parameters, "landuse_layer", context ) project.writeEntry("QGIS2FDS", "landuse_layer", parameters["landuse_layer"]) # Prepare CRS and their transformations project_crs = QgsProject.instance().crs() project.writeEntry( "QGIS2FDS", "project_crs", project_crs.description() ) # save to check if changed wgs84_crs = QgsCoordinateReferenceSystem("EPSG:4326") dem_crs = dem_layer.crs() project_to_wgs84_tr = QgsCoordinateTransform( project_crs, wgs84_crs, QgsProject.instance() ) # Get extent in WGS84 CRS wgs84_extent = self.parameterAsExtent( parameters, "extent", context, crs=wgs84_crs ) project.writeEntry("QGIS2FDS", "extent", parameters["extent"]) # Get origin in WGS84 CRS if parameters["origin"] is not None: wgs84_origin = QgsPoint( self.parameterAsPoint(parameters, "origin", context) ) wgs84_origin.transform(project_to_wgs84_tr) feedback.pushInfo(f"Using user origin: <{wgs84_origin}> WGS84") else: wgs84_origin = QgsPoint( (wgs84_extent.xMinimum() + wgs84_extent.xMaximum()) / 2.0, (wgs84_extent.yMinimum() + wgs84_extent.yMaximum()) / 2.0, ) feedback.pushInfo( f"Using terrain extent centroid as origin: <{wgs84_origin}> WGS84" ) project.writeEntry("QGIS2FDS", "origin", parameters["origin"]) # Get fire origin in WGS84 CRS if parameters["fire_origin"] is not None: wgs84_fire_origin = QgsPoint( self.parameterAsPoint(parameters, "fire_origin", context) ) wgs84_fire_origin.transform(project_to_wgs84_tr) feedback.pushInfo(f"Using user fire origin: <{wgs84_fire_origin}> WGS84") else: wgs84_fire_origin = QgsPoint(wgs84_origin.x(), wgs84_origin.y()) feedback.pushInfo( f"Using origin as fire origin: <{wgs84_fire_origin}> WGS84" ) project.writeEntry("QGIS2FDS", "fire_origin", parameters["fire_origin"]) # Get UTM CRS from origin utm_epsg = utils.lonlat_to_epsg(lon=wgs84_origin.x(), lat=wgs84_origin.y()) utm_crs = QgsCoordinateReferenceSystem(utm_epsg) feedback.pushInfo(f"Using UTM CRS: <{utm_crs.description()}>") # Get extent in UTM CRS and DEM CRS utm_extent = self.parameterAsExtent(parameters, "extent", context, crs=utm_crs,) dem_extent = self.parameterAsExtent(parameters, "extent", context, crs=dem_crs) # Get origin in UTM CRS wgs84_to_utm_tr = QgsCoordinateTransform( wgs84_crs, utm_crs, QgsProject.instance() ) utm_origin = QgsPoint(wgs84_origin.x(), wgs84_origin.y()) utm_origin.transform(wgs84_to_utm_tr) if utm_origin == wgs84_origin: # check for QGIS bug raise QgsProcessingException( f"QGIS bug: UTM Origin <{utm_origin} and WGS84 Origin <{wgs84_origin}> cannot be the same!" ) # Get fire origin in UTM CRS utm_fire_origin = QgsPoint(wgs84_fire_origin.x(), wgs84_fire_origin.y()) utm_fire_origin.transform(wgs84_to_utm_tr) # Save texture feedback.pushInfo("Saving texture image...") utils.write_image( destination_crs=utm_crs, extent=utm_extent, filepath=f"{path}/{chid}_texture.png", imagetype="png", ) feedback.setCurrentStep(2) if feedback.isCanceled(): return {} # QGIS geographic transformations # Creating sampling grid in DEM crs feedback.pushInfo("Creating sampling grid layer from DEM...") xspacing = dem_layer.rasterUnitsPerPixelX() yspacing = dem_layer.rasterUnitsPerPixelY() x0, y0, x1, y1 = ( # terrain extent in DEM CRS dem_extent.xMinimum(), dem_extent.yMinimum(), dem_extent.xMaximum(), dem_extent.yMaximum(), ) xd0, yd1 = ( # DEM extent in DEM CRS dem_layer.extent().xMinimum(), dem_layer.extent().yMaximum(), ) # align terrain extent to DEM grid (gridding starts from top left corner) x0 = xd0 + round((x0 - xd0) / xspacing) * xspacing + xspacing / 2.0 y1 = yd1 + round((y1 - yd1) / yspacing) * yspacing - yspacing / 2.0 dem_extent = QgsRectangle(x0, y0, x1, y1) # terrain extent in DEM CRS alg_params = { "CRS": dem_crs, "EXTENT": dem_extent, "HOVERLAY": 0, "HSPACING": xspacing, "TYPE": 0, # Points "VOVERLAY": 0, "VSPACING": yspacing, "OUTPUT": QgsProcessing.TEMPORARY_OUTPUT, } outputs["CreateGrid"] = processing.run( "native:creategrid", alg_params, context=context, feedback=feedback, is_child_algorithm=True, ) feedback.setCurrentStep(3) if feedback.isCanceled(): return {} # QGIS geographic transformations # Draping Z values to sampling grid in DEM crs feedback.pushInfo("Setting Z values from DEM...") alg_params = { "BAND": 1, "INPUT": outputs["CreateGrid"]["OUTPUT"], "NODATA": 0, "RASTER": dem_layer, "SCALE": 1, "OUTPUT": QgsProcessing.TEMPORARY_OUTPUT, } outputs["DrapeSetZValueFromRaster"] = processing.run( "native:setzfromraster", alg_params, context=context, feedback=feedback, is_child_algorithm=True, ) feedback.setCurrentStep(4) if feedback.isCanceled(): return {} # QGIS geographic transformations # Reprojecting sampling grid to UTM CRS feedback.pushInfo("Reprojecting sampling grid layer to UTM CRS...") alg_params = { "INPUT": outputs["DrapeSetZValueFromRaster"]["OUTPUT"], "TARGET_CRS": utm_crs, "OUTPUT": QgsProcessing.TEMPORARY_OUTPUT, } outputs["ReprojectLayer"] = processing.run( "native:reprojectlayer", alg_params, context=context, feedback=feedback, is_child_algorithm=True, ) feedback.setCurrentStep(5) if feedback.isCanceled(): return {} # QGIS geographic transformations # Adding geom attributes (x, y, z) to sampling grid in UTM CRS feedback.pushInfo("Adding geometry attributes to sampling grid layer...") alg_params = { "CALC_METHOD": 0, # Layer CRS "INPUT": outputs["ReprojectLayer"]["OUTPUT"], "OUTPUT": QgsProcessing.TEMPORARY_OUTPUT, } outputs["AddGeometryAttributes"] = processing.run( "qgis:exportaddgeometrycolumns", alg_params, context=context, feedback=feedback, is_child_algorithm=True, ) feedback.setCurrentStep(6) if feedback.isCanceled(): return {} # QGIS geographic transformations # Sampling landuse layer with sampling grid in UTM CRS if landuse_layer: feedback.pushInfo("Sampling landuse...") alg_params = { "COLUMN_PREFIX": "landuse", "INPUT": outputs["AddGeometryAttributes"]["OUTPUT"], "RASTERCOPY": parameters["landuse_layer"], "OUTPUT": parameters["sampling_layer"], } outputs["sampling_layer"] = processing.run( "qgis:rastersampling", alg_params, context=context, feedback=feedback, is_child_algorithm=True, ) results["sampling_layer"] = outputs["sampling_layer"]["OUTPUT"] point_layer = context.getMapLayer(results["sampling_layer"]) else: feedback.pushInfo("No landuse sampling.") results["sampling_layer"] = outputs["AddGeometryAttributes"]["OUTPUT"] point_layer = context.getMapLayer(results["sampling_layer"]) # add fake landuse point_layer.dataProvider().addAttributes( (QgsField("landuse", QVariant.Int),) ) point_layer.updateFields() feedback.setCurrentStep(7) if feedback.isCanceled(): return {} # Prepare geometry feedback.pushInfo("Building lists of vertices and faces with landuses...") verts, faces, landuses, landuses_set = geometry.get_geometry( layer=point_layer, utm_origin=utm_origin, ) feedback.setCurrentStep(8) if feedback.isCanceled(): return {} # Write the FDS case file feedback.pushInfo("Writing the FDS case file...") content = fds.get_case( dem_layer=dem_layer, landuse_layer=landuse_layer, chid=chid, wgs84_origin=wgs84_origin, utm_origin=utm_origin, wgs84_fire_origin=wgs84_fire_origin, utm_fire_origin=utm_fire_origin, utm_crs=utm_crs, verts=verts, faces=faces, landuses=landuses, landuse_type=landuse_type, landuses_set=landuses_set, utm_extent=utm_extent, ) utils.write_file(filepath=f"{path}/{chid}.fds", content=content) return results
def clip_by_extent(layer, extent): """Clip a raster using a bounding box using processing. Issue https://github.com/inasafe/inasafe/issues/3183 :param layer: The layer to clip. :type layer: QgsRasterLayer :param extent: The extent. :type extent: QgsRectangle :return: Clipped layer. :rtype: QgsRasterLayer .. versionadded:: 4.0 """ parameters = dict() # noinspection PyBroadException try: output_layer_name = quick_clip_steps['output_layer_name'] output_layer_name = output_layer_name % layer.keywords['layer_purpose'] output_raster = unique_filename(suffix='.tif', dir=temp_dir()) # We make one pixel size buffer on the extent to cover every pixels. # See https://github.com/inasafe/inasafe/issues/3655 pixel_size_x = layer.rasterUnitsPerPixelX() pixel_size_y = layer.rasterUnitsPerPixelY() buffer_size = max(pixel_size_x, pixel_size_y) extent = extent.buffered(buffer_size) if is_raster_y_inverted(layer): # The raster is Y inverted. We need to switch Y min and Y max. bbox = [ str(extent.xMinimum()), str(extent.xMaximum()), str(extent.yMaximum()), str(extent.yMinimum()) ] else: # The raster is normal. bbox = [ str(extent.xMinimum()), str(extent.xMaximum()), str(extent.yMinimum()), str(extent.yMaximum()) ] # These values are all from the processing algorithm. # https://github.com/qgis/QGIS/blob/master/python/plugins/processing/ # algs/gdal/ClipByExtent.py # Please read the file to know these parameters. parameters['INPUT'] = layer.source() parameters['NO_DATA'] = '' parameters['PROJWIN'] = ','.join(bbox) parameters['DATA_TYPE'] = 5 parameters['COMPRESS'] = 4 parameters['JPEGCOMPRESSION'] = 75 parameters['ZLEVEL'] = 6 parameters['PREDICTOR'] = 1 parameters['TILED'] = False parameters['BIGTIFF'] = 0 parameters['TFW'] = False parameters['EXTRA'] = '' parameters['OUTPUT'] = output_raster initialize_processing() feedback = create_processing_feedback() context = create_processing_context(feedback=feedback) result = processing.run( "gdal:cliprasterbyextent", parameters, context=context) if result is None: raise ProcessingInstallationError clipped = QgsRasterLayer(result['OUTPUT'], output_layer_name) # We transfer keywords to the output. clipped.keywords = layer.keywords.copy() clipped.keywords['title'] = output_layer_name check_layer(clipped) except Exception as e: # This step clip_raster_by_extent was nice to speedup the analysis. # As we got an exception because the layer is invalid, we are not going # to stop the analysis. We will return the original raster layer. # It will take more processing time until we clip the vector layer. # Check https://github.com/inasafe/inasafe/issues/4026 why we got some # exceptions with this step. LOGGER.exception(parameters) LOGGER.exception( 'Error from QGIS clip raster by extent. Please check the QGIS ' 'logs too !') LOGGER.info( 'Even if we got an exception, we are continuing the analysis. The ' 'layer was not clipped.') LOGGER.exception(str(e)) LOGGER.exception(get_error_message(e).to_text()) clipped = layer return clipped
symbol = renderer.symbol() symbol.setColor(QColor(255, 0, 0, 255)) atbn_map.triggerRepaint() iface.layerTreeView().refreshLayerSymbology(atbn_map.id()) param = { 'INPUT': atbn_map, 'DISTANCE': 20, 'SEGMENTS': 5, 'END_CAP_STYLE': 0, 'JOIN_STYLE': 0, 'MITER_LIMIT': 2, 'DISSOLVE': False, 'OUTPUT': 'memory:' } styles = processing.run("qgis:buffer", param) autobahnSpace = QgsProject.instance().addMapLayer(styles['OUTPUT']) layer = iface.activeLayer() atbn20 = layer layer.setName("Autobahn 20") renderer = atbn20.renderer() symbol = renderer.symbol() symbol.setColor(QColor(58, 26, 52)) """Next, we add 2 more buffers, 1 100m away from the autobahn, and the other 300m away from the autobahn. These are used to illustrate that the impact of constructing an autobahn is far more than just the physical space that it uses.""" atbn_map = iface.activeLayer() param = { 'INPUT': atbn_map, 'DISTANCE': 100, 'SEGMENTS': 5, 'END_CAP_STYLE': 0,
atlas_dataset) layer_csv = QgsVectorLayer(uri, 'somename', 'delimitedtext') if not layer_csv.isValid(): print('atlas dataset failed to load') uri2 = shapefile overlay_er = QgsVectorLayer(uri2, 'somename2', 'ogr') if not overlay_er.isValid(): print('sa2 shapefile layer failed to load') if shape_file_year == "2016": params = { 'INPUT_FIELDS' : [],\ 'OUTPUT' : output_file,\ 'OVERLAY' : overlay_er,\ 'OVERLAY_FIELDS' : ['SA2_MAIN16','SA2_5DIG16','SA2_NAME16'],\ 'INPUT' : layer_csv} elif shape_file_year == "2011": params = { 'INPUT_FIELDS' : [],\ 'OUTPUT' : output_file,\ 'OVERLAY' : overlay_er,\ 'OVERLAY_FIELDS' : ['SA2_MAIN11','SA2_5DIG11','SA2_NAME11'],\ 'INPUT' : layer_csv} else: print("shape file year is wrong") processing.run("native:intersection", params) app.exitQgis()
def processAlgorithm(self, parameters, context, model_feedback): # Use a multi-step feedback, so that individual child algorithm progress reports are adjusted for the # overall progress through the model feedback = QgsProcessingMultiStepFeedback(3, model_feedback) results = {} outputs = {} # Extraire par attribut # On récupère les profils dont l'id ≠ 0 (uniquement les profils obstrués) alg_params = { 'FIELD': 'id', 'INPUT': parameters['profils'], 'OPERATOR': 1, 'VALUE': '0', 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['ExtraireParAttribut'] = processing.run( 'native:extractbyattribute', alg_params, context=context, is_child_algorithm=True) # Séparer une couche vecteur # On divise la couche en X couches, une par id d'obstruction alg_params = { 'FIELD': 'id', 'INPUT': outputs['ExtraireParAttribut']['OUTPUT'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['SparerUneCoucheVecteur'] = processing.run( 'qgis:splitvectorlayer', alg_params, context=context, is_child_algorithm=True) layers = outputs['SparerUneCoucheVecteur']['OUTPUT_LAYERS'] feedback = QgsProcessingMultiStepFeedback( len(layers) + 4, model_feedback) status = 0 to_merge = [] for layer in layers: # pour chaque groupe de profils obstrués # Interpoler les valeurs d'une série de profils # On interpole linéairement les profils entre le premier et le dernier profil alg_params = { 'echantillons_nb': 20, 'mnt': parameters['MNT'], 'profils': layer, 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['InterpolerLesValeursDuneSrieDeProfils'] = processing.run( 'script:Interpoler les valeurs d\'une série de profils', alg_params, context=context, feedback=feedback, is_child_algorithm=True) # Interpolation TIN # On passe du nuage de points obtenu au traitement précédent à un raster par interpolation TIN alg_params = { 'EXTENT': parameters['MNT'], 'INTERPOLATION_DATA': outputs['InterpolerLesValeursDuneSrieDeProfils']['OUTPUT'] + '::~::0::~::0::~::0', # format d'INTERPOLATION_DATA, non-documenté 'METHOD': 0, 'PIXEL_SIZE': parameters['rsolution'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['InterpolationTin'] = processing.run( 'qgis:tininterpolation', alg_params, context=context, feedback=feedback, is_child_algorithm=True) # Recaler un raster # Le raster précédent n'est pas calé sur les pixels du MNT original, on le recale alg_params = { 'Rasterrecaler': outputs['InterpolationTin']['OUTPUT'], 'Rastersource': parameters['MNT'], 'rsolutionpixellaire': parameters['rsolution'], 'gdal:warpreproject_1:Recalé': QgsProcessing.TEMPORARY_OUTPUT } outputs['RecalerUnRaster'] = processing.run( 'model:Caler un raster sur une référence', alg_params, context=context, feedback=feedback, is_child_algorithm=True) # On ajoute le morceau de MNT obtenu à une liste de MNT à intégrer à la fin to_merge.append( outputs['RecalerUnRaster']['gdal:warpreproject_1:Recalé']) status += 1 feedback.setCurrentStep(status) if feedback.isCanceled(): return {} status += 1 feedback.setCurrentStep(status) if feedback.isCanceled(): return {} # Intégrer un MNT # On intègre de manière incrémentale les MNT intermédiaires calculés précédemment mnt = parameters['MNT'] for layer in to_merge: outval = QgsProcessing.TEMPORARY_OUTPUT if layer == to_merge[ -1]: # Quand on arrive au dernier MNT à ajouter, la sortie obtenue est la sortie finale outval = parameters['OUTPUT'] alg_params = { 'MNTintgrer': layer, 'MNToriginal': mnt, 'seuildediffrence': 1, 'gdal:rastercalculator_1:Fusionné': outval } outputs['IntgrerUnMnt'] = processing.run( 'model:Fusionner deux MNT', alg_params, context=context, feedback=feedback, is_child_algorithm=True) mnt = outputs['IntgrerUnMnt']['gdal:rastercalculator_1:Fusionné'] status += 1 feedback.setCurrentStep(status) if feedback.isCanceled(): return {} results[ 'OUTPUT'] = mnt #outputs['IntgrerUnMnt']['gdal:rastercalculator_1:MNT intégré'] return results
def filter_advanced(self, expression, from_layer, field_name): if 'dbname' in from_layer.dataProvider().dataSourceUri(): layer_type = 'sql' else: layer_type = 'shape' print(expression) self.field_id = self.dockwidget.comboBox_field_id.currentText() exp = QgsExpression(expression) if exp.hasParserError(): features_list = [] from_layer.selectByExpression(expression) field_idx = from_layer.fields().indexFromName(self.field_id) if field_idx != -1: for feat in from_layer.selectedFeatures(): features_list.append(feat[self.field_id]) string_ints = [str(int) for int in features_list] if len(string_ints) > 0: self.filter = '"{}" IN (\''.format( self.field_id) + '\',\''.join(string_ints) + '\')' elif len(string_ints) == 0: self.filter = '"{}" is null'.format(self.field_id) if len(self.selected_za_nro_data) > 0 and len( self.selected_za_zpm_data) < 1: from_layer.setSubsetString('(' + self.filter_za_nro['sql'] + ') AND ' + self.filter) elif len(self.selected_za_zpm_data) > 0: from_layer.setSubsetString('(' + self.filter_za_zpm['sql'] + ') AND ' + self.filter) else: from_layer.setSubsetString(self.filter) else: self.filter = expression if len(self.selected_za_nro_data) > 0 and len( self.selected_za_zpm_data) < 1: from_layer.setSubsetString('(' + self.filter_za_nro['sql'] + ') AND ' + self.filter) elif len(self.selected_za_zpm_data) > 0: from_layer.setSubsetString('(' + self.filter_za_zpm['sql'] + ') AND ' + self.filter) else: from_layer.setSubsetString(self.filter) if self.filter_from == 2 and field_name is not None: if len(self.selected_za_nro_data) > 0: from_layer.setSubsetString('(' + self.filter_za_nro[layer_type] + ') AND ' + expression) elif len(self.selected_za_zpm_data) > 0: from_layer.setSubsetString('(' + self.filter_za_zpm[layer_type] + ') AND ' + expression) elif len(self.selected_za_zpa_data) > 0: from_layer.setSubsetString('(' + self.filter_za_zpa[layer_type] + ') AND ' + expression) else: from_layer.setSubsetString(expression) idx = from_layer.fields().indexFromName(field_name) list_items = [] selected_items = {} selected_items['sql'] = [] selected_items['shape'] = [] self.filter_items = {} self.filter_items['sql'] = '' self.filter_items['shape'] = '' for feature in from_layer.getFeatures(): feature_field = feature.attributes()[idx] if feature_field != NULL: if ',' in feature_field: feature_array = feature_field.split(',') for feat_field in feature_array: if feat_field not in list_items: list_items.append(feat_field) else: if feature_field not in list_items: list_items.append(feature_field) for item in list_items: selected_items['sql'].append('"' + str(field_name) + '" ~ \'' + str(item) + '$\'' + ' OR "' + str(field_name) + '" ~ \'' + str(item) + ',\'') selected_items['shape'].append('"' + str(field_name) + '" LIKE \'' + str(item) + '\'') self.filter_items['sql'] = ' OR '.join(selected_items['sql']) self.filter_items['shape'] = ' OR '.join(selected_items['shape']) for layer in self.layers['sql']: if layer.name() != from_layer.name(): field_idx = layer.fields().indexFromName(field_name) if field_idx == -1: print('Le champ ' + field_name + ' non présent dans la couche ' + layer.name()) else: layer.setSubsetString(self.filter_items['sql']) for layer in self.layers['shape']: if layer.name() != from_layer.name(): field_idx = layer.fields().indexFromName(field_name) if field_idx == -1: print('Le champ ' + field_name + ' non présent dans la couche ' + layer.name()) else: layer.setSubsetString(self.filter_items['shape']) elif self.filter_geo == 2: with_tampon = self.dockwidget.checkBox_tampon.checkState() predicats = self.dockwidget.mComboBox_filter_geo.checkedItems() if len(self.selected_za_nro_data) > 0: from_layer.setSubsetString('(' + self.filter_za_nro[layer_type] + ') AND ' + self.filter) elif len(self.selected_za_zpm_data) > 0: from_layer.setSubsetString('(' + self.filter_za_zpm[layer_type] + ') AND ' + self.filter) else: from_layer.setSubsetString(self.filter) if with_tampon == 2: distance = float(self.dockwidget.lineEdit_tampon.text()) print(distance) outputs = {} alg_params_buffer = { 'DISSOLVE': True, 'DISTANCE': distance, 'END_CAP_STYLE': 2, 'INPUT': from_layer, 'JOIN_STYLE': 2, 'MITER_LIMIT': 2, 'SEGMENTS': 5, 'OUTPUT': 'TEMPORARY_OUTPUT' } outputs['alg_params_buffer'] = processing.run( 'qgis:buffer', alg_params_buffer) from_layer = outputs['alg_params_buffer']['OUTPUT'] for layer in self.layers['sql']: if layer.name() != from_layer.name(): features_list = [] alg_params_select = { 'INPUT': layer, 'INTERSECT': from_layer, 'METHOD': 0, 'PREDICATE': [int(predicat[0]) for predicat in predicats] } field_idx = layer.fields().indexFromName(self.field_id) if field_idx != -1: processing.run("qgis:selectbylocation", alg_params_select) for feat in layer.selectedFeatures(): features_list.append(feat[self.field_id]) string_ints = [str(int) for int in features_list] if len(string_ints) > 0: self.filter = '"{}" IN (\''.format( self.field_id) + '\',\''.join( string_ints) + '\')' elif len(string_ints) == 0: self.filter = '"{}" is null'.format(self.field_id) if len(self.selected_za_nro_data) > 0 and len( self.selected_za_zpm_data) < 1: layer.setSubsetString('(' + self.filter_za_nro['sql'] + ') AND ' + self.filter) elif len(self.selected_za_zpm_data) > 0: layer.setSubsetString('(' + self.filter_za_zpm['sql'] + ') AND ' + self.filter) else: layer.setSubsetString(self.filter) else: print( 'Le champ "code_id" non présent dans la couche ' + layer.name()) layer.removeSelection() for layer in self.layers['shape']: if layer.name() != from_layer.name(): features_list = [] alg_params_select = { 'INPUT': layer, 'INTERSECT': from_layer, 'METHOD': 0, 'PREDICATE': [0] } processing.run("qgis:selectbylocation", alg_params_select) field_idx = layer.fields().indexFromName(self.field_id) if field_idx != -1: for feat in layer.selectedFeatures(): features_list.append(feat[self.field_id]) string_ints = [str(int) for int in features_list] if len(string_ints) > 0: self.filter = '"{}" IN (\''.format( self.field_id) + '\',\''.join( string_ints) + '\')' elif len(string_ints) == 0: self.filter = '"{}" is null'.format(self.field_id) if len(self.selected_za_nro_data) > 0 and len( self.selected_za_zpm_data) < 1: layer.setSubsetString('(' + self.filter_za_nro['shape'] + ') AND ' + self.filter) elif len(self.selected_za_zpm_data) > 0: layer.setSubsetString('(' + self.filter_za_zpm['shape'] + ') AND ' + self.filter) else: layer.setSubsetString(self.filter) else: print( 'Le champ "code_id" non présent dans la couche ' + layer.name()) layer.removeSelection() else: if len(self.selected_za_nro_data) > 0: from_layer.setSubsetString('(' + self.filter_za_nro[layer_type] + ') AND ' + expression) elif len(self.selected_za_zpm_data) > 0: field_idx = layer.fields().indexFromName(field_name) if field_idx == -1: print('Le champ ' + field_name + ' non présent dans la couche ' + layer.name()) else: from_layer.setSubsetString('(' + self.filter_za_zpm[layer_type] + ') AND ' + expression) elif len(self.selected_za_zpa_data) > 0: field_zpm_idx = layer.fields().indexFromName('za_zpm') field_zpa_idx = layer.fields().indexFromName('za_zpa') if field_zpa_idx == -1 and field_zpm_idx != -1: from_layer.setSubsetString('(' + self.filter_za_zpm[layer_type] + ') AND ' + expression) elif field_zpm_idx == -1: from_layer.setSubsetString('(' + self.filter_za_nro[layer_type] + ') AND ' + expression) else: from_layer.setSubsetString('(' + self.filter_za_zpa[layer_type] + ') AND ' + expression) elif len(self.selected_commune_data) > 0: from_layer.setSubsetString(self.filter_commune[layer_type] + ' AND ' + expression) else: from_layer.setSubsetString(expression)
print( raster_temp.name()) shape_point = "/data/GIS/DATA/DATA_PHY/SORTIE_PHY/PHY3/vecteur/PHY3_POINTS_SANS_0_L93.shp" IDW_GDAL = { 'INPUT' : shape_point, 'Z_FIELD' : 'DIAM', 'POWER' : 2, 'SMOOTHING' : 0, 'RADIUS_1' : 3, 'RADIUS_2' : 3, 'ANGLE' : 0, 'MAX_POINTS' : 1, 'MIN_POINTS' : 1, 'NODATA' : -9999, 'OPTIONS' : '', 'DATA_TYPE' : 5, 'OUTPUT' : nom_raster_temp } #IDW_QGIS = {'INTERPOLATION_DATA': shape_point_attr, #'DISTANCE_COEFFICIENT':5, \ #'COLUMNS':650,'ROWS':418, #'EXTENT':'795825.2212942,796084.9317131,6674126.9668843,6674710.7083225 [EPSG:2154]', #'OUTPUT' : nom_raster_temp #} print( IDW_GDAL) retour_idw = processing.run("gdal:gridinversedistance", IDW_GDAL) print( "Fin IDW GDAL {0}".format( retour_idw)) # exemple gdal Warp # #{ 'INPUT' : '/tmp/processing_57ba05b6d2ff4b339c37717268eceb9e/46fd4e1f601d4d91945167ade6f9ad9a/OUTPUT.tif', 'MASK' : '/data/GIS/DATA/DATA_PHY/data/Contour_L93.shp', 'NODATA' : -9999, 'ALPHA_BAND' : False, 'CROP_TO_CUTLINE' : True, 'KEEP_RESOLUTION' : False, 'OPTIONS' : '', 'DATA_TYPE' : 5, 'OUTPUT' : '/tmp/processing_57ba05b6d2ff4b339c37717268eceb9e/487dbca5177c47d8871fc817e791212b/OUTPUT.tif' } # #GDAL command: #gdalwarp -ot Float32 -of GTiff -cutline /data/GIS/DATA/DATA_PHY/data/Contour_L93.shp -crop_to_cutline -dstnodata -9999.0 /tmp/processing_57ba05b6d2ff4b339c37717268eceb9e/46fd4e1f601d4d91945167ade6f9ad9a/OUTPUT.tif /tmp/processing_57ba05b6d2ff4b339c37717268eceb9e/487dbca5177c47d8871fc817e791212b/OUTPUT.tif
def processAlgorithm(self, context, feedback): layer = QgsProcessingUtils.mapLayerFromString( self.getParameterValue(ConcaveHull.INPUT), context) alpha = self.getParameterValue(self.ALPHA) holes = self.getParameterValue(self.HOLES) no_multigeom = self.getParameterValue(self.NO_MULTIGEOMETRY) # Delaunay triangulation from input point layer feedback.setProgressText(self.tr('Creating Delaunay triangles...')) delone_triangles = processing.run("qgis:delaunaytriangulation", layer, None, context=context)['OUTPUT'] delaunay_layer = QgsProcessingUtils.mapLayerFromString( delone_triangles, context) # Get max edge length from Delaunay triangles feedback.setProgressText(self.tr('Computing edges max length...')) features = QgsProcessingUtils.getFeatures(delaunay_layer, context) count = QgsProcessingUtils.featureCount(delaunay_layer, context) if count == 0: raise GeoAlgorithmExecutionException( self.tr('No Delaunay triangles created.')) counter = 50. / count lengths = [] edges = {} for feat in features: line = feat.geometry().asPolygon()[0] for i in range(len(line) - 1): lengths.append(sqrt(line[i].sqrDist(line[i + 1]))) edges[feat.id()] = max(lengths[-3:]) feedback.setProgress(feat.id() * counter) max_length = max(lengths) # Get features with longest edge longer than alpha*max_length feedback.setProgressText(self.tr('Removing features...')) counter = 50. / len(edges) i = 0 ids = [] for id, max_len in list(edges.items()): if max_len > alpha * max_length: ids.append(id) feedback.setProgress(50 + i * counter) i += 1 # Remove features delaunay_layer.selectByIds(ids) delaunay_layer.startEditing() delaunay_layer.deleteSelectedFeatures() delaunay_layer.commitChanges() # Dissolve all Delaunay triangles feedback.setProgressText(self.tr('Dissolving Delaunay triangles...')) dissolved = processing.run("qgis:dissolve", delaunay_layer.id(), True, None, None, context=context)['OUTPUT'] dissolved_layer = QgsProcessingUtils.mapLayerFromString( dissolved, context) # Save result feedback.setProgressText(self.tr('Saving data...')) feat = QgsFeature() QgsProcessingUtils.getFeatures(dissolved_layer, context).nextFeature(feat) writer = self.getOutputFromName(self.OUTPUT).getVectorWriter( layer.fields(), QgsWkbTypes.Polygon, layer.crs(), context) geom = feat.geometry() if no_multigeom and geom.isMultipart(): # Only singlepart geometries are allowed geom_list = geom.asMultiPolygon() for single_geom_list in geom_list: single_feature = QgsFeature() single_geom = QgsGeometry.fromPolygon(single_geom_list) if not holes: # Delete holes deleted = True while deleted: deleted = single_geom.deleteRing(1) single_feature.setGeometry(single_geom) writer.addFeature(single_feature) else: # Multipart geometries are allowed if not holes: # Delete holes deleted = True while deleted: deleted = geom.deleteRing(1) writer.addFeature(feat) del writer
def processAlgorithm(self, parameters, context, feedback): layer = self.parameterAsSource(parameters, ConcaveHull.INPUT, context) if layer is None: raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT)) alpha = self.parameterAsDouble(parameters, self.ALPHA, context) holes = self.parameterAsBool(parameters, self.HOLES, context) no_multigeom = self.parameterAsBool(parameters, self.NO_MULTIGEOMETRY, context) # Delaunay triangulation from input point layer feedback.setProgressText(QCoreApplication.translate('ConcaveHull', 'Creating Delaunay triangles…')) delaunay_layer = processing.run("qgis:delaunaytriangulation", {'INPUT': parameters[ConcaveHull.INPUT], 'OUTPUT': 'memory:'}, feedback=feedback, context=context)['OUTPUT'] # Get max edge length from Delaunay triangles feedback.setProgressText(QCoreApplication.translate('ConcaveHull', 'Computing edges max length…')) features = delaunay_layer.getFeatures() count = delaunay_layer.featureCount() if count == 0: raise QgsProcessingException(self.tr('No Delaunay triangles created.')) counter = 50. / count lengths = [] edges = {} for feat in features: if feedback.isCanceled(): break line = feat.geometry().asPolygon()[0] for i in range(len(line) - 1): lengths.append(sqrt(line[i].sqrDist(line[i + 1]))) edges[feat.id()] = max(lengths[-3:]) feedback.setProgress(feat.id() * counter) max_length = max(lengths) # Get features with longest edge longer than alpha*max_length feedback.setProgressText(QCoreApplication.translate('ConcaveHull', 'Removing features…')) counter = 50. / len(edges) i = 0 ids = [] for id, max_len in edges.items(): if feedback.isCanceled(): break if max_len > alpha * max_length: ids.append(id) feedback.setProgress(50 + i * counter) i += 1 # Remove features delaunay_layer.dataProvider().deleteFeatures(ids) # Dissolve all Delaunay triangles feedback.setProgressText(QCoreApplication.translate('ConcaveHull', 'Dissolving Delaunay triangles…')) dissolved_layer = processing.run("native:dissolve", {'INPUT': delaunay_layer, 'OUTPUT': 'memory:'}, feedback=feedback, context=context)['OUTPUT'] # Save result feedback.setProgressText(QCoreApplication.translate('ConcaveHull', 'Saving data…')) feat = QgsFeature() dissolved_layer.getFeatures().nextFeature(feat) # Not needed anymore, free up some resources del delaunay_layer del dissolved_layer (sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context, layer.fields(), QgsWkbTypes.Polygon, layer.sourceCrs()) if sink is None: raise QgsProcessingException(self.invalidSinkError(parameters, self.OUTPUT)) geom = feat.geometry() if no_multigeom and geom.isMultipart(): # Only singlepart geometries are allowed geom_list = geom.asGeometryCollection() for single_geom in geom_list: if feedback.isCanceled(): break single_feature = QgsFeature() if not holes: # Delete holes single_geom = single_geom.removeInteriorRings() single_feature.setGeometry(single_geom) sink.addFeature(single_feature, QgsFeatureSink.FastInsert) else: # Multipart geometries are allowed if not holes: # Delete holes geom = geom.removeInteriorRings() feat.setGeometry(geom) sink.addFeature(feat, QgsFeatureSink.FastInsert) return {self.OUTPUT: dest_id}
def processAlgorithm(self, parameters, context, feedback): # parameters # Database connection parameters connection_name = QgsExpressionContextUtils.globalScope().variable( 'gobs_connection_name') spatiallayer = self.SPATIALLAYERS[parameters[self.SPATIALLAYER]] sourcelayer = self.parameterAsVectorLayer(parameters, self.SOURCELAYER, context) uniqueid = self.parameterAsString(parameters, self.UNIQUEID, context) uniquelabel = self.parameterAsString(parameters, self.UNIQUELABEL, context) msg = '' status = 1 # Get chosen spatial layer id id_spatial_layer = spatiallayer.split('-')[-1].strip() feedback.pushInfo( tr('CHECK COMPATIBILITY BETWEEN SOURCE AND TARGET GEOMETRY TYPES')) # Get spatial layer geometry type sql = ''' SELECT sl_geometry_type FROM gobs.spatial_layer WHERE id = {0} LIMIT 1 '''.format(id_spatial_layer) target_type = None [header, data, rowCount, ok, error_message] = fetchDataFromSqlQuery(connection_name, sql) if not ok: status = 0 msg = tr('* The following error has been raised' ) + ' %s' % error_message feedback.reportError(msg) raise QgsProcessingException(msg) else: for line in data: target_type = line[0].lower() # Check multi type target_is_multi = target_type.startswith('multi') # Get vector layer geometry type # And compare it with the spatial_layer type source_type = QgsWkbTypes.geometryDisplayString( int(sourcelayer.geometryType())).lower() source_wtype = QgsWkbTypes.displayString(int( sourcelayer.wkbType())).lower() ok = True if not target_type.endswith(source_type): ok = False msg = tr( 'Source vector layer and target spatial layer do not have compatible geometry types' ) msg += ' - SOURCE: {}, TARGET: {}'.format(source_type, target_type) feedback.pushInfo(msg) raise QgsProcessingException(msg) source_is_multi = source_wtype.startswith('multi') # Cannot import multi type into single type target spatial layer if source_is_multi and not target_is_multi: ok = False msg = tr( 'Cannot import a vector layer with multi geometries into a target spatial layer with a simple geometry type defined' ) msg += ' - SOURCE: {}, TARGET: {}'.format(source_wtype, target_type) feedback.pushInfo(msg) raise QgsProcessingException(msg) # Import data to temporary table feedback.pushInfo(tr('IMPORT SOURCE LAYER INTO TEMPORARY TABLE')) temp_schema = 'public' temp_table = 'temp_' + str(time.time()).replace('.', '') processing.run("qgis:importintopostgis", { 'INPUT': parameters[self.SOURCELAYER], 'DATABASE': connection_name, 'SCHEMA': temp_schema, 'TABLENAME': temp_table, 'PRIMARY_KEY': 'gobs_id', 'GEOMETRY_COLUMN': 'geom', 'ENCODING': 'UTF-8', 'OVERWRITE': True, 'CREATEINDEX': False, 'LOWERCASE_NAMES': False, 'DROP_STRING_LENGTH': True, 'FORCE_SINGLEPART': False }, context=context, feedback=feedback) feedback.pushInfo( tr('* Source layer has been imported into temporary table')) # Add ST_Multi if needed st_multi_left = '' st_multi_right = '' if target_is_multi: st_multi_left = 'ST_Multi(' st_multi_right = ')' # Get target geometry type in integer geometry_type_integer = 1 if target_type.replace('multi', '') == 'linestring': geometry_type_integer = 2 if target_type.replace('multi', '') == 'polygon': geometry_type_integer = 3 # Copy data to spatial_object feedback.pushInfo(tr('COPY IMPORTED DATA TO spatial_object')) sql = ''' INSERT INTO gobs.spatial_object (so_unique_id, so_unique_label, geom, fk_id_spatial_layer) SELECT "{so_unique_id}", "{so_unique_label}", {st_multi_left}ST_Transform(ST_CollectionExtract(ST_MakeValid(geom),{geometry_type_integer}), 4326){st_multi_right} AS geom, {id_spatial_layer} FROM "{temp_schema}"."{temp_table}" -- Update line if data already exists ON CONFLICT ON CONSTRAINT spatial_object_so_unique_id_fk_id_spatial_layer_key DO UPDATE SET (geom, so_unique_label) = (EXCLUDED.geom, EXCLUDED.so_unique_label) WHERE True ; '''.format(so_unique_id=uniqueid, so_unique_label=uniquelabel, st_multi_left=st_multi_left, geometry_type_integer=geometry_type_integer, st_multi_right=st_multi_right, id_spatial_layer=id_spatial_layer, temp_schema=temp_schema, temp_table=temp_table) feedback.pushInfo(sql) try: [header, data, rowCount, ok, error_message] = fetchDataFromSqlQuery(connection_name, sql) if not ok: status = 0 msg = tr('* The following error has been raised' ) + ' %s' % error_message feedback.reportError(msg) else: status = 1 msg = tr('* Source data has been successfully imported !') feedback.pushInfo(msg) except Exception as e: status = 0 msg = tr( '* An unknown error occured while adding features to spatial_object table' ) msg += ' ' + str(e) finally: # Remove temporary table feedback.pushInfo(tr('DROP TEMPORARY DATA')) sql = ''' DROP TABLE IF EXISTS "%s"."%s" ; ''' % (temp_schema, temp_table) [header, data, rowCount, ok, error_message] = fetchDataFromSqlQuery(connection_name, sql) if ok: feedback.pushInfo(tr('* Temporary data has been deleted.')) else: feedback.reportError( tr('* An error occured while droping temporary table') + ' "%s"."%s"' % (temp_schema, temp_table)) msg = tr('SPATIAL LAYER HAS BEEN SUCCESSFULLY IMPORTED !') return {self.OUTPUT_STATUS: status, self.OUTPUT_STRING: msg}
##Select by expression=name ##Tests=group ##INPUT_LAYER=vector ##OUTPUT_LAYER=output vector import processing result = processing.run("qgis:selectbyexpression", INPUT_LAYER, '"id2" = 0 and "id" > 7', "1") processing.run("qgis:saveselectedfeatures", result["RESULT"], OUTPUT_LAYER)
def import_from_excel(self): steps = 18 step = 0 self.progress.setVisible(True) self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(False) # Where to store the reports? excel_path = self.txt_excel_path.text() if not excel_path: self.show_message( QCoreApplication.translate( "DialogImportFromExcel", "You need to select an Excel file before continuing with the import." ), Qgis.Warning) self.progress.setVisible(False) self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(True) return if not os.path.exists(excel_path): self.show_message( QCoreApplication.translate( "DialogImportFromExcel", "The specified Excel file does not exist!"), Qgis.Warning) self.progress.setVisible(False) self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(True) return self.progress.setVisible(True) self.txt_log.setText( QCoreApplication.translate( "DialogImportFromExcel", "Loading tables from the Excel file...")) # Now that we have the Excel file, build vrts to load its sheets appropriately layer_group_party = self.get_layer_from_excel_sheet( excel_path, 'agrupacion') layer_party = self.get_layer_from_excel_sheet(excel_path, 'interesado') layer_parcel = self.get_layer_from_excel_sheet(excel_path, 'predio') layer_right = self.get_layer_from_excel_sheet(excel_path, 'derecho') if not layer_group_party.isValid() or not layer_party.isValid( ) or not layer_parcel.isValid() or not layer_right.isValid(): self.show_message( QCoreApplication.translate( "DialogImportFromExcel", "One of the sheets of the Excel file couldn't be loaded! Check the format again." ), Qgis.Warning) self.progress.setVisible(False) self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(True) return QgsProject.instance().addMapLayers( [layer_group_party, layer_party, layer_parcel, layer_right]) self.txt_log.setText( QCoreApplication.translate("DialogImportFromExcel", "Loading LADM_COL tables...")) step += 1 self.progress.setValue(step / steps * 100) # GET LADM LAYERS res_layers = self.qgis_utils.get_layers(self._db, { COL_PARTY_TABLE: { 'name': COL_PARTY_TABLE, 'geometry': None }, PARCEL_TABLE: { 'name': PARCEL_TABLE, 'geometry': None }, RIGHT_TABLE: { 'name': RIGHT_TABLE, 'geometry': None }, EXTFILE_TABLE: { 'name': EXTFILE_TABLE, 'geometry': None }, RRR_SOURCE_RELATION_TABLE: { 'name': RRR_SOURCE_RELATION_TABLE, 'geometry': None }, LA_GROUP_PARTY_TABLE: { 'name': LA_GROUP_PARTY_TABLE, 'geometry': None }, MEMBERS_TABLE: { 'name': MEMBERS_TABLE, 'geometry': None }, ADMINISTRATIVE_SOURCE_TABLE: { 'name': ADMINISTRATIVE_SOURCE_TABLE, 'geometry': None } }, load=True) col_party_table = res_layers[COL_PARTY_TABLE] if col_party_table is None: self.iface.messageBar().pushMessage( "Asistente LADM_COL", QCoreApplication.translate( "QGISUtils", "Col_Party table couldn't be found... {}").format( self._db.get_description()), Qgis.Warning) return parcel_table = res_layers[PARCEL_TABLE] if parcel_table is None: self.iface.messageBar().pushMessage( "Asistente LADM_COL", QCoreApplication.translate( "QGISUtils", "Parcel table couldn't be found... {}").format( self._db.get_description()), Qgis.Warning) return right_table = res_layers[RIGHT_TABLE] if right_table is None: self.iface.messageBar().pushMessage( "Asistente LADM_COL", QCoreApplication.translate( "QGISUtils", "Right table couldn't be found... {}").format( self._db.get_description()), Qgis.Warning) return extfile_table = res_layers[EXTFILE_TABLE] if extfile_table is None: self.iface.messageBar().pushMessage( "Asistente LADM_COL", QCoreApplication.translate( "QGISUtils", "EXT_FILE table couldn't be found... {}").format( self._db.get_description()), Qgis.Warning) return rrr_source_table = res_layers[RRR_SOURCE_RELATION_TABLE] if rrr_source_table is None: self.iface.messageBar().pushMessage( "Asistente LADM_COL", QCoreApplication.translate( "QGISUtils", "RRR-SOURCE table couldn't be found... {}").format( self._db.get_description()), Qgis.Warning) return group_party_table = res_layers[LA_GROUP_PARTY_TABLE] if group_party_table is None: self.iface.messageBar().pushMessage( "Asistente LADM_COL", QCoreApplication.translate( "QGISUtils", "Group party table couldn't be found... {}").format( self._db.get_description()), Qgis.Warning) return members_table = res_layers[MEMBERS_TABLE] if members_table is None: self.iface.messageBar().pushMessage( "Asistente LADM_COL", QCoreApplication.translate( "QGISUtils", "Members table couldn't be found... {}").format( self._db.get_description()), Qgis.Warning) return administrative_source_table = res_layers[ADMINISTRATIVE_SOURCE_TABLE] if administrative_source_table is None: self.iface.messageBar().pushMessage( "Asistente LADM_COL", QCoreApplication.translate( "QGISUtils", "Administrative Source table couldn't be found... {}"). format(self._db.get_description()), Qgis.Warning) return # Get feature counts to compare after the ETL and know how many records were imported to each ladm_col table ladm_tables = [ parcel_table, col_party_table, right_table, administrative_source_table, rrr_source_table, group_party_table, members_table ] ladm_tables_feature_count_before = { t.name(): t.featureCount() for t in ladm_tables } # Run the ETL # 1 self.txt_log.setText( QCoreApplication.translate( "DialogImportFromExcel", "ETL (step 1): Load col_interesado data...")) step += 1 self.progress.setValue(step / steps * 100) processing.run( "model:ETL-model", { 'INPUT': layer_party, 'mapping': [{ 'expression': '"numero de documento"', 'length': 12, 'name': 'documento_identidad', 'precision': -1, 'type': 10 }, { 'expression': '"tipo documento"', 'length': 255, 'name': 'tipo_documento', 'precision': -1, 'type': 10 }, { 'expression': '"organo_emisor"', 'length': 20, 'name': 'organo_emisor', 'precision': -1, 'type': 10 }, { 'expression': '"fecha_emision"', 'length': -1, 'name': 'fecha_emision', 'precision': -1, 'type': 14 }, { 'expression': '"apellido1"', 'length': 100, 'name': 'primer_apellido', 'precision': -1, 'type': 10 }, { 'expression': '"nombre1"', 'length': 100, 'name': 'primer_nombre', 'precision': -1, 'type': 10 }, { 'expression': '"apellido2"', 'length': 100, 'name': 'segundo_apellido', 'precision': -1, 'type': 10 }, { 'expression': '"nombre2"', 'length': 100, 'name': 'segundo_nombre', 'precision': -1, 'type': 10 }, { 'expression': '"razon_social"', 'length': 250, 'name': 'razon_social', 'precision': -1, 'type': 10 }, { 'expression': '"sexo persona"', 'length': 255, 'name': 'genero', 'precision': -1, 'type': 10 }, { 'expression': '"tipo_interesado_juridico"', 'length': 255, 'name': 'tipo_interesado_juridico', 'precision': -1, 'type': 10 }, { 'expression': '"nombre"', 'length': 255, 'name': 'nombre', 'precision': -1, 'type': 10 }, { 'expression': '"tipo persona"', 'length': 255, 'name': 'tipo', 'precision': -1, 'type': 10 }, { 'expression': "'ANT_COL_INTERESADO'", 'length': 255, 'name': 'p_espacio_de_nombres', 'precision': -1, 'type': 10 }, { 'expression': '$id', 'length': 255, 'name': 'p_local_id', 'precision': -1, 'type': 10 }, { 'expression': 'now()', 'length': -1, 'name': 'comienzo_vida_util_version', 'precision': -1, 'type': 16 }, { 'expression': '"fin_vida_util_version"', 'length': -1, 'name': 'fin_vida_util_version', 'precision': -1, 'type': 16 }], 'output': col_party_table }) # 2 self.txt_log.setText( QCoreApplication.translate( "DialogImportFromExcel", "ETL (step 2): Define group parties...")) step += 1 self.progress.setValue(step / steps * 100) pre_group_party_layer = processing.run( "qgis:statisticsbycategories", { 'CATEGORIES_FIELD_NAME': 'id agrupación', 'INPUT': layer_group_party, 'OUTPUT': 'memory:', 'VALUES_FIELD_NAME': None })['OUTPUT'] # 3 self.txt_log.setText( QCoreApplication.translate("DialogImportFromExcel", "ETL (step 3): Load group parties...")) step += 1 self.progress.setValue(step / steps * 100) processing.run( "model:ETL-model", { 'INPUT': pre_group_party_layer, 'mapping': [{ 'expression': "'Grupo_Civil'", 'length': 255, 'name': 'ai_tipo', 'precision': -1, 'type': 10 }, { 'expression': '"nombre"', 'length': 255, 'name': 'nombre', 'precision': -1, 'type': 10 }, { 'expression': "'Otro'", 'length': 255, 'name': 'tipo', 'precision': -1, 'type': 10 }, { 'expression': "'ANT_Agrupacion_Interesados'", 'length': 255, 'name': 'p_espacio_de_nombres', 'precision': -1, 'type': 10 }, { 'expression': '"id agrupación"', 'length': 255, 'name': 'p_local_id', 'precision': -1, 'type': 10 }, { 'expression': 'now()', 'length': -1, 'name': 'comienzo_vida_util_version', 'precision': -1, 'type': 16 }, { 'expression': '"fin_vida_util_version"', 'length': -1, 'name': 'fin_vida_util_version', 'precision': -1, 'type': 16 }], 'output': group_party_table }) # 4 self.txt_log.setText( QCoreApplication.translate( "DialogImportFromExcel", "ETL (step 4): Join group parties t_id...")) step += 1 self.progress.setValue(step / steps * 100) group_party_tid_layer = processing.run( "native:joinattributestable", { 'DISCARD_NONMATCHING': False, 'FIELD': 'id agrupación', 'FIELDS_TO_COPY': 't_id', 'FIELD_2': 'p_local_id', 'INPUT': layer_group_party, 'INPUT_2': group_party_table, 'METHOD': 1, 'OUTPUT': 'memory:', 'PREFIX': 'agrupacion_' })['OUTPUT'] # 5 self.txt_log.setText( QCoreApplication.translate( "DialogImportFromExcel", "ETL (step 5): Join group parties with parties...")) step += 1 self.progress.setValue(step / steps * 100) group_party_party_tid_layer = processing.run( "native:joinattributestable", { 'DISCARD_NONMATCHING': False, 'FIELD': 'numero de documento', 'FIELDS_TO_COPY': 't_id', 'FIELD_2': 'documento_identidad', 'INPUT': group_party_tid_layer, 'INPUT_2': col_party_table, 'METHOD': 1, 'OUTPUT': 'memory:', 'PREFIX': 'interesado_' })['OUTPUT'] # 6 self.txt_log.setText( QCoreApplication.translate( "DialogImportFromExcel", "ETL (step 6): Load group party members...")) step += 1 self.progress.setValue(step / steps * 100) processing.run( "model:ETL-model", { 'INPUT': group_party_party_tid_layer, 'mapping': [{ 'expression': '"interesado_t_id"', 'length': -1, 'name': 'interesados_col_interesado', 'precision': 0, 'type': 4 }, { 'expression': '"agrupacion_t_id"', 'length': -1, 'name': 'agrupacion', 'precision': 0, 'type': 4 }], 'output': members_table }) # 7 self.txt_log.setText( QCoreApplication.translate("DialogImportFromExcel", "ETL (step 7): Load parcels...")) step += 1 self.progress.setValue(step / steps * 100) processing.run( "model:ETL-model", { 'INPUT': layer_parcel, 'mapping': [{ 'expression': '"departamento"', 'length': 2, 'name': 'departamento', 'precision': -1, 'type': 10 }, { 'expression': '"municipio"', 'length': 3, 'name': 'municipio', 'precision': -1, 'type': 10 }, { 'expression': '"zona"', 'length': 2, 'name': 'zona', 'precision': -1, 'type': 10 }, { 'expression': '$id', 'length': 20, 'name': 'nupre', 'precision': -1, 'type': 10 }, { 'expression': '"matricula predio"', 'length': 80, 'name': 'fmi', 'precision': -1, 'type': 10 }, { 'expression': '"numero predial nuevo"', 'length': 30, 'name': 'numero_predial', 'precision': -1, 'type': 10 }, { 'expression': '"numero predial viejo"', 'length': 20, 'name': 'numero_predial_anterior', 'precision': -1, 'type': 10 }, { 'expression': '"avaluo"', 'length': 16, 'name': 'avaluo_predio', 'precision': 1, 'type': 6 }, { 'expression': '"copropiedad"', 'length': -1, 'name': 'copropiedad', 'precision': 0, 'type': 4 }, { 'expression': '"nombre predio"', 'length': 255, 'name': 'nombre', 'precision': -1, 'type': 10 }, { 'expression': '"tipo predio"', 'length': 255, 'name': 'tipo', 'precision': -1, 'type': 10 }, { 'expression': "'ANT_PREDIO'", 'length': 255, 'name': 'u_espacio_de_nombres', 'precision': -1, 'type': 10 }, { 'expression': '$id', 'length': 255, 'name': 'u_local_id', 'precision': -1, 'type': 10 }, { 'expression': 'now()', 'length': -1, 'name': 'comienzo_vida_util_version', 'precision': -1, 'type': 16 }], 'output': parcel_table }) # 8 self.txt_log.setText( QCoreApplication.translate( "DialogImportFromExcel", "ETL (step 8): Concatenate Rights and Sources fields...")) step += 1 self.progress.setValue(step / steps * 100) concat_right_source_layer = processing.run( "qgis:fieldcalculator", { 'FIELD_LENGTH': 100, 'FIELD_NAME': 'concat_', 'FIELD_PRECISION': 3, 'FIELD_TYPE': 2, 'FORMULA': 'concat( \"número documento interesado\" , \"agrupacion\" , \"numero predial nuevo\" , \"tipo de fuente\" , \"Descricpión de la fuente\")', 'INPUT': layer_right, 'NEW_FIELD': True, 'OUTPUT': 'memory:' })['OUTPUT'] # 9 self.txt_log.setText( QCoreApplication.translate( "DialogImportFromExcel", "ETL (step 9): Load Administrative Sources...")) step += 1 self.progress.setValue(step / steps * 100) processing.run( "model:ETL-model", { 'INPUT': concat_right_source_layer, 'mapping': [{ 'expression': '"descripcion de la fuente"', 'length': 255, 'name': 'texto', 'precision': -1, 'type': 10 }, { 'expression': '"tipo de fuente"', 'length': 255, 'name': 'tipo', 'precision': -1, 'type': 10 }, { 'expression': '"codigo_registral_transaccion"', 'length': 5, 'name': 'codigo_registral_transaccion', 'precision': -1, 'type': 10 }, { 'expression': '"nombre"', 'length': 50, 'name': 'nombre', 'precision': -1, 'type': 10 }, { 'expression': '"fecha_aceptacion"', 'length': -1, 'name': 'fecha_aceptacion', 'precision': -1, 'type': 16 }, { 'expression': '"estado_disponibilidad de la fuente"', 'length': 255, 'name': 'estado_disponibilidad', 'precision': -1, 'type': 10 }, { 'expression': '"sello_inicio_validez"', 'length': -1, 'name': 'sello_inicio_validez', 'precision': -1, 'type': 16 }, { 'expression': '"tipo_principal"', 'length': 255, 'name': 'tipo_principal', 'precision': -1, 'type': 10 }, { 'expression': '"fecha_grabacion"', 'length': -1, 'name': 'fecha_grabacion', 'precision': -1, 'type': 16 }, { 'expression': '"fecha_entrega"', 'length': -1, 'name': 'fecha_entrega', 'precision': -1, 'type': 16 }, { 'expression': "'ANT_COLFUENTEADMINISTRATIVA'", 'length': 255, 'name': 's_espacio_de_nombres', 'precision': -1, 'type': 10 }, { 'expression': '"concat_"', 'length': 255, 'name': 's_local_id', 'precision': -1, 'type': 10 }, { 'expression': '"oficialidad"', 'length': -1, 'name': 'oficialidad', 'precision': -1, 'type': 1 }], 'output': administrative_source_table }) # 10 self.txt_log.setText( QCoreApplication.translate( "DialogImportFromExcel", "ETL (step 10): Join concatenate source to administrative source t_id..." )) step += 1 self.progress.setValue(step / steps * 100) source_tid_layer = processing.run( "native:joinattributestable", { 'DISCARD_NONMATCHING': False, 'FIELD': 'concat_', 'FIELDS_TO_COPY': 't_id', 'FIELD_2': 's_local_id', 'INPUT': concat_right_source_layer, 'INPUT_2': administrative_source_table, 'METHOD': 1, 'OUTPUT': 'memory:', 'PREFIX': 'fuente_' })['OUTPUT'] # 11 self.txt_log.setText( QCoreApplication.translate("DialogImportFromExcel", "ETL (step 11): Load extarchivo...")) step += 1 self.progress.setValue(step / steps * 100) processing.run( "model:ETL-model", { 'INPUT': source_tid_layer, 'mapping': [{ 'expression': '"fecha_aceptacion"', 'length': -1, 'name': 'fecha_aceptacion', 'precision': -1, 'type': 14 }, { 'expression': '"Ruta de Almacenamiento de la fuente"', 'length': 255, 'name': 'datos', 'precision': -1, 'type': 10 }, { 'expression': '"extraccion"', 'length': -1, 'name': 'extraccion', 'precision': -1, 'type': 14 }, { 'expression': '"fecha_grabacion"', 'length': -1, 'name': 'fecha_grabacion', 'precision': -1, 'type': 14 }, { 'expression': '"fecha_entrega"', 'length': -1, 'name': 'fecha_entrega', 'precision': -1, 'type': 14 }, { 'expression': "'ANT_EXTARCHIVO'", 'length': 255, 'name': 's_espacio_de_nombres', 'precision': -1, 'type': 10 }, { 'expression': '$id', 'length': 255, 'name': 's_local_id', 'precision': -1, 'type': 10 }, { 'expression': '"fuente_t_id"', 'length': -1, 'name': 'col_fuenteadminstrtiva_ext_archivo_id', 'precision': 0, 'type': 4 }, { 'expression': '"col_fuenteespacial_ext_archivo_id"', 'length': -1, 'name': 'col_fuenteespacial_ext_archivo_id', 'precision': 0, 'type': 4 }], 'output': EXTFILE_TABLE }) # 12 self.txt_log.setText( QCoreApplication.translate( "DialogImportFromExcel", "ETL (step 12): Join source and party t_id...")) step += 1 self.progress.setValue(step / steps * 100) source_party_tid_layer = processing.run( "native:joinattributestable", { 'DISCARD_NONMATCHING': False, 'FIELD': 'número documento Interesado', 'FIELDS_TO_COPY': 't_id', 'FIELD_2': 'documento_identidad', 'INPUT': source_tid_layer, 'INPUT_2': col_party_table, 'METHOD': 1, 'OUTPUT': 'memory:', 'PREFIX': 'interesado_' })['OUTPUT'] # 13 self.txt_log.setText( QCoreApplication.translate( "DialogImportFromExcel", "ETL (step 13): Join source, party, group party t_id...")) step += 1 self.progress.setValue(step / steps * 100) source_party_group_tid_layer = processing.run( "native:joinattributestable", { 'DISCARD_NONMATCHING': False, 'FIELD': 'agrupación', 'FIELDS_TO_COPY': 't_id', 'FIELD_2': 'p_local_id', 'INPUT': source_party_tid_layer, 'INPUT_2': group_party_table, 'METHOD': 1, 'OUTPUT': 'memory:', 'PREFIX': 'agrupacion_' })['OUTPUT'] # 14 self.txt_log.setText( QCoreApplication.translate( "DialogImportFromExcel", "ETL (step 14): Join source, party, group party, parcel t_id..." )) step += 1 self.progress.setValue(step / steps * 100) source_party_group_parcel_tid_layer = processing.run( "native:joinattributestable", { 'DISCARD_NONMATCHING': False, 'FIELD': 'numero predial nuevo', 'FIELDS_TO_COPY': 't_id', 'FIELD_2': 'numero_predial', 'INPUT': source_party_group_tid_layer, 'INPUT_2': parcel_table, 'METHOD': 1, 'OUTPUT': 'memory:', 'PREFIX': 'predio_' })['OUTPUT'] # 15 self.txt_log.setText( QCoreApplication.translate("DialogImportFromExcel", "ETL (step 15): Load Rights...")) step += 1 self.progress.setValue(step / steps * 100) processing.run( "model:ETL-model", { 'INPUT': source_party_group_parcel_tid_layer, 'mapping': [{ 'expression': '"tipo"', 'length': 255, 'name': 'tipo', 'precision': -1, 'type': 10 }, { 'expression': '"codigo_registral_derecho"', 'length': 5, 'name': 'codigo_registral_derecho', 'precision': -1, 'type': 10 }, { 'expression': '"descripcion"', 'length': 255, 'name': 'descripcion', 'precision': -1, 'type': 10 }, { 'expression': '"comprobacion_comparte"', 'length': -1, 'name': 'comprobacion_comparte', 'precision': -1, 'type': 1 }, { 'expression': '"uso_efectivo"', 'length': 255, 'name': 'uso_efectivo', 'precision': -1, 'type': 10 }, { 'expression': "'ANT_Col_Derecho'", 'length': 255, 'name': 'r_espacio_de_nombres', 'precision': -1, 'type': 10 }, { 'expression': '"concat_"', 'length': 255, 'name': 'r_local_id', 'precision': -1, 'type': 10 }, { 'expression': '"agrupacion_t_id"', 'length': -1, 'name': 'interesado_la_agrupacion_interesados', 'precision': 0, 'type': 4 }, { 'expression': '"interesado_t_id"', 'length': -1, 'name': 'interesado_col_interesado', 'precision': 0, 'type': 4 }, { 'expression': '"unidad_la_baunit"', 'length': -1, 'name': 'unidad_la_baunit', 'precision': 0, 'type': 4 }, { 'expression': '"predio_t_id"', 'length': -1, 'name': 'unidad_predio', 'precision': 0, 'type': 4 }, { 'expression': 'now()', 'length': -1, 'name': 'comienzo_vida_util_version', 'precision': -1, 'type': 16 }, { 'expression': '"fin_vida_util_version"', 'length': -1, 'name': 'fin_vida_util_version', 'precision': -1, 'type': 16 }], 'output': right_table }) # 16 self.txt_log.setText( QCoreApplication.translate( "DialogImportFromExcel", "ETL (step 16): Join source, party, group party, parcel, right t_id..." )) step += 1 self.progress.setValue(step / steps * 100) source_party_group_parcel_right_tid_layer = processing.run( "native:joinattributestable", { 'DISCARD_NONMATCHING': False, 'FIELD': 'concat_', 'FIELDS_TO_COPY': 't_id', 'FIELD_2': 'r_local_id', 'INPUT': source_party_group_parcel_tid_layer, 'INPUT_2': right_table, 'METHOD': 1, 'OUTPUT': 'memory:', 'PREFIX': 'derecho_' })['OUTPUT'] # 17 self.txt_log.setText( QCoreApplication.translate("DialogImportFromExcel", "ETL (step 17): Load rrrfuente...")) step += 1 self.progress.setValue(step / steps * 100) processing.run( "model:ETL-model", { 'INPUT': source_party_group_parcel_right_tid_layer, 'mapping': [{ 'expression': '"fuente_t_id"', 'length': -1, 'name': 'rfuente', 'precision': 0, 'type': 4 }, { 'expression': '"rrr_col_responsabilidad"', 'length': -1, 'name': 'rrr_col_responsabilidad', 'precision': 0, 'type': 4 }, { 'expression': '"derecho_t_id"', 'length': -1, 'name': 'rrr_col_derecho', 'precision': 0, 'type': 4 }, { 'expression': '"rrr_col_restriccion"', 'length': -1, 'name': 'rrr_col_restriccion', 'precision': 0, 'type': 4 }, { 'expression': '"rrr_col_hipoteca"', 'length': -1, 'name': 'rrr_col_hipoteca', 'precision': 0, 'type': 4 }], 'output': rrr_source_table }) # Print summary getting feature count in involved LADM_COL tables... summary = """<html><head/><body><p>""" summary += QCoreApplication.translate("DialogImportFromExcel", "Import done!!!<br/>") for table in ladm_tables: summary += QCoreApplication.translate( "DialogImportFromExcel", "<br/><b>{count}</b> records loaded into table <b>{table}</b>" ).format(count=table.featureCount() - ladm_tables_feature_count_before[table.name()], table=table.name()) summary += """</body></html>""" self.txt_log.setText(summary) self.qgis_utils.message_with_duration_emitted.emit( QCoreApplication.translate( "QGISUtils", "Data successfully imported to LADM_COL from intermediate structure (Excel file: '{}')!!!" ).format(excel_path), Qgis.Success, 0)
def computeNdvi(dlg, conf, dir_raster_src, dir_dest, rasterName, ndviName, extension_input_raster): # Calcul despeckele indice Ndvi li = layerList() messInfo(dlg, "Calcul du NDVI.") messInfo(dlg, "") rasterPath = dir_raster_src + os.sep + rasterName + extension_input_raster ndviPath = dir_dest + os.sep + ndviName + EXT_RASTER # Test si c'est une image multibande cols, rows, bands = getGeometryImage(rasterPath) if bands < 4: QMessageBox.information( None, "Attention !!!", ndviPath + " ne peut pas être créé. L'image raster d'entrée n'a pas un nombre de bande suffisant.", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, ndviPath + " ne peut pas être créé.") return None # Selection des bandes pour le calcul du NDVI num_channel_red = 0 num_channel_nir = 0 d = conf.channelOrderDic key = "Red" if key in conf.channelOrderDic.keys(): num_channel_red = int(conf.channelOrderDic[key]) key = "NIR" if key in conf.channelOrderDic.keys(): num_channel_nir = int(conf.channelOrderDic[key]) if (num_channel_red == 0 or num_channel_nir == 0): QMessageBox.information( None, "Attention !!!", ndviPath + " ne peut pas être créé. NDVI needs Red and NIR channels to be computed).", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, ndviPath + " ne peut pas être créé.") return None # Suppression du fichier de sortie si il existe if os.path.exists(ndviPath): try: os.remove(ndviPath) except: QMessageBox.information( None, "Attention !!!", ndviPath + " ne peut pas être effacé. Vérifiez que le fichier n'est pas verrouillé par un autre utilisateur ou que le fichier peut être effacé manuellement (droits d'écriture sur le répertoire).", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, ndviPath + " ne peut pas être effacé.") return None # Calcul if conf.rbOTB.isChecked(): # Calculatrice raster OTB try: expression = '(im1b%s - im1b%s)/(im1b%s + im1b%s)' % ( str(num_channel_nir), str(num_channel_red), str(num_channel_nir), str(num_channel_red)) #processing.algorithmHelp("otb:BandMath") #processing.runalg('otb:bandmath', rasterPath, '128',expression, ndviPath) parameters = { "il": [rasterPath], "out": ndviPath, "exp": expression, "outputpixeltype": 2, "ram": 128 } processing.run('otb:BandMath', parameters) except: messErreur(dlg, "Erreur de traitement sur otb:BandMath ndvi.") return None # Fin OTB else: # Calculatrice raster QGIS entries = [] raster = li[rasterName] extent = raster.extent() height = raster.height() width = raster.width() b_red = QgsRasterCalculatorEntry() b_red.ref = 'b@%s' % (str(num_channel_red)) b_red.raster = raster b_red.bandNumber = num_channel_red entries.append(b_red) b_nir = QgsRasterCalculatorEntry() b_nir.ref = 'b@%s' % (str(num_channel_nir)) b_nir.raster = raster b_nir.bandNumber = num_channel_nir entries.append(b_nir) expression = '(b@%s - b@%s)/(b@%s + b@%s)' % ( str(num_channel_nir), str(num_channel_red), str(num_channel_nir), str(num_channel_red)) calc = QgsRasterCalculator(expression, ndviPath, FORMAT_IMA, extent, width, height, entries) ret = calc.processCalculation() if ret != 0: QMessageBox.information( None, "Attention !!!", " Erreur d'exécution, cela peut être du à une insuffisance mémoire, image trop volumineuse.", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, "Erreur lors du lancement de QgsRasterCalculator.") return None # Fin QGIS if os.path.exists(ndviPath): ndvi = QgsRasterLayer(ndviPath, ndviName) else: QMessageBox.information( None, "Attention !!!", ndviPath + " n'a pas été créé. Vérifiez que le fichier n'est pas verrouillé par un autre utilisateur ou que le fichier peut être effacé manuellement (droits d'écriture sur le répertoire).", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, ndviPath + " n'a pas été créé.") return None if not ndvi.isValid(): messErreur(dlg, ndviPath + " ne peut pas être chargé.") return None return ndvi
def processAlgorithm(self, parameters, context, feedback): layer = self.parameterAsSource(parameters, ConcaveHull.INPUT, context) if layer is None: raise QgsProcessingException( self.invalidSourceError(parameters, self.INPUT)) alpha = self.parameterAsDouble(parameters, self.ALPHA, context) holes = self.parameterAsBool(parameters, self.HOLES, context) no_multigeom = self.parameterAsBool(parameters, self.NO_MULTIGEOMETRY, context) # Delaunay triangulation from input point layer feedback.setProgressText( QCoreApplication.translate('ConcaveHull', 'Creating Delaunay triangles…')) delaunay_layer = processing.run("qgis:delaunaytriangulation", { 'INPUT': parameters[ConcaveHull.INPUT], 'OUTPUT': 'memory:' }, feedback=feedback, context=context)['OUTPUT'] # Get max edge length from Delaunay triangles feedback.setProgressText( QCoreApplication.translate('ConcaveHull', 'Computing edges max length…')) features = delaunay_layer.getFeatures() count = delaunay_layer.featureCount() if count == 0: raise QgsProcessingException( self.tr('No Delaunay triangles created.')) counter = 50. / count lengths = [] edges = {} for feat in features: if feedback.isCanceled(): break line = feat.geometry().asPolygon()[0] for i in range(len(line) - 1): lengths.append(sqrt(line[i].sqrDist(line[i + 1]))) edges[feat.id()] = max(lengths[-3:]) feedback.setProgress(feat.id() * counter) max_length = max(lengths) # Get features with longest edge longer than alpha*max_length feedback.setProgressText( QCoreApplication.translate('ConcaveHull', 'Removing features…')) counter = 50. / len(edges) i = 0 ids = [] for id, max_len in list(edges.items()): if feedback.isCanceled(): break if max_len > alpha * max_length: ids.append(id) feedback.setProgress(50 + i * counter) i += 1 # Remove features delaunay_layer.dataProvider().deleteFeatures(ids) # Dissolve all Delaunay triangles feedback.setProgressText( QCoreApplication.translate('ConcaveHull', 'Dissolving Delaunay triangles…')) dissolved_layer = processing.run("native:dissolve", { 'INPUT': delaunay_layer, 'OUTPUT': 'memory:' }, feedback=feedback, context=context)['OUTPUT'] # Save result feedback.setProgressText( QCoreApplication.translate('ConcaveHull', 'Saving data…')) feat = QgsFeature() dissolved_layer.getFeatures().nextFeature(feat) # Not needed anymore, free up some resources del delaunay_layer del dissolved_layer (sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context, layer.fields(), QgsWkbTypes.Polygon, layer.sourceCrs()) if sink is None: raise QgsProcessingException( self.invalidSinkError(parameters, self.OUTPUT)) geom = feat.geometry() if no_multigeom and geom.isMultipart(): # Only singlepart geometries are allowed geom_list = geom.asGeometryCollection() for single_geom in geom_list: if feedback.isCanceled(): break single_feature = QgsFeature() if not holes: # Delete holes single_geom = single_geom.removeInteriorRings() single_feature.setGeometry(single_geom) sink.addFeature(single_feature, QgsFeatureSink.FastInsert) else: # Multipart geometries are allowed if not holes: # Delete holes geom = geom.removeInteriorRings() feat.setGeometry(geom) sink.addFeature(feat, QgsFeatureSink.FastInsert) return {self.OUTPUT: dest_id}
def exportRaster(layer, count, layersFolder, feedback, iface, matchCRS): feedback.showFeedback("Exporting %s to PNG..." % layer.name()) name_ts = (safeName(layer.name()) + unicode(count) + unicode(int(time.time()))) # We need to create a new file to export style piped_file = os.path.join(tempfile.gettempdir(), name_ts + '_piped.tif') piped_extent = layer.extent() piped_width = layer.height() piped_height = layer.width() piped_crs = layer.crs() piped_renderer = layer.renderer() piped_provider = layer.dataProvider() pipe = QgsRasterPipe() pipe.set(piped_provider.clone()) pipe.set(piped_renderer.clone()) file_writer = QgsRasterFileWriter(piped_file) file_writer.writeRaster(pipe, piped_height, -1, piped_extent, piped_crs) # Export layer as PNG out_raster = os.path.join(layersFolder, safeName(layer.name()) + "_" + unicode(count) + ".png") projectCRS = iface.mapCanvas().mapSettings().destinationCrs() if not (matchCRS and layer.crs() == projectCRS): # Extent of the layer in EPSG:3857 crsSrc = layer.crs() crsDest = QgsCoordinateReferenceSystem(3857) try: xform = QgsCoordinateTransform(crsSrc, crsDest, QgsProject.instance()) except: xform = QgsCoordinateTransform(crsSrc, crsDest) extentRep = xform.transformBoundingBox(layer.extent()) extentRepNew = ','.join([unicode(extentRep.xMinimum()), unicode(extentRep.xMaximum()), unicode(extentRep.yMinimum()), unicode(extentRep.yMaximum())]) # Reproject in 3857 piped_3857 = os.path.join(tempfile.gettempdir(), name_ts + '_piped_3857.tif') qgis_version = Qgis.QGIS_VERSION old_stdout = sys.stdout sys.stdout = mystdout = StringIO() try: processing.algorithmHelp("gdal:warpreproject") except: pass sys.stdout = old_stdout params = { "INPUT": piped_file, "SOURCE_CRS": layer.crs().authid(), "TARGET_CRS": "EPSG:3857", "NODATA": 0, "TARGET_RESOLUTION": 0, "RESAMPLING": 2, "TARGET_EXTENT": extentRepNew, "EXT_CRS": "EPSG:3857", "TARGET_EXTENT_CRS": "EPSG:3857", "DATA_TYPE": 0, "COMPRESS": 4, "JPEGCOMPRESSION": 75, "ZLEVEL": 6, "PREDICTOR": 1, "TILED": False, "BIGTIFF": 0, "TFW": False, "MULTITHREADING": False, "COPY_SUBDATASETS": False, "EXTRA": "", "OUTPUT": piped_3857 } warpArgs = {} lines = mystdout.getvalue() for count, line in enumerate(lines.split("\n")): if count != 0 and ":" in line: try: k = line.split(":")[0] warpArgs[k] = params[k] except: pass try: processing.run("gdal:warpreproject", warpArgs) except: shutil.copyfile(piped_file, piped_3857) try: processing.run("gdal:translate", {"INPUT": piped_3857, "OUTSIZE": 100, "OUTSIZE_PERC": True, "NODATA": 0, "EXPAND": 0, "TARGET_CRS": "", "PROJWIN": extentRepNew, "SDS": False, "DATA_TYPE": 0, "COMPRESS": 4, "JPEGCOMPRESSION": 75, "ZLEVEL": 6, "PREDICTOR": 1, "TILED": False, "BIGTIFF": 0, "TFW": False, "COPY_SUBDATASETS": False, "OPTIONS": "", "OUTPUT": out_raster}) except: shutil.copyfile(piped_3857, out_raster) else: srcExtent = ','.join([unicode(piped_extent.xMinimum()), unicode(piped_extent.xMaximum()), unicode(piped_extent.yMinimum()), unicode(piped_extent.yMaximum())]) processing.run("gdal:translate", {"INPUT": piped_file, "OUTSIZE": 100, "OUTSIZE_PERC": True, "NODATA": 0, "EXPAND": 0, "TARGET_CRS": "", "PROJWIN": srcExtent, "SDS": False, "DATA_TYPE": 0, "COMPRESS": 4, "JPEGCOMPRESSION": 75, "ZLEVEL": 6, "PREDICTOR": 1, "TILED": False, "BIGTIFF": 0, "TFW": False, "COPY_SUBDATASETS": False, "OPTIONS": "", "OUTPUT": out_raster})
def clip_by_extent(layer, extent): """Clip a raster using a bounding box using processing. Issue https://github.com/inasafe/inasafe/issues/3183 :param layer: The layer to clip. :type layer: QgsRasterLayer :param extent: The extent. :type extent: QgsRectangle :return: Clipped layer. :rtype: QgsRasterLayer .. versionadded:: 4.0 """ parameters = dict() # noinspection PyBroadException try: output_layer_name = quick_clip_steps['output_layer_name'] output_layer_name = output_layer_name % layer.keywords['layer_purpose'] output_raster = unique_filename(suffix='.tif', dir=temp_dir()) # We make one pixel size buffer on the extent to cover every pixels. # See https://github.com/inasafe/inasafe/issues/3655 pixel_size_x = layer.rasterUnitsPerPixelX() pixel_size_y = layer.rasterUnitsPerPixelY() buffer_size = max(pixel_size_x, pixel_size_y) extent = extent.buffered(buffer_size) if is_raster_y_inverted(layer): # The raster is Y inverted. We need to switch Y min and Y max. bbox = [ str(extent.xMinimum()), str(extent.xMaximum()), str(extent.yMaximum()), str(extent.yMinimum()) ] else: # The raster is normal. bbox = [ str(extent.xMinimum()), str(extent.xMaximum()), str(extent.yMinimum()), str(extent.yMaximum()) ] # These values are all from the processing algorithm. # https://github.com/qgis/QGIS/blob/master/python/plugins/processing/ # algs/gdal/ClipByExtent.py # Please read the file to know these parameters. parameters['INPUT'] = layer.source() parameters['NO_DATA'] = '' parameters['PROJWIN'] = ','.join(bbox) parameters['DATA_TYPE'] = 5 parameters['COMPRESS'] = 4 parameters['JPEGCOMPRESSION'] = 75 parameters['ZLEVEL'] = 6 parameters['PREDICTOR'] = 1 parameters['TILED'] = False parameters['BIGTIFF'] = 0 parameters['TFW'] = False parameters['EXTRA'] = '' parameters['OUTPUT'] = output_raster initialize_processing() feedback = create_processing_feedback() context = create_processing_context(feedback=feedback) result = processing.run("gdal:cliprasterbyextent", parameters, context=context) if result is None: raise ProcessingInstallationError clipped = QgsRasterLayer(result['OUTPUT'], output_layer_name) # We transfer keywords to the output. clipped.keywords = layer.keywords.copy() clipped.keywords['title'] = output_layer_name check_layer(clipped) except Exception as e: # This step clip_raster_by_extent was nice to speedup the analysis. # As we got an exception because the layer is invalid, we are not going # to stop the analysis. We will return the original raster layer. # It will take more processing time until we clip the vector layer. # Check https://github.com/inasafe/inasafe/issues/4026 why we got some # exceptions with this step. LOGGER.exception(parameters) LOGGER.exception( 'Error from QGIS clip raster by extent. Please check the QGIS ' 'logs too !') LOGGER.info( 'Even if we got an exception, we are continuing the analysis. The ' 'layer was not clipped.') LOGGER.exception(str(e)) LOGGER.exception(get_error_message(e).to_text()) clipped = layer return clipped
# for testing: path = "/Users/timo/Ruby/GetTweets/stored_tweets/2015-05-08.json" #path = "/Users/timo/Ruby/GetTweets/stored_tweets/*" # the rest of the path for where the tweets to be analyzed reside path = fs_path+"stored_tweets/2015-05-08.json" # load tweets into Apache SparkSQL (sqlContext) tweets = sqlContext.read.json(path) # register SparkSQL temptable called 'tweets' tweets.registerTempTable("tweets") # get the text content of each tweet tweet_texts = sqlContext.sql("SELECT text FROM tweets") # run the processing .run() in the processing.py for the texts # as output, we have pre-processed texts ready for gensim dictionary and gensim building texts = twpr.run(tweet_texts) # build Gensim dictionary and corpus with helper methods in processing.py dictionary = twpr.buildDictionaryFromTexts(texts) corpus = twpr.buildCorpusFromDictionaryAndTexts(texts, dictionary) # set LDA topic count parameter num_topics = 25 # in order to map LDA output and actual tweets for further analysis, select tweet IDs and texts tweet_ids = sqlContext.sql("SELECT id_str as id, text FROM tweets") # now we have all necesssary pre-processed data for LDA analysis # use the pre-processed inputs to do the LDA analysis distros = twpr.doLDA(corpus, dictionary, num_topics, tweet_ids) # now we have the Apache Spark RDD object we can either .take(5) or .collect() all
def processAlgorithm(self, parameters, context, feedback): layer = QgsProcessingUtils.mapLayerFromString(self.getParameterValue(ConcaveHull.INPUT), context) alpha = self.getParameterValue(self.ALPHA) holes = self.getParameterValue(self.HOLES) no_multigeom = self.getParameterValue(self.NO_MULTIGEOMETRY) # Delaunay triangulation from input point layer feedback.setProgressText(self.tr('Creating Delaunay triangles...')) delone_triangles = processing.run("qgis:delaunaytriangulation", layer, None, context=context)['OUTPUT'] delaunay_layer = QgsProcessingUtils.mapLayerFromString(delone_triangles, context) # Get max edge length from Delaunay triangles feedback.setProgressText(self.tr('Computing edges max length...')) features = QgsProcessingUtils.getFeatures(delaunay_layer, context) count = QgsProcessingUtils.featureCount(delaunay_layer, context) if count == 0: raise GeoAlgorithmExecutionException(self.tr('No Delaunay triangles created.')) counter = 50. / count lengths = [] edges = {} for feat in features: line = feat.geometry().asPolygon()[0] for i in range(len(line) - 1): lengths.append(sqrt(line[i].sqrDist(line[i + 1]))) edges[feat.id()] = max(lengths[-3:]) feedback.setProgress(feat.id() * counter) max_length = max(lengths) # Get features with longest edge longer than alpha*max_length feedback.setProgressText(self.tr('Removing features...')) counter = 50. / len(edges) i = 0 ids = [] for id, max_len in list(edges.items()): if max_len > alpha * max_length: ids.append(id) feedback.setProgress(50 + i * counter) i += 1 # Remove features delaunay_layer.selectByIds(ids) delaunay_layer.startEditing() delaunay_layer.deleteSelectedFeatures() delaunay_layer.commitChanges() # Dissolve all Delaunay triangles feedback.setProgressText(self.tr('Dissolving Delaunay triangles...')) dissolved = processing.run("qgis:dissolve", delaunay_layer.id(), True, None, None, context=context)['OUTPUT'] dissolved_layer = QgsProcessingUtils.mapLayerFromString(dissolved, context) # Save result feedback.setProgressText(self.tr('Saving data...')) feat = QgsFeature() QgsProcessingUtils.getFeatures(dissolved_layer, context).nextFeature(feat) writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(layer.fields(), QgsWkbTypes.Polygon, layer.crs(), context) geom = feat.geometry() if no_multigeom and geom.isMultipart(): # Only singlepart geometries are allowed geom_list = geom.asMultiPolygon() for single_geom_list in geom_list: single_feature = QgsFeature() single_geom = QgsGeometry.fromPolygon(single_geom_list) if not holes: # Delete holes deleted = True while deleted: deleted = single_geom.deleteRing(1) single_feature.setGeometry(single_geom) writer.addFeature(single_feature) else: # Multipart geometries are allowed if not holes: # Delete holes deleted = True while deleted: deleted = geom.deleteRing(1) writer.addFeature(feat) del writer
##Select by attribute=name ##Tests=group ##INPUT_LAYER=vector ##OUTPUT_LAYER=output vector import processing result = processing.run("qgis:selectbyattribute", INPUT_LAYER, "id2", 0, "2") processing.run("qgis:saveselectedfeatures", result["OUTPUT"], OUTPUT_LAYER)
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ # Retrieve the feature source and sink. The 'dest_id' variable is used # to uniquely identify the feature sink, and must be included in the # dictionary returned by the processAlgorithm function. source = self.parameterAsSource( parameters, self.INPUT, context ) # If source was not found, throw an exception to indicate that the algorithm # encountered a fatal error. The exception text can be any string, but in this # case we use the pre-built invalidSourceError method to return a standard # helper text for when a source cannot be evaluated if source is None: raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT)) (sink, dest_id) = self.parameterAsSink( parameters, self.OUTPUT, context, source.fields(), source.wkbType(), source.sourceCrs() ) # Send some information to the user feedback.pushInfo('CRS is {}'.format(source.sourceCrs().authid())) # If sink was not created, throw an exception to indicate that the algorithm # encountered a fatal error. The exception text can be any string, but in this # case we use the pre-built invalidSinkError method to return a standard # helper text for when a sink cannot be evaluated if sink is None: raise QgsProcessingException(self.invalidSinkError(parameters, self.OUTPUT)) # Compute the number of steps to display within the progress bar and # get features from source total = 100.0 / source.featureCount() if source.featureCount() else 0 features = source.getFeatures() for current, feature in enumerate(features): # Stop the algorithm if cancel button has been clicked if feedback.isCanceled(): break # Add a feature in the sink sink.addFeature(feature, QgsFeatureSink.FastInsert) # Update the progress bar feedback.setProgress(int(current * total)) # To run another Processing algorithm as part of this algorithm, you can use # processing.run(...). Make sure you pass the current context and feedback # to processing.run to ensure that all temporary layer outputs are available # to the executed algorithm, and that the executed algorithm can send feedback # reports to the user (and correctly handle cancelation and progress reports!) if False: buffered_layer = processing.run("native:buffer", param={ 'INPUT': dest_id, 'DISTANCE': 1.5, 'SEGMENTS': 5, 'END_CAP_STYLE': 0, 'JOIN_STYLE': 0, 'MITER_LIMIT': 2, 'DISSOLVE': False, 'OUTPUT': 'memory:' }, context=context, feedback=feedback)['OUTPUT'] # Return the results of the algorithm. In this case our only result is # the feature sink which contains the processed features, but some # algorithms may return multiple feature sinks, calculated numeric # statistics, etc. These should all be included in the returned # dictionary, with keys matching the feature corresponding parameter # or output names. return {self.OUTPUT: dest_id}
import processing layer=iface.mapCanvas().currentLayer() r=processing.run( "contourplugin:generatecontours", { 'ContourInterval' : 1, 'ContourLevels' : '', 'ContourMethod' : 1, 'ContourType' : 0, 'ExtendOption' : 0, 'InputField' : '"z"', 'InputLayer' : layer, 'LabelTrimZeros' : False, 'LabelUnits' : '', 'MaxContourValue' : None, 'MinContourValue' : None, 'NContour' : 20, 'OutputLayer' : 'memory:' } ) layer=r['OutputLayer'] QgsProject.instance().addMapLayer(layer)
def computeMaskThreshold(dlg, conf, dir_raster_treat, dir_dest, rasterTreatName, rasterSeuilName, seuilStr, deltaStr, extension_input_raster): # Calcul du masque d'eau fonction du seuil choisi seuil = float(seuilStr) if not dlg.rbSeuil.isChecked(): delta = 0 values_seuil_list = [0] else: delta = float(deltaStr) values_seuil_list = [-1, 0, +1] messInfo(dlg, "Seuil: " + seuilStr) messInfo(dlg, "") if dlg.rbComputeNdvi.isChecked(): direction = True elif dlg.rbComputeNdwi2.isChecked(): direction = False else: direction = True if direction: direction_operator_str = "<" # Operateur inferieur else: direction_operator_str = ">" # Operateur superieur if conf.rbOTB.isChecked(): # Calculatrice OTB init = 41253 else: # Calculatrice QGIS init = 32526 masks_list = [] for i in values_seuil_list: newSeuil = seuil + i * delta if float(newSeuil) == 0: newSeuilStr = '0' newSeuil10Str = '0' else: newSeuilStr = str(newSeuil) newSeuil10Str = str(newSeuil * 10) while newSeuilStr[0] == '0' and len( newSeuilStr) >= 2 and newSeuilStr[1] != '.': newSeuilStr = newSeuilStr[1:] if '.' in newSeuilStr: while newSeuilStr[-1] == '0': newSeuilStr = newSeuilStr[:len(newSeuilStr) - 1] if newSeuilStr[-1] == '.': newSeuilStr = newSeuilStr[:len(newSeuilStr) - 1] if newSeuil != init: init = newSeuil if delta == 0: layerSeuilName = rasterSeuilName + seuilStr else: layerSeuilName = rasterSeuilName + newSeuilStr layerSeuilPath = dir_dest + os.sep + layerSeuilName + EXT_RASTER if os.path.exists(layerSeuilPath): try: os.remove(layerSeuilPath) except: QMessageBox.information( None, "Attention !!!", layerSeuilPath + " ne peut pas être effacé. Vérifiez que le fichier n'est pas verrouillé par un autre utilisateur ou que le fichier peut être effacé manuellement (droits d'écriture sur le répertoire).", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, layerSeuilPath + " ne peut pas être effacé.") return None messInfo(dlg, "Calcul du masque 'Eau' avec le seuil: " + newSeuilStr) messInfo(dlg, "") # Calculatrice OTB if conf.rbOTB.isChecked(): rasterTreatPath = dir_raster_treat + os.sep + rasterTreatName + extension_input_raster try: expression = 'im1b1' + direction_operator_str + newSeuilStr + '?1:2' #processing.algorithmHelp("otb:BandMath") #processing.runalg('otb:bandmath', rasterTreatPath, '128',expression ,layerSeuilPath) parameters = { "il": [rasterTreatPath], "out": layerSeuilPath, "exp": expression, "outputpixeltype": 2, "ram": 128 } processing.run('otb:BandMath', parameters) except: messErreur( dlg, "Erreur lors du lancement de otb:BandMath seuillage.") return None # Fin OTB # Calculatrice QGIS else: entries = [] li = layerList() raster = li[rasterTreatName] extent = raster.extent() height = raster.height() width = raster.width() s1 = QgsRasterCalculatorEntry() s1.ref = 's@1' s1.raster = raster s1.bandNumber = 1 entries.append(s1) if platform.system() == "Linux": # Bug calculatrice raster sous linux calc = QgsRasterCalculator( '(10*s@1' + direction_operator_str + newSeuil10Str + ')', layerSeuilPath, FORMAT_IMA, extent, width, height, entries) else: calc = QgsRasterCalculator( '(s@1' + direction_operator_str + newSeuilStr + ')', layerSeuilPath, FORMAT_IMA, extent, width, height, entries) ret = calc.processCalculation() if ret != 0: QMessageBox.information( None, "Attention !!!", " Erreur d'exécution, cela peut être du à une insuffisance mémoire, image trop volumineuse.", QMessageBox.Ok, QMessageBox.NoButton) messErreur( dlg, "Erreur de traitement sur QgsRasterCalculator.") return None # Fin QGIS if os.path.exists(layerSeuilPath): mask = QgsRasterLayer(layerSeuilPath, layerSeuilName) else: QMessageBox.information( None, "Attention !!!", layerSeuilPath + " n'a pas été créé. Vérifiez que le fichier n'est pas verrouillé par un autre utilisateur ou que le fichier peut être effacé manuellement (droits d'écriture sur le répertoire).", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, layerSeuilPath + " n'a pas été créé.") return None if not mask.isValid(): messErreur(dlg, layerSeuilPath + " ne peut pas être chargé.") return None # Add list pour return masks_list.append(mask) return masks_list
def spaced(bar,buildings_layer_path,receiver_points_layer_path,spaced_pts_distance): distance_from_facades = 0.1 buildings_layer_name = os.path.splitext(os.path.basename(buildings_layer_path))[0] buildings_layer = QgsVectorLayer(buildings_layer_path,buildings_layer_name,"ogr") # cp building layer to delete all fields buildings_memory_layer = QgsVectorLayer("Polygon?crs=" + str(buildings_layer.crs().authid()), "polygon_memory_layer", "memory") buildings_memory_layer.dataProvider().addAttributes([]) buildings_feat_all = buildings_layer.dataProvider().getFeatures() buildings_feat_list = [] for buildings_feat in buildings_feat_all: buildings_feat_list.append(buildings_feat) buildings_memory_layer.dataProvider().addFeatures(buildings_feat_list) buildings_memory_layer.updateExtents() # this is crazy: I had to addd this line otherwise the first processing doesn't work... QgsProject.instance().addMapLayers([buildings_memory_layer]) bar.setValue(1) # this processing alg has as output['OUTPUT'] the layer output = processing.run("native:buffer", {'INPUT': buildings_memory_layer, 'DISTANCE': distance_from_facades, 'DISSOLVE': False, 'OUTPUT': 'memory:'}) # I can now remove the layer from map... QgsProject.instance().removeMapLayers( [buildings_memory_layer.id()] ) bar.setValue(25) # this processing alg has as output['OUTPUT'] the layer output = processing.run("qgis:polygonstolines", {'INPUT': output['OUTPUT'], 'OUTPUT': 'memory:'}) bar.setValue(50) # this processing alg has as output['output'] the layer path... poly_to_lines = output['OUTPUT'] output = processing.run("qgis:pointsalonglines", {'INPUT': poly_to_lines, 'DISTANCE': spaced_pts_distance, 'START_OFFSET': 0, 'END_OFFSET': 0, 'OUTPUT': 'memory:'}) bar.setValue(75) receiver_points_memory_layer = output['OUTPUT'] del output ## Delete pts in buildings # creates SpatialIndex buildings_feat_all = buildings_layer.dataProvider().getFeatures() buildings_spIndex = QgsSpatialIndex() buildings_feat_all_dict = {} for buildings_feat in buildings_feat_all: buildings_spIndex.insertFeature(buildings_feat) buildings_feat_all_dict[buildings_feat.id()] = buildings_feat receiver_points_memory_layer_all = receiver_points_memory_layer.dataProvider().getFeatures() receiver_points_layer_fields = QgsFields() receiver_points_layer_fields.append(QgsField("id_pt", QVariant.Int)) receiver_points_layer_fields.append(QgsField("id_bui", QVariant.Int)) receiver_points_layer_writer = QgsVectorFileWriter(receiver_points_layer_path, "System", receiver_points_layer_fields, QgsWkbTypes.Point, buildings_layer.crs(), "ESRI Shapefile") receiver_points_feat_id = 0 receiver_memory_feat_total = receiver_points_memory_layer.dataProvider().featureCount() receiver_memory_feat_number = 0 for receiver_memory_feat in receiver_points_memory_layer_all: receiver_memory_feat_number = receiver_memory_feat_number + 1 barValue = receiver_memory_feat_number/float(receiver_memory_feat_total)*25 + 75 bar.setValue(barValue) rect = QgsRectangle() rect.setXMinimum(receiver_memory_feat.geometry().asPoint().x() - distance_from_facades) rect.setXMaximum(receiver_memory_feat.geometry().asPoint().x() + distance_from_facades) rect.setYMinimum(receiver_memory_feat.geometry().asPoint().y() - distance_from_facades) rect.setYMaximum(receiver_memory_feat.geometry().asPoint().y() + distance_from_facades) buildings_selection = buildings_spIndex.intersects(rect) to_add = True receiver_geom = receiver_memory_feat.geometry() building_id_correct = None for buildings_id in buildings_selection: building_geom = buildings_feat_all_dict[buildings_id].geometry() intersectBuilding = QgsGeometry.intersects(receiver_geom, building_geom) building_id_correct = buildings_id if intersectBuilding: to_add = False building_id_correct = None break # picking the nearest building to the receiver point analysed nearestIds = buildings_spIndex.nearestNeighbor(receiver_geom.asPoint(), 1) building_fid = [] for featureId in nearestIds: request = QgsFeatureRequest().setFilterFid(featureId) for feature in buildings_layer.getFeatures(request): dist = receiver_geom.distance(feature.geometry()) building_fid.append((dist, feature.id())) building_fid_correct = min(building_fid, key=lambda x: x[0])[-1] if to_add: attributes = [receiver_points_feat_id, building_fid_correct] fet = QgsFeature() fet.setGeometry(receiver_memory_feat.geometry()) fet.setAttributes(attributes) receiver_points_layer_writer.addFeature(fet) receiver_points_feat_id = receiver_points_feat_id + 1 del receiver_points_layer_writer receiver_points_layer_name = os.path.splitext(os.path.basename(receiver_points_layer_path))[0] receiver_points_layer = QgsVectorLayer(receiver_points_layer_path, str(receiver_points_layer_name), "ogr") QgsProject.instance().addMapLayers([receiver_points_layer]) QgsProject.instance().reloadAllLayers()
def filterRaster(dlg, conf, dir_dest, rasterSeuilName, rasterFilterName): # Filtre que l'on propose pour éliminer les zones d'eau mineures li = layerList() layerSeuil = li[rasterSeuilName] layerSeuilPath = dir_dest + os.sep + rasterSeuilName + EXT_RASTER layerFiltreIlotsPath = dir_dest + os.sep + rasterFilterName + EXT_RASTER for elem in li: if elem == rasterFilterName: QgsProject.instance().removeMapLayer(li[elem].id()) if os.path.exists(layerFiltreIlotsPath): try: os.remove(layerFiltreIlotsPath) except: QMessageBox.information( None, "Attention !!!", layerFiltreIlotsPath + " ne peut pas être effacé. Vérifiez que le fichier n'est pas verrouillé par un autre utilisateur ou que le fichier peut être effacé manuellement (droits d'écriture sur le répertoire).", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, layerFiltreIlotsPath + " ne peut pas être effacé.") return None # Filtrage OTB if conf.rbOTB.isChecked(): seuilCMR = dlg.seuilCMR.text() if seuilCMR == '': QMessageBox.information(None, "Attention !!!", "Valeur de radius incorrecte !", QMessageBox.Ok, QMessageBox.NoButton) return None try: seuilCMR = int(seuilCMR) except: QMessageBox.information(None, "Attention !!!", "Valeur de radius incorrecte !", QMessageBox.Ok, QMessageBox.NoButton) return None if not 0 <= int(seuilCMR) <= 30: QMessageBox.information(None, "Attention !!!", "Valeur de radius incorrecte !", QMessageBox.Ok, QMessageBox.NoButton) return None messInfo( dlg, "Lancement du filtre 'Classification Map Regularization' sur le raster: " + rasterSeuilName) messInfo(dlg, "Radius: " + str(seuilCMR)) messInfo(dlg, "") try: #processing.algorithmHelp("otb:ClassificationMapRegularization") #processing.runalg('otb:classificationmapregularization', layerSeuilPath, seuilCMR, True, 0, 0, False, 0, 128, layerFiltreIlotsPath) parameters = { "io.in": layerSeuilPath, "io.out": layerFiltreIlotsPath, "ip.radius": seuilCMR, "ip.suvbool": True, "ip.nodatalabel": 0, "ip.undecidedlabel": 0, "ip.onlyisolatedpixels": False, "ip.isolatedthreshold": 0, "outputpixeltype": 2, "ram": 128 } processing.run('otb:ClassificationMapRegularization', parameters) except: messErreur( dlg, "Erreur de traitement par (filtre Classification Map Regularization) de %s !!!" % (layerFiltreIlotsPath)) return None # Fin OTB # Filtrage QGIS (Gdal) else: seuilTamiser = dlg.seuilTamiser.text() if seuilTamiser == '': QMessageBox.information(None, "Attention !!!", "Valeur de seuil incorrecte !", QMessageBox.Ok, QMessageBox.NoButton) return None try: seuilTamiser = int(seuilTamiser) except: QMessageBox.information(None, "Attention !!!", "Valeur de seuil incorrecte !", QMessageBox.Ok, QMessageBox.NoButton) return None if not 0 <= int(seuilTamiser) < 10000: QMessageBox.information(None, "Attention !!!", "Valeur de seuil incorrecte !", QMessageBox.Ok, QMessageBox.NoButton) return None if dlg.rbTamiser4.isChecked(): conn = False else: conn = True messInfo(dlg, "Lancement du filtrage sur le raster: " + rasterSeuilName) messInfo(dlg, "Seuil: " + str(seuilTamiser)) messInfo(dlg, "") try: #processing.algorithmHelp("gdal:sieve") #processing.runalg('gdalogr:sieve', layerSeuil,seuilTamiser,conn,layerFiltreIlotsPath) parameters = { "INPUT": layerSeuil, "THRESHOLD": seuilTamiser, "EIGHT_CONNECTEDNESS": conn, "NO_MASK": True, "MASK_LAYER": "", "OUTPUT": layerFiltreIlotsPath } processing.run('gdal:sieve', parameters) except: messErreur( dlg, "Erreur de traitement par gdal:sieve (filtre) de %s !!!" % (layerFiltreIlotsPath)) return None # Fin QGIS # Test si le filtrage à reussi if os.path.exists(layerFiltreIlotsPath): layer = QgsRasterLayer(layerFiltreIlotsPath, rasterFilterName) else: QMessageBox.information( None, "Attention !!!", layerFiltreIlotsPath + " n'a pas été créé. Vérifiez que le fichier n'est pas verrouillé par un autre utilisateur ou que le fichier peut être effacé manuellement (droits d'écriture sur le répertoire).", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, layerFiltreIlotsPath + " n'a pas été créé.") return None if not layer.isValid(): messErreur(dlg, layerFiltreIlotsPath + " ne peut pas être chargé.") return None return layer
def polygonizeRaster(dlg, dir_dest, rasterToPolygonizeName, vectorPolygonName): # Fonction de vectorisation li = layerList() rasterToPolygonize = li[rasterToPolygonizeName] messInfo(dlg, "Vectorisation du raster: " + rasterToPolygonizeName) messInfo(dlg, "") outputVectorPath = dir_dest + os.sep + vectorPolygonName + EXT_VECTOR if os.path.exists(outputVectorPath): try: os.remove(outputVectorPath) except: QMessageBox.information( None, "Attention !!!", outputVectorPath + " ne peut pas être effacé. Vérifiez que le fichier n'est pas verrouillé par un autre utilisateur ou que le fichier peut être effacé manuellement.", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, outputVectorPath + " ne peut pas être effacé.") return None if rasterToPolygonize: try: #processing.algorithmHelp("gdal:polygonize") #processing.runandload('gdalogr:polygonize', rasterToPolygonize,'DN', outputVectorPath) parameters = { "INPUT": rasterToPolygonize, "BAND": 1, "FIELD": 'DN', "EIGHT_CONNECTEDNESS": False, "OUTPUT": outputVectorPath } processing.run('gdal:polygonize', parameters) except: messErreur(dlg, "Erreur pendant l'exécution de gdal:polygonize.") return None else: messErreur( dlg, "fin de traitement sur gdal:polygonize, " + rasterToPolygonizeName + " n'est pas valide.") return None # Test si la vectorisation à reussi if os.path.exists(outputVectorPath): layer = QgsVectorLayer(outputVectorPath, vectorPolygonName, "ogr") else: QMessageBox.information( None, "Attention !!!", outputVectorPath + " n'a pas été créé. Vérifiez que le fichier n'est pas verrouillé par un autre utilisateur ou que le fichier peut être effacé manuellement (droits d'écriture sur le répertoire).", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, outputVectorPath + " n'a pas été créé.") return None if not layer.isValid(): messErreur(dlg, outputVectorPath + " ne peut pas être chargé.") return None messInfo(dlg, "Fin vectorisation du raster: " + rasterToPolygonizeName) messInfo(dlg, "") return layer
def processAlgorithm(self, parameters, context, model_feedback): # Use a multi-step feedback, so that individual child algorithm progress reports are adjusted for the # overall progress through the model feedback = QgsProcessingMultiStepFeedback(18, model_feedback) results = {} outputs = {} # Build query inside an area alg_params = { 'AREA': parameters['inputarea'], 'KEY': parameters['inputkeytype'], 'SERVER': 'http://www.overpass-api.de/api/interpreter', 'TIMEOUT': 999, 'VALUE': parameters['inputvaluetype'] } outputs['BuildQueryInsideAnArea'] = processing.run( 'quickosm:buildqueryinsidearea', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(1) if feedback.isCanceled(): return {} # Build virtual raster alg_params = { 'ADD_ALPHA': False, 'ASSIGN_CRS': None, 'INPUT': [ parameters['lightmap'], parameters['lightmap2'], parameters['lightmap3'] ], 'PROJ_DIFFERENCE': False, 'RESAMPLING': 0, 'RESOLUTION': 0, 'SEPARATE': False, 'SRC_NODATA': '', 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['BuildVirtualRaster'] = processing.run( 'gdal:buildvirtualraster', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(2) if feedback.isCanceled(): return {} # Download file alg_params = { 'URL': outputs['BuildQueryInsideAnArea']['OUTPUT_URL'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['DownloadFile'] = processing.run('native:filedownloader', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(3) if feedback.isCanceled(): return {} # String concatenation - Multipolygon alg_params = { 'INPUT_1': outputs['DownloadFile']['OUTPUT'], 'INPUT_2': '|layername=multipolygons' } outputs['StringConcatenationMultipolygon'] = processing.run( 'native:stringconcatenation', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(4) if feedback.isCanceled(): return {} # Fix geometries alg_params = { 'INPUT': outputs['StringConcatenationMultipolygon']['CONCATENATION'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['FixGeometries'] = processing.run('native:fixgeometries', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(5) if feedback.isCanceled(): return {} # String concatenation - Points alg_params = { 'INPUT_1': outputs['DownloadFile']['OUTPUT'], 'INPUT_2': '|layername=points' } outputs['StringConcatenationPoints'] = processing.run( 'native:stringconcatenation', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(6) if feedback.isCanceled(): return {} # Explode HStore Field alg_params = { 'EXPECTED_FIELDS': '', 'FIELD': 'other_tags', 'INPUT': outputs['FixGeometries']['OUTPUT'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['ExplodeHstoreField'] = processing.run( 'native:explodehstorefield', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(7) if feedback.isCanceled(): return {} # Fix geometries alg_params = { 'INPUT': outputs['StringConcatenationPoints']['CONCATENATION'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['FixGeometries'] = processing.run('native:fixgeometries', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(8) if feedback.isCanceled(): return {} # Refactor fields alg_params = { 'FIELDS_MAPPING': [{ 'expression': 'if("osm_way_id" is NULL, -1, "osm_way_id")', 'length': 0, 'name': 'osm_id', 'precision': 0, 'type': 10 }, { 'expression': ' if( "name" is NULL, \'Unknown\', "name")', 'length': 0, 'name': 'name', 'precision': 0, 'type': 10 }, { 'expression': "concat(@inputkeytype,'-', @inputvaluetype )", 'length': 0, 'name': 'type', 'precision': 0, 'type': 10 }], 'INPUT': outputs['ExplodeHstoreField']['OUTPUT'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['RefactorFields'] = processing.run('qgis:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(9) if feedback.isCanceled(): return {} # Zonal statistics alg_params = { 'COLUMN_PREFIX': 'light_pol', 'INPUT_RASTER': outputs['BuildVirtualRaster']['OUTPUT'], 'INPUT_VECTOR': outputs['RefactorFields']['OUTPUT'], 'RASTER_BAND': 1, 'STATS': 2 } outputs['ZonalStatistics'] = processing.run('qgis:zonalstatistics', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(10) if feedback.isCanceled(): return {} # Centroids alg_params = { 'ALL_PARTS': False, 'INPUT': outputs['ZonalStatistics']['INPUT_VECTOR'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['Centroids'] = processing.run('native:centroids', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(11) if feedback.isCanceled(): return {} # Explode HStore Field alg_params = { 'EXPECTED_FIELDS': '', 'FIELD': 'other_tags', 'INPUT': outputs['FixGeometries']['OUTPUT'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['ExplodeHstoreField'] = processing.run( 'native:explodehstorefield', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(12) if feedback.isCanceled(): return {} # Refactor fields alg_params = { 'FIELDS_MAPPING': [{ 'expression': 'if("osm_id" is NULL, -1, "osm_id")', 'length': 0, 'name': 'osm_id', 'precision': 0, 'type': 2 }, { 'expression': ' if( "name" is NULL, \'Unknown\', "name")', 'length': 0, 'name': 'name', 'precision': 0, 'type': 10 }, { 'expression': "concat(@inputkeytype,'-', @inputvaluetype )", 'length': 0, 'name': 'type', 'precision': 0, 'type': 10 }], 'INPUT': outputs['ExplodeHstoreField']['OUTPUT'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['RefactorFields'] = processing.run('qgis:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(13) if feedback.isCanceled(): return {} # Refactor fields alg_params = { 'FIELDS_MAPPING': [{ 'expression': '"osm_id"', 'length': 0, 'name': 'osm_id', 'precision': 0, 'type': 2 }, { 'expression': '"name"', 'length': 0, 'name': 'name', 'precision': 0, 'type': 10 }, { 'expression': '"type"', 'length': 0, 'name': 'type', 'precision': 0, 'type': 10 }, { 'expression': '"light_polmean"', 'length': 0, 'name': 'light_pol', 'precision': 0, 'type': 6 }], 'INPUT': outputs['Centroids']['OUTPUT'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['RefactorFields'] = processing.run('qgis:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(14) if feedback.isCanceled(): return {} # Sample raster values alg_params = { 'COLUMN_PREFIX': 'rvalue', 'INPUT': outputs['RefactorFields']['OUTPUT'], 'RASTERCOPY': outputs['BuildVirtualRaster']['OUTPUT'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['SampleRasterValues'] = processing.run('qgis:rastersampling', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(15) if feedback.isCanceled(): return {} # Refactor fields alg_params = { 'FIELDS_MAPPING': [{ 'expression': '"osm_id"', 'length': 0, 'name': 'osm_id', 'precision': 0, 'type': 2 }, { 'expression': '"name"', 'length': 0, 'name': 'name', 'precision': 0, 'type': 10 }, { 'expression': '"type"', 'length': 0, 'name': 'type', 'precision': 0, 'type': 10 }, { 'expression': '"rvalue_1"', 'length': 0, 'name': 'light_pol', 'precision': 0, 'type': 6 }], 'INPUT': outputs['SampleRasterValues']['OUTPUT'], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['RefactorFields'] = processing.run('qgis:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(16) if feedback.isCanceled(): return {} # Merge vector layers alg_params = { 'CRS': None, 'LAYERS': [ outputs['RefactorFields']['OUTPUT'], outputs['RefactorFields']['OUTPUT'] ], 'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT } outputs['MergeVectorLayers'] = processing.run( 'native:mergevectorlayers', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.setCurrentStep(17) if feedback.isCanceled(): return {} # Refactor fields alg_params = { 'FIELDS_MAPPING': [{ 'expression': '"osm_id"', 'length': 16, 'name': 'osm_id', 'precision': 0, 'type': 4 }, { 'expression': '"name"', 'length': 33, 'name': 'name', 'precision': 0, 'type': 10 }, { 'expression': '"type"', 'length': 6, 'name': 'type', 'precision': 0, 'type': 10 }, { 'expression': '"light_pol"', 'length': 18, 'name': 'light_pol', 'precision': 10, 'type': 6 }, { 'expression': ' $y ', 'length': 18, 'name': 'lat', 'precision': 6, 'type': 6 }, { 'expression': '$x', 'length': 18, 'name': 'lng', 'precision': 6, 'type': 6 }], 'INPUT': outputs['MergeVectorLayers']['OUTPUT'], 'OUTPUT': parameters['Outputfinal2'] } outputs['RefactorFields'] = processing.run('qgis:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True) results['Outputfinal2'] = outputs['RefactorFields']['OUTPUT'] return results
def EineDXF(uiParent, mLay_crs, bZielSave, sOutForm, grpProjekt,AktList, Kern, AktOpt, DXFDatNam, zielPfadOrDatei, qPrjDatName, sOrgCharSet, bLayer, bFormatText, bUseColor4Point,bUseColor4Line,bUseColor4Poly, dblFaktor,chkTransform, DreiPassPunkte): # 23.02.17 # Processing erst hier Laden, um den Start von QGIS zu beschleunigen import processing from processing.core.Processing import Processing sCharSet=sOrgCharSet myGroups={} # ---------------------------------------------------------------------------- # Dateiquelle anpassen # ---------------------------------------------------------------------------- # (zumindest) unter Windows gibt es Probleme, wenn Umlaute im Dateinamen sind # einzige saubere Variante scheint die Bearbeitung einer Dateikopie zu sein # um Resourcen zu sparen, zunächst nur kopie, wenn umwandlung des Dateinamens in einen String Fehler bringt # 21.11.16: ab 2.18 bringt die Umwandlung in einen String keinen Fehler mehr # deshalb neue Strategie zum Erkennen der Umlaute # 21.02.17: grundsätzlich mit Kopie, da runalg die Datei sperrt und nicht mehr frei gibt # 19.03.18: noch mal überlegen/testen: es gibt dxf's, welche durch dictionary's mehere GByte haben-da ist kopieren nicht so geil if ifAscii(DXFDatNam): korrDXFDatNam=DXFDatNam else: uiParent.SetAktionGesSchritte(2) uiParent.SetAktionText(tr("Copy DXF-File")) uiParent.SetAktionAktSchritt(1) korrDXFDatNam=(EZUTempDir() + str(uuid.uuid4()) + '.dxf') copyfile(DXFDatNam, korrDXFDatNam) #printlog ("Copy" + DXFDatNam + ' --> ' + korrDXFDatNam) optGCP = "" if chkTransform: for p in range(len(DreiPassPunkte)): optGCP = optGCP + " -gcp " for k in range(len(DreiPassPunkte[p])): optGCP = optGCP + str(DreiPassPunkte[p][k][0]) + " " + str(DreiPassPunkte[p][k][1]) + " " zE=0 uiParent.SetAktionGesSchritte(len(AktList)) for p in AktList: zE=zE+1 v = p.split(":") if myqtVersion == 5: uiParent.SetAktionText(tr("Edit Entity: " + Kern+v[0] )) else: uiParent.SetAktionText(tr("Edit Entity: " + Kern.encode("utf8")+v[0] )) uiParent.SetAktionAktSchritt(zE) if sOutForm == "SHP": iOutForm = 0 # •0 — ESRI Shapedatei shpdat=zielPfadOrDatei+Kern+v[0]+'.shp' qmldat=zielPfadOrDatei+Kern+v[0]+'.qml' else: qmldat = EZUTempDir() + str(uuid.uuid4()) + '.qml' gpkgTable=Kern+v[0] # ---------------------------------------------------------------------------- # Dateiziel anpassen # ---------------------------------------------------------------------------- # ZielPfad bzw. Zielname dürfen keine Umlaute enthalten --> in temporäre Datei konvertieren # 21.02.17: Leerzeichen im Pfad funktionieren auch nicht, deshalb grundsätzlich als Kopie #if ifAscii(shpdat): # korrSHPDatNam=shpdat #else: if sOutForm == "SHP": if bZielSave: korrSHPDatNam=(EZUTempDir() + str(uuid.uuid4()) + '.shp') else: korrSHPDatNam=shpdat bKonvOK=False try: if sOutForm == "SHP": opt= ('-skipfailure %s -nlt %s %s -sql "select *, ogr_style from entities where OGR_GEOMETRY %s"') % (AktOpt,v[1],optGCP,v[2]) hinweislog ('convertformat'+','+korrDXFDatNam +','+ '0'+','+ opt +','+ '"' + korrSHPDatNam + '"') if myqtVersion == 4: pAntw=processing.runalg('gdalogr:convertformat',korrDXFDatNam , 0, opt , korrSHPDatNam) else: # das zu erzeugende Ausgabeformat wird über die Dateiendung definiert pList={'INPUT':korrDXFDatNam,'OPTIONS':opt,'OUTPUT': korrSHPDatNam} pAntw=processing.run('gdal:convertformat',pList) if os.path.exists(korrSHPDatNam): bKonvOK = True else: # nur für QGIS 3.x definiert if sCharSet == "System": ogrCharSet=locale.getdefaultlocale()[1] else: ogrCharSet=sCharSet ogrCharSet=ogrCharSet.upper() opt = '-append -update --config DXF_ENCODING "' + ogrCharSet + '" ' opt = opt + ('%s -nlt %s %s -sql "select *, ogr_style from entities where OGR_GEOMETRY %s" -nln %s ') % (AktOpt,v[1],optGCP,v[2], gpkgTable) #opt = opt + ' -s_srs EPSG:25833 -t_srs EPSG:25833 ' hinweislog ('convertformat'+','+korrDXFDatNam +','+ '0'+','+ opt +','+ '"' + zielPfadOrDatei + '"') pList={'INPUT':korrDXFDatNam,'OPTIONS':opt,'OUTPUT': zielPfadOrDatei} pAntw=processing.run('gdal:convertformat',pList) if os.path.exists(zielPfadOrDatei):bKonvOK = True except: addFehler(tr("Error processing: " + DXFDatNam)) return False if pAntw is None: addFehler(tr("process 'gdalogr:convertformat' could not start please restart QGIS")) else: if myqtVersion == 5 and sOutForm == "SHP": # Unter QGIS3.0 gibt es aktuell ein ganz böses Problem: Das Schreiben der DBF crasht, wenn Kodierung cp1252 ist # --> Shape (DBF) nach UTF8 konvertieren aktShapeName=korrSHPDatNam korrSHPDatNam=(EZUTempDir() + str(uuid.uuid4()) + '.shp') # neuer Dateiname ShapeCodepage2Utf8 (aktShapeName, korrSHPDatNam, sOrgCharSet) # 28.03.18 sOrgCharSet sCharSet="utf-8" if bKonvOK: if sOutForm == "SHP": attTableEdit(sOutForm,korrSHPDatNam,bFormatText,sCharSet) if korrSHPDatNam != shpdat: # evtl. korrigierte Dateiname umbenennen #printlog ("move:" + korrSHPDatNam + '-->' + shpdat) move(korrSHPDatNam,shpdat) for rest in glob(korrSHPDatNam[0:-4] + '.*'): #printlog ("move:" + rest + '-->' + shpdat[0:-4] + rest[-4:]) move(rest,shpdat[0:-4] + rest[-4:]) # ogr2ogr schreibt den EPSG-code nicht in die prj-Datei, dadurch kommt es beim Einbinden # zu anderenen EPSG-Codes -> Nutzung einer qpj #print qPrjDatName,shpdat[0:-3]+"qpj" copyfile (qPrjDatName,shpdat[0:-3]+"qpj") Layer = QgsVectorLayer(shpdat, "entities"+v[0],"ogr") # vermutlich reicht einer der beiden Befehle # unbekannte Codepages werden zu "System" Layer.setProviderEncoding(sCharSet) Layer.dataProvider().setEncoding(sCharSet) else: attTableEdit(sOutForm,zielPfadOrDatei,bFormatText,sCharSet,gpkgTable) sLayer="%s|layername=%s" %(zielPfadOrDatei,gpkgTable) #|geometrytype=Point" Layer = QgsVectorLayer(sLayer, "entities"+v[0],"ogr") Layer.setCrs(mLay_crs) if Layer.featureCount() < 0: Layer=None # bei QGIS3 wird bei Fehlern -2 zurückgegeben If Layer führt dann zu Fehlern if Layer: # Kontrolle, ob was sinnvolles im Layer ist. Ogr erzeugt öfters Shapes ohne Koordinaten bLayerMitDaten = False if Layer.featureCount() > 0: koo=Layer.extent() if koo.xMinimum() == 0 and koo.yMinimum() == 0 and koo.xMaximum() == 0 and koo.yMaximum() == 0: # das scheint ein Ufo zu sein addHinweis("Empty coordinates for " + opt ) else: bLayerMitDaten = True else: addHinweis("No entities for " + opt ) # der Layer enthält Daten if bLayerMitDaten: if not bLayer: # Group by Layer ist deaktiviert, für jede Geometrieart wird nur ein Layer geschrieben # 17.01.18: funktioniert bei 2.18 und 2.99 # 28.03.17 Diese Zeile ist notwendig, damit das "processing.runalg" sauber läuft !!??? if myqtVersion == 4: QgsMapLayerRegistry.instance().addMapLayer(Layer, False) else: Layer = QgsProject.instance().addMapLayer(Layer, False) # nicht in Legende ml=grpProjekt.addLayer(Layer) ml.setExpanded(False) #QgsMapLayerRegistry.instance().addMapLayer(Layer) #iface.legendInterface().moveLayer( Layer, grpProjekt) Rend=kat4Layer(Layer, bUseColor4Line, bUseColor4Poly) if Rend is not None: if myqtVersion == 4: Layer.setRendererV2(Rend) else: Layer.setRenderer(Rend) else: addFehler ("Categorization for " + opt + " could not be executed") if Layer.geometryType() == 0: labelingDXF (Layer,bFormatText, bUseColor4Point, dblFaktor) if Layer.geometryType() == 0 and myqtVersion == 5: Layer.saveNamedStyle (qmldat) EditQML (qmldat) Layer.loadNamedStyle(qmldat) else: # Group by Layer ist aktiviert, für jeden Layer wird eine extra Gruppe erzeugt if myqtVersion == 4: fni = Layer.fieldNameIndex('Layer') else: fni = Layer.dataProvider().fieldNameIndex('Layer') unique_values = Layer.dataProvider().uniqueValues(fni) zL=0 for AktLayerNam in unique_values: if AktLayerNam == NULL: AktLayerNam = "Null" uiParent.SetAktionGesSchritte(len(unique_values)) uiParent.SetAktionText("Edit Layer: " + AktLayerNam ) uiParent.SetAktionAktSchritt(zL) zL=zL+1 if sOutForm == "SHP": Layer = QgsVectorLayer(shpdat, AktLayerNam+'('+v[0]+')',"ogr") # vermutlich reicht einer der beiden Befehle # unbekannte Codepages werden zu "System" Layer.setProviderEncoding(sCharSet) Layer.dataProvider().setEncoding(sCharSet) Layer.setSubsetString( "Layer = '" + AktLayerNam + "'" ) else: Layer = QgsVectorLayer(sLayer, AktLayerNam+'('+v[0]+')',"ogr") Layer.setCrs(mLay_crs) Layer.setSubsetString( "Layer = '" + AktLayerNam + "'" ) if Layer.featureCount() < 0: Layer=None # bei QGIS3 wird bei Fehlern -2 zurückgegeben If Layer führt dann zu Fehlern if myqtVersion == 4: QgsMapLayerRegistry.instance().addMapLayer(Layer, False) else: Layer = QgsProject.instance().addMapLayer(Layer, False) # nicht in Legende #print 'Layer = "' + AktLayerNam + '"' #iface.mapCanvas().setRenderFlag( True ) if AktLayerNam not in myGroups: #gL = iface.legendInterface().addGroup( AktLayerNam, False,grpProjekt) gL = grpProjekt.addGroup( AktLayerNam) myGroups[AktLayerNam]=gL #print myGroups #iface.legendInterface().setGroupExpanded( gL, False ) #iface.legendInterface().moveLayer( Layer, gL) gL.addLayer(Layer) gL.setExpanded(False) else: #iface.legendInterface().moveLayer( Layer, myGroups[AktLayerNam]) myGroups[AktLayerNam].addLayer(Layer) if Layer.geometryType() == 0: if myqtVersion == 4: symbol = QgsSymbolV2.defaultSymbol(Layer.geometryType()) Layer.setRendererV2(QgsSingleSymbolRendererV2( symbol ) ) else: symbol = QgsSymbol.defaultSymbol(Layer.geometryType()) Layer.setRenderer(QgsSingleSymbolRenderer( symbol ) ) symbol.setSize( 0.1 ) labelingDXF (Layer, bFormatText, bUseColor4Point, dblFaktor) if Layer.geometryType() == 0 and myqtVersion == 5: Layer.saveNamedStyle (qmldat) EditQML (qmldat) Layer.loadNamedStyle(qmldat) if Layer.geometryType() == 1 and bUseColor4Line: if myqtVersion == 4: lineMeta = QgsSymbolLayerV2Registry.instance().symbolLayerMetadata("SimpleLine") symbol = QgsSymbolV2.defaultSymbol(Layer.geometryType()) renderer = QgsRuleBasedRendererV2(symbol) else: registry = QgsSymbolLayerRegistry() lineMeta = registry.symbolLayerMetadata("SimpleLine") symbol = QgsSymbol.defaultSymbol(Layer.geometryType()) renderer = QgsRuleBasedRenderer(symbol) root_rule = renderer.rootRule() rule = root_rule.children()[0].clone() symbol.deleteSymbolLayer(0) qmap={} qmap["color_dd_active"]="1" qmap["color_dd_expression"]="\"color\"" qmap["color_dd_field"]="color" qmap["color_dd_useexpr"]="0" lineLayer = lineMeta.createSymbolLayer(qmap) symbol.appendSymbolLayer(lineLayer) rule.setSymbol(symbol) rule.appendChild(rule) if myqtVersion == 4: Layer.setRendererV2(renderer) else: Layer.setRenderer(renderer) if Layer.geometryType() == 2 and bUseColor4Poly: if myqtVersion == 4: fillMeta = QgsSymbolLayerV2Registry.instance().symbolLayerMetadata("SimpleFill") symbol = QgsSymbolV2.defaultSymbol(Layer.geometryType()) renderer = QgsRuleBasedRendererV2(symbol) else: registry = QgsSymbolLayerRegistry() fillMeta = registry.symbolLayerMetadata("SimpleFill") symbol = QgsSymbol.defaultSymbol(Layer.geometryType()) renderer = QgsRuleBasedRenderer(symbol) root_rule = renderer.rootRule() rule = root_rule.children()[0].clone() symbol.deleteSymbolLayer(0) qmap={} qmap["color_dd_active"]="1" qmap["color_dd_expression"]="\"fccolor\"" qmap["color_dd_field"]="fcolor" qmap["color_dd_useexpr"]="0" lineLayer = fillMeta.createSymbolLayer(qmap) symbol.appendSymbolLayer(lineLayer) rule.setSymbol(symbol) rule.appendChild(rule) if myqtVersion == 4: Layer.setRendererV2(renderer) Layer.setLayerTransparency(50) else: Layer.setRenderer(renderer) Layer.setOpacity(0.5) # 27.02.18: immer speichern und bei Punkt und qt5 reload if sOutForm == "SHP": Layer.saveNamedStyle (qmldat) else: Layer.saveStyleToDatabase(gpkgTable, gpkgTable, True, "") else: Layer=None # um Datei löschen zu ermöglichen if not DelShapeDatBlock(shpdat): DelShapeDatBlock(shpdat) else: addHinweis (tr("Option '%s' could not be executed")% opt ) else: if sOutForm == "SHP": addFehler(tr("Creation '%s' failed. Please look to the QGIS log message panel (OGR)") % shpdat ) else: addFehler(tr("Creation '%s' failed. Please look to the QGIS log message panel (OGR)") % zielPfadOrDatei ) uiParent.SetAktionGesSchritte(2) uiParent.SetAktionText(tr("Switch on the display") ) uiParent.SetAktionAktSchritt(1) iface.mapCanvas().setRenderFlag( True ) return True """
##Select by expression=name ##Tests=group #inputs ##INPUT_LAYER=vector ##OUTPUT_LAYER=vectorDestination import processing result = processing.run("qgis:selectbyexpression", {'INPUT': INPUT_LAYER, 'EXPRESSION': '"id2" = 0 and "id" > 7', 'METHOD': 1}, context=context, feedback=feedback) result = processing.run("qgis:saveselectedfeatures", {'INPUT': result["OUTPUT"], 'OUTPUT': parameters['OUTPUT_LAYER']}, context=context, feedback=feedback) OUTPUT_LAYER = result['OUTPUT']
def despeckeleGamma(dlg, conf, dir_raster_src, dir_dest, rasterName, gammaName, extension_input_raster): # Calcul despeckele option Gamma li = layerList() messInfo(dlg, "Calcul du despeckele Gamma.") messInfo(dlg, "") rasterPath = dir_raster_src + os.sep + rasterName + extension_input_raster gammaPath = dir_dest + os.sep + gammaName + EXT_RASTER radius = dlg.spinBoxRadius.value() nb_looks = dlg.doubleSpinBoxLooks.value() # Suppression du fichier de sortie si il existe if os.path.exists(gammaPath): try: os.remove(gammaPath) except: QMessageBox.information( None, "Attention !!!", gammaPath + " ne peut pas être effacé. Vérifiez que le fichier n'est pas verrouillé par un autre utilisateur ou que le fichier peut être effacé manuellement (droits d'écriture sur le répertoire).", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, gammaPath + " ne peut pas être effacé.") return None # Calcul if conf.rbOTB.isChecked(): # Despeckele Gamma par OTB try: #processing.algorithmHelp("otb:Despeckle") #processing.runalg('otb:despecklegammamap', rasterPath, '128', 0, radius, nb_looks, gammaPath) parameters = { "in": rasterPath, "out": gammaPath, "filter": 'gammamap', "filter.gammamap.rad": radius, "filter.gammamap.nblooks": nb_looks, "outputpixeltype": 2, "ram": 128 } processing.run('otb:Despeckle', parameters) except: messErreur(dlg, "Erreur de traitement sur otb:Despeckle Gamma.") return None # Fin OTB else: # Despeckele Gamma par GRASS entries = [] raster = li[rasterName] extent = raster.extent() height = raster.height() width = raster.width() try: # En attente de faire fonctionner le despeckle avec GRASS !!! print("DEBUG lancement grass:despeckle Gamma") processing.algorithmHelp("grass:i.despeckle") #processing.runalg('grass:i.despeckle', rasterPath, 'gamma', radius, nb_looks, gammaPath) print("DEBUG fin grass:despeckle Gamma") except: messErreur(dlg, "Erreur de traitement sur grass:despeckle.") return None # Fin GRASS if os.path.exists(gammaPath): gamma = QgsRasterLayer(gammaPath, gammaName) else: QMessageBox.information( None, "Attention !!!", gammaPath + " n'a pas été créé. Vérifiez que le fichier n'est pas verrouillé par un autre utilisateur ou que le fichier peut être effacé manuellement (droits d'écriture sur le répertoire).", QMessageBox.Ok, QMessageBox.NoButton) messErreur(dlg, gammaPath + " n'a pas été créé.") return None if not gamma.isValid(): messErreur(dlg, gammaPath + " ne peut pas être chargé.") return None return gamma