def processAlgorithm(self, parameters, context, feedback): connection = self.parameterAsString(parameters, self.DATABASE, context) id_field = self.parameterAsString(parameters, self.ID_FIELD, context) geom_field = self.parameterAsString(parameters, self.GEOMETRY_FIELD, context) uri = postgis.uri_from_name(connection) sql = self.parameterAsString(parameters, self.SQL, context) sql = sql.replace('\n', ' ') uri.setDataSource("", "(" + sql.rstrip(';') + ")", geom_field, "", id_field) vlayer = QgsVectorLayer(uri.uri(), "layername", "postgres") if not vlayer.isValid(): raise QgsProcessingException( self.tr("""This layer is invalid! Please check the PostGIS log for error messages.""")) context.temporaryLayerStore().addMapLayer(vlayer) context.addLayerToLoadOnCompletion( vlayer.id(), QgsProcessingContext.LayerDetails('SQL layer', context.project(), self.OUTPUT)) return {self.OUTPUT: vlayer.id()}
def loadOnCompletion(self): details = QgsProcessingContext.LayerDetails('stations', QgsProject.instance()) self.alg.context.addLayerToLoadOnCompletion( self.alg.parameters['CurrentStationsLayer'], details) self.alg.context.addLayerToLoadOnCompletion( self.alg.parameters['CurrentPredictionsLayer'], details)
def processAlgorithm(self, parameters, context, feedback): connection_name = self.parameterAsConnectionName(parameters, self.DATABASE, context) id_field = self.parameterAsString(parameters, self.ID_FIELD, context) geom_field = self.parameterAsString( parameters, self.GEOMETRY_FIELD, context) # resolve connection details to uri try: md = QgsProviderRegistry.instance().providerMetadata('postgres') conn = md.createConnection(connection_name) except QgsProviderConnectionException: raise QgsProcessingException(self.tr('Could not retrieve connection details for {}').format(connection_name)) uri = QgsDataSourceUri(conn.uri()) sql = self.parameterAsString(parameters, self.SQL, context) sql = sql.replace('\n', ' ') uri.setDataSource("", "(" + sql.rstrip(';') + ")", geom_field, "", id_field) vlayer = QgsVectorLayer(uri.uri(), "layername", "postgres") if not vlayer.isValid(): raise QgsProcessingException(self.tr("""This layer is invalid! Please check the PostGIS log for error messages.""")) context.temporaryLayerStore().addMapLayer(vlayer) context.addLayerToLoadOnCompletion( vlayer.id(), QgsProcessingContext.LayerDetails('SQL layer', context.project(), self.OUTPUT)) return {self.OUTPUT: vlayer.id()}
def processAlgorithm(self, parameters, context, feedback): files = self.parameterAsEnums(parameters, self.FILES, context) sql_layers = [self.options[i] for i in sorted(files)] result = dict() result[self.OUTPUT] = [] total = len(files) for current, sql_layer in enumerate(sql_layers): feedback.pushInfo('Chargement de : {}'.format(sql_layer.name)) layer = sql_layer.vector_layer() context.temporaryLayerStore().addMapLayer(layer) context.addLayerToLoadOnCompletion( layer.id(), QgsProcessingContext.LayerDetails( sql_layer.name, context.project(), self.OUTPUT, )) if sql_layer.qml: context.layerToLoadOnCompletionDetails( layer.id()).setPostProcessor( StylesPostProcessor.create(sql_layer.qml)) result[self.OUTPUT].append(layer.id()) feedback.setProgress(int(current / total * 100)) return result
def add_layer_details(lyrname): # Set empty name as we are calling setOutputLayerName details = QgsProcessingContext.LayerDetails("", destination_project, outputName, layerTypeHint) try: layer = QgsProcessingUtils.mapLayerFromString( lyrname, context, typeHint=details.layerTypeHint) if layer is not None: # Fix layer name # Because if details name is empty it will be set to the file name # see https://qgis.org/api/qgsprocessingcontext_8cpp_source.html#l00128 # XXX Make sure that Processing/Configuration/PREFER_FILENAME_AS_LAYER_NAME # setting is set to false (see processfactory.py:129) details.setOutputLayerName(layer) LOGGER.debug("Layer name for '%s' set to '%s'", outputName, layer.name()) context.addLayerToLoadOnCompletion(lyrname, details) return layer.name() else: LOGGER.warning("No layer found for %s", lyrname) except Exception: LOGGER.error( "Processing: Error loading result layer {}:\n{}".format( lyrname, traceback.format_exc())) return None
def XYZ(self, context, url, name): rasterLyr = QgsRasterLayer("type=xyz&url=" + url, name, "wms") context.temporaryLayerStore().addMapLayer(rasterLyr) context.addLayerToLoadOnCompletion( rasterLyr.id(), QgsProcessingContext.LayerDetails(name, context.project(), self.OUTPUT)) return rasterLyr
def processAlgorithm(self, parameters, context, feedback): base_name = self.parameterAsString(parameters, self.FILE_GPKG, context) project_name = self.parameterAsString(parameters, self.PROJECT_NAME, context) extent = self.parameterAsExtent(parameters, self.PROJECT_EXTENT, context) crs = self.parameterAsCrs(parameters, self.PROJECT_CRS, context) feedback.pushInfo('Création du projet de {type} : {name}'.format( type=self.project_type.label, name=project_name)) parent_base_name = str(Path(base_name).parent) if not base_name.endswith('.gpkg'): base_name = os.path.join(parent_base_name, Path(base_name).stem + '.gpkg') if os.path.exists(base_name): feedback.reportError( 'Le fichier existe déjà. Ré-écriture du fichier…') self.create_geopackage(self.project_type, base_name, crs, context.project().transformContext()) output_layers = self.load_layers(self.project_type, base_name, feedback) # Add metadata feature = QgsFeature(output_layers['metadata'].fields()) feature.setAttribute('project_name', project_name) feature.setAttribute('crs', str(crs.authid())) feature.setAttribute('extent', extent.asWktPolygon()) feature.setAttribute('project_type', self.project_type.label) with edit(output_layers['metadata']): output_layers['metadata'].addFeature(feature) # Add glossary for table, labels in self.glossary.items(): with edit(output_layers[table]): for i, label in enumerate(labels): feature = QgsFeature(output_layers[table].fields()) feature.setAttribute('key', i + 1) feature.setAttribute('label', label) output_layers[table].addFeature(feature) # Load layers in the project output_id = [] for layer in output_layers.values(): context.temporaryLayerStore().addMapLayer(layer) context.addLayerToLoadOnCompletion( layer.id(), QgsProcessingContext.LayerDetails(layer.name(), context.project(), self.OUTPUT_LAYERS)) context.project().setTitle(project_name) output_id.append(layer.id()) return {self.FILE_GPKG: base_name, self.OUTPUT_LAYERS: output_id}
def load_shapefile(self, shp_file_path, filename, table, context, feedback): # Chargement de la couche corrrespondante dans le projet if not os.path.exists(shp_file_path): shp_file_path = shp_file_path.replace(".shp", ".dbf") if not os.path.exists(shp_file_path): feedback.reportError('Fichier introuvable lors du chargement') return False export_layer = QgsVectorLayer(shp_file_path, "Export COVADIS " + table, 'ogr') context.temporaryLayerStore().addMapLayer(export_layer) context.addLayerToLoadOnCompletion( export_layer.id(), QgsProcessingContext.LayerDetails(filename, context.project(), self.OUTPUT) )
def addLayerToLoad(self, layer, outputName, destName, context, destinationProject): """ Add a layer to the context layer store so that it can be reused later """ layer.setName(destName) context.temporaryLayerStore().addMapLayer(layer) if destinationProject: context.addLayerToLoadOnCompletion( layer.id(), QgsProcessingContext.LayerDetails(destName, destinationProject, outputName)) return layer.id()
def initLayer(self, context, uri, table, geom, sql, id): uri.setDataSource("raepa", table, geom, sql, id) layer = QgsVectorLayer(uri.uri(), table, "postgres") context.temporaryLayerStore().addMapLayer(layer) context.addLayerToLoadOnCompletion( layer.id(), QgsProcessingContext.LayerDetails( table, context.project(), self.OUTPUT_LAYERS ) ) return layer
def initLayer(self, context, uri, schema, table, geom, sql, pkey=None): uri.setDataSource(schema, table, geom, sql) if pkey: uri.setDataSource(schema, table, geom, sql, pkey) layer = QgsVectorLayer(uri.uri(), table, "postgres") if not layer.isValid(): return False context.temporaryLayerStore().addMapLayer(layer) context.addLayerToLoadOnCompletion( layer.id(), QgsProcessingContext.LayerDetails(table, context.project(), self.OUTPUT)) return layer
def processAlgorithm(self, parameters, context, feedback): destination = self.parameterAsFile(parameters, self.DATABASE, context) try: db = postgis.GeoDB.from_name(destination) schema = self.parameterAsFile(parameters, self.SCHEMA, context) except QgsProcessingException: raise QgsProcessingException( tr('* ERROR while getting database "{}"').format(destination)) database_uri = db.uri output_layers = [] for table, geom in MAPPING.items(): uri = QgsDataSourceUri(database_uri) if Qgis.QGIS_VERSION_INT >= 31000: uri.setTable(table) if geom[0]: uri.setGeometryColumn('geom') else: uri_string = uri.uri(True) if geom[0]: uri_string = uri_string.replace( 'table=""', 'table="{}" (geom)'.format(table)) else: uri_string = uri_string.replace('table=""', 'table="{}"'.format(table)) uri = QgsDataSourceUri(uri_string) # Schema is updating the table name, # so after search&replace uri.setSchema(schema) uri.setKeyColumn(geom[2]) dest_layer = QgsVectorLayer(uri.uri(False), table, 'postgres') if not dest_layer.isValid(): raise QgsProcessingException( tr('* ERROR: Can\'t load table "{}" in URI "{}"').format( table, uri.uri())) feedback.pushInfo('The layer {} has been loaded'.format(table)) output_layers.append(dest_layer.id()) # Add layer to project context.temporaryLayerStore().addMapLayer(dest_layer) context.addLayerToLoadOnCompletion( dest_layer.id(), QgsProcessingContext.LayerDetails(table, context.project(), self.OUTPUT_LAYERS)) return {self.OUTPUT_LAYERS: output_layers}
def postProcessAlgorithm(self, context, feedback): """ Add resulting layers to map :param qgis.core.QgsProcessingContext context: Threadsafe context in which a processing algorithm is executed :param qgis.core.QgsProcessingFeedback feedback: For providing feedback from a processing algorithm """ output_layers = self.outputs if self.outputs else {} for layer, layer_name in output_layers.items(): context.addLayerToLoadOnCompletion( layer, QgsProcessingContext.LayerDetails(layer_name, context.project())) return {} #Avoid NoneType can not be converted to a QMap instance
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ # Database connection parameters connection_name = QgsExpressionContextUtils.globalScope().variable( 'raepa_connection_name') msg = '' status = 1 # Set SQL self.setSql(parameters, context, feedback) # Set output layer name self.setLayerName(parameters, context, feedback) # Buid QGIS uri to load layer id_field = 'id' uri = postgis.uri_from_name(connection_name) uri.setDataSource("", "(" + self.SQL + ")", self.GEOM_FIELD, "", id_field) vlayer = QgsVectorLayer(uri.uri(), "layername", "postgres") if not vlayer.isValid(): feedback.pushInfo('SQL = \n' + self.SQL) raise QgsProcessingException("""Cette couche est invalide! Vérifier les logs de PostGIS pour des messages d\'erreurs.""") # Load layer context.temporaryLayerStore().addMapLayer(vlayer) context.addLayerToLoadOnCompletion( vlayer.id(), QgsProcessingContext.LayerDetails(self.LAYER_NAME, context.project(), self.OUTPUT_LAYER)) return { self.OUTPUT_STATUS: status, self.OUTPUT_STRING: msg, self.OUTPUT_LAYER: vlayer.id(), self.OUTPUT_LAYER_RESULT_NAME: self.LAYER_NAME }
def processAlgorithm(self, parameters, context, feedback): # Database connection parameters connection_name = QgsExpressionContextUtils.projectScope( context.project()).variable('gobs_connection_name') msg = '' status = 1 # Set SQL self.setSql(parameters, context, feedback) # Set output layer name self.setLayerName(parameters, context, feedback) # Buid QGIS uri to load layer id_field = 'id' uri = getPostgisConnectionUriFromName(connection_name) uri.setDataSource("", "(" + self.SQL + ")", self.GEOM_FIELD, "", id_field) vlayer = QgsVectorLayer(uri.uri(), "layername", "postgres") if not vlayer.isValid(): feedback.reportError('SQL = \n' + self.SQL) raise QgsProcessingException( tr("""This layer is invalid! Please check the PostGIS log for error messages.""")) # Load layer context.temporaryLayerStore().addMapLayer(vlayer) context.addLayerToLoadOnCompletion( vlayer.id(), QgsProcessingContext.LayerDetails(self.LAYER_NAME, context.project(), self.OUTPUT_LAYER)) return { self.OUTPUT_STATUS: status, self.OUTPUT_STRING: msg, self.OUTPUT_LAYER: vlayer.id(), self.OUTPUT_LAYER_RESULT_NAME: self.LAYER_NAME }
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ source_pts = self.parameterAsSource(parameters, self.INPUT_POINTS, context) input_field = self.parameterAsString(parameters, self.INPUT_FIELD, context) source_dem = self.parameterAsRasterLayer(parameters, self.INPUT_DEM, context) out_directory = self.parameterAsString(parameters, self.OUTPUT_DIR, context) out_type_nr = self.parameterAsInt(parameters, self.OUTPUT_TYPE, context) out_type = QgsVectorFileWriter.supportedFormatExtensions( )[:2][out_type_nr] to_gpkg = out_type == 'gpkg' load_results = self.parameterAsBool(parameters, self.LOAD_RESULTS, context) if source_pts is None: raise QgsProcessingException( self.invalidSourceError(parameters, self.INPUT_POINTS)) if source_dem is None: raise QgsProcessingException( self.invalidSourceError(parameters, self.INPUT_DEM)) feedback.pushInfo("Input data loaded! Creating catchments...") feedback.setProgress(1) unique_field = input_field if input_field else "" if unique_field: field_idx = source_pts.fields().lookupField(unique_field) unique_values = source_pts.uniqueValues(field_idx) else: unique_values = [f.id() for f in source_pts.getFeatures()] feedback.pushInfo(f"Creating directory: {out_directory}") mkdir(out_directory) bname = f"catchment{'s' if to_gpkg else ''}" output_basename = os.path.join(out_directory, bname) # Compute the number of steps to display within the progress bar and # get features from source total_nr = len(unique_values) total = 100. / total_nr if source_pts.featureCount() else 1 output_layers = [] for i, unique_value in enumerate(unique_values): # Stop the algorithm if cancel button has been clicked if feedback.isCanceled(): break table = f"catchment_{unique_value}" if to_gpkg else "" file_mod = "" if to_gpkg else f"_{unique_value}" filename = f"{output_basename}{file_mod}" destination = f"{filename}.{out_type}" output_uri = destination + (f"|layername={table}" if to_gpkg else "") feedback.pushInfo( self.tr('Creating layer: {}').format(destination)) if unique_field: req_filter = f"{QgsExpression.quotedColumnRef(unique_field)}={QgsExpression.quotedValue(unique_value)}" req = QgsFeatureRequest().setFilterExpression(req_filter) else: req = QgsFeatureRequest(unique_value) # feature id for source_pt in source_pts.getFeatures(req): if feedback.isCanceled(): break # Get x and y coordinate from point feature geom = source_pt.geometry() p = geom.asPoint() x = p.x() y = p.y() feedback.pushInfo( 'Creating upslope area for point ({:.2f}, {:.2f}) - {} of {}' .format(x, y, i + 1, total_nr)) # Calculate catchment raster from point feature catchraster = processing.run( "saga:upslopearea", { 'TARGET': None, 'TARGET_PT_X': x, 'TARGET_PT_Y': y, 'ELEVATION': source_dem, 'SINKROUTE': None, 'METHOD': 0, 'CONVERGE': 1.1, 'AREA': 'TEMPORARY_OUTPUT' }, context=context, feedback=feedback, ) # Polygonize raster catchment catchpoly = processing.run( "gdal:polygonize", { 'INPUT': catchraster["AREA"], 'BAND': 1, 'FIELD': 'DN', 'EIGHT_CONNECTEDNESS': False, 'OUTPUT': 'TEMPORARY_OUTPUT' }, context=context, feedback=feedback, ) # Select features having DN = 100 catchpoly_lyr = QgsProcessingUtils.mapLayerFromString( catchpoly["OUTPUT"], context=context) catchpoly_lyr.selectByExpression('"DN"=100') options = QgsVectorFileWriter.SaveVectorOptions() options.driverName = "GPKG" if to_gpkg else "ESRI Shapefile" options.layerName = table options.actionOnExistingFile = QgsVectorFileWriter.CreateOrOverwriteLayer options.onlySelectedFeatures = True trans_context = QgsCoordinateTransformContext() write_result, error_message = QgsVectorFileWriter.writeAsVectorFormatV2( catchpoly_lyr, destination, trans_context, options) if write_result != 0: feedback.pushInfo(f"Initial write failed: {error_message}") # retry with option for creating the dataset options.actionOnExistingFile = QgsVectorFileWriter.CreateOrOverwriteFile write_result, error_message = QgsVectorFileWriter.writeAsVectorFormatV2( catchpoly_lyr, destination, trans_context, options) feedback.pushInfo( f"Final write attempt: {write_result} == 0 -> SUCCESS or {error_message}" ) output_layer = QgsProcessingUtils.mapLayerFromString( output_uri, context=context) output_layers.append(output_uri) if load_results: context.temporaryLayerStore().addMapLayer(output_layer) context.addLayerToLoadOnCompletion( output_layer.id(), QgsProcessingContext.LayerDetails( table if to_gpkg else f"catchment {unique_value}", context.project(), self.OUTPUT_LAYERS)) feedback.setProgress(int((i + 1) * total)) return { self.OUTPUT_DIR: out_directory, self.OUTPUT_LAYERS: output_layers }
def processAlgorithm(self, parameters, context, feedback): self.PreRun(parameters, context, feedback) commands = self.getConsoleCommands(parameters, context, feedback) ChloeUtils.runChloe(commands, feedback) print('parameters : {}'.format(str(parameters))) # Auto generate outputs: dict {'name parameter' : 'value', ...} for output in self.destinationParameterDefinitions(): print(str(output) + " " + str(output.metadata())) results = {} for o in self.outputDefinitions(): if o.name() in parameters: results[o.name()] = parameters[o.name()] for k, v in self.output_values.items(): results[k] = v # Load OUTPUT_ASC on temp layer on context id checked box checked # (it will be load after in the project) # #print('context : {}'.format(context)) #print('parameterDefinitions : {}'.format(self.parameterDefinitions())) if ('OUTPUT_ASC' in parameters ) and 'openLayer' in parameters['OUTPUT_ASC'] and parameters[ 'OUTPUT_ASC']['openLayer'] == True: # Load OUTPUT_ASC on temp layer on context # (it will be load after in the project) output_asc = parameters['OUTPUT_ASC']["data"] rlayer = QgsRasterLayer(output_asc, "hillshade") if not rlayer.isValid(): raise QgsProcessingException( self.tr("""Cannot load the output in the application""")) rLayerName = ChloeUtils.deduceLayerName(rlayer, self.name()) ChloeUtils.setLayerSymbology(rlayer, 'continuous.qml') context.temporaryLayerStore().addMapLayer(rlayer) layerDetails = QgsProcessingContext.LayerDetails( rLayerName, context.project(), self.OUTPUT_ASC) #postProcess = ChloeOutputLayerPostProcessor()t #layerDetails.setPostProcessor(postProcess) context.addLayerToLoadOnCompletion(rlayer.id(), layerDetails) results[self.OUTPUT_ASC] = rlayer.id() if ('OUTPUT_CSV' in parameters ) and parameters['OUTPUT_CSV']['openLayer'] == True: uri = "file:///" + str( results['OUTPUT_CSV'] ) + "?type=csv&delimiter=;&detectTypes=yes&geomType=none&subsetIndex=no&watchFile=no" output_csv = parameters['OUTPUT_CSV']["data"] if 'OUTPUT_ASC' in parameters: output_csv = ChloeUtils.adjustExtension( output_csv, parameters['OUTPUT_ASC']["data"]) print("output_csv " + str(uri) + " " + str(output_csv)) tLayerName = ChloeUtils.deduceLayerName(output_csv, self.name()) tLayer = QgsVectorLayer(uri, tLayerName, 'delimitedtext') if not tLayer.isValid(): raise QgsProcessingException( self.tr("""Cannot load the outpout in the application""")) context.temporaryLayerStore().addMapLayer(tLayer) layerDetails = QgsProcessingContext.LayerDetails( tLayerName, context.project(), self.OUTPUT_CSV) context.addLayerToLoadOnCompletion(tLayer.id(), layerDetails) results[self.OUTPUT_CSV] = tLayer.id() if ('OUTPUT_DIR' in parameters) and self.outputFilenames and parameters[ 'OUTPUT_DIR']['openLayer'] == True: # === import all asc for multi algorithm outputDir = self.parameterAsString(parameters, self.OUTPUT_DIR, context) if outputDir != None: self.prepareMultiProjectionFiles() for file in self.outputFilenames: print(file + " " + os.path.splitext(os.path.basename(file))[0]) rlayer = QgsRasterLayer( file, os.path.splitext(os.path.basename(file))[0]) #rlayer = QgsRasterLayer(load_it, "hillshade") if not rlayer.isValid(): raise QgsProcessingException( self. tr("""Cannot load the outpout in the application""" )) rLayerName = ChloeUtils.deduceLayerName( rlayer, self.name()) ChloeUtils.setLayerSymbology(rlayer, 'continuous.qml') context.temporaryLayerStore().addMapLayer(rlayer) layerDetails = QgsProcessingContext.LayerDetails( rLayerName, context.project(), self.OUTPUT_DIR) context.addLayerToLoadOnCompletion(rlayer.id(), layerDetails) return results
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ source = self.parameterAsSource(parameters, self.INPUT, context) path = self.parameterAsFile(parameters, self.OUTPUT, context) field_def = { 'idx': QVariant.Int, 'name': QVariant.String, 'type': QVariant.Int, 'typeName': QVariant.String, 'length': QVariant.Int, 'precision': QVariant.Int, 'comment': QVariant.String, 'alias': QVariant.String } # create virtual layer vl = QgsVectorLayer("None", "fields", "memory") pr = vl.dataProvider() # define fields fields = QgsFields() for n, t in field_def.items(): fields.append(QgsField(name=n, type=t)) # add fields pr.addAttributes(fields) vl.updateFields( ) # tell the vector layer to fetch changes from the provider # add feature based on field description field_index = 0 for f in providerFields(source.fields()): field_index += 1 feat = QgsFeature() feat.setAttributes([ field_index, f.name(), f.type(), f.typeName(), f.length(), f.precision(), f.comment(), f.alias() ]) pr.addFeatures([feat]) # set create file layer options options = QgsVectorFileWriter.SaveVectorOptions() options.driverName = QgsVectorFileWriter.driverForExtension('csv') options.fileEncoding = 'UTF-8' options.actionOnExistingFile = QgsVectorFileWriter.CreateOrOverwriteFile options.layerOptions = ['CREATE_CSVT=YES'] # write file write_result, error_message = QgsVectorFileWriter.writeAsVectorFormat( vl, path, options) # result if write_result != QgsVectorFileWriter.NoError: raise QgsProcessingException( self.tr('* ERROR: {0}').format(error_message)) del fields del pr del vl # create layer dest_layer = QgsVectorLayer(path, self.OUTPUT_LAYER, 'ogr') if not dest_layer.isValid(): raise QgsProcessingException( self.tr('* ERROR: Can\'t load layer {1} in {0}').format( path, self.OUTPUT_LAYER)) # Add layer to context context.temporaryLayerStore().addMapLayer(dest_layer) context.addLayerToLoadOnCompletion( dest_layer.id(), QgsProcessingContext.LayerDetails(self.OUTPUT_LAYER, context.project(), self.OUTPUT_LAYER)) return {self.OUTPUT: path, self.OUTPUT_LAYER: dest_layer.id()}
def process_road(self, context, url): """Major step of the process""" if self.feedback.isCanceled(): self.feedback.reportError( 'The algorithm has been canceled during the building of the url.' ) return self.OUTPUT_CANCEL self.feedback.setCurrentStep(1) self.feedback.pushInfo('Downloading data and OSM file.') connexion_overpass_api = ConnexionOAPI(url) osm_file = connexion_overpass_api.run() if self.feedback.isCanceled(): self.feedback.reportError( 'The algorithm has been canceled during the download.') return self.OUTPUT_CANCEL self.feedback.setCurrentStep(2) self.feedback.pushInfo('Processing downloaded file.') out_dir = dirname(self.file) if self.file else None out_file = basename(self.file)[:-5] if self.file else None osm_parser = OsmParser(osm_file=osm_file, output_format=Format.GeoPackage, output_dir=out_dir, prefix_file=out_file, feedback_alg=True, feedback=self.feedback) layers = osm_parser.processing_parse() if self.feedback.isCanceled(): self.feedback.reportError( 'The algorithm has been canceled during the parsing.') return self.OUTPUT_CANCEL self.feedback.setCurrentStep(7) self.feedback.pushInfo('Decorating the requested layers.') layer_output = [] OUTPUT = { 'points': self.OUTPUT_POINTS, 'lines': self.OUTPUT_LINES, 'multilinestrings': self.OUTPUT_MULTILINESTRINGS, 'multipolygons': self.OUTPUT_MULTIPOLYGONS } for layer in layers: layers[layer]['layer_decorated'] = processing.run( "quickosm:decoratelayer", {'LAYER': layers[layer]['vector_layer']}, feedback=self.feedback) context.temporaryLayerStore().addMapLayer( layers[layer]['vector_layer']) layer_output.append( QgsProcessingUtils.mapLayerFromString( layers[layer]['vector_layer'].id(), context, True)) if 'tags' in layers[layer]: layer_details = QgsProcessingContext.LayerDetails( 'OSMQuery_' + layer, context.project(), OUTPUT[layer]) if 'colour' in layers[layer]['tags']: layer_details.setPostProcessor( SetColoringPostProcessor.create(layers[layer]['tags'])) context.addLayerToLoadOnCompletion( layers[layer]['vector_layer'].id(), layer_details) if self.feedback.isCanceled(): self.feedback.reportError( 'The algorithm has been canceled during the post processing.') return self.OUTPUT_CANCEL outputs = { self.OUTPUT_POINTS: layer_output[0].id(), self.OUTPUT_LINES: layer_output[1].id(), self.OUTPUT_MULTILINESTRINGS: layer_output[2].id(), self.OUTPUT_MULTIPOLYGONS: layer_output[3].id(), } return outputs
def processAlgorithm(self, parameters, context, feedback): # extract input parameters alg = self.parameterAsEnum(parameters, self.INPUT_ALG_NAME, context) extent = self.parameterAsVectorLayer(parameters, self.INPUT_EXTENT, context) raster_1_id = self.parameterAsString(parameters, self.INPUT_RASTER_1, context) raster_1 = QgsProject.instance().mapLayer(raster_1_id) raster_2_id = self.parameterAsString(parameters, self.INPUT_RASTER_2, context) raster_2 = QgsProject.instance().mapLayer(raster_2_id) (sink, self.dest_id) = self.parameterAsSink( parameters, self.OUTPUT_BUFFER, context, QgsFields(), extent.wkbType(), extent.sourceCrs(), ) outputFile = self.parameterAsRasterLayer( parameters, self.OUTPUT_CHANGES, context ) # create a temporary vector layer tmp = tempfile.mkdtemp() path_roi = os.path.join(tmp, "roi.shp") writer = QgsVectorFileWriter( path_roi, "UTF-8", QgsFields(), QgsWkbTypes.Polygon, extent.sourceCrs(), "ESRI Shapefile", ) # create buffered extent and update temporary shp for detector for feature in extent.getFeatures(): geom = feature.geometry() buffer = geom.buffer( 10, 100, QgsGeometry.CapFlat, QgsGeometry.JoinStyleMiter, 100 ) feature.setGeometry(buffer) feature.setFields(QgsFields()) sink.addFeature(feature) writer.addFeature(feature) del writer # run change detector path1 = raster_1.source() path2 = raster_2.source() detector = LittoDynChangeDetectorPca(path1, path2, path_roi) if alg == 1: detector = LittoDynChangeDetectorEvi(path1, path2, path_roi) elif alg == 2: detector = LittoDynChangeDetectorNdvi(path1, path2, path_roi) elif alg == 3: detector = LittoDynChangeDetectorNgrdi(path1, path2, path_roi) elif alg == 4: detector = LittoDynChangeDetectorNormEuclid(path1, path2, path_roi) elif alg == 5: detector = LittoDynChangeDetectorNormCorr(path1, path2, path_roi) elif alg == 6: detector = LittoDynChangeDetectorNormCos(path1, path2, path_roi) detector.detect() # store output layers in group alg_name = self.options[alg].lower().replace(" ", "_") if Qgis.QGIS_VERSION_INT >= 31500: name = "{}_{}_{}".format(raster_1.name(), raster_2.name(), alg_name) ProcessingConfig.setSettingValue(ProcessingConfig.RESULTS_GROUP_NAME, name) # save result in temporary file tmp = tempfile.mkdtemp() path_changes = os.path.join(tmp, "{}_changes.tif".format(alg_name)) detector.save(path_changes) rl = QgsRasterLayer(path_changes, "{}_changes".format(alg_name), "gdal") context.temporaryLayerStore().addMapLayer(rl) context.addLayerToLoadOnCompletion( rl.id(), QgsProcessingContext.LayerDetails( "{}_changes".format(alg_name), context.project(), self.OUTPUT_CHANGES ), ) return { self.OUTPUT_CHANGES: rl.id(), self.OUTPUT_BUFFER: self.dest_id, self.OUTPUT_CHANGES: rl.id(), }
def processAlgorithm(self, parameters, context, feedback): # Init ORS client providers = configmanager.read_config()['providers'] provider = providers[self.parameterAsEnum(parameters, self.IN_PROVIDER, context)] clnt = client.Client(provider) clnt.overQueryLimit.connect( lambda: feedback.reportError("OverQueryLimit: Retrying...")) params = dict() geometry_param = self.GEOMETRY_TYPES[self.parameterAsEnum( parameters, self.IN_GEOMETRY, context)] params[ self.IN_GEOMETRY] = True if geometry_param == 'Polygon' else False mode = self.MODE_TYPES[self.parameterAsEnum(parameters, self.IN_MODE, context)] source = self.parameterAsSource(parameters, self.IN_POINTS, context) if source.wkbType() == 4: raise QgsProcessingException( "TypeError: Multipoint Layers are not accepted. Please convert to single geometry layer." ) # Get ID field properties id_field_name = self.parameterAsString(parameters, self.IN_FIELD, context) id_field_id = source.fields().lookupField(id_field_name) if id_field_name == '': id_field_id = 0 id_field_name = source.fields().field(id_field_id).name() id_field = source.fields().field(id_field_id) # Populate iso_layer instance with parameters self.isochrones.set_parameters(self.PROFILE, geometry_param, id_field.type(), id_field_name) layer_time = QgsVectorLayer(f'{geometry_param}?crs=EPSG:4326', f'Isochrones {self.PROFILE.capitalize()}', 'memory') self.isos_time_id = layer_time.id() layer_time_pr = layer_time.dataProvider() layer_time_pr.addAttributes(self.isochrones.get_fields()) layer_time.updateFields() layer_dist = QgsVectorLayer( f'{geometry_param}?crs=EPSG:4326', f'Isodistances {self.PROFILE.capitalize()}', 'memory') self.isos_dist_id = layer_dist.id() layer_dist_pr = layer_dist.dataProvider() layer_dist_pr.addAttributes(self.isochrones.get_fields()) layer_dist.updateFields() layer_snapped_points = QgsVectorLayer( f'MultiPoint?crs=EPSG:4326', f'Snapped Points {self.PROFILE.capitalize()}', 'memory') self.points_snapped_id = layer_snapped_points.id() layer_snapped_points_pr = layer_snapped_points.dataProvider() layer_snapped_points_pr.addAttributes( self.isochrones.get_point_fields()) layer_snapped_points.updateFields() layer_input_points = QgsVectorLayer( f'Point?crs=EPSG:4326', f'Input Points {self.PROFILE.capitalize()}', 'memory') self.points_input_id = layer_input_points.id() layer_input_points_pr = layer_input_points.dataProvider() layer_input_points_pr.addAttributes(self.isochrones.get_point_fields()) layer_input_points.updateFields() denoise = self.parameterAsDouble(parameters, self.IN_DENOISE, context) if denoise: params[self.IN_DENOISE] = denoise generalize = self.parameterAsDouble(parameters, self.IN_GENERALIZE, context) if generalize: params[self.IN_GENERALIZE] = generalize avoid_layer = self.parameterAsLayer(parameters, self.IN_AVOID, context) if avoid_layer: params['avoid_locations'] = get_avoid_locations(avoid_layer) show_locations = self.parameterAsBool(parameters, self.IN_SHOW_LOCATIONS, context) # Sets all advanced parameters as attributes of self.costing_options self.costing_options.set_costing_options(self, parameters, context) intervals_time = self.parameterAsString(parameters, self.IN_INTERVALS_TIME, context) intervals_distance = self.parameterAsString(parameters, self.IN_INTERVALS_DISTANCE, context) feat_count = source.featureCount( ) if not intervals_time else source.featureCount() * 2 self.intervals = { "time": [{ "time": int(x) } for x in intervals_time.split(',')] if intervals_time else [], "distance": [{ "distance": int(x) } for x in intervals_distance.split(',')] if intervals_distance else [] } counter = 0 for metric, interv in self.intervals.items(): if feedback.isCanceled(): break if not interv: continue # Make the actual requests requests = [] for properties in self.get_sorted_feature_parameters(source): if feedback.isCanceled(): break r_params = deepcopy(params) r_params['contours'] = interv # Get transformed coordinates and feature locations, feat = properties r_params.update( get_directions_params(locations, self.PROFILE, self.costing_options, mode)) r_params['id'] = feat[id_field_name] requests.append(r_params) for params in requests: counter += 1 if feedback.isCanceled(): break # If feature causes error, report and continue with next try: # Populate features from response response = clnt.request('/isochrone', post_json=params) except (exceptions.ApiError) as e: msg = "Feature ID {} caused a {}:\n{}".format( params['id'], e.__class__.__name__, str(e)) feedback.reportError(msg) logger.log(msg, 2) continue except (exceptions.InvalidKey, exceptions.GenericServerError) as e: msg = "{}:\n{}".format(e.__class__.__name__, str(e)) feedback.reportError(msg) logger.log(msg) raise options = {} if params.get('costing_options'): options = params['costing_options'] self.isochrones.set_response(response) for isochrone in self.isochrones.get_features( params['id'], options.get(self.PROFILE)): if metric == 'time': layer_time_pr.addFeature(isochrone) elif metric == 'distance': layer_dist_pr.addFeature(isochrone) if show_locations: for point_feat in self.isochrones.get_multipoint_features( params['id']): layer_snapped_points_pr.addFeature(point_feat) for point_feat in self.isochrones.get_point_features( params['id']): layer_input_points_pr.addFeature(point_feat) feedback.setProgress(int((counter / feat_count) * 100)) temp = [] if layer_time.hasFeatures(): layer_time.updateExtents() context.temporaryLayerStore().addMapLayer(layer_time) temp.append( ("Isochrones " + self.PROFILE.capitalize(), self.OUT_TIME, layer_time.id())) if layer_dist.hasFeatures(): layer_dist.updateExtents() context.temporaryLayerStore().addMapLayer(layer_dist) temp.append( ("Isochrones " + self.PROFILE.capitalize(), self.OUT_DISTANCE, layer_dist.id())) if show_locations: layer_snapped_points.updateExtents() context.temporaryLayerStore().addMapLayer(layer_snapped_points) temp.append(("Snapped Points " + self.PROFILE.capitalize(), self.POINTS_SNAPPED, layer_snapped_points.id())) layer_input_points.updateExtents() context.temporaryLayerStore().addMapLayer(layer_input_points) temp.append(("Input Points " + self.PROFILE.capitalize(), self.POINTS_INPUT, layer_input_points.id())) results = dict() for l_name, e_id, l_id in temp: results[e_id] = l_id context.addLayerToLoadOnCompletion( l_id, QgsProcessingContext.LayerDetails(l_name, context.project(), l_name)) return results
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ source = self.parameterAsVectorLayer(parameters, self.INPUT, context) field_insee = self.parameterAsString(parameters, self.INSEE_CODE, context) field_commune = self.parameterAsString(parameters, self.COMMUNE_NAME, context) value_epsg = self.parameterAsString(parameters, self.EPSG_CODE, context) if value_epsg == '2154' or value_epsg == '3942' or value_epsg == '3943' or value_epsg == '3944' or value_epsg == '3945' or value_epsg == '3946' or value_epsg == '3947' or value_epsg == '3948' or value_epsg == '3949' or value_epsg == '3950' or value_epsg == '32630' or value_epsg == ' 32631' or value_epsg == '32632' or value_epsg == '3857' or value_epsg == '4326' or value_epsg == '4258' or value_epsg == '32620' or value_epsg == '2970' or value_epsg == '2972' or value_epsg == '2973' or value_epsg == '2975' or value_epsg == '32622' or value_epsg == '32740' or value_epsg == '32738' or value_epsg == '4471' or value_epsg == '32621': feedback.pushInfo('EPSG code' + value_epsg) tab = [] for f in source.getFeatures(): col_select = f[field_insee], (''.join( (c for c in unicodedata.normalize('NFD', f[field_commune]) if unicodedata.category(c) != 'Mn'))) # Insere chaque ligne du CSV dans le tableau tab.append(col_select) #Permet la suppression des doublons et le tri Lt = sorted(set(tab)) print(Lt) for c_insee, n_couche in Lt: urlWithParams = "url=http://inspire.cadastre.gouv.fr/scpc/" + c_insee + ".wms?contextualWMSLegend=0&crs=EPSG:" + value_epsg + "&dpiMode=7&featureCount=10&format=image/png&layers=VOIE_COMMUNICATION&styles=&maxHeight=1024&maxWidth=1280" rlayer = QgsRasterLayer( urlWithParams, 'Petites_voies_de_communication_' + n_couche + '_' + c_insee, 'wms') feedback.pushInfo('Category :' + n_couche + ' - ' + c_insee) feedback.pushInfo('Validity of WMS : %s' % rlayer.isValid()) if not rlayer.isValid(): print('Petites_voies_de_communication_' + n_couche + '_' + c_insee + ' failed to load!') feedback.pushInfo('WMS INVALID : Cadastre_' + n_couche + '_' + c_insee) else: #Source : https://gis.stackexchange.com/questions/342802/loading-openstreetmap-in-pyqgis output_layers = [] output_layers.append(rlayer) context.temporaryLayerStore().addMapLayer(rlayer) context.addLayerToLoadOnCompletion( rlayer.id(), QgsProcessingContext.LayerDetails( 'Petites_voies_de_communication_' + n_couche + '_' + c_insee, context.project(), self.OUTPUT_LAYERS)) else: feedback.pushInfo('Error EPSG code') # Return the results of the algorithm. In this case our only result is # the feature sink which contains the processed features, but some # algorithms may return multiple feature sinks, calculated numeric # statistics, etc. These should all be included in the returned # dictionary, with keys matching the feature corresponding parameter # or output names. # At the end of the processAlgorithmn # Add the layer to the project return {}
def processAlgorithm(self, parameters, context, feedback): destination = self.parameterAsFile(parameters, self.DESTINATION, context) try: db = postgis.GeoDB.from_name(destination) is_geopackage = False schema = self.parameterAsFile(parameters, self.SCHEMA, context) except QgsProcessingException: is_geopackage = True schema = None if is_geopackage: if not destination.lower().endswith('.gpkg'): destination += '.gpkg' uri = destination else: database_uri = db.uri info = database_uri.connectionInfo(True) conn = psycopg2.connect(info) cur = conn.cursor() sql = "DROP VIEW IF EXISTS {}.{};".format(schema, self.VIEW_NAME) feedback.pushInfo(sql) cur.execute(sql) conn.commit() crs = self.parameterAsCrs(parameters, self.CRS, context) options = dict() options['update'] = True if is_geopackage: options['layerOptions'] = ['FID=id'] options['fileEncoding'] = 'UTF-8' output_layers = [] for table, geom in MAPPING.items(): # create virtual layer if geom[0]: vl_path = '{}?crs={}&'.format(geom[0], crs.authid()) else: vl_path = 'None?' csv_path = resources_path('data_models', '{}.csv'.format(table)) csv = QgsVectorLayer(csv_path, table, 'ogr') if not csv.isValid(): csv_path = resources_path('data_models', '{}.csv'.format(table)) raise QgsProcessingException( tr('* ERROR: Can\'t load CSV {}').format(csv_path)) fields = [] for c_f in csv.getFeatures(): fields.append('field={}:{}'.format(c_f['name'], c_f['typeName'])) del csv vl_path += '&'.join(fields) LOGGER.debug('Memory layer "{}" created with {}'.format( table, vl_path)) vl = QgsVectorLayer(vl_path, table, 'memory') if vl.fields().count() != len(fields): raise QgsProcessingException( tr('* ERROR while creating fields in layer "{}"').format( table)) # export layer options['layerName'] = vl.name() if not is_geopackage: uri = QgsDataSourceUri(database_uri) if Qgis.QGIS_VERSION_INT >= 31000: uri.setTable(vl.name()) if vl.isSpatial(): uri.setGeometryColumn('geom') else: uri_string = uri.uri(True) if vl.isSpatial(): uri_string = uri_string.replace( 'table=""', 'table="{}" (geom)'.format(vl.name())) else: uri_string = uri_string.replace( 'table=""', 'table="{}"'.format(vl.name())) uri = QgsDataSourceUri(uri_string) # Schema is updating the table name, # so after search&replace uri.setSchema(schema) uri.setKeyColumn(vl.fields().at(0).name()) exporter = QgsVectorLayerExporter( uri if is_geopackage else uri.uri(), 'ogr' if is_geopackage else 'postgres', vl.fields(), vl.wkbType(), vl.crs(), True, options) # result if exporter.errorCode() != QgsVectorLayerExporter.NoError: source = uri if is_geopackage else uri.uri() raise QgsProcessingException( tr('* ERROR while exporting the layer to "{}":"{}"'). format(source, exporter.errorMessage())) # Do create sequence if geom[2] and not is_geopackage: cur = conn.cursor() sql = "CREATE SEQUENCE {}.{}_{}_seq;".format( schema, table, geom[2]) cur.execute(sql) conn.commit() sql = ("ALTER TABLE {0}.{1} " "ALTER COLUMN {2} " "SET DEFAULT nextval('{0}.{1}_{2}_seq'::regclass);" ).format(schema, table, geom[2]) cur.execute(sql) conn.commit() # connection troncon_rereau_classif in geopackage if is_geopackage: dest_layer = QgsVectorLayer( '{}|layername={}'.format(uri, table), table, 'ogr') else: uri = QgsDataSourceUri(database_uri) if Qgis.QGIS_VERSION_INT >= 31000: uri.setTable(vl.name()) if vl.isSpatial(): uri.setGeometryColumn('geom') else: uri_string = uri.uri(True) if vl.isSpatial(): uri_string = uri_string.replace( 'table=""', 'table="{}" (geom)'.format(vl.name())) else: uri_string = uri_string.replace( 'table=""', 'table="{}"'.format(vl.name())) uri = QgsDataSourceUri(uri_string) # Schema is updating the table name, # so after search&replace uri.setSchema(schema) uri.setKeyColumn(vl.fields().at(0).name()) dest_layer = QgsVectorLayer(uri.uri(False), table, 'postgres') if not dest_layer.isValid(): source = uri if is_geopackage else uri.uri() raise QgsProcessingException( tr('* ERROR: Can\'t load table "{}" in URI "{}"').format( table, source)) feedback.pushInfo('The layer {} has been created'.format(table)) output_layers.append(dest_layer.id()) # Add layer to project context.temporaryLayerStore().addMapLayer(dest_layer) context.addLayerToLoadOnCompletion( dest_layer.id(), QgsProcessingContext.LayerDetails(table, context.project(), self.OUTPUT_LAYERS)) # Get connection if is_geopackage: conn = spatialite_connect(uri) # Do create view cur = conn.cursor() prefix = '' view_destination = self.VIEW_NAME if not is_geopackage: prefix = '{}.'.format(schema) view_destination = '{}{}'.format(prefix, view_destination) sql = ("CREATE VIEW {0} AS " "SELECT r.id, r.caa, r.id_geom_regard, r.id_file, g.geom " "FROM {1}regard r, {1}geom_regard g " "WHERE r.id_geom_regard = g.id;").format( view_destination, prefix) feedback.pushInfo(sql) cur.execute(sql) conn.commit() if is_geopackage: sql = ("INSERT INTO gpkg_contents " "(table_name, identifier, data_type, srs_id) " "VALUES ( '{0}', '{0}', 'features', {1});").format( self.VIEW_NAME, crs.postgisSrid()) feedback.pushInfo(sql) cur.execute(sql) conn.commit() sql = ( "INSERT INTO gpkg_geometry_columns " "(table_name, column_name, geometry_type_name, srs_id, z, m) " "VALUES ('{0}', 'geom', 'POINT', {1}, 0, 0);").format( self.VIEW_NAME, crs.postgisSrid()) feedback.pushInfo(sql) cur.execute(sql) conn.commit() conn.close() # Connexion à la couche view_regard_localized dans le Geopackage if is_geopackage: view_layer = QgsVectorLayer( '{}|layername={}'.format(uri, self.VIEW_NAME), self.VIEW_NAME, 'ogr') else: uri = QgsDataSourceUri(database_uri) if Qgis.QGIS_VERSION_INT >= 31000: uri.setTable(self.VIEW_NAME) uri.setGeometryColumn('geom') else: uri_string = uri.uri(True) uri_string = uri_string.replace( 'table=""', 'table="{}" (geom)'.format(self.VIEW_NAME)) uri = QgsDataSourceUri(uri_string) # Schema is updating the table name, # so after search&replace uri.setSchema(schema) uri.setKeyColumn('id') view_layer = QgsVectorLayer(uri.uri(False), self.VIEW_NAME, 'postgres') if not view_layer.isValid(): source = uri if is_geopackage else uri.uri() raise QgsProcessingException( tr('* ERROR: Can\'t load layer {} in {}').format( self.VIEW_NAME, source)) output_layers.append(view_layer.id()) # Add layer to project context.temporaryLayerStore().addMapLayer(view_layer) context.addLayerToLoadOnCompletion( view_layer.id(), QgsProcessingContext.LayerDetails(self.VIEW_NAME, context.project(), self.OUTPUT_LAYERS)) feedback.pushInfo('The data model has been created in {}'.format(uri)) return {self.DESTINATION: uri, self.OUTPUT_LAYERS: output_layers}