def processAlgorithm(self, parameters, context, feedback): connection = self.parameterAsString(parameters, self.DATABASE, context) id_field = self.parameterAsString(parameters, self.ID_FIELD, context) geom_field = self.parameterAsString( parameters, self.GEOMETRY_FIELD, context) uri = postgis.uri_from_name(connection) sql = self.parameterAsString(parameters, self.SQL, context) sql = sql.replace('\n', ' ') uri.setDataSource("", "(" + sql + ")", geom_field, "", id_field) vlayer = QgsVectorLayer(uri.uri(), "layername", "postgres") if not vlayer.isValid(): raise QgsProcessingException(self.tr("""This layer is invalid! Please check the PostGIS log for error messages.""")) context.temporaryLayerStore().addMapLayer(vlayer) context.addLayerToLoadOnCompletion( vlayer.id(), QgsProcessingContext.LayerDetails('SQL layer', context.project(), self.OUTPUT)) return {self.OUTPUT: vlayer.id()}
def getConsoleCommands(self): connection = self.DB_CONNECTIONS[self.getParameterValue(self.DATABASE)] uri = uri_from_name(connection) if self.processing: # to get credentials input when needed uri = GeoDB(uri=uri).uri inLayer = self.getParameterValue(self.INPUT_LAYER) ogrLayer = ogrConnectionString(inLayer)[1:-1] shapeEncoding = self.getParameterValue(self.SHAPE_ENCODING) ssrs = str(self.getParameterValue(self.S_SRS)) tsrs = str(self.getParameterValue(self.T_SRS)) asrs = str(self.getParameterValue(self.A_SRS)) schema = str(self.getParameterValue(self.SCHEMA)) table = str(self.getParameterValue(self.TABLE)) pk = str(self.getParameterValue(self.PK)) pkstring = "-lco FID=" + pk primary_key = self.getParameterValue(self.PRIMARY_KEY) geocolumn = str(self.getParameterValue(self.GEOCOLUMN)) geocolumnstring = "-lco GEOMETRY_NAME=" + geocolumn dim = self.DIMLIST[self.getParameterValue(self.DIM)] dimstring = "-lco DIM=" + dim simplify = str(self.getParameterValue(self.SIMPLIFY)) segmentize = str(self.getParameterValue(self.SEGMENTIZE)) spat = self.getParameterValue(self.SPAT) clip = self.getParameterValue(self.CLIP) where = str(self.getParameterValue(self.WHERE)) wherestring = '-where "' + where + '"' gt = str(self.getParameterValue(self.GT)) overwrite = self.getParameterValue(self.OVERWRITE) append = self.getParameterValue(self.APPEND) addfields = self.getParameterValue(self.ADDFIELDS) launder = self.getParameterValue(self.LAUNDER) launderstring = "-lco LAUNDER=NO" index = self.getParameterValue(self.INDEX) indexstring = "-lco SPATIAL_INDEX=OFF" skipfailures = self.getParameterValue(self.SKIPFAILURES) promotetomulti = self.getParameterValue(self.PROMOTETOMULTI) precision = self.getParameterValue(self.PRECISION) options = str(self.getParameterValue(self.OPTIONS)) arguments = [] arguments.append('-progress') arguments.append('--config PG_USE_COPY YES') if shapeEncoding: arguments.append('--config') arguments.append('SHAPE_ENCODING') arguments.append('"' + shapeEncoding + '"') arguments.append('-f') arguments.append('PostgreSQL') arguments.append('PG:"') for token in uri.connectionInfo(self.processing).split(' '): arguments.append(token) arguments.append('active_schema={}'.format(schema or 'public')) arguments.append('"') arguments.append(dimstring) arguments.append(ogrLayer) arguments.append(ogrLayerName(inLayer)) if index: arguments.append(indexstring) if launder: arguments.append(launderstring) if append: arguments.append('-append') if addfields: arguments.append('-addfields') if overwrite: arguments.append('-overwrite') if len(self.GEOMTYPE[self.getParameterValue(self.GTYPE)]) > 0: arguments.append('-nlt') arguments.append(self.GEOMTYPE[self.getParameterValue(self.GTYPE)]) if len(geocolumn) > 0: arguments.append(geocolumnstring) if len(pk) > 0: arguments.append(pkstring) elif primary_key is not None: arguments.append("-lco FID=" + primary_key) if len(table) == 0: table = ogrLayerName(inLayer).lower() if schema: table = '{}.{}'.format(schema, table) arguments.append('-nln') arguments.append(table) if len(ssrs) > 0: arguments.append('-s_srs') arguments.append(ssrs) if len(tsrs) > 0: arguments.append('-t_srs') arguments.append(tsrs) if len(asrs) > 0: arguments.append('-a_srs') arguments.append(asrs) if len(spat) > 0: regionCoords = spat.split(',') arguments.append('-spat') arguments.append(regionCoords[0]) arguments.append(regionCoords[2]) arguments.append(regionCoords[1]) arguments.append(regionCoords[3]) if clip: arguments.append('-clipsrc spat_extent') if skipfailures: arguments.append('-skipfailures') if where: arguments.append(wherestring) if len(simplify) > 0: arguments.append('-simplify') arguments.append(simplify) if len(segmentize) > 0: arguments.append('-segmentize') arguments.append(segmentize) if len(gt) > 0: arguments.append('-gt') arguments.append(gt) if promotetomulti: arguments.append('-nlt PROMOTE_TO_MULTI') if precision is False: arguments.append('-lco PRECISION=NO') if len(options) > 0: arguments.append(options) commands = [] if isWindows(): commands = ['cmd.exe', '/C ', 'ogr2ogr.exe', GdalUtils.escapeAndJoin(arguments)] else: commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)] return commands
def getConsoleCommands(self, parameters): connection = self.DB_CONNECTIONS[self.getParameterValue(self.DATABASE)] uri = uri_from_name(connection) if self.processing: # to get credentials input when needed uri = GeoDB(uri=uri).uri inLayer = self.getParameterValue(self.INPUT_LAYER) ogrLayer = ogrConnectionString(inLayer)[1:-1] shapeEncoding = self.getParameterValue(self.SHAPE_ENCODING) schema = str(self.getParameterValue(self.SCHEMA)) table = str(self.getParameterValue(self.TABLE)) pk = str(self.getParameterValue(self.PK)) pkstring = "-lco FID=" + pk primary_key = self.getParameterValue(self.PRIMARY_KEY) where = str(self.getParameterValue(self.WHERE)) wherestring = '-where "' + where + '"' gt = str(self.getParameterValue(self.GT)) overwrite = self.getParameterValue(self.OVERWRITE) append = self.getParameterValue(self.APPEND) addfields = self.getParameterValue(self.ADDFIELDS) launder = self.getParameterValue(self.LAUNDER) launderstring = "-lco LAUNDER=NO" skipfailures = self.getParameterValue(self.SKIPFAILURES) precision = self.getParameterValue(self.PRECISION) options = str(self.getParameterValue(self.OPTIONS)) arguments = [] arguments.append('-progress') arguments.append('--config PG_USE_COPY YES') if len(shapeEncoding) > 0: arguments.append('--config') arguments.append('SHAPE_ENCODING') arguments.append('"' + shapeEncoding + '"') arguments.append('-f') arguments.append('PostgreSQL') arguments.append('PG:"') for token in uri.connectionInfo(self.processing).split(' '): arguments.append(token) arguments.append('active_schema={}'.format(schema or 'public')) arguments.append('"') arguments.append(ogrLayer) arguments.append('-nlt NONE') arguments.append(ogrLayerName(inLayer)) if launder: arguments.append(launderstring) if append: arguments.append('-append') if addfields: arguments.append('-addfields') if overwrite: arguments.append('-overwrite') if len(pk) > 0: arguments.append(pkstring) elif primary_key is not None: arguments.append("-lco FID=" + primary_key) if len(table) == 0: table = ogrLayerName(inLayer).lower() if schema: table = '{}.{}'.format(schema, table) arguments.append('-nln') arguments.append(table) if skipfailures: arguments.append('-skipfailures') if where: arguments.append(wherestring) if len(gt) > 0: arguments.append('-gt') arguments.append(gt) if not precision: arguments.append('-lco PRECISION=NO') if len(options) > 0: arguments.append(options) commands = [] if isWindows(): commands = ['cmd.exe', '/C ', 'ogr2ogr.exe', GdalUtils.escapeAndJoin(arguments)] else: commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)] return commands
def getConsoleCommands(self, parameters, context, feedback, executing=True): connection = self.DB_CONNECTIONS[self.getParameterValue(self.DATABASE)] uri = uri_from_name(connection) if executing: # to get credentials input when needed uri = GeoDB(uri=uri).uri inLayer = self.getParameterValue(self.INPUT_LAYER) ogrLayer = GdalUtils.ogrConnectionString(inLayer, context) shapeEncoding = self.getParameterValue(self.SHAPE_ENCODING) schema = str(self.getParameterValue(self.SCHEMA)) table = str(self.getParameterValue(self.TABLE)) pk = str(self.getParameterValue(self.PK)) pkstring = "-lco FID=" + pk primary_key = self.getParameterValue(self.PRIMARY_KEY) where = str(self.getParameterValue(self.WHERE)) wherestring = '-where "' + where + '"' gt = str(self.getParameterValue(self.GT)) overwrite = self.getParameterValue(self.OVERWRITE) append = self.getParameterValue(self.APPEND) addfields = self.getParameterValue(self.ADDFIELDS) launder = self.getParameterValue(self.LAUNDER) launderstring = "-lco LAUNDER=NO" skipfailures = self.getParameterValue(self.SKIPFAILURES) precision = self.getParameterValue(self.PRECISION) options = str(self.getParameterValue(self.OPTIONS)) arguments = [] arguments.append('-progress') arguments.append('--config PG_USE_COPY YES') if len(shapeEncoding) > 0: arguments.append('--config') arguments.append('SHAPE_ENCODING') arguments.append('"' + shapeEncoding + '"') arguments.append('-f') arguments.append('PostgreSQL') arguments.append('PG:"') for token in uri.connectionInfo(executing).split(' '): arguments.append(token) arguments.append('active_schema={}'.format(schema or 'public')) arguments.append('"') arguments.append(ogrLayer) arguments.append('-nlt NONE') arguments.append(GdalUtils.ogrLayerName(inLayer)) if launder: arguments.append(launderstring) if append: arguments.append('-append') if addfields: arguments.append('-addfields') if overwrite: arguments.append('-overwrite') if len(pk) > 0: arguments.append(pkstring) elif primary_key is not None: arguments.append("-lco FID=" + primary_key) if len(table) == 0: table = GdalUtils.ogrLayerName(inLayer).lower() if schema: table = '{}.{}'.format(schema, table) arguments.append('-nln') arguments.append(table) if skipfailures: arguments.append('-skipfailures') if where: arguments.append(wherestring) if len(gt) > 0: arguments.append('-gt') arguments.append(gt) if not precision: arguments.append('-lco PRECISION=NO') if len(options) > 0: arguments.append(options) commands = [] if isWindows(): commands = [ 'cmd.exe', '/C ', 'ogr2ogr.exe', GdalUtils.escapeAndJoin(arguments) ] else: commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)] return commands
def getConsoleCommands(self, parameters, context, feedback, executing=True): connection = self.parameterAsString(parameters, self.DATABASE, context) uri = uri_from_name(connection) if executing: # to get credentials input when needed uri = GeoDB(uri=uri).uri ogrLayer, layername = self.getOgrCompatibleSource(self.INPUT, parameters, context, feedback, executing) shapeEncoding = self.parameterAsString(parameters, self.SHAPE_ENCODING, context) ssrs = self.parameterAsCrs(parameters, self.S_SRS, context) tsrs = self.parameterAsCrs(parameters, self.T_SRS, context) asrs = self.parameterAsCrs(parameters, self.A_SRS, context) table = self.parameterAsString(parameters, self.TABLE, context) schema = self.parameterAsString(parameters, self.SCHEMA, context) pk = self.parameterAsString(parameters, self.PK, context) pkstring = "-lco FID=" + pk primary_key = self.parameterAsString(parameters, self.PRIMARY_KEY, context) geocolumn = self.parameterAsString(parameters, self.GEOCOLUMN, context) geocolumnstring = "-lco GEOMETRY_NAME=" + geocolumn dim = self.DIMLIST[self.parameterAsEnum(parameters, self.DIM, context)] dimstring = "-lco DIM=" + dim simplify = self.parameterAsString(parameters, self.SIMPLIFY, context) segmentize = self.parameterAsString(parameters, self.SEGMENTIZE, context) spat = self.parameterAsExtent(parameters, self.SPAT, context) clip = self.parameterAsBool(parameters, self.CLIP, context) where = self.parameterAsString(parameters, self.WHERE, context) wherestring = '-where "' + where + '"' gt = self.parameterAsString(parameters, self.GT, context) overwrite = self.parameterAsBool(parameters, self.OVERWRITE, context) append = self.parameterAsBool(parameters, self.APPEND, context) addfields = self.parameterAsBool(parameters, self.ADDFIELDS, context) launder = self.parameterAsBool(parameters, self.LAUNDER, context) launderstring = "-lco LAUNDER=NO" index = self.parameterAsBool(parameters, self.INDEX, context) indexstring = "-lco SPATIAL_INDEX=OFF" skipfailures = self.parameterAsBool(parameters, self.SKIPFAILURES, context) promotetomulti = self.parameterAsBool(parameters, self.PROMOTETOMULTI, context) precision = self.parameterAsBool(parameters, self.PRECISION, context) options = self.parameterAsString(parameters, self.OPTIONS, context) arguments = [] arguments.append('-progress') arguments.append('--config PG_USE_COPY YES') if shapeEncoding: arguments.append('--config') arguments.append('SHAPE_ENCODING') arguments.append('"' + shapeEncoding + '"') arguments.append('-f') arguments.append('PostgreSQL') arguments.append('PG:"') for token in uri.connectionInfo(executing).split(' '): arguments.append(token) arguments.append('active_schema={}'.format(schema or 'public')) arguments.append('"') arguments.append(dimstring) arguments.append(ogrLayer) arguments.append(layername) if index: arguments.append(indexstring) if launder: arguments.append(launderstring) if append: arguments.append('-append') if addfields: arguments.append('-addfields') if overwrite: arguments.append('-overwrite') if len(self.GEOMTYPE[self.parameterAsEnum(parameters, self.GTYPE, context)]) > 0: arguments.append('-nlt') arguments.append(self.GEOMTYPE[self.parameterAsEnum(parameters, self.GTYPE, context)]) if len(geocolumn) > 0: arguments.append(geocolumnstring) if len(pk) > 0: arguments.append(pkstring) elif primary_key is not None: arguments.append("-lco FID=" + primary_key) if len(table) == 0: table = layername.lower() if schema: table = '{}.{}'.format(schema, table) arguments.append('-nln') arguments.append(table) if ssrs.isValid(): arguments.append('-s_srs') arguments.append(GdalUtils.gdal_crs_string(ssrs)) if tsrs.isValid(): arguments.append('-t_srs') arguments.append(GdalUtils.gdal_crs_string(tsrs)) if asrs.isValid(): arguments.append('-a_srs') arguments.append(GdalUtils.gdal_crs_string(asrs)) if not spat.isNull(): arguments.append('-spat') arguments.append(spat.xMinimum()) arguments.append(spat.yMinimum()) arguments.append(spat.xMaximum()) arguments.append(spat.yMaximum()) if clip: arguments.append('-clipsrc spat_extent') if skipfailures: arguments.append('-skipfailures') if where: arguments.append(wherestring) if len(simplify) > 0: arguments.append('-simplify') arguments.append(simplify) if len(segmentize) > 0: arguments.append('-segmentize') arguments.append(segmentize) if len(gt) > 0: arguments.append('-gt') arguments.append(gt) if promotetomulti: arguments.append('-nlt PROMOTE_TO_MULTI') if precision is False: arguments.append('-lco PRECISION=NO') if len(options) > 0: arguments.append(options) commands = [] if isWindows(): commands = ['cmd.exe', '/C ', 'ogr2ogr.exe', GdalUtils.escapeAndJoin(arguments)] else: commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)] return commands
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ ### RETRIEVE PARAMETERS ### # Retrieve the input vector layer = study area study_area = self.parameterAsSource(parameters, self.STUDY_AREA, context) # Retrieve the output PostGIS layer name and format it layer_name = self.parameterAsString(parameters, self.OUTPUT_NAME, context) ts = datetime.now() format_name = "{} {}".format(layer_name, str(ts.strftime('%Y%m%d_%H%M%S'))) # Retrieve the taxons filters groupe_taxo = [ self.db_variables.value('groupe_taxo')[i] for i in ( self.parameterAsEnums(parameters, self.GROUPE_TAXO, context)) ] regne = [ self.db_variables.value('regne')[i] for i in (self.parameterAsEnums(parameters, self.REGNE, context)) ] phylum = [ self.db_variables.value('phylum')[i] for i in (self.parameterAsEnums(parameters, self.PHYLUM, context)) ] classe = [ self.db_variables.value('classe')[i] for i in (self.parameterAsEnums(parameters, self.CLASSE, context)) ] ordre = [ self.db_variables.value('ordre')[i] for i in (self.parameterAsEnums(parameters, self.ORDRE, context)) ] famille = [ self.db_variables.value('famille')[i] for i in (self.parameterAsEnums(parameters, self.FAMILLE, context)) ] group1_inpn = [ self.db_variables.value('group1_inpn')[i] for i in ( self.parameterAsEnums(parameters, self.GROUP1_INPN, context)) ] group2_inpn = [ self.db_variables.value('group2_inpn')[i] for i in ( self.parameterAsEnums(parameters, self.GROUP2_INPN, context)) ] # Retrieve the datetime filter period_type = self.period_variables[self.parameterAsEnum( parameters, self.PERIOD, context)] # Retrieve the extra "where" conditions extra_where = self.parameterAsString(parameters, self.EXTRA_WHERE, context) ### CONSTRUCT "WHERE" CLAUSE (SQL) ### # Construct the sql array containing the study area's features geometry array_polygons = construct_sql_array_polygons(study_area) # Define the "where" clause of the SQL query, aiming to retrieve the output PostGIS layer = biodiversity data where = "is_valid and ST_within(geom, ST_union({}))".format( array_polygons) # Define a dictionnary with the aggregated taxons filters and complete the "where" clause thanks to it taxons_filters = { "groupe_taxo": groupe_taxo, "regne": regne, "phylum": phylum, "classe": classe, "ordre": ordre, "famille": famille, "obs.group1_inpn": group1_inpn, "obs.group2_inpn": group2_inpn } taxons_where = construct_sql_taxons_filter(taxons_filters) where += taxons_where # Complete the "where" clause with the datetime filter datetime_where = construct_sql_datetime_filter(self, period_type, ts, parameters, context) where += datetime_where # Complete the "where" clause with the extra conditions where += " " + extra_where ### EXECUTE THE SQL QUERY ### # Retrieve the data base connection name connection = self.parameterAsString(parameters, self.DATABASE, context) # URI --> Configures connection to database and the SQL query uri = postgis.uri_from_name(connection) # Define the SQL query query = """SELECT obs.* FROM src_lpodatas.v_c_observations obs LEFT JOIN taxonomie.taxref t ON obs.taxref_cdnom=t.cd_nom WHERE {}""".format(where) #feedback.pushInfo(query) # Format the URI with the query uri.setDataSource("", "(" + query + ")", "geom", "", "id_synthese") ### GET THE OUTPUT LAYER ### # Retrieve the output PostGIS layer = biodiversity data layer_obs = QgsVectorLayer(uri.uri(), format_name, "postgres") # Check if the PostGIS layer is valid check_layer_is_valid(feedback, layer_obs) # Load the PostGIS layer load_layer(context, layer_obs) ### MANAGE EXPORT ### # Create new valid fields for the sink new_fields = format_layer_export(layer_obs) # Retrieve the sink for the export (sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context, new_fields, layer_obs.wkbType(), layer_obs.sourceCrs()) if sink is None: # Return the PostGIS layer return {self.OUTPUT: layer_obs.id()} else: # Fill the sink and return it for feature in layer_obs.getFeatures(): sink.addFeature(feature) return {self.OUTPUT: dest_id}
def getConsoleCommands(self, parameters, context, feedback, executing=True): connection = self.parameterAsString(parameters, self.DATABASE, context) uri = uri_from_name(connection) if executing: # to get credentials input when needed uri = GeoDB(uri=uri).uri ogrLayer, layername = self.getOgrCompatibleSource(self.INPUT, parameters, context, feedback, executing) shapeEncoding = self.parameterAsString(parameters, self.SHAPE_ENCODING, context) ssrs = self.parameterAsCrs(parameters, self.S_SRS, context) tsrs = self.parameterAsCrs(parameters, self.T_SRS, context) asrs = self.parameterAsCrs(parameters, self.A_SRS, context) table = self.parameterAsString(parameters, self.TABLE, context) schema = self.parameterAsString(parameters, self.SCHEMA, context) pk = self.parameterAsString(parameters, self.PK, context) pkstring = "-lco FID=" + pk primary_key = self.parameterAsString(parameters, self.PRIMARY_KEY, context) geocolumn = self.parameterAsString(parameters, self.GEOCOLUMN, context) geocolumnstring = "-lco GEOMETRY_NAME=" + geocolumn dim = self.DIMLIST[self.parameterAsEnum(parameters, self.DIM, context)] dimstring = "-lco DIM=" + dim simplify = self.parameterAsString(parameters, self.SIMPLIFY, context) segmentize = self.parameterAsString(parameters, self.SEGMENTIZE, context) spat = self.parameterAsExtent(parameters, self.SPAT, context) clip = self.parameterAsBool(parameters, self.CLIP, context) where = self.parameterAsString(parameters, self.WHERE, context) wherestring = '-where "' + where + '"' gt = self.parameterAsString(parameters, self.GT, context) overwrite = self.parameterAsBool(parameters, self.OVERWRITE, context) append = self.parameterAsBool(parameters, self.APPEND, context) addfields = self.parameterAsBool(parameters, self.ADDFIELDS, context) launder = self.parameterAsBool(parameters, self.LAUNDER, context) launderstring = "-lco LAUNDER=NO" index = self.parameterAsBool(parameters, self.INDEX, context) indexstring = "-lco SPATIAL_INDEX=OFF" skipfailures = self.parameterAsBool(parameters, self.SKIPFAILURES, context) promotetomulti = self.parameterAsBool(parameters, self.PROMOTETOMULTI, context) precision = self.parameterAsBool(parameters, self.PRECISION, context) options = self.parameterAsString(parameters, self.OPTIONS, context) arguments = [] arguments.append('-progress') arguments.append('--config PG_USE_COPY YES') if shapeEncoding: arguments.append('--config') arguments.append('SHAPE_ENCODING') arguments.append('"' + shapeEncoding + '"') arguments.append('-f') arguments.append('PostgreSQL') arguments.append('PG:"') for token in uri.connectionInfo(executing).split(' '): arguments.append(token) arguments.append('active_schema={}'.format(schema or 'public')) arguments.append('"') arguments.append(dimstring) arguments.append(ogrLayer) arguments.append(layername) if index: arguments.append(indexstring) if launder: arguments.append(launderstring) if append: arguments.append('-append') if addfields: arguments.append('-addfields') if overwrite: arguments.append('-overwrite') if len(self.GEOMTYPE[self.parameterAsEnum(parameters, self.GTYPE, context)]) > 0: arguments.append('-nlt') arguments.append(self.GEOMTYPE[self.parameterAsEnum(parameters, self.GTYPE, context)]) if len(geocolumn) > 0: arguments.append(geocolumnstring) if pk: arguments.append(pkstring) elif primary_key: arguments.append("-lco FID=" + primary_key) if len(table) == 0: table = layername.lower() if schema: table = '{}.{}'.format(schema, table) arguments.append('-nln') arguments.append(table) if ssrs.isValid(): arguments.append('-s_srs') arguments.append(GdalUtils.gdal_crs_string(ssrs)) if tsrs.isValid(): arguments.append('-t_srs') arguments.append(GdalUtils.gdal_crs_string(tsrs)) if asrs.isValid(): arguments.append('-a_srs') arguments.append(GdalUtils.gdal_crs_string(asrs)) if not spat.isNull(): arguments.append('-spat') arguments.append(spat.xMinimum()) arguments.append(spat.yMinimum()) arguments.append(spat.xMaximum()) arguments.append(spat.yMaximum()) if clip: arguments.append('-clipsrc spat_extent') if skipfailures: arguments.append('-skipfailures') if where: arguments.append(wherestring) if len(simplify) > 0: arguments.append('-simplify') arguments.append(simplify) if len(segmentize) > 0: arguments.append('-segmentize') arguments.append(segmentize) if len(gt) > 0: arguments.append('-gt') arguments.append(gt) if promotetomulti: arguments.append('-nlt PROMOTE_TO_MULTI') if precision is False: arguments.append('-lco PRECISION=NO') if len(options) > 0: arguments.append(options) commands = [] if isWindows(): commands = ['cmd.exe', '/C ', 'ogr2ogr.exe', GdalUtils.escapeAndJoin(arguments)] else: commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)] return commands
def getConsoleCommands(self): connection = self.getParameterValue(self.DATABASE) uri = uri_from_name(connection) if self.processing: # to get credentials input when needed uri = GeoDB(uri=uri).uri inLayer = self.getParameterValue(self.INPUT_LAYER) ogrLayer = ogrConnectionString(inLayer)[1:-1] shapeEncoding = self.getParameterValue(self.SHAPE_ENCODING) ssrs = self.getParameterValue(self.S_SRS) tsrs = self.getParameterValue(self.T_SRS) asrs = self.getParameterValue(self.A_SRS) schema = self.getParameterValue(self.SCHEMA) table = self.getParameterValue(self.TABLE) pk = self.getParameterValue(self.PK) primary_key = self.getParameterValue(self.PRIMARY_KEY) geocolumn = self.getParameterValue(self.GEOCOLUMN) dim = self.DIMLIST[self.getParameterValue(self.DIM)] simplify = self.getParameterValue(self.SIMPLIFY) segmentize = self.getParameterValue(self.SEGMENTIZE) spat = self.getParameterValue(self.SPAT) clip = self.getParameterValue(self.CLIP) where = self.getParameterValue(self.WHERE) gt = self.getParameterValue(self.GT) overwrite = self.getParameterValue(self.OVERWRITE) append = self.getParameterValue(self.APPEND) addfields = self.getParameterValue(self.ADDFIELDS) launder = self.getParameterValue(self.LAUNDER) index = self.getParameterValue(self.INDEX) skipfailures = self.getParameterValue(self.SKIPFAILURES) promotetomulti = self.getParameterValue(self.PROMOTETOMULTI) precision = self.getParameterValue(self.PRECISION) options = self.getParameterValue(self.OPTIONS) arguments = [] arguments.append('-progress') arguments.append('--config PG_USE_COPY YES') if shapeEncoding: arguments.append('--config') arguments.append('SHAPE_ENCODING') arguments.append('"' + shapeEncoding + '"') arguments.append('-f') arguments.append('PostgreSQL') arguments.append('PG:"') for token in uri.connectionInfo(self.processing).split(' '): arguments.append(token) arguments.append('active_schema={}'.format(schema or 'public')) arguments.append('"') arguments.append("-lco DIM=" + dim) arguments.append(ogrLayer) arguments.append(ogrLayerName(inLayer)) if index: arguments.append("-lco SPATIAL_INDEX=OFF") if launder: arguments.append("-lco LAUNDER=NO") if append: arguments.append('-append') if addfields: arguments.append('-addfields') if overwrite: arguments.append('-overwrite') if len(self.GEOMTYPE[self.getParameterValue(self.GTYPE)]) > 0: arguments.append('-nlt') arguments.append(self.GEOMTYPE[self.getParameterValue(self.GTYPE)]) if geocolumn: arguments.append("-lco GEOMETRY_NAME=" + geocolumn) if pk: arguments.append("-lco FID=" + pk) elif primary_key is not None: arguments.append("-lco FID=" + primary_key) if not table: table = ogrLayerName(inLayer).lower() if schema: table = '{}.{}'.format(schema, table) arguments.append('-nln') arguments.append(table) if ssrs: arguments.append('-s_srs') arguments.append(ssrs) if tsrs: arguments.append('-t_srs') arguments.append(tsrs) if asrs: arguments.append('-a_srs') arguments.append(asrs) if spat: regionCoords = spat.split(',') arguments.append('-spat') arguments.append(regionCoords[0]) arguments.append(regionCoords[2]) arguments.append(regionCoords[1]) arguments.append(regionCoords[3]) if clip: arguments.append('-clipsrc spat_extent') if skipfailures: arguments.append('-skipfailures') if where: arguments.append('-where "' + where + '"') if simplify: arguments.append('-simplify') arguments.append(simplify) if segmentize: arguments.append('-segmentize') arguments.append(segmentize) if gt: arguments.append('-gt') arguments.append(gt) if promotetomulti: arguments.append('-nlt PROMOTE_TO_MULTI') if precision is False: arguments.append('-lco PRECISION=NO') if options: arguments.append(options) commands = [] if isWindows(): commands = [ 'cmd.exe', '/C ', 'ogr2ogr.exe', GdalUtils.escapeAndJoin(arguments) ] else: commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)] return commands
def processAlgorithm(self, parameters, context, feedback): msg = "" connection = self.parameterAsString(parameters, self.DATABASE, context) metadata = QgsProviderRegistry.instance().providerMetadata('postgres') conn = metadata.findConnection(connection) schema = self.parameterAsString(parameters, self.SCHEMA, context) table = self.parameterAsString(parameters, self.TABLE, context) input_layer = self.parameterAsVectorLayer(parameters, self.INPUT, context) geom = None geomlayer = [ "repere", "poi_tourisme", "poi_service", "liaison", "segment" ] if table in geomlayer: geom = "geom" uri = uri_from_name(connection) uri.setDataSource(schema, table, geom, "") layer = QgsVectorLayer(uri.uri(), table, "postgres") layer_name = layer.name() # Création du dictionnaire de correspondance des champs # Format générique d'une correspondance entre champs champs = { 'expression': '', # champs d'entrée 'length': 0, # longueur de destinaion 'name': '', # champs de destination 'precision': 0, # precision de destinaton 'type': 10 # type de destination } matrix = self.parameterAsMatrix(parameters, self.MATRIX, context) field_map = [] # Création du mapping de champs for field in layer.fields(): # Champs fournis par l'utilisateur name = field.displayName() if name in matrix[1::2]: i = len(matrix) - 1 - matrix[::-1].index(name) c = champs c['expression'] = matrix[i - 1] c['name'] = name c['precision'] = field.precision() c['length'] = field.length() ccopy = c.copy() field_map.append(ccopy) else: # Champs éventuellement non fournis par l'utilisateur c = champs c['expression'] = "" c['name'] = name c['precision'] = field.precision() c['length'] = field.length() ccopy = c.copy() field_map.append(ccopy) if layer_name == 'portion': k = matrix.index('lien_itin') c_lien_itin = { 'expression': matrix[k - 1], # champs d'entrée 'length': 0, # longueur de destinaion 'name': 'lien_itin', # champs de destination 'precision': 0, # precision de destinaton 'type': 2 # type de destination } field_map.append(c_lien_itin) if 'lien_segm' in matrix: m = matrix.index('lien_segm') c_lien_segm = { 'expression': matrix[m - 1], # champs d'entrée 'length': 0, # longueur de destinaion 'name': 'lien_segm', # champs de destination 'precision': 0, # precision de destinaton 'type': 2 # type de destination } field_map.append(c_lien_segm) if layer_name in ['itineraire', 'portion', 'segment']: if 'id_import' in matrix: n = matrix.index('id_import') c_id_import = { 'expression': matrix[n - 1], # champs d'entrée 'length': 0, # longueur de destinaion 'name': 'id_import', # champs de destination 'precision': 0, # precision de destinaton 'type': 2 # type de destination } field_map.append(c_id_import) # Refactorisation des champs refact_params = { 'FIELDS_MAPPING': field_map, 'INPUT': input_layer, 'OUTPUT': 'memory:' } algresult = processing.run('qgis:refactorfields', refact_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.pushInfo(tr("Refactoring des champs fait")) # Exporter dans PostgreSQL self.to_postgresql(connection, layer_name, algresult['OUTPUT'], context, feedback) # Importer la table dans veloroutes self.update_to_veloroutes(conn, layer_name, feedback) return {self.OUTPUT_MSG: msg}
def processAlgorithm(self, parameters, context, feedback): msg = "" output_layers = [] layers_name_none = dict() layers_name_none["v_certificat"] = "id_view" layers_name_none["v_voie"] = "id_view" layers_name_none["v_section"] = "id_view" layers_name_none["v_parcelle"] = "id_view" # override = self.parameterAsBool(parameters, self.OVERRIDE, context) connection = self.parameterAsString(parameters, self.DATABASE, context) schema = self.parameterAsString(parameters, self.SCHEMA, context) data_update = self.parameterAsBool(parameters, self.TRUNCATE_PARCELLE, context) if data_update: feedback.pushInfo("## Mise à jour des données parcelles ##") feedback.pushInfo( "## Rend id_parcelle = null dans adresse.point_adresse ##") sql = """ UPDATE adresse.point_adresse pa SET id_parcelle = NULL; """ _, _, _, ok, error_message = fetch_data_from_sql_query( connection, sql) if not ok: return { self.OUTPUT_MSG: error_message, self.OUTPUT: output_layers } feedback.pushInfo(""" ## Désactivation de la clé étrangère sur adresse.point_adresse pour pouvoir vider la table adresse.parcelle ## """) sql = """ ALTER TABLE adresse.point_adresse DROP CONSTRAINT point_adresse_id_parcelle_fkey; """ _, _, _, ok, error_message = fetch_data_from_sql_query( connection, sql) if not ok: return { self.OUTPUT_MSG: error_message, self.OUTPUT: output_layers } feedback.pushInfo("## Vide la table adresse.parcelle ##") sql = """ TRUNCATE adresse.parcelle RESTART IDENTITY; """ _, _, _, ok, error_message = fetch_data_from_sql_query( connection, sql) if not ok: return { self.OUTPUT_MSG: error_message, self.OUTPUT: output_layers } feedback.pushInfo( "## Réactivation de la clé étrangère sur adresse.point_adresse ##" ) sql = """ ALTER TABLE adresse.point_adresse ADD CONSTRAINT point_adresse_id_parcelle_fkey FOREIGN KEY (id_parcelle) REFERENCES adresse.parcelle (fid); """ _, _, _, ok, error_message = fetch_data_from_sql_query( connection, sql) if not ok: return { self.OUTPUT_MSG: error_message, self.OUTPUT: output_layers } feedback.pushInfo("## Remplissage de la table adresse.parcelle ##") sql = """ INSERT INTO adresse.parcelle(id,commune, prefixe, section, numero, contenance, arpente, geom) SELECT p.idu, p.nomcommune, p1.ccopre, p1.ccosec, p1.dnupla, p.contenance, CASE WHEN p1.ccoarp = 'A' THEN True ELSE False END as arpente, p.geom FROM {}.parcelle_info p, {}.parcelle p1 WHERE p.geo_parcelle = p1.parcelle AND p.idu not in(select pa.id from adresse.parcelle pa) """.format(schema, schema) _, _, _, ok, error_message = fetch_data_from_sql_query( connection, sql) if not ok: return { self.OUTPUT_MSG: error_message, self.OUTPUT: output_layers } feedback.pushInfo( "## Mise à jour de id_parcelle dans adresse.point_adresse ##") sql = """ UPDATE adresse.point_adresse pa SET id_parcelle = (SELECT p.fid FROM adresse.parcelle p WHERE ST_intersects(pa.geom, p.geom)); """ _, _, _, ok, error_message = fetch_data_from_sql_query(connection, sql) if not ok: return {self.OUTPUT_MSG: error_message, self.OUTPUT: output_layers} feedback.pushInfo("## CREATION DES VUES ##") feedback.pushInfo("## Vue adresse.v_certificat ##") sql = "DROP VIEW IF EXISTS adresse.v_certificat" _, _, _, ok, error_message = fetch_data_from_sql_query(connection, sql) sql = """ CREATE VIEW adresse.v_certificat AS SELECT row_number() over (order by c.commune_nom) as id_view, pr.proprietaire as id_prop, pa.id_point, c.insee_code, c.commune_nom, c.code_postal, c.adresse_mairie, c.maire, trim(coalesce(pr.dqualp, '')) || ' ' || CASE WHEN trim(pr.dnomus) != trim(pr.dnomlp) THEN Coalesce( trim(pr.dnomus) || '/' || trim(pr.dprnus) || ', née ', '' ) ELSE '' END || trim(coalesce(pr.ddenom, '')) AS p_nom, ltrim(trim(coalesce(pr.dlign4, '')), '0') || trim(coalesce(pr.dlign5, '')) AS p_adresse, trim(coalesce(pr.dlign6, '')) as p_adresse2, pc.ccosec, pi.tex, pa.adresse_complete FROM adresse.commune c JOIN adresse.point_adresse pa ON pa.id_commune = c.id_com JOIN adresse.parcelle p ON p.fid = pa.id_parcelle JOIN {}.parcelle_info pi ON pi.idu = p.id JOIN {}.parcelle pc ON pc.parcelle = pi.geo_parcelle JOIN {}.proprietaire pr ON pr.dnupro = pc.dnupro; """.format(schema, schema, schema) _, _, _, ok, error_message = fetch_data_from_sql_query(connection, sql) if not ok: return {self.OUTPUT_MSG: error_message, self.OUTPUT: output_layers} feedback.pushInfo("## Vue adresse.v_voie ##") sql = "DROP VIEW IF EXISTS adresse.v_voie" _, _, _, ok, error_message = fetch_data_from_sql_query(connection, sql) sql = """ CREATE VIEW adresse.v_voie AS SELECT row_number() OVER (ORDER BY v.nom) AS id_view, v.id_voie, v.nom_complet, c.id_com, cc.insee_code FROM adresse.voie v, adresse.appartenir_com c, adresse.commune cc WHERE c.id_voie = v.id_voie AND c.id_com = cc.id_com; """ _, _, _, ok, error_message = fetch_data_from_sql_query(connection, sql) if not ok: return {self.OUTPUT_MSG: error_message, self.OUTPUT: output_layers} feedback.pushInfo("## Vue adresse.v_section ##") sql = "DROP VIEW IF EXISTS adresse.v_section" _, _, _, ok, error_message = fetch_data_from_sql_query(connection, sql) sql = """ CREATE VIEW adresse.v_section AS SELECT row_number() OVER (ORDER BY s.tex) AS id_view, concat(c.ccodep, c.ccocom) AS insee, s.tex, cc.id_com FROM {}.commune c, {}.geo_section s, adresse.commune cc WHERE c.commune = s.geo_commune AND concat(c.ccodep, c.ccocom) = cc.insee_code::text; """.format(schema, schema) _, _, _, ok, error_message = fetch_data_from_sql_query(connection, sql) if not ok: return {self.OUTPUT_MSG: error_message, self.OUTPUT: output_layers} feedback.pushInfo("## Vue adresse.v_parcelle ##") sql = "DROP VIEW IF EXISTS adresse.v_parcelle" _, _, _, ok, error_message = fetch_data_from_sql_query(connection, sql) sql = """ CREATE VIEW adresse.v_parcelle as SELECT row_number() OVER (ORDER BY s.tex) AS id_view, concat(c.ccodep, c.ccocom) as insee, s.tex as section, cc.id_com, p.tex as parcelle FROM {}.commune c, {}.geo_section s, {}.parcelle_info p, adresse.commune cc WHERE c.commune = s.geo_commune AND concat(c.ccodep, c.ccocom) = cc.insee_code AND p.geo_section= s.geo_section ORDER BY p.tex; """.format(schema, schema, schema) _, _, _, ok, error_message = fetch_data_from_sql_query(connection, sql) if not ok: return {self.OUTPUT_MSG: error_message, self.OUTPUT: output_layers} uri = uri_from_name(connection) is_host = uri.host() != "" if is_host: feedback.pushInfo("Connexion établie via l'hote") else: feedback.pushInfo("Connexion établie via le service") feedback.pushInfo("") feedback.pushInfo("## CHARGEMENT DES COUCHES ##") for x in layers_name_none: if not context.project().mapLayersByName(x): result = self.initLayer(context, uri, 'adresse', x, None, "", layers_name_none[x]) if not result: feedback.pushInfo("La couche " + x + " ne peut pas être chargée") else: feedback.pushInfo("La couche " + x + " a pu être chargée") output_layers.append(result.id()) msg = "success" return {self.OUTPUT_MSG: msg, self.OUTPUT: output_layers}
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ ### RETRIEVE PARAMETERS ### # Retrieve the input vector layer = study area study_area = self.parameterAsSource(parameters, self.STUDY_AREA, context) # Retrieve the output PostGIS layer name and format it layer_name = self.parameterAsString(parameters, self.OUTPUT_NAME, context) ts = datetime.now() format_name = "{} {}".format(layer_name, str(ts.strftime('%Y%m%d_%H%M%S'))) # Retrieve the taxonomic rank taxonomic_ranks_labels = [ "Groupe taxo", "Règne", "Phylum", "Classe", "Ordre", "Famille", "Groupe 1 INPN", "Groupe 2 INPN" ] taxonomic_ranks_db = [ "groupe_taxo", "regne", "phylum", "classe", "ordre", "famille", "obs.group1_inpn", "obs.group2_inpn" ] taxonomic_rank_label = taxonomic_ranks_labels[self.parameterAsEnum( parameters, self.TAXONOMIC_RANK, context)] taxonomic_rank_db = taxonomic_ranks_db[self.parameterAsEnum( parameters, self.TAXONOMIC_RANK, context)] # Retrieve the taxons filters groupe_taxo = [ self.db_variables.value('groupe_taxo')[i] for i in ( self.parameterAsEnums(parameters, self.GROUPE_TAXO, context)) ] regne = [ self.db_variables.value('regne')[i] for i in (self.parameterAsEnums(parameters, self.REGNE, context)) ] phylum = [ self.db_variables.value('phylum')[i] for i in (self.parameterAsEnums(parameters, self.PHYLUM, context)) ] classe = [ self.db_variables.value('classe')[i] for i in (self.parameterAsEnums(parameters, self.CLASSE, context)) ] ordre = [ self.db_variables.value('ordre')[i] for i in (self.parameterAsEnums(parameters, self.ORDRE, context)) ] famille = [ self.db_variables.value('famille')[i] for i in (self.parameterAsEnums(parameters, self.FAMILLE, context)) ] group1_inpn = [ self.db_variables.value('group1_inpn')[i] for i in ( self.parameterAsEnums(parameters, self.GROUP1_INPN, context)) ] group2_inpn = [ self.db_variables.value('group2_inpn')[i] for i in ( self.parameterAsEnums(parameters, self.GROUP2_INPN, context)) ] # Retrieve the datetime filter period = self.period_variables[self.parameterAsEnum( parameters, self.PERIOD, context)] # Retrieve the extra "where" conditions extra_where = self.parameterAsString(parameters, self.EXTRA_WHERE, context) # Retrieve the histogram parameter histogram_variables = [ "Pas d'histogramme", "Nb de données", "Nb d'espèces", "Nb d'observateurs", "Nb de dates", "Nb de données de mortalité" ] histogram_option = histogram_variables[self.parameterAsEnum( parameters, self.HISTOGRAM_OPTIONS, context)] if histogram_option != "Pas d'histogramme": output_histogram = self.parameterAsFileOutput( parameters, self.OUTPUT_HISTOGRAM, context) if output_histogram == "": raise QgsProcessingException( "Veuillez renseigner un emplacement pour enregistrer votre histogramme !" ) ### CONSTRUCT "WHERE" CLAUSE (SQL) ### # Construct the sql array containing the study area's features geometry array_polygons = construct_sql_array_polygons(study_area) # Define the "where" clause of the SQL query, aiming to retrieve the output PostGIS layer = summary table where = "is_valid and is_present and ST_within(obs.geom, ST_union({}))".format( array_polygons) # Define a dictionnary with the aggregated taxons filters and complete the "where" clause thanks to it taxons_filters = { "groupe_taxo": groupe_taxo, "regne": regne, "phylum": phylum, "classe": classe, "ordre": ordre, "famille": famille, "obs.group1_inpn": group1_inpn, "obs.group2_inpn": group2_inpn } taxons_where = construct_sql_taxons_filter(taxons_filters) where += taxons_where # Complete the "where" clause with the datetime filter datetime_where = construct_sql_datetime_filter(self, period, ts, parameters, context) where += datetime_where # Complete the "where" clause with the extra conditions where += " " + extra_where ### EXECUTE THE SQL QUERY ### # Retrieve the data base connection name connection = self.parameterAsString(parameters, self.DATABASE, context) # URI --> Configures connection to database and the SQL query uri = postgis.uri_from_name(connection) # Define the SQL query query = """WITH obs AS ( SELECT obs.* FROM src_lpodatas.v_c_observations obs LEFT JOIN taxonomie.taxref t ON obs.taxref_cdnom = t.cd_nom WHERE {}), communes AS ( SELECT DISTINCT obs.id_synthese, la.area_name FROM obs LEFT JOIN gn_synthese.cor_area_synthese cor ON obs.id_synthese = cor.id_synthese JOIN ref_geo.l_areas la ON cor.id_area = la.id_area WHERE la.id_type = (SELECT id_type FROM ref_geo.bib_areas_types WHERE type_code = 'COM')), total_count AS ( SELECT COUNT(*) AS total_count FROM obs) SELECT row_number() OVER () AS id, COALESCE({}, 'Pas de correspondance taxref') AS "{}", {} COUNT(*) AS "Nb de données", ROUND(COUNT(*)::decimal/total_count, 4)*100 AS "Nb données / Nb données TOTAL (%)", COUNT(DISTINCT t.cd_ref) FILTER (WHERE t.id_rang='ES') AS "Nb d'espèces", COUNT(DISTINCT observateur) AS "Nb d'observateurs", COUNT(DISTINCT date) AS "Nb de dates", SUM(CASE WHEN mortalite THEN 1 ELSE 0 END) AS "Nb de données de mortalité", max(nombre_total) AS "Nb d'individus max", min (date_an) AS "Année première obs", max(date_an) AS "Année dernière obs", string_agg(DISTINCT obs.nom_vern,', ') FILTER (WHERE t.id_rang='ES') AS "Liste des espèces", string_agg(DISTINCT com.area_name,', ') AS "Communes", string_agg(DISTINCT obs.source,', ') AS "Sources" FROM total_count, obs LEFT JOIN taxonomie.taxref t ON obs.taxref_cdnom=t.cd_nom LEFT JOIN communes com ON obs.id_synthese = com.id_synthese GROUP BY {}{}, total_count ORDER BY {}{}""".format( where, taxonomic_rank_db, taxonomic_rank_label, 'groupe_taxo AS "Groupe taxo", ' if taxonomic_rank_label in ['Ordre', 'Famille'] else "", "groupe_taxo, " if taxonomic_rank_label in ['Ordre', 'Famille'] else "", taxonomic_rank_db, "groupe_taxo, " if taxonomic_rank_label in ['Ordre', 'Famille'] else "", taxonomic_rank_db) #feedback.pushInfo(query) # Retrieve the boolean add_table add_table = self.parameterAsBool(parameters, self.ADD_TABLE, context) if add_table: # Define the name of the PostGIS summary table which will be created in the DB table_name = simplify_name(format_name) # Define the SQL queries queries = construct_queries_list(table_name, query) # Execute the SQL queries execute_sql_queries(context, feedback, connection, queries) # Format the URI uri.setDataSource(None, table_name, None, "", "id") else: # Format the URI with the query uri.setDataSource("", "(" + query + ")", None, "", "id") ### GET THE OUTPUT LAYER ### # Retrieve the output PostGIS layer = summary table layer_summary = QgsVectorLayer(uri.uri(), format_name, "postgres") # Check if the PostGIS layer is valid check_layer_is_valid(feedback, layer_summary) # Load the PostGIS layer load_layer(context, layer_summary) # Open the attribute table of the PostGIS layer iface.showAttributeTable(layer_summary) iface.setActiveLayer(layer_summary) ### CONSTRUCT THE HISTOGRAM ### if histogram_option != "Pas d'histogramme": plt.close() x_var = [ (feature[taxonomic_rank_label] if feature[taxonomic_rank_label] != 'Pas de correspondance taxref' else 'Aucune correspondance') for feature in layer_summary.getFeatures() ] y_var = [ int(feature[histogram_option]) for feature in layer_summary.getFeatures() ] if len(x_var) <= 20: plt.subplots_adjust(bottom=0.5) elif len(x_var) <= 80: plt.figure(figsize=(20, 8)) plt.subplots_adjust(bottom=0.3, left=0.05, right=0.95) else: plt.figure(figsize=(40, 16)) plt.subplots_adjust(bottom=0.2, left=0.03, right=0.97) plt.bar(range(len(x_var)), y_var, tick_label=x_var) plt.xticks(rotation='vertical') plt.xlabel(self.taxonomic_ranks_variables[self.parameterAsEnum( parameters, self.TAXONOMIC_RANK, context)]) plt.ylabel(histogram_option.replace("Nb", "Nombre")) plt.title('{} par {}'.format( histogram_option.replace("Nb", "Nombre"), taxonomic_rank_label[0].lower() + taxonomic_rank_label[1:].replace("taxo", "taxonomique"))) if output_histogram[-4:] != ".png": output_histogram += ".png" plt.savefig(output_histogram) #plt.show() return {self.OUTPUT: layer_summary.id()}
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ ### RETRIEVE PARAMETERS ### # Retrieve the input vector layer = study area study_area = self.parameterAsSource(parameters, self.STUDY_AREA, context) # Retrieve the output PostGIS layer name and format it layer_name = self.parameterAsString(parameters, self.OUTPUT_NAME, context) format_name = "{} {}".format(layer_name, str(self.ts.strftime('%Y%m%d_%H%M%S'))) # Retrieve the time interval time_interval = self.interval_variables[self.parameterAsEnum( parameters, self.TIME_INTERVAL, context)] # Retrieve the period start_year = self.parameterAsInt(parameters, self.START_YEAR, context) end_year = self.parameterAsInt(parameters, self.END_YEAR, context) if end_year < start_year: raise QgsProcessingException( "Veuillez renseigner une année de fin postérieure à l'année de début !" ) # Retrieve the taxonomic rank taxonomic_rank = self.taxonomic_ranks_variables[self.parameterAsEnum( parameters, self.TAXONOMIC_RANK, context)] # Retrieve the aggregation type aggregation_type = 'Nombre de données' if taxonomic_rank == 'Groupes taxonomiques': aggregation_type = self.agg_variables[self.parameterAsEnum( parameters, self.AGG, context)] # Retrieve the taxons filters groupe_taxo = [ self.db_variables.value('groupe_taxo')[i] for i in ( self.parameterAsEnums(parameters, self.GROUPE_TAXO, context)) ] regne = [ self.db_variables.value('regne')[i] for i in (self.parameterAsEnums(parameters, self.REGNE, context)) ] phylum = [ self.db_variables.value('phylum')[i] for i in (self.parameterAsEnums(parameters, self.PHYLUM, context)) ] classe = [ self.db_variables.value('classe')[i] for i in (self.parameterAsEnums(parameters, self.CLASSE, context)) ] ordre = [ self.db_variables.value('ordre')[i] for i in (self.parameterAsEnums(parameters, self.ORDRE, context)) ] famille = [ self.db_variables.value('famille')[i] for i in (self.parameterAsEnums(parameters, self.FAMILLE, context)) ] group1_inpn = [ self.db_variables.value('group1_inpn')[i] for i in ( self.parameterAsEnums(parameters, self.GROUP1_INPN, context)) ] group2_inpn = [ self.db_variables.value('group2_inpn')[i] for i in ( self.parameterAsEnums(parameters, self.GROUP2_INPN, context)) ] # Retrieve the extra "where" conditions extra_where = self.parameterAsString(parameters, self.EXTRA_WHERE, context) # Retrieve the histogram parameter add_histogram = self.parameterAsEnums(parameters, self.ADD_HISTOGRAM, context) if len(add_histogram) > 0: output_histogram = self.parameterAsFileOutput( parameters, self.OUTPUT_HISTOGRAM, context) if output_histogram == "": raise QgsProcessingException( "Veuillez renseigner un emplacement pour enregistrer votre histogramme !" ) ### CONSTRUCT "SELECT" CLAUSE (SQL) ### # Select data according to the time interval and the period select_data, x_var = construct_sql_select_data_per_time_interval( self, time_interval, start_year, end_year, aggregation_type, parameters, context) # Select species info (optional) select_species_info = """/*source_id_sp, */taxref_cdnom AS cd_nom, cd_ref, nom_rang as "Rang", groupe_taxo AS "Groupe taxo", obs.nom_vern AS "Nom vernaculaire", nom_sci AS "Nom scientifique\"""" # Select taxonomic groups info (optional) select_taxo_groups_info = 'groupe_taxo AS "Groupe taxo"' ### CONSTRUCT "WHERE" CLAUSE (SQL) ### # Construct the sql array containing the study area's features geometry array_polygons = construct_sql_array_polygons(study_area) # Define the "where" clause of the SQL query, aiming to retrieve the output PostGIS layer = summary table where = "is_valid and is_present and ST_within(obs.geom, ST_union({}))".format( array_polygons) # Define a dictionnary with the aggregated taxons filters and complete the "where" clause thanks to it taxons_filters = { "groupe_taxo": groupe_taxo, "regne": regne, "phylum": phylum, "classe": classe, "ordre": ordre, "famille": famille, "obs.group1_inpn": group1_inpn, "obs.group2_inpn": group2_inpn } taxons_where = construct_sql_taxons_filter(taxons_filters) where += taxons_where # Complete the "where" clause with the extra conditions where += " " + extra_where ### CONSTRUCT "GROUP BY" CLAUSE (SQL) ### # Group by species (optional) group_by_species = "/*source_id_sp, */taxref_cdnom, cd_ref, nom_rang, nom_sci, obs.nom_vern, " if taxonomic_rank == 'Espèces' else "" ### EXECUTE THE SQL QUERY ### # Retrieve the data base connection name connection = self.parameterAsString(parameters, self.DATABASE, context) # URI --> Configures connection to database and the SQL query uri = postgis.uri_from_name(connection) # Define the SQL query query = """SELECT row_number() OVER () AS id, {}{} FROM src_lpodatas.v_c_observations obs LEFT JOIN taxonomie.taxref t ON obs.taxref_cdnom = t.cd_nom LEFT JOIN taxonomie.bib_taxref_rangs r ON t.id_rang = r.id_rang WHERE {} GROUP BY {}groupe_taxo ORDER BY groupe_taxo{}""".format( select_species_info if taxonomic_rank == 'Espèces' else select_taxo_groups_info, select_data, where, group_by_species, ", obs.nom_vern" if taxonomic_rank == 'Espèces' else "") #feedback.pushInfo(query) # Retrieve the boolean add_table add_table = self.parameterAsBool(parameters, self.ADD_TABLE, context) if add_table: # Define the name of the PostGIS summary table which will be created in the DB table_name = simplify_name(format_name) # Define the SQL queries queries = construct_queries_list(table_name, query) # Execute the SQL queries execute_sql_queries(context, feedback, connection, queries) # Format the URI uri.setDataSource(None, table_name, None, "", "id") else: # Format the URI with the query uri.setDataSource("", "(" + query + ")", None, "", "id") ### GET THE OUTPUT LAYER ### # Retrieve the output PostGIS layer = summary table layer_summary = QgsVectorLayer(uri.uri(), format_name, "postgres") # Check if the PostGIS layer is valid check_layer_is_valid(feedback, layer_summary) # Load the PostGIS layer load_layer(context, layer_summary) # Open the attribute table of the PostGIS layer iface.showAttributeTable(layer_summary) iface.setActiveLayer(layer_summary) ### CONSTRUCT THE HISTOGRAM ### if len(add_histogram) > 0: plt.close() y_var = [] for x in x_var: y = 0 for feature in layer_summary.getFeatures(): y += feature[x] y_var.append(y) if len(x_var) <= 20: plt.subplots_adjust(bottom=0.4) elif len(x_var) <= 80: plt.figure(figsize=(20, 8)) plt.subplots_adjust(bottom=0.3, left=0.05, right=0.95) else: plt.figure(figsize=(40, 16)) plt.subplots_adjust(bottom=0.2, left=0.03, right=0.97) plt.bar(range(len(x_var)), y_var, tick_label=x_var) plt.xticks(rotation='vertical') x_label = time_interval.split(' ')[1].title() if x_label[-1] != 's': x_label += 's' plt.xlabel(x_label) plt.ylabel(aggregation_type) plt.title('{} {}'.format( aggregation_type, time_interval[0].lower() + time_interval[1:])) if output_histogram[-4:] != ".png": output_histogram += ".png" plt.savefig(output_histogram) #plt.show() return {self.OUTPUT: layer_summary.id()}