def selec_localizacion(capa_entrada1, capa_entrada2, predicado): """" Seleccion por localizacion Argumentos: capa_entrada1: instancia capa sobre la que se selecciona capa_entrada2: intancia segundo capara que participa en la seleccion predicado: Condicion espacial de seleccion 0 — intersecta 1 — contiene 2 — dividir 3 — igual 4 — toca 5 — superpone 6 — esta dentro 7 — cruza """ parameter = { "INPUT": capa_entrada1, "PREDICATE": predicado, "INTERSECT": capa_entrada2, "OUTPUT": "TEMPORARY_OUTPUT" } processing.run("qgis:selectbylocation", parameter)
def extract_band(self, raster_layer, band_number, band_name, output_dir): # Create single band narrow channel mosaic from 2-channel mosaic(produced by pix4d) temp_out_path = os.path.join(output_dir, os.path.normpath(band_name + '_temp.tif')) input_path = raster_layer.dataProvider().dataSourceUri() # extract reflectance values from band 1 ''' Calc("A*(A>0) + -10000*(A<=0)", A=input_path, A_band=band_number, outfile=temp_out_path, NoDataValue=-10000) ''' one_band_params = { 'BAND_A': band_number, 'EXTRA': '', 'FORMULA': "A*(A>0) + -10000*(A<=0)", 'INPUT_A': input_path, 'NO_DATA': -10000, 'OPTIONS': '', 'RTYPE': 5, 'OUTPUT': temp_out_path } processing.run('gdal:rastercalculator', one_band_params) band_one_raster = QgsRasterLayer(temp_out_path, raster_layer.name()) return band_one_raster
def _deleteLayer(self, layer_name): try: processing.run("native:spatialiteexecutesql", {'DATABASE': '{0}|layername={1}'.format(self.gpkg_path, layer_name), 'SQL': 'drop table {0}'.format(layer_name)}) except IndexError: layer_name = None
def runCreateSpatialIndex(self, inputLyr): processing.run( "native:createspatialindex", {'INPUT':inputLyr} ) return False
def processAlgorithm(self, parameters, context, feedback): anni = [self.years[id_anno] for id_anno in parameters['anno']] prodotti = [ self.products[id_prod] for id_prod in parameters['prodotto'] ] estensione = parameters['estensione'] tiles_selection = [] if estensione: grid_params = { "TYPE": 2, "EXTENT": "-180,180,-80,80", "HSPACING": 20, "VSPACING": 20, "CRS": "EPSG:4326", "OUTPUT": QgsProcessing.TEMPORARY_OUTPUT } output1 = processing.run('native:creategrid', grid_params, context=context, feedback=None, is_child_algorithm=True) grid_layer = output1["OUTPUT"] select_params = { "INPUT": grid_layer, "EXTENT": estensione, "OUTPUT": QgsProcessing.TEMPORARY_OUTPUT } output2 = processing.run('native:extractbyextent', select_params, context=context, feedback=None, is_child_algorithm=True) grids_selected_layer = context.takeResultLayer(output2["OUTPUT"]) print(grid_layer, grids_selected_layer) for feat in grids_selected_layer.getFeatures(): lon = int(feat["left"]) lat = int(feat["top"]) lon_prefix = "E" if lon >= 0 else "W" lat_prefix = "N" if lat >= 0 else "S" tile_code = lon_prefix + str( abs(lon)).zfill(3) + lat_prefix + str(abs(lat)).zfill(2) tiles_selection.append(tile_code) url_to_download_list = self.search_data(anni, tiles_selection, prodotti) print(url_to_download_list) for d in url_to_download_list: # output = self.parameterAsFile(parameters,'Download directory',context) + os.path.basename(d) output = parameters['Download directory'] + '/' + d["filename"] self.s3client.download_file(BUCKET, d["Key"], output) feedback.pushInfo(d["filename"]) if feedback.isCanceled(): feedback.pushInfo("Terminated by user") return {} return {}
def _lang(self): ''' Lang Simplification Algorithm ''' ''' Densify By Count >>> Delete holes >>> Smooth (QGIS) || Generalize (Grass) ''' densify_by_count = processing.run('qgis:densifygeometries', {'INPUT': self.gpkg_path + '|layername=zones', 'VERTICES': 3, 'OUTPUT': 'ogr:dbname=\'' + self.gpkg_path + '\' table=\"densifyByCount\" (' 'geom)'}) delete_holes = processing.run('qgis:deleteholes', {'INPUT': densify_by_count['OUTPUT'], 'MIN_AREA': 30, 'OUTPUT': 'ogr:dbname=\'' + self.gpkg_path + '\' table=\"deleteHoles\" (geom)'}) # qgis # smooth = processing.run('qgis:smoothgeometry', # { 'INPUT': deleteHoles['OUTPUT'], # 'ITERATIONS': 10, # 'OFFSET': 0.25, # 'MAX_ANGLE': 180, # 'OUTPUT': 'ogr:dbname=\''+ self.gpkg_path +'\' table=\"smooth\" (geom)'}) # # extract vertices (polygon to nodes) # processing.run('qgis: extractvertices', # { 'INPUT': self.gpkg_path + '|layername=zones', # 'OUTPUT' : 'TEMPORARY_OUTPUT' }) # grass grass_generalize = processing.run('grass7:v.generalize', {'input': delete_holes['OUTPUT'], 'method': 2, 'threshold': 1, 'output': 'TEMPORARY_OUTPUT', 'error': 'TEMPORARY_OUTPUT'}) grass_generalize_converted = processing.run('gdal:convertformat', { 'INPUT': grass_generalize['output'], 'OPTIONS': '', 'OUTPUT': 'grass_generalize_converted.shp'}) grass_generalize_converted_toGpkg = QgsVectorLayer(grass_generalize_converted['OUTPUT'], 'grass_generalize_lang', "ogr") # self._saveIntoGpkg(grass_generalize_converted_toGpkg,'grass_generalize_lang') # grass_generalize_convertedd = QgsVectorLayer(grass_generalize_converted['OUTPUT'], 'grass_generalize_lang', "ogr") # grass_generalize_converted_toMap = QgsVectorLayer(self.gpkg_path + '|layername=grass_generalize_lang', 'grass_generalize_lang', "ogr") generalize_layer = QgsProject.instance().addMapLayer(grass_generalize_converted_toGpkg, False) root = QgsProject.instance().layerTreeRoot() group_gtfs = root.findGroup('zones') group_gtfs.insertChildNode(0, QgsLayerTreeLayer(generalize_layer))
def polyToLine(inputFile, outputFile): #Load and check polygon layer to environment Outline_PolyLayer = QgsVectorLayer(inputFile, "OutlinePolygon") checkLayer(Outline_PolyLayer) #Convert Polygon to Lines params = {'INPUT': Outline_PolyLayer, 'OUTPUT': outputFile} processing.run("native:polygonstolines", params)
def lineToVertex(inputFile, outputFile): #Load and check line layer to environment Outline_LineLayer = QgsVectorLayer(inputFile, "OutlineLine") checkLayer(Outline_LineLayer) ##Convert Lines to Vertices params = {'INPUT': Outline_LineLayer, 'OUTPUT': outputFile} processing.run("native:extractvertices", params)
def make_deso_centroids(input_url: str) -> QgsVectorLayer: processing.run( 'native:centroids', { 'INPUT': input_url, 'ALL_PARTS': False, 'OUTPUT': '/Users/laurentcazor/Documents/Trivector work/Work destination choice/Test_small/origins.shp', }, )
def setupMasks(productType, layers): pathJson = Path(__file__).parent / 'produtos' / productType / 'masks.json' if pathJson.exists(): processing.run( 'ferramentasedicao:loadmasks', { 'JSON_FILE': str(pathJson), 'INPUT_LAYERS': layers } )
def highestSpot(lyr, moldura): '''Atualiza o atributo 'cota_mais_alta' se o ponto cotado é o mais alto do MI ''' processing.run( 'ferramentasedicao:highestspotontheframe', { 'INPUT_LAYER_P': lyr, 'INPUT_SPOT_FIELD': 'cota', 'INPUT_HIGHEST_SPOT_FIELD': 'cota_mais_alta', 'INPUT_FRAME': moldura })
def test_load_structure(self): """Test load structure.""" feedback = LoggerProcessingFeedBack() params = { 'OVERRIDE': True, 'ADD_AUDIT': True, 'SRID': QgsCoordinateReferenceSystem('EPSG:32620'), 'NOM': 'CC de Test', 'SIREN': '123456789', 'CODE': 'cat' } processing.run('raepa:create_database_structure', params, feedback=feedback) self.cursor.execute('SELECT table_name FROM information_schema.tables WHERE table_schema = \'raepa\'') records = self.cursor.fetchall() result = [r[0] for r in records] expected = [ '_val_raepa_etat_canal_ass', '_val_raepa_forme_canal_ass', '_val_raepa_precision_annee', '_val_raepa_type_intervention_ass', 'affleurant_pcrs', 'commune', 'raepa_apparaep_p', 'raepa_apparass_p', 'raepa_canalaep_l', 'raepa_canalass_l', 'raepa_ouvraep_p', 'raepa_ouvrass_p', 'raepa_reparaep_p', 'raepa_reparass_p', 'sys_liste_table', 'sys_organisme_gestionnaire', 'sys_structure_metadonnee', 'v_canalisation_avec_z_manquant', 'v_canalisation_avec_zaval_manquant', 'v_canalisation_branchement', 'v_canalisation_sans_ouvrage', 'val_raepa_cat_canal_ae', 'val_raepa_cat_canal_ass', 'val_raepa_fonc_app_ae', 'val_raepa_fonc_app_ass', 'val_raepa_fonc_canal_ae', 'val_raepa_fonc_canal_ass', 'val_raepa_fonc_ouv_ae', 'val_raepa_fonc_ouv_ass', 'val_raepa_materiau', 'val_raepa_mode_circulation', 'val_raepa_qualite_anpose', 'val_raepa_qualite_geoloc', 'val_raepa_support_reparation', 'val_raepa_typ_reseau_ass', 'val_raepa_type_defaillance', ] self.assertCountEqual(expected, result)
def calcul_orientation_appareil(*args): id_appar = args[0] params = { 'SOURCE_ID': id_appar } try: processing.run('raepa:get_orientation_appareil', params) except QgsProcessingException: QgsMessageLog.logMessage('Erreur dans les logs de Processing/PostGIS.', 'RAEPA', Qgis.Critical) iface.messageBar().pushMessage( 'Erreur dans les logs de Processing/PostGIS.', level=Qgis.Critical, duration=2) return
def fixAndDissolveFeatures(input, context, feedback): # run the input throuhg the fix geometries algorithm first fixed = processing.run("qgis:fixgeometries", { 'INPUT': input, 'OUTPUT': 'memory:' })['OUTPUT'] return processing.run("qgis:dissolve", { 'INPUT': fixed, 'OUTPUT': 'memory:' }, context=context, feedback=feedback)['OUTPUT']
def buffer(capa_entrada, capa_salida, distancia): """ Buffer de una capa vectorial Se exporta directamente como capa Argumentos: capa_entrada: instancia de la capa sobre la que se realiza el buffer capa_salida: nombre shp salida sin extension distancia: distancia en metros de buffer * Metros porque se ha proyectado a metros """ output = capa_salida + ".shp" parameter = {"INPUT": capa_entrada,"DISTANCE": distancia,\ "DISSOLVE": True, "OUTPUT": output} processing.run("qgis:buffer", parameter)
def processAlgorithm(self, parameters, context, feedback): feedback = QgsProcessingMultiStepFeedback(1, feedback) timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S') results = {} outputs = {} for raster in glob.glob(str(parameters[self.INPUT_Pfad]) + '/*.tif'): fileInfo = QFileInfo(raster) baseName = fileInfo.baseName() tiff = str(parameters[self.INPUT_Pfad]) + '/' + baseName + '.tif' tiff_tfw = str( parameters[self.OUTPUT_Pfad]) + '/' + baseName + '.tif' alg_params = { 'COPY_SUBDATASETS': False, 'DATA_TYPE': 0, 'EXTRA': '', 'INPUT': str(tiff), 'NODATA': None, 'OPTIONS': 'tfw=yes', 'TARGET_CRS': None, 'OUTPUT': str(tiff_tfw) } outputs['tiff_tfw'] = processing.run('gdal:translate', alg_params, context=context, feedback=feedback, is_child_algorithm=True) results['Berechnung der TFWs abgeschlossen!'] = outputs[ 'tiff_tfw']['OUTPUT'] return results
def add_habitat_info(input_layer, habitat_layer, context, feedback) -> QgsVectorLayer: """ Add information from the habitat layer into the export. """ feedback.pushInfo('\n') feedback.pushInfo( "Jointure spatiale avec la couche 'habitat' pour le champ 'facies'" ) params = { 'INPUT': input_layer, 'JOIN': habitat_layer, 'PREDICATE': [0], # Intersects 'JOIN_FIELDS': ['nom', 'facies'], 'METHOD': 1, # Take attributes of the first located feature only (one-to-one) 'DISCARD_NONMATCHING': False, 'PREFIX': 'habitat_', 'OUTPUT': 'TEMPORARY_OUTPUT' } results = run( 'qgis:joinattributesbylocation', params, context=context, feedback=feedback, is_child_algorithm=True, ) feedback.pushInfo('{} habitats ont été trouvés'.format( results['JOINED_COUNT'])) layer = QgsProcessingUtils.mapLayerFromString(results['OUTPUT'], context, True) return layer
def processAlgorithm(self, parameters, context, feedback): anno = self.yearlist[parameters['anno']] # nome_tile = self.parameterAsString(parameters, 'nome_tile', context) nome_tile = parameters['nome_tile'] prodotto = self.services[parameters['prodotto']] url_to_download_list = self.search_Data(anno=anno, nome_tile=nome_tile, prodotto=prodotto) for d in url_to_download_list: # output = self.parameterAsFile(parameters,'Download directory',context) + os.path.basename(d) output = parameters['Download directory'] + '/' + os.path.basename( d) alg_params = {"URL": d, "OUTPUT": output} output = processing.run('native:filedownloader', alg_params, context=context, feedback=feedback, is_child_algorithm=True) feedback.pushInfo(os.path.basename(d)) if feedback.isCanceled(): feedback.pushInfo("Terminated by user") return {} return {}
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ # DO SOMETHING sql = ('WITH nodes AS ' '(SELECT ST_StartPoint(geom) AS geom FROM ' '{0} UNION ALL ' 'SELECT ST_EndPoint(geom) AS geom FROM {0}) ' 'SELECT geom FROM nodes ' 'GROUP BY geom HAVING count(*) = 1').format( parameters[self.TABLE]) feedback.pushInfo(sql) find_pseudo = processing.run("gdal:executesql", { 'INPUT': parameters['INPUT'], 'SQL': sql, 'OUTPUT': parameters['OUTPUT'] }, context=context, feedback=feedback, is_child_algorithm=True) return {self.OUTPUT: find_pseudo['OUTPUT']}
def mergeEnergyLines(self, lyr, limit): r = processing.run('ferramentasedicao:mergelinesbyangle', { 'INPUT': lyr, 'MAX_ITERATION': limit, 'OUTPUT': 'TEMPORARY_OUTPUT' }) return r['OUTPUT']
def export_uniques(self): """ Return the unique values in a raster using QGIS native processing algorithm. Algorithm: native:rasterlayeruniquevaluesreport ----Inputs: INPUT: Input raster layer BAND: Raster Band OUTPUT_HTML_FILE: (optional) Defaults to temporary file OUTPUT_TABLE: (optional) Defaults to skip Returns: ----Result {Dict}: { 'CRS_AUTHID': CRS as string, 'EXTENT': Extent, 'HEIGHT_IN_PIXELS': Height in Pix, 'NODATA_PIXEL_COUNT': No Data Count, 'OUTPUT_HTML_FILE': Path to HTML file with results, 'TOTAL_PIXEL_COUNT': Total Pixels, 'WIDTH_IN_PIXELS': 1891 } """ alg_string = "native:rasterlayeruniquevaluesreport" params = { "INPUT": self.raster, "BAND": 1, # TODO add functionality to choose band "OUTPUT_TABLE": os.path.join(self.working_directory, "uniques.csv") } result = processing.run(alg_string, params) return result
def parcourir_reseau_jusquaux_vannes_fermees(*args): x = float(args[0]) y = float(args[1]) crs = iface.mapCanvas().mapSettings().destinationCrs() params = { 'OUTPUT_LAYER_NAME': '', 'POINT': QgsReferencedPointXY(QgsPointXY(x, y), crs) } network = {} try: network = processing.run('raepa:get_network_to_vanne_ferme_from_point', params) except QgsProcessingException: # If the object is at the end of the network, the SQL does not provide Geometry # so the layer is invalid but we have to continue to test upstream QgsMessageLog.logMessage('Erreur dans les logs de Processing/PostGIS.', 'RAEPA', Qgis.Critical) iface.messageBar().pushMessage( 'Erreur dans les logs de Processing/PostGIS.', level=Qgis.Critical, duration=2) network['OUTPUT_STATUS'] = 0 if network['OUTPUT_STATUS'] == 1: layer = network['OUTPUT_LAYER'] layer.setName(network['OUTPUT_LAYER_RESULT_NAME']) symbol = QgsLineSymbol.createSimple( { 'line_color': '255,50,50,255', 'line_style': 'solid', 'line_width': '1.8' } ) layer.renderer().setSymbol(symbol) QgsProject.instance().addMapLayer(layer)
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ # DO SOMETHING sql = ( 'SELECT aneis_uniao.geom FROM ' '(SELECT (ST_DumpRings(geom)).* FROM ' f'(SELECT (ST_Dump(ST_Union(geom))).geom AS geom FROM {parameters[self.TABLE]}) AS uniao ' ') AS aneis_uniao LEFT JOIN ' '(SELECT geom FROM ' f'(SELECT (ST_DumpRings((ST_Dump(geom)).geom)).* FROM {parameters[self.TABLE]}) AS dump ' 'WHERE path[1] != 0) AS interior_rings ON ST_Equals(aneis_uniao.geom, interior_rings.geom) ' 'WHERE path[1] != 0 AND ST_Area(aneis_uniao.geom) > 0.0000001 AND interior_rings.geom IS NULL' ) feedback.pushInfo(sql) find_pseudo = processing.run("gdal:executesql", { 'INPUT': parameters['INPUT'], 'SQL': sql, 'OUTPUT': parameters['OUTPUT'] }, context=context, feedback=feedback, is_child_algorithm=True) return {self.OUTPUT: find_pseudo['OUTPUT']}
def network_to_vanne(*args): id_objet = args[0] # Use alg get_downstream_route and get_upstream_route params = { 'OUTPUT_LAYER_NAME': '', 'SOURCE_ID': id_objet } down = {} try: down = processing.run('raepa:get_network_to_vanne', params) except QgsProcessingException: # If the object is at the end of the network, the SQL does not provide Geometry # so the layer is invalid but we have to continue to test upstream QgsMessageLog.logMessage('Erreur dans les logs de Processing/PostGIS.', 'RAEPA', Qgis.Critical) iface.messageBar().pushMessage( 'Erreur dans les logs de Processing/PostGIS.', level=Qgis.Critical, duration=2) down['OUTPUT_STATUS'] = 0 if down['OUTPUT_STATUS'] == 1: layer = down['OUTPUT_LAYER'] layer.setName(down['OUTPUT_LAYER_RESULT_NAME']) symbol = QgsLineSymbol.createSimple( { 'line_color': '255,50,50,255', 'line_style': 'solid', 'line_width': '1.8' } ) layer.renderer().setSymbol(symbol) QgsProject.instance().addMapLayer(layer)
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ # DO SOMETHING sql = ( 'WITH extremos AS ' f'(SELECT ST_StartPoint(geom) AS geom FROM {parameters[self.TABLE]} UNION ALL SELECT ST_EndPoint(geom) AS geom ' f'FROM {parameters[self.TABLE]}), ' 'fronteira_massa AS ' f'(SELECT ST_Boundary(geom) AS geom FROM {parameters[self.TABLE2]}) ' 'SELECT extremos.geom FROM extremos LEFT JOIN fronteira_massa ON ST_DWithin(extremos.geom, fronteira_massa.geom, 0.0000001) ' 'WHERE fronteira_massa.geom IS NULL ') feedback.pushInfo(sql) find_pseudo = processing.run("gdal:executesql", { 'INPUT': parameters['INPUT'], 'SQL': sql, 'OUTPUT': parameters['OUTPUT'] }, context=context, feedback=feedback, is_child_algorithm=True) return {self.OUTPUT: find_pseudo['OUTPUT']}
def snapping(): for layer in self.layers: layer_type = self.layers[layer].wkbType() layer_name = self.layers[layer].name() output = {} if layer_type == 5: alg_params_snapgeometries = { 'BEHAVIOR': 0, 'INPUT': self.layers[layer], 'REFERENCE_LAYER': self.layer_sites_portee, 'TOLERANCE': self.buff_adj, 'OUTPUT': 'TEMPORARY_OUTPUT' } output['alg_params_snapgeometries'] = processing.run( 'qgis:snapgeometries', alg_params_snapgeometries) for feature in self.layers[layer].getFeatures(): for feat in output['alg_params_snapgeometries'][ 'OUTPUT'].getFeatures(): if feature['code_id'] == feat['code_id']: geom_wkt = feat.geometry().asWkt() feature.setGeometry(QgsGeometry.fromWkt(geom_wkt)) self.layers[layer].updateFeature(feature)
def processAlgorithm(self, parameters, context, feedback): raster = self.parameterAsRasterLayer(parameters, self.PrmInput, context) out_path = self.parameterAsOutputLayer(parameters, self.PrmOutputRaster, context) north = self.parameterAsDouble(parameters, self.PrmNorthLatitude, context) south = self.parameterAsDouble(parameters, self.PrmSouthLatitude, context) east = self.parameterAsDouble(parameters, self.PrmEastLongitude, context) west = self.parameterAsDouble(parameters, self.PrmWestLongitude, context) rotation = self.parameterAsDouble(parameters, self.PrmRotation, context) if rotation == 0: status = processing.run("gdal:translate", {'INPUT': raster, 'EXTRA': '-a_srs EPSG:4326 -a_ullr {} {} {} {}'.format(west, north, east, south), 'DATA_TYPE': 0, 'OUTPUT': out_path}) else: rwidth = raster.width() rheight = raster.height() center_x = (east + west) / 2.0 center_y = (north + south)/ 2.0 center_pt = QgsPointXY(center_x, center_y) ul_pt = QgsPointXY(west, north) ur_pt = QgsPointXY(east, north) lr_pt = QgsPointXY(east, south) ll_pt = QgsPointXY(west, south) distance = center_pt.distance(ul_pt) az = center_pt.azimuth(ul_pt) - rotation pt1 = center_pt.project(distance, az) az = center_pt.azimuth(ur_pt) - rotation pt2 = center_pt.project(distance, az) az = center_pt.azimuth(lr_pt) - rotation pt3 = center_pt.project(distance, az) az = center_pt.azimuth(ll_pt) - rotation pt4 = center_pt.project(distance, az) gcp1= '-gcp {} {} {} {}'.format(0,0, pt1.x(), pt1.y()) gcp2= '-gcp {} {} {} {}'.format(rwidth,0, pt2.x(), pt2.y()) gcp3= '-gcp {} {} {} {}'.format(rwidth, rheight, pt3.x(), pt3.y()) gcp4= '-gcp {} {} {} {}'.format(0, rheight, pt4.x(), pt4.y()) status = processing.run("gdal:translate", {'INPUT': raster, 'EXTRA': '-a_srs EPSG:4326 -a_nodata 0,0,0 {} {} {} {}'.format(gcp1, gcp2, gcp3, gcp4), 'DATA_TYPE': 0, 'OUTPUT': out_path}) feedback.pushInfo('{}'.format(status)) results = {} results[self.PrmOutputRaster] = out_path return (results)
def processAlgorithm(self, parameters, context, feedback): inputraster = self.parameterAsRasterLayer(parameters, self.INPUT_RASTER, context) params = {'INPUT': inputraster, 'OUTPUT': parameters['OUTPUT']} result = processing.run("gdal:translate", params, context=context) return {self.OUTPUT: result['OUTPUT']}
def process_layer(self, layerpath): """ Runs the GDAL algoritm to store layer in postgis """ db = self.options['connection'] params = { 'DATABASE': db, 'INPUT': layerpath, 'SCHEMA': self.options['schema'], 'TABLE': self.layer.lower(), 'APPEND': not self.options['overwrite'], 'OVERWRITE': self.options['overwrite'] } processing.run( 'gdal:importvectorintopostgisdatabaseavailableconnections', params) self.setProgress(100)
def processAlgorithm(self, parameters, context, feedback): source = self.parameterAsVectorLayer( parameters, self.INPUT, context ) if source is None: raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT)) fields = source.fields() for field in fields.names(): feedback.pushInfo('Indexing field {}'.format(field)) params = {'INPUT': source,'FIELD': field} processing.run("native:createattributeindex", params) return {self.OUTPUT: source.id()}