def addConnectionConfig(cls, conn_name, uri): """Necessary to allow db_manager to have the list of connections get from settings.""" uri = QgsDataSourceUri(uri) settings = QgsSettings() baseKey = "/PostgreSQL/connections/" baseKey += conn_name settings.setValue(baseKey + "/service", uri.service()) settings.setValue(baseKey + "/host", uri.host()) settings.setValue(baseKey + "/port", uri.port()) settings.setValue(baseKey + "/database", uri.database()) if uri.username(): settings.setValue(baseKey + "/username", uri.username()) if uri.password(): settings.setValue(baseKey + "/password", uri.password()) if uri.authConfigId(): settings.setValue(baseKey + "/authcfg", uri.authConfigId()) if uri.sslMode(): settings.setValue(baseKey + "/sslmode", uri.sslMode())
def getConnectionParameterFromDbLayer(layer: QgsMapLayer) -> Dict[str,str]: ''' Get connection parameters from the layer datasource ''' connectionParams = None if layer.providerType() == 'postgres': dbType = 'postgis' else: dbType = 'spatialite' src = layer.source() try: uri = QgsDataSourceUri(src) except: uri = QgsDataSourceURI(src) # TODO Use immutable namedtuple connectionParams = { 'service' : uri.service(), 'dbname' : uri.database(), 'host' : uri.host(), 'port': uri.port(), 'user' : uri.username(), 'password': uri.password(), 'sslmode' : uri.sslMode(), 'key': uri.keyColumn(), 'estimatedmetadata' : str(uri.useEstimatedMetadata()), 'checkPrimaryKeyUnicity' : '', 'srid' : uri.srid(), 'type': uri.wkbType(), 'schema': uri.schema(), 'table' : uri.table(), 'geocol' : uri.geometryColumn(), 'sql' : uri.sql(), 'dbType': dbType } return connectionParams
def toMapLayer(self): from qgis.core import QgsRasterLayer, QgsContrastEnhancement, QgsDataSourceUri, QgsCredentials rl = QgsRasterLayer(self.gdalUri(), self.name) if not rl.isValid(): err = rl.error().summary() uri = QgsDataSourceUri(self.database().uri()) conninfo = uri.connectionInfo(False) username = uri.username() password = uri.password() for i in range(3): (ok, username, password) = QgsCredentials.instance().get(conninfo, username, password, err) if ok: uri.setUsername(username) uri.setPassword(password) rl = QgsRasterLayer(self.gdalUri(uri), self.name) if rl.isValid(): break if rl.isValid(): rl.setContrastEnhancement(QgsContrastEnhancement.StretchToMinimumMaximum) return rl
def ogrConnectionString(uri): """Generates OGR connection sting from layer source """ ogrstr = None layer = dataobjects.getObjectFromUri(uri, False) if layer is None: return '"' + uri + '"' provider = layer.dataProvider().name() if provider == "spatialite": # dbname='/geodata/osm_ch.sqlite' table="places" (Geometry) sql= regex = re.compile("dbname='(.+)'") r = regex.search(str(layer.source())) ogrstr = r.groups()[0] elif provider == "postgres": # dbname='ktryjh_iuuqef' host=spacialdb.com port=9999 # user='******' password='******' sslmode=disable # key='gid' estimatedmetadata=true srid=4326 type=MULTIPOLYGON # table="t4" (geom) sql= dsUri = QgsDataSourceUri(layer.dataProvider().dataSourceUri()) conninfo = dsUri.connectionInfo() conn = None ok = False while not conn: try: conn = psycopg2.connect(dsUri.connectionInfo()) except psycopg2.OperationalError: (ok, user, passwd) = QgsCredentials.instance().get(conninfo, dsUri.username(), dsUri.password()) if not ok: break dsUri.setUsername(user) dsUri.setPassword(passwd) if not conn: raise RuntimeError("Could not connect to PostgreSQL database - check connection info") if ok: QgsCredentials.instance().put(conninfo, user, passwd) ogrstr = "PG:%s" % dsUri.connectionInfo() elif provider == "oracle": # OCI:user/password@host:port/service:table dsUri = QgsDataSourceUri(layer.dataProvider().dataSourceUri()) ogrstr = "OCI:" if dsUri.username() != "": ogrstr += dsUri.username() if dsUri.password() != "": ogrstr += "/" + dsUri.password() delim = "@" if dsUri.host() != "": ogrstr += delim + dsUri.host() delim = "" if dsUri.port() != "" and dsUri.port() != "1521": ogrstr += ":" + dsUri.port() ogrstr += "/" if dsUri.database() != "": ogrstr += dsUri.database() elif dsUri.database() != "": ogrstr += delim + dsUri.database() if ogrstr == "OCI:": raise RuntimeError("Invalid oracle data source - check connection info") ogrstr += ":" if dsUri.schema() != "": ogrstr += dsUri.schema() + "." ogrstr += dsUri.table() else: ogrstr = str(layer.source()).split("|")[0] return '"' + ogrstr + '"'
class GeoDB(object): @classmethod def from_name(cls, conn_name): uri = uri_from_name(conn_name) return cls(uri=uri) def __init__(self, host=None, port=None, dbname=None, user=None, passwd=None, service=None, uri=None): # Regular expression for identifiers without need to quote them self.re_ident_ok = re.compile(r"^\w+$") port = str(port) if uri: self.uri = uri else: self.uri = QgsDataSourceUri() if service: self.uri.setConnection(service, dbname, user, passwd) else: self.uri.setConnection(host, port, dbname, user, passwd) conninfo = self.uri.connectionInfo(False) err = None for i in range(4): expandedConnInfo = self.uri.connectionInfo(True) try: self.con = psycopg2.connect(expandedConnInfo) if err is not None: QgsCredentials.instance().put(conninfo, self.uri.username(), self.uri.password()) break except psycopg2.OperationalError as e: if i == 3: raise QgsProcessingException(str(e)) err = str(e) user = self.uri.username() password = self.uri.password() (ok, user, password) = QgsCredentials.instance().get( conninfo, user, password, err) if not ok: raise QgsProcessingException( QCoreApplication.translate("PostGIS", 'Action canceled by user')) if user: self.uri.setUsername(user) if password: self.uri.setPassword(password) finally: # remove certs (if any) of the expanded connectionInfo expandedUri = QgsDataSourceUri(expandedConnInfo) sslCertFile = expandedUri.param("sslcert") if sslCertFile: sslCertFile = sslCertFile.replace("'", "") try: os.remove(sslCertFile) except OSError: pass sslKeyFile = expandedUri.param("sslkey") if sslKeyFile: sslKeyFile = sslKeyFile.replace("'", "") try: os.remove(sslKeyFile) except OSError: pass sslCAFile = expandedUri.param("sslrootcert") if sslCAFile: sslCAFile = sslCAFile.replace("'", "") try: os.remove(sslCAFile) except OSError: pass self.has_postgis = self.check_postgis() def get_info(self): c = self.con.cursor() self._exec_sql(c, 'SELECT version()') return c.fetchone()[0] def check_postgis(self): """Check whether postgis_version is present in catalog. """ c = self.con.cursor() self._exec_sql( c, "SELECT COUNT(*) FROM pg_proc WHERE proname = 'postgis_version'") return c.fetchone()[0] > 0 def get_postgis_info(self): """Returns tuple about PostGIS support: - lib version - installed scripts version - released scripts version - geos version - proj version - whether uses stats """ c = self.con.cursor() self._exec_sql( c, 'SELECT postgis_lib_version(), postgis_scripts_installed(), \ postgis_scripts_released(), postgis_geos_version(), \ postgis_proj_version(), postgis_uses_stats()') return c.fetchone() def list_schemas(self): """Get list of schemas in tuples: (oid, name, owner, perms). """ c = self.con.cursor() sql = "SELECT oid, nspname, pg_get_userbyid(nspowner), nspacl \ FROM pg_namespace \ WHERE nspname !~ '^pg_' AND nspname != 'information_schema'" self._exec_sql(c, sql) return c.fetchall() def list_geotables(self, schema=None): """Get list of tables with schemas, whether user has privileges, whether table has geometry column(s) etc. Geometry_columns: - f_table_schema - f_table_name - f_geometry_column - coord_dimension - srid - type """ c = self.con.cursor() if schema: schema_where = " AND nspname = '%s' " % self._quote_unicode(schema) else: schema_where = \ " AND (nspname != 'information_schema' AND nspname !~ 'pg_') " # LEFT OUTER JOIN: like LEFT JOIN but if there are more matches, # for join, all are used (not only one) # First find out whether PostGIS is enabled if not self.has_postgis: # Get all tables and views sql = """SELECT pg_class.relname, pg_namespace.nspname, pg_class.relkind, pg_get_userbyid(relowner), reltuples, relpages, NULL, NULL, NULL, NULL FROM pg_class JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace WHERE pg_class.relkind IN ('v', 'r', 'm', 'p')""" \ + schema_where + 'ORDER BY nspname, relname' else: # Discovery of all tables and whether they contain a # geometry column sql = """SELECT pg_class.relname, pg_namespace.nspname, pg_class.relkind, pg_get_userbyid(relowner), reltuples, relpages, pg_attribute.attname, pg_attribute.atttypid::regtype, NULL, NULL FROM pg_class JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace LEFT OUTER JOIN pg_attribute ON pg_attribute.attrelid = pg_class.oid AND (pg_attribute.atttypid = 'geometry'::regtype OR pg_attribute.atttypid IN (SELECT oid FROM pg_type WHERE typbasetype='geometry'::regtype)) WHERE pg_class.relkind IN ('v', 'r', 'm', 'p') """ \ + schema_where + 'ORDER BY nspname, relname, attname' self._exec_sql(c, sql) items = c.fetchall() # Get geometry info from geometry_columns if exists if self.has_postgis: sql = """SELECT relname, nspname, relkind, pg_get_userbyid(relowner), reltuples, relpages, geometry_columns.f_geometry_column, geometry_columns.type, geometry_columns.coord_dimension, geometry_columns.srid FROM pg_class JOIN pg_namespace ON relnamespace=pg_namespace.oid LEFT OUTER JOIN geometry_columns ON relname=f_table_name AND nspname=f_table_schema WHERE relkind IN ('r','v','m','p') """ \ + schema_where + 'ORDER BY nspname, relname, \ f_geometry_column' self._exec_sql(c, sql) # Merge geometry info to "items" for (i, geo_item) in enumerate(c.fetchall()): if geo_item[7]: items[i] = geo_item return items def get_table_rows(self, table, schema=None): c = self.con.cursor() self._exec_sql( c, 'SELECT COUNT(*) FROM %s' % self._table_name(schema, table)) return c.fetchone()[0] def get_table_fields(self, table, schema=None): """Return list of columns in table""" c = self.con.cursor() schema_where = (" AND nspname='%s' " % self._quote_unicode(schema) if schema is not None else '') sql = """SELECT a.attnum AS ordinal_position, a.attname AS column_name, t.typname AS data_type, a.attlen AS char_max_len, a.atttypmod AS modifier, a.attnotnull AS notnull, a.atthasdef AS hasdefault, adef.adsrc AS default_value FROM pg_class c JOIN pg_attribute a ON a.attrelid = c.oid JOIN pg_type t ON a.atttypid = t.oid JOIN pg_namespace nsp ON c.relnamespace = nsp.oid LEFT JOIN pg_attrdef adef ON adef.adrelid = a.attrelid AND adef.adnum = a.attnum WHERE c.relname = '%s' %s AND a.attnum > 0 ORDER BY a.attnum""" \ % (self._quote_unicode(table), schema_where) self._exec_sql(c, sql) attrs = [] for row in c.fetchall(): attrs.append(TableAttribute(row)) return attrs def get_table_indexes(self, table, schema=None): """Get info about table's indexes. ignore primary key and unique index, they get listed in constraints. """ c = self.con.cursor() schema_where = (" AND nspname='%s' " % self._quote_unicode(schema) if schema is not None else '') sql = """SELECT relname, indkey FROM pg_class, pg_index WHERE pg_class.oid = pg_index.indexrelid AND pg_class.oid IN ( SELECT indexrelid FROM pg_index, pg_class JOIN pg_namespace nsp ON pg_class.relnamespace = nsp.oid WHERE pg_class.relname='%s' %s AND pg_class.oid=pg_index.indrelid AND indisunique != 't' AND indisprimary != 't' )""" \ % (self._quote_unicode(table), schema_where) self._exec_sql(c, sql) indexes = [] for row in c.fetchall(): indexes.append(TableIndex(row)) return indexes def get_table_constraints(self, table, schema=None): c = self.con.cursor() schema_where = (" AND nspname='%s' " % self._quote_unicode(schema) if schema is not None else '') sql = """SELECT c.conname, c.contype, c.condeferrable, c.condeferred, array_to_string(c.conkey, ' '), c.consrc, t2.relname, c.confupdtype, c.confdeltype, c.confmatchtype, array_to_string(c.confkey, ' ') FROM pg_constraint c LEFT JOIN pg_class t ON c.conrelid = t.oid LEFT JOIN pg_class t2 ON c.confrelid = t2.oid JOIN pg_namespace nsp ON t.relnamespace = nsp.oid WHERE t.relname = '%s' %s """ \ % (self._quote_unicode(table), schema_where) self._exec_sql(c, sql) constrs = [] for row in c.fetchall(): constrs.append(TableConstraint(row)) return constrs def get_view_definition(self, view, schema=None): """Returns definition of the view.""" schema_where = (" AND nspname='%s' " % self._quote_unicode(schema) if schema is not None else '') sql = """SELECT pg_get_viewdef(c.oid) FROM pg_class c JOIN pg_namespace nsp ON c.relnamespace = nsp.oid WHERE relname='%s' %s AND relkind IN ('v','m')""" \ % (self._quote_unicode(view), schema_where) c = self.con.cursor() self._exec_sql(c, sql) return c.fetchone()[0] def add_geometry_column(self, table, geom_type, schema=None, geom_column='the_geom', srid=-1, dim=2): # Use schema if explicitly specified if schema: schema_part = "'%s', " % self._quote_unicode(schema) else: schema_part = '' sql = "SELECT AddGeometryColumn(%s'%s', '%s', %d, '%s', %d)" % ( schema_part, self._quote_unicode(table), self._quote_unicode(geom_column), srid, self._quote_unicode(geom_type), dim, ) self._exec_sql_and_commit(sql) def delete_geometry_column(self, table, geom_column, schema=None): """Use PostGIS function to delete geometry column correctly.""" if schema: schema_part = "'%s', " % self._quote_unicode(schema) else: schema_part = '' sql = "SELECT DropGeometryColumn(%s'%s', '%s')" % ( schema_part, self._quote_unicode(table), self._quote_unicode(geom_column)) self._exec_sql_and_commit(sql) def delete_geometry_table(self, table, schema=None): """Delete table with one or more geometries using PostGIS function.""" if schema: schema_part = "'%s', " % self._quote_unicode(schema) else: schema_part = '' sql = "SELECT DropGeometryTable(%s'%s')" % (schema_part, self._quote_unicode(table)) self._exec_sql_and_commit(sql) def create_table(self, table, fields, pkey=None, schema=None): """Create ordinary table. 'fields' is array containing instances of TableField 'pkey' contains name of column to be used as primary key """ if len(fields) == 0: return False table_name = self._table_name(schema, table) sql = 'CREATE TABLE %s (%s' % (table_name, fields[0].field_def()) for field in fields[1:]: sql += ', %s' % field.field_def() if pkey: sql += ', PRIMARY KEY (%s)' % self._quote(pkey) sql += ')' self._exec_sql_and_commit(sql) return True def delete_table(self, table, schema=None): """Delete table from the database.""" table_name = self._table_name(schema, table) sql = 'DROP TABLE %s' % table_name self._exec_sql_and_commit(sql) def empty_table(self, table, schema=None): """Delete all rows from table.""" table_name = self._table_name(schema, table) sql = 'DELETE FROM %s' % table_name self._exec_sql_and_commit(sql) def rename_table(self, table, new_table, schema=None): """Rename a table in database.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s RENAME TO %s' % (table_name, self._quote(new_table)) self._exec_sql_and_commit(sql) # Update geometry_columns if PostGIS is enabled if self.has_postgis: sql = "UPDATE geometry_columns SET f_table_name='%s' \ WHERE f_table_name='%s'" \ % (self._quote_unicode(new_table), self._quote_unicode(table)) if schema is not None: sql += " AND f_table_schema='%s'" % self._quote_unicode(schema) self._exec_sql_and_commit(sql) def create_view(self, name, query, schema=None): view_name = self._table_name(schema, name) sql = 'CREATE VIEW %s AS %s' % (view_name, query) self._exec_sql_and_commit(sql) def delete_view(self, name, schema=None): view_name = self._table_name(schema, name) sql = 'DROP VIEW %s' % view_name self._exec_sql_and_commit(sql) def rename_view(self, name, new_name, schema=None): """Rename view in database.""" self.rename_table(name, new_name, schema) def create_schema(self, schema): """Create a new empty schema in database.""" sql = 'CREATE SCHEMA %s' % self._quote(schema) self._exec_sql_and_commit(sql) def delete_schema(self, schema): """Drop (empty) schema from database.""" sql = 'DROP SCHEMA %s' % self._quote(schema) self._exec_sql_and_commit(sql) def rename_schema(self, schema, new_schema): """Rename a schema in database.""" sql = 'ALTER SCHEMA %s RENAME TO %s' % (self._quote(schema), self._quote(new_schema)) self._exec_sql_and_commit(sql) # Update geometry_columns if PostGIS is enabled if self.has_postgis: sql = \ "UPDATE geometry_columns SET f_table_schema='%s' \ WHERE f_table_schema='%s'" \ % (self._quote_unicode(new_schema), self._quote_unicode(schema)) self._exec_sql_and_commit(sql) def table_add_column(self, table, field, schema=None): """Add a column to table (passed as TableField instance).""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s ADD %s' % (table_name, field.field_def()) self._exec_sql_and_commit(sql) def table_delete_column(self, table, field, schema=None): """Delete column from a table.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s DROP %s' % (table_name, self._quote(field)) self._exec_sql_and_commit(sql) def table_column_rename(self, table, name, new_name, schema=None): """Rename column in a table.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s RENAME %s TO %s' % ( table_name, self._quote(name), self._quote(new_name)) self._exec_sql_and_commit(sql) # Update geometry_columns if PostGIS is enabled if self.has_postgis: sql = "UPDATE geometry_columns SET f_geometry_column='%s' \ WHERE f_geometry_column='%s' AND f_table_name='%s'" \ % (self._quote_unicode(new_name), self._quote_unicode(name), self._quote_unicode(table)) if schema is not None: sql += " AND f_table_schema='%s'" % self._quote(schema) self._exec_sql_and_commit(sql) def table_column_set_type(self, table, column, data_type, schema=None): """Change column type.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s ALTER %s TYPE %s' % ( table_name, self._quote(column), data_type) self._exec_sql_and_commit(sql) def table_column_set_default(self, table, column, default, schema=None): """Change column's default value. If default=None drop default value. """ table_name = self._table_name(schema, table) if default: sql = 'ALTER TABLE %s ALTER %s SET DEFAULT %s' % ( table_name, self._quote(column), default) else: sql = 'ALTER TABLE %s ALTER %s DROP DEFAULT' % ( table_name, self._quote(column)) self._exec_sql_and_commit(sql) def table_column_set_null(self, table, column, is_null, schema=None): """Change whether column can contain null values.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s ALTER %s ' % (table_name, self._quote(column)) if is_null: sql += 'DROP NOT NULL' else: sql += 'SET NOT NULL' self._exec_sql_and_commit(sql) def table_add_primary_key(self, table, column, schema=None): """Add a primery key (with one column) to a table.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s ADD PRIMARY KEY (%s)' % (table_name, self._quote(column)) self._exec_sql_and_commit(sql) def table_add_unique_constraint(self, table, column, schema=None): """Add a unique constraint to a table.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s ADD UNIQUE (%s)' % (table_name, self._quote(column)) self._exec_sql_and_commit(sql) def table_delete_constraint(self, table, constraint, schema=None): """Delete constraint in a table.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s DROP CONSTRAINT %s' % (table_name, self._quote(constraint)) self._exec_sql_and_commit(sql) def table_move_to_schema(self, table, new_schema, schema=None): if new_schema == schema: return table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s SET SCHEMA %s' % (table_name, self._quote(new_schema)) self._exec_sql_and_commit(sql) # Update geometry_columns if PostGIS is enabled if self.has_postgis: sql = "UPDATE geometry_columns SET f_table_schema='%s' \ WHERE f_table_name='%s'" \ % (self._quote_unicode(new_schema), self._quote_unicode(table)) if schema is not None: sql += " AND f_table_schema='%s'" % self._quote_unicode(schema) self._exec_sql_and_commit(sql) def create_index(self, table, name, column, schema=None): """Create index on one column using default options.""" table_name = self._table_name(schema, table) idx_name = self._quote(name) sql = 'CREATE INDEX %s ON %s (%s)' % (idx_name, table_name, self._quote(column)) self._exec_sql_and_commit(sql) def create_spatial_index(self, table, schema=None, geom_column='the_geom'): table_name = self._table_name(schema, table) idx_name = self._quote(u"sidx_%s_%s" % (table, geom_column)) sql = 'CREATE INDEX %s ON %s USING GIST(%s)' % ( idx_name, table_name, self._quote(geom_column)) self._exec_sql_and_commit(sql) def delete_index(self, name, schema=None): index_name = self._table_name(schema, name) sql = 'DROP INDEX %s' % index_name self._exec_sql_and_commit(sql) def get_database_privileges(self): """DB privileges: (can create schemas, can create temp. tables). """ sql = "SELECT has_database_privilege('%(d)s', 'CREATE'), \ has_database_privilege('%(d)s', 'TEMP')" \ % {'d': self._quote_unicode(self.uri.database())} c = self.con.cursor() self._exec_sql(c, sql) return c.fetchone() def get_schema_privileges(self, schema): """Schema privileges: (can create new objects, can access objects in schema).""" sql = "SELECT has_schema_privilege('%(s)s', 'CREATE'), \ has_schema_privilege('%(s)s', 'USAGE')" \ % {'s': self._quote_unicode(schema)} c = self.con.cursor() self._exec_sql(c, sql) return c.fetchone() def get_table_privileges(self, table, schema=None): """Table privileges: (select, insert, update, delete). """ t = self._table_name(schema, table) sql = """SELECT has_table_privilege('%(t)s', 'SELECT'), has_table_privilege('%(t)s', 'INSERT'), has_table_privilege('%(t)s', 'UPDATE'), has_table_privilege('%(t)s', 'DELETE')""" \ % {'t': self._quote_unicode(t)} c = self.con.cursor() self._exec_sql(c, sql) return c.fetchone() def vacuum_analyze(self, table, schema=None): """Run VACUUM ANALYZE on a table.""" t = self._table_name(schema, table) # VACUUM ANALYZE must be run outside transaction block - we # have to change isolation level self.con.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) c = self.con.cursor() self._exec_sql(c, 'VACUUM ANALYZE %s' % t) self.con.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED) def sr_info_for_srid(self, srid): if not self.has_postgis: return 'Unknown' try: c = self.con.cursor() self._exec_sql( c, "SELECT srtext FROM spatial_ref_sys WHERE srid = '%d'" % srid) srtext = c.fetchone()[0] # Try to extract just SR name (should be quoted in double # quotes) x = re.search('"([^"]+)"', srtext) if x is not None: srtext = x.group() return srtext except DbError: return 'Unknown' def insert_table_row(self, table, values, schema=None, cursor=None): """Insert a row with specified values to a table. If a cursor is specified, it doesn't commit (expecting that there will be more inserts) otherwise it commits immediately. """ t = self._table_name(schema, table) sql = '' for value in values: # TODO: quote values? if sql: sql += ', ' sql += value sql = 'INSERT INTO %s VALUES (%s)' % (t, sql) if cursor: self._exec_sql(cursor, sql) else: self._exec_sql_and_commit(sql) def _exec_sql(self, cursor, sql): try: cursor.execute(sql) except psycopg2.Error as e: raise QgsProcessingException( str(e) + ' QUERY: ' + e.cursor.query.decode(e.cursor.connection.encoding)) def _exec_sql_and_commit(self, sql): """Tries to execute and commit some action, on error it rolls back the change. """ try: c = self.con.cursor() self._exec_sql(c, sql) self.con.commit() except DbError: self.con.rollback() raise def _quote(self, identifier): """Quote identifier if needed.""" # Make sure it's python unicode string identifier = str(identifier) # Is it needed to quote the identifier? if self.re_ident_ok.match(identifier) is not None: return identifier # It's needed - let's quote it (and double the double-quotes) return u'"%s"' % identifier.replace('"', '""') def _quote_unicode(self, txt): """Make the string safe - replace ' with ''. """ # make sure it's python unicode string txt = str(txt) return txt.replace("'", "''") def _table_name(self, schema, table): if not schema: return self._quote(table) else: return u'%s.%s' % (self._quote(schema), self._quote(table))
def ogrConnectionStringAndFormatFromLayer(layer): provider = layer.dataProvider().name() if provider == 'spatialite': # dbname='/geodata/osm_ch.sqlite' table="places" (Geometry) sql= regex = re.compile("dbname='(.+)'") r = regex.search(str(layer.source())) ogrstr = r.groups()[0] format = 'SQLite' elif provider == 'postgres': # dbname='ktryjh_iuuqef' host=spacialdb.com port=9999 # user='******' password='******' sslmode=disable # key='gid' estimatedmetadata=true srid=4326 type=MULTIPOLYGON # table="t4" (geom) sql= dsUri = QgsDataSourceUri(layer.dataProvider().dataSourceUri()) conninfo = dsUri.connectionInfo() conn = None ok = False while not conn: try: conn = psycopg2.connect(dsUri.connectionInfo()) except psycopg2.OperationalError: (ok, user, passwd) = QgsCredentials.instance().get( conninfo, dsUri.username(), dsUri.password()) if not ok: break dsUri.setUsername(user) dsUri.setPassword(passwd) if not conn: raise RuntimeError( 'Could not connect to PostgreSQL database - check connection info' ) if ok: QgsCredentials.instance().put(conninfo, user, passwd) ogrstr = "PG:%s" % dsUri.connectionInfo() format = 'PostgreSQL' elif provider == 'mssql': #'dbname=\'db_name\' host=myHost estimatedmetadata=true # srid=27700 type=MultiPolygon table="dbo"."my_table" # #(Shape) sql=' dsUri = layer.dataProvider().uri() ogrstr = 'MSSQL:' ogrstr += 'database={0};'.format(dsUri.database()) ogrstr += 'server={0};'.format(dsUri.host()) if dsUri.username() != "": ogrstr += 'uid={0};'.format(dsUri.username()) else: ogrstr += 'trusted_connection=yes;' if dsUri.password() != '': ogrstr += 'pwd={0};'.format(dsUri.password()) ogrstr += 'tables={0}'.format(dsUri.table()) format = 'MSSQL' elif provider == "oracle": # OCI:user/password@host:port/service:table dsUri = QgsDataSourceUri(layer.dataProvider().dataSourceUri()) ogrstr = "OCI:" if dsUri.username() != "": ogrstr += dsUri.username() if dsUri.password() != "": ogrstr += "/" + dsUri.password() delim = "@" if dsUri.host() != "": ogrstr += delim + dsUri.host() delim = "" if dsUri.port() != "" and dsUri.port() != '1521': ogrstr += ":" + dsUri.port() ogrstr += "/" if dsUri.database() != "": ogrstr += dsUri.database() elif dsUri.database() != "": ogrstr += delim + dsUri.database() if ogrstr == "OCI:": raise RuntimeError( 'Invalid oracle data source - check connection info') ogrstr += ":" if dsUri.schema() != "": ogrstr += dsUri.schema() + "." ogrstr += dsUri.table() format = 'OCI' else: ogrstr = str(layer.source()).split("|")[0] path, ext = os.path.splitext(ogrstr) format = QgsVectorFileWriter.driverForExtension(ext) return ogrstr, '"' + format + '"'
def ogrConnectionStringAndFormat(uri, context): """Generates OGR connection string and format string from layer source Returned values are a tuple of the connection string and format string """ ogrstr = None format = None layer = QgsProcessingUtils.mapLayerFromString(uri, context, False) if layer is None: path, ext = os.path.splitext(uri) format = QgsVectorFileWriter.driverForExtension(ext) return '"' + uri + '"', '"' + format + '"' provider = layer.dataProvider().name() if provider == 'spatialite': # dbname='/geodata/osm_ch.sqlite' table="places" (Geometry) sql= regex = re.compile("dbname='(.+)'") r = regex.search(str(layer.source())) ogrstr = r.groups()[0] format = 'SQLite' elif provider == 'postgres': # dbname='ktryjh_iuuqef' host=spacialdb.com port=9999 # user='******' password='******' sslmode=disable # key='gid' estimatedmetadata=true srid=4326 type=MULTIPOLYGON # table="t4" (geom) sql= dsUri = QgsDataSourceUri(layer.dataProvider().dataSourceUri()) conninfo = dsUri.connectionInfo() conn = None ok = False while not conn: try: conn = psycopg2.connect(dsUri.connectionInfo()) except psycopg2.OperationalError: (ok, user, passwd) = QgsCredentials.instance().get( conninfo, dsUri.username(), dsUri.password()) if not ok: break dsUri.setUsername(user) dsUri.setPassword(passwd) if not conn: raise RuntimeError( 'Could not connect to PostgreSQL database - check connection info' ) if ok: QgsCredentials.instance().put(conninfo, user, passwd) ogrstr = "PG:%s" % dsUri.connectionInfo() format = 'PostgreSQL' elif provider == "oracle": # OCI:user/password@host:port/service:table dsUri = QgsDataSourceUri(layer.dataProvider().dataSourceUri()) ogrstr = "OCI:" if dsUri.username() != "": ogrstr += dsUri.username() if dsUri.password() != "": ogrstr += "/" + dsUri.password() delim = "@" if dsUri.host() != "": ogrstr += delim + dsUri.host() delim = "" if dsUri.port() != "" and dsUri.port() != '1521': ogrstr += ":" + dsUri.port() ogrstr += "/" if dsUri.database() != "": ogrstr += dsUri.database() elif dsUri.database() != "": ogrstr += delim + dsUri.database() if ogrstr == "OCI:": raise RuntimeError( 'Invalid oracle data source - check connection info') ogrstr += ":" if dsUri.schema() != "": ogrstr += dsUri.schema() + "." ogrstr += dsUri.table() format = 'OCI' else: ogrstr = str(layer.source()).split("|")[0] path, ext = os.path.splitext(ogrstr) format = QgsVectorFileWriter.driverForExtension(ext) return '"' + ogrstr + '"', '"' + format + '"'
def on_accept(self): """Accept and run the Command.run_it method.""" db_key = self.database_combo.currentText() settings = self.databases[db_key] db_set = settings["db_settings"] if settings["db_type"] == "spatialite": pass else: # postgres successful_connection = False uname = db_set["username"] passwd = db_set["password"] msg = "Log in" while not successful_connection: uri = QgsDataSourceUri() uri.setConnection( db_set["host"], db_set["port"], db_set["database"], db_set["username"], db_set["password"], ) # try to connect # create a PostgreSQL connection using QSqlDatabase db = QSqlDatabase.addDatabase("QPSQL") # check to see if it is valid db.setHostName(uri.host()) db.setDatabaseName(uri.database()) db.setPort(int(uri.port())) db.setUserName(uri.username()) db.setPassword(uri.password()) # open (create) the connection if db.open(): successful_connection = True break else: # todo - provide feedback what is wrong pass connInfo = uri.connectionInfo() (success, uname, passwd) = QgsCredentialDialog.instance().get( connInfo, uname, passwd, msg) if success: db_set["username"] = passwd db_set["password"] = uname else: return self.command.run_it(self.filename, db_set, settings["db_type"]) self.accept()
def ogrConnectionStringAndFormatFromLayer(layer): provider = layer.dataProvider().name() if provider == 'spatialite': # dbname='/geodata/osm_ch.sqlite' table="places" (Geometry) sql= regex = re.compile("dbname='(.+)'") r = regex.search(str(layer.source())) ogrstr = r.groups()[0] format = 'SQLite' elif provider == 'postgres': # dbname='ktryjh_iuuqef' host=spacialdb.com port=9999 # user='******' password='******' sslmode=disable # key='gid' estimatedmetadata=true srid=4326 type=MULTIPOLYGON # table="t4" (geom) sql= dsUri = QgsDataSourceUri(layer.dataProvider().dataSourceUri()) conninfo = dsUri.connectionInfo() conn = None ok = False while not conn: try: conn = psycopg2.connect(dsUri.connectionInfo()) except psycopg2.OperationalError: (ok, user, passwd) = QgsCredentials.instance().get(conninfo, dsUri.username(), dsUri.password()) if not ok: break dsUri.setUsername(user) dsUri.setPassword(passwd) if not conn: raise RuntimeError('Could not connect to PostgreSQL database - check connection info') if ok: QgsCredentials.instance().put(conninfo, user, passwd) ogrstr = "PG:%s" % dsUri.connectionInfo() format = 'PostgreSQL' elif provider == 'mssql': #'dbname=\'db_name\' host=myHost estimatedmetadata=true # srid=27700 type=MultiPolygon table="dbo"."my_table" # #(Shape) sql=' dsUri = layer.dataProvider().uri() ogrstr = 'MSSQL:' ogrstr += 'database={0};'.format(dsUri.database()) ogrstr += 'server={0};'.format(dsUri.host()) if dsUri.username() != "": ogrstr += 'uid={0};'.format(dsUri.username()) else: ogrstr += 'trusted_connection=yes;' if dsUri.password() != '': ogrstr += 'pwd={0};'.format(dsUri.password()) ogrstr += 'tables={0}'.format(dsUri.table()) format = 'MSSQL' elif provider == "oracle": # OCI:user/password@host:port/service:table dsUri = QgsDataSourceUri(layer.dataProvider().dataSourceUri()) ogrstr = "OCI:" if dsUri.username() != "": ogrstr += dsUri.username() if dsUri.password() != "": ogrstr += "/" + dsUri.password() delim = "@" if dsUri.host() != "": ogrstr += delim + dsUri.host() delim = "" if dsUri.port() != "" and dsUri.port() != '1521': ogrstr += ":" + dsUri.port() ogrstr += "/" if dsUri.database() != "": ogrstr += dsUri.database() elif dsUri.database() != "": ogrstr += delim + dsUri.database() if ogrstr == "OCI:": raise RuntimeError('Invalid oracle data source - check connection info') ogrstr += ":" if dsUri.schema() != "": ogrstr += dsUri.schema() + "." ogrstr += dsUri.table() format = 'OCI' else: ogrstr = str(layer.source()).split("|")[0] path, ext = os.path.splitext(ogrstr) format = QgsVectorFileWriter.driverForExtension(ext) return ogrstr, '"' + format + '"'
def on_accept(self): """Accept and run the Command.run_it method.""" db_key = self.database_combo.currentText() settings = self.databases[db_key] db_set = settings["db_settings"] if settings["db_type"] == "spatialite": pass else: # postgres successful_connection = False uname = db_set["username"] passwd = db_set["password"] msg = "Log in" while not successful_connection: uri = QgsDataSourceUri() uri.setConnection( db_set["host"], db_set["port"], db_set["database"], db_set["username"], db_set["password"], ) # try to connect # create a PostgreSQL connection using QSqlDatabase db = QSqlDatabase.addDatabase("QPSQL") # check to see if it is valid db.setHostName(uri.host()) db.setDatabaseName(uri.database()) try: # port can be an empty string, e.g. for spatialite db's db.setPort(int(uri.port())) except ValueError: # TODO: I've seen this uri.port() handling before in some # other file, this can probably be refactored. pass db.setUserName(uri.username()) db.setPassword(uri.password()) # open (create) the connection if db.open(): successful_connection = True break else: # todo - provide feedback what is wrong pass connInfo = uri.connectionInfo() (success, uname, passwd) = QgsCredentialDialog.instance().get( connInfo, uname, passwd, msg) if success: db_set["username"] = uname db_set["password"] = passwd else: return checks = [] if self.check_manhole_indicator.isChecked(): checks.append("manhole_indicator") if self.check_pipe_friction.isChecked(): checks.append("pipe_friction") if self.check_manhole_area.isChecked(): checks.append("manhole_area") self.command.run_it( checks, self.check_only_empty_fields.isChecked(), db_set, settings["db_type"], ) self.accept()
def processAlgorithm(self, parameters, context, feedback): # Retrieving parameters if qgs_version < 31400: connection_name = self.parameterAsString(parameters, self.DATABASE, context) db = postgis.GeoDB.from_name(connection_name) uri = db.uri schema = self.parameterAsString(parameters, self.SCHEMA, context) else: connection_name = self.parameterAsConnectionName( parameters, self.DATABASE, context) md = QgsProviderRegistry.instance().providerMetadata('postgres') conn = md.createConnection(connection_name) uri = QgsDataSourceUri(conn.uri()) schema = self.parameterAsSchema(parameters, self.SCHEMA, context) shape = self.parameterAsFile(parameters, self.SHAPEFILE, context) geopackage = parameters[self.GEOPACKAGE] # Execute ogr2ogr only in case the shape is Valid or no shape was provided shape_qgs = QgsVectorLayer(shape, 'test_valid', 'ogr') # Test is shape is valid if shape_qgs.isValid() or shape == '': feedback.pushInfo("Valid shape") else: raise QgsProcessingException("Invalid shape") # Text for ogr2ogr extension = shape_qgs.extent() if shape == '': target_ogr = "ogr2ogr -f GPKG {} -overwrite -forceNullable {} ".format( geopackage, shape) else: target_ogr = "ogr2ogr -f GPKG {} -overwrite -forceNullable -spat {} {} {} {} ".format( geopackage, str(extension.xMinimum()), str(extension.yMinimum()), str(extension.xMaximum()), str(extension.yMaximum())) source_ogr = 'PG:"host={} dbname={} schemas={} port={} user={} password={}" '.format( uri.host(), uri.database(), schema, uri.port(), uri.username(), uri.password()) string_ogr = target_ogr + source_ogr feedback.pushInfo('Text for ogr2ogr = ' + string_ogr) # Export schema to geopackage try: processo = subprocess.run(string_ogr, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.DEVNULL) except subprocess.CalledProcessError as e: raise QgsProcessingException(str(e.stderr.decode('utf-8'))) # Connect with PostGIS database con = psycopg2.connect(user=uri.username(), password=uri.password(), host=uri.host(), port=uri.port(), database=uri.database()) feedback.pushInfo('Uri = ' + str(uri)) feedback.pushInfo('Uri text = ' + uri.uri()) feedback.pushInfo('Connection = ' + str(con)) feedback.pushInfo('') # Query columns with array type with con: select_schema_tables = "SELECT table_name FROM information_schema.tables " \ "WHERE table_type = '{}' AND table_schema = '{}'".format('BASE TABLE', schema) feedback.pushInfo('consulta = ' + select_schema_tables) cur = con.cursor() cur.execute(select_schema_tables) rows = cur.fetchall() schema_tables = [table[0] for table in rows] feedback.pushInfo('schema tables = ' + str(schema_tables)) feedback.pushInfo('') dict_table_arrcolumns = dict() feedback.pushInfo( 'Loop para gerar dicionário. Uma lista contendo nome das colunas ARRAY para cada tabela' ) for table in schema_tables: select_columns = "SELECT column_name FROM information_schema.columns " \ "WHERE data_type = 'ARRAY' AND table_schema = '{}' AND table_name = '{}'".format(schema, table) feedback.pushInfo('consulta para tabela ' + table + ': ' + select_columns) cur.execute(select_columns) rows = cur.fetchall() table_arrcolumns = [table[0] for table in rows] if table_arrcolumns != []: dict_table_arrcolumns[table] = table_arrcolumns feedback.pushInfo('dicionario tabelas - colunas tipo array = ' + str(dict_table_arrcolumns)) feedback.pushInfo('') cur.close() con.close() # ============================================================================= # # Connect with Geopackage to UPDATE columns of array type # feedback.pushInfo('Connecting to Geopackage to update array columns') # with lite.connect(geopackage) as con: # feedback.pushInfo('Con = ' + str(con)) # # # Create cursor # cur = con.cursor() # # # Loop in dictionary of table:array columns # for table, arrcolumn in dict_table_arrcolumns.items(): # for column in arrcolumn: # # Select query = """SELECT tipoproduto, '{' || substr(replace(tipoproduto, ')', '}'), 4) FROM {}""".format(table) # update_query = """UPDATE {} SET {} = '{{' || substr(replace({}, ')', '}}'), 4)""".format(table, column, column) # feedback.pushInfo('Update query = ' + update_query) # cur.execute(update_query) # con.commit() # ============================================================================= # UPDATE columns of array type inside Geopackage using PyQGIS for table, arrcolumn in dict_table_arrcolumns.items(): uri_geopackage = geopackage + '|layername=' + table source = QgsVectorLayer(uri_geopackage, 'geopackage_layer', 'ogr') if not source.isValid(): raise QgsProcessingException('Source layer not valid') features = source.getFeatures() for f in features: fid = f.id() for column in arrcolumn: fvalue = f[column] if isinstance(fvalue, QVariant): continue mfvalue = '{' + fvalue.replace(')', '}')[3:] findex = source.fields().indexFromName(column) attr = {findex: mfvalue} source.dataProvider().changeAttributeValues({fid: attr}) return {'Result': 'Exported'}
def processAlgorithm(self, parameters, context, feedback): """ Retrieving parameters an URL example is 'http://localhost:8080/geoserver/rest/workspaces/cite/datastores/Publicacao/featuretypes' """ # Retrieving parameters if qgs_version < 31400: connection_name = self.parameterAsString(parameters, self.DATABASE, context) db = postgis.GeoDB.from_name(connection_name) uri = db.uri schema = self.parameterAsString(parameters, self.SCHEMA, context) else: connection_name = self.parameterAsConnectionName( parameters, self.DATABASE, context) md = QgsProviderRegistry.instance().providerMetadata('postgres') conn = md.createConnection(connection_name) uri = QgsDataSourceUri(conn.uri()) schema = self.parameterAsSchema(parameters, self.SCHEMA, context) url = parameters[self.URL] headers = {'Content-type': 'text/xml'} user = parameters[self.USER] password = parameters[self.PASSWORD] prefix = parameters[self.PREFIX] # Debugging info feedback.pushInfo('Dados de conexão') feedback.pushInfo('url = ' + url) feedback.pushInfo('user = '******'password = '******'prefix = ' + prefix) feedback.pushInfo('') # URI and connection uri = db.uri con = psycopg2.connect(user=uri.username(), password=uri.password(), host=uri.host(), port=uri.port(), database=uri.database()) feedback.pushInfo('') feedback.pushInfo('Connection = ' + str(con) + '\n') with con: select_schema_tables = "SELECT table_name FROM information_schema.tables " \ "WHERE table_schema = '{}'".format(schema) cur = con.cursor() cur.execute(select_schema_tables) rows = cur.fetchall() schema_tables = [table[0] for table in rows] feedback.pushInfo('Schema Tables = ' + str(schema_tables) + '\n') # Nomenclatura Geoserver dicio = {'P': '(Ponto)', 'L': '(Linha)', 'A': '(Área)'} # Names are with underline and with first letter in uppercase. The name of the project goes first. names1 = [ item[item.find('_') + 1:].replace('_', ' ').title().replace(' ', '_') for item in schema_tables ] names2 = [prefix + '_' + item for item in names1] titles = [ prefix + ' ' + item.replace('_', ' ')[:-1] + dicio[item[-1]] if item[-1] in 'PLA' else prefix + ' ' + item.replace('_', ' ') for item in names1 ] # Dicionário de categorias EDGV 3 dicio_cat = { 'ENC': 'Energia e Comunicações', 'ECO': 'Estrutura Econômica', 'HID': 'Hidrografia', 'LML': 'Limites e Localidades', 'PTO': 'Pontos de Referência', 'REL': 'Relevo', 'SAB': 'Saneamento Básico', 'TRA': 'Sistema de Transporte', 'AER': 'Subsistema Aeroportuário', 'DUT': 'Subsistema Dutos', 'FER': 'Subsistema Ferroviário', 'HDV': 'Subsistema Rodoviário', 'ROD': 'Subsistema Rodoviário', 'VEG': 'Vegetação', 'VER': 'Área Verde', 'CBGE': 'Classes Base do Mapeamento Topográfico em Grandes Escalas', 'LAZ': 'Cultura e Lazer', 'EDF': 'Edificações', 'EMU': 'Estrutura de Mobilidade Urbana' } feedback.pushInfo( 'Payloads dará informação sobre a nomenclatura exibida pelo Geoserver' ) payloads = [] for i in range(len(names1)): a = ("""<featureType> <name>""" + names2[i] + """</name> <nativeName>""" + schema_tables[i] + """</nativeName> <abstract>""" + "Camada representando a classe [" + names1[i][:names1[i].rfind('_')] + "] de primitiva geométrica [" + names1[i][-1] + ':' + dicio[names1[i][-1]][1:-1] + "] da categoria [" + schema_tables[i][:schema_tables[i].find('_')].upper() + ':' + dicio_cat[schema_tables[i][:schema_tables[i].find('_')].upper( )] + "] da EDGV versão [3.0] " "para o projeto [" + prefix + "] da instituição/provedor [IBGE/Cartografia]." + """</abstract> <title>""" + titles[i] + """</title> <enabled>true</enabled> <advertised>false</advertised> </featureType>""") payloads.append(a) feedback.pushInfo('payload = ' + a) feedback.pushInfo('') # Teste de publicação ''' feedback.pushInfo('Teste de Publicação') feedback.pushInfo('payloadTeste = ' + payloads[21]) try: resp = requests.post(url, auth=(user, password), data=payloads[21].encode('utf-8'),headers=headers) feedback.pushInfo('resp = ' + resp.text) except: pass ''' # Publicação feedback.pushInfo('') for i, payload in enumerate(payloads): try: resp = requests.post(url, auth=(user, password), data=payload.encode('utf-8'), headers=headers) if resp.text == '': feedback.pushInfo("Camada publicada foi " + names2[i]) else: feedback.pushInfo("Erro na publicação: " + resp.text) except: pass ''' resp = requests.post(url, auth=(user, password), data=payloads[21],headers=headers) # está funcionando feedback.pushInfo('resp = ' + resp.text) ''' return {'Result': 'Layers Published'}
def symbology_graduated(self, layer): # connect to PostGIS database with the municipalities and the data table layer = iface.activeLayer() uri = QgsDataSourceUri() uri.setConnection("localhost", "5432", "MED", "postgres", "postgres") uri.setDataSource("public", "distritos", "geom", '', "gid") # now query the table db = QSqlDatabase.addDatabase("QPSQL") db.setDatabaseName(uri.database()) db.setPort(int(uri.port())) db.setUserName(uri.username()) db.setPassword(uri.password()) db.open() attribute_choose = self.dlg.comboBox.currentText() level_choose = self.dlg.comboBox_5.currentText() pqi_choose = self.dlg.comboBox_6.currentText() ICD_choose = self.dlg.comboBox_8.currentText() year_choose = self.dlg.comboBox_7.currentText() #if self.dlg.comboBox_7.currentText()!='all': import os directory = 'C:\OSGeo4W64/apps\qgis\python\plugins\hcqi_tool' with open(os.path.join(directory, 'pqi.bat'), 'w') as OPATH: OPATH.writelines([ 'SET Path=%WINDIR%\system32;%WINDIR%;%WINDIR%\System32\Wbem;&', 'C:/"Program Files"/PostgreSQL/12/bin/pgsql2shp.exe -f C:/Users/Lia/AppData/Roaming/QGIS/QGIS3/profiles/default/layer_pqi2.shp -u postgres -P postgres MED "SELECT distritos.distrito,data_pqi.valor_pqi,geom FROM data_pqi INNER JOIN distritos ON data_pqi.geo=distritos.distrito WHERE (data_pqi.pqi=' + "'" + str(pqi_choose) + "'" + ' AND data_pqi.chrono=' + "'" + str(year_choose) + "'" + ' AND data_pqi.codificacao=' + "'" + str(ICD_choose) + "'" + ') GROUP BY distritos.distrito, data_pqi.valor_pqi, data_pqi.chrono, distritos.geom ORDER BY distritos.distrito"' ]) # QMessageBox.about(self.dlg.checkBox, 'teste', str('yes')) #layer = QFileInfo(QgsApplication.qgisUserDatabaseFilePath()).path() + "/layer_average.shp" #os.system(os.path.join(QGIS_DIR, "python/plugins/hcqi_tool/average.bat")) os.system(os.path.join(QGIS_DIR, "python/plugins/hcqi_tool/pqi.bat")) layer = "C:/Users/Lia/AppData/Roaming/QGIS/QGIS3/profiles/default/layer_pqi2.shp" #vdlayer = QgsVectorLayer(str(layer), "layer", "ogr") # QgsMapLayerRegistry.instance().addMapLayer(vdlayer) layer_proj = "C:/Users/Lia/AppData/Roaming/QGIS/QGIS3/profiles/default/layer_pqi_proj_etrs89.shp" proj_layer = Processing.runAlgorithm( "native:reprojectlayer", { 'INPUT': str(layer), 'TARGET_CRS': QgsCoordinateReferenceSystem('EPSG:3763'), 'OPERATION': '+proj=pipeline +step +proj=unitconvert +xy_in=deg +xy_out=rad +step +proj=tmerc +lat_0=39.6682583333333 +lon_0=-8.13310833333333 +k=1 +x_0=0 +y_0=0 +ellps=GRS80', 'OUTPUT': str(layer_proj) }) vdlayer = QgsVectorLayer(str(layer_proj), "layer", "ogr") fields_at = vdlayer.fields() # QMessageBox.about(self.dlg.checkBox, 'teste', str(fields_at)) self.field_names_at = fields_at.names() # QMessageBox.about(self.dlg.checkBox, 'teste', str(self.field_names_at)) mean = self.field_names_at[1] # self.field = self.dlg.comboBox.currentText() # self.classes = self.dlg.spinBox.value() myRenderer = QgsGraduatedSymbolRenderer() myRenderer.setClassAttribute(mean) myRenderer.setMode(QgsGraduatedSymbolRenderer.EqualInterval) myRenderer.updateClasses(vdlayer, QgsGraduatedSymbolRenderer.EqualInterval, 5) myStyle = QgsStyle().defaultStyle() defaultColorRampNames = myStyle.colorRampNames() ramp = myStyle.colorRamp(defaultColorRampNames[5]) myRenderer.updateColorRamp(ramp) vdlayer.setRenderer(myRenderer) QgsProject.instance().addMapLayer(vdlayer)
def symbology_categorized(self, layer): # connect to PostGIS database with the municipalities and the data table layer = iface.activeLayer() uri = QgsDataSourceUri() uri.setConnection("localhost", "5432", "MED", "postgres", "postgres") uri.setDataSource("public", "distritos2", "geom", '', "gid") # now query the table db = QSqlDatabase.addDatabase("QPSQL") db.setDatabaseName(uri.database()) db.setPort(int(uri.port())) db.setUserName(uri.username()) db.setPassword(uri.password()) # QMessageBox.about(self.dlg.checkBox, 'teste', str(uri.database())) # QMessageBox.about(self.dlg.checkBox, 'teste', str(uri.port())) # QMessageBox.about(self.dlg.checkBox, 'teste', str(uri.username())) # QMessageBox.about(self.dlg.checkBox, 'teste', str(uri.password())) db.open() # get attribute attribute = self.dlg.comboBox.currentText() # get values from attribute # n = len(self.field_names) # lista_attrib = [] # for i in range(0,n): # f = self.field_names[i] # if f==str(attribute): # number = i # for feat in layer.getFeatures(): # attrb = feat.attributes() # attribute_read = attrb[number] #reads the attribute one by one # lista_attrib = lista_attrib + [str(attribute_read)] # lista_attributes = lista_attrib ## list without duplicates # woduplicates = list(set(lista_attributes)) #QMessageBox.about(self.dlg.checkBox, 'teste', str(attribute)) if attribute == 'ANO': if self.ANO is None: self.ANO = year_attrib() self.ANO.show() # create tuple with the colors and the fields # list_color = ["yellow", "darkcyan", "red", "grey"] ##list_fields = woduplicates # list_fields = ['1', '2', '6', " "] # dicti = {'1':('yellow','1'), '2':('darkcyan','2'), '6':('red', '6'), '':("grey", ' ')} # tuples = zip(list_color, list_fields) ## get attributes # attrb = woduplicates ## create dict with the tuples and the attributes # attribute_sym = dict(zip(attrb, tuples)) # QMessageBox.about(self.dlg.checkBox, 'teste', str(attribute_sym)) ## symbolize # categories = [] ## get default style # myStyle = QgsStyleV2().defaultStyle() ##QMessageBox.about(self.dlg.checkBox, 'teste', str(myStyle)) ### get a list of default color ramps [u'Blues', u'BrBG', u'BuGn'....] # defaultColorRampNames = myStyle.colorRampNames() ##QMessageBox.about(self.dlg.checkBox, 'teste', str(defaultColorRampNames)) ### setting ramp to Blues, first index of defaultColorRampNames # ramp = myStyle.colorRamp(defaultColorRampNames[0]) ##pr = ramp.properties() ##QMessageBox.about(self.dlg.checkBox, 'teste', str(pr)) ## set up an empty categorized renderer and assign the color ramp # self.field = self.dlg.comboBox.currentText() # renderer = QgsCategorizedSymbolRendererV2(self.field, []) # renderer.setSourceColorRamp(ramp) # layer.setRendererV2(renderer) # converter string para numeric #subprocess.call('ALTER TABLE sample_reside_join ALTER COLUMN totdias TYPE integer USING (trim(totdias)::integer);') # subprocess.call('"C:\Program Files\PostgreSQL/12/bin/pgsql2shp.exe" -f ' + '"C:\Users\Lia\Desktop/categorized.shp" ' + '-h localhost -p 5432 -u postgres -P postgres LIA ' + '"SELECT freguesia, mode(ADM_TIP), geom FROM grande_porto LEFT OUTER JOIN sample_reside_join ON grande_porto.dicofre = sample_reside_join.reside GROUP BY freguesia, geom ORDER BY freguesia"') shape = QFileInfo( QgsApplication.qgisUserDatabaseFilePath()).path() + "/result.shp" subprocess.call( '"C:\Program Files\PostgreSQL/12/bin/pgsql2shp.exe" -f ' + '"' + str(shape) + '" ' + '-h localhost -p ' + str(uri.port()) + ' -u ' + str(uri.username()) + ' -P ' + str(uri.password()) + ' ' + str(uri.database()) + ' "SELECT freguesia, sum(' + str(attribute) + '), geom FROM grande_porto LEFT OUTER JOIN sample_reside_join ON grande_porto.dicofre = sample_reside_join.reside GROUP BY freguesia, geom ORDER BY freguesia"' ) # subprocess.call('"C:\Program Files\PostgreSQL/12/bin/pgsql2shp.exe" -f ' + '"C:\Users\Lia\Desktop/categorized2.shp" ' + '-h localhost -p 5432 -u postgres -P postgres LIA ' + '"SELECT freguesia,(cnt_2/total::float)*100 perc_adm FROM (SELECT freguesia, count(*) total, sum(CASE WHEN adm_tip::integer = 2 THEN 1 ELSE 0 END) cnt_2, geom FROM grande_porto LEFT OUTER JOIN sample_reside_join ON grande_porto.dicofre = sample_reside_join.reside GROUP BY freguesia, geom) x ORDER BY freguesia"') #QMessageBox.about(self.dlg.checkBox, 'teste', # '"C:\Program Files\PostgreSQL/12/bin/pgsql2shp.exe" -f ' + '"' + str( # shape) + '" ' + '-h localhost -p ' + str(uri.port()) + ' -u ' + str( # uri.username()) + ' -P ' + str(uri.password()) + str( # uri.database()) + ' "SELECT freguesia, mode(' + str( # attribute) + '), geom FROM grande_porto LEFT OUTER JOIN sample_reside_join ON grande_porto.dicofre = sample_reside_join.reside GROUP BY freguesia, geom ORDER BY freguesia"') # vdlayer = QgsVectorLayer(unicode("C:\Users\Lia\Desktop/categorized.shp").encode('utf8'), "average", "ogr") vdlayer = QgsVectorLayer(str(shape), "average", "ogr") fields_at = vdlayer.fields() self.field_names_at = fields_at.names() #QMessageBox.about(self.dlg.checkBox, 'teste', str(self.field_names_at)) mode = self.field_names_at[1] # get values from attribute n = len(self.field_names_at) # QMessageBox.about(self.dlg.checkBox, 'teste', str(n)) lista_attrib = [] for i in range(0, n): f = self.field_names_at[i] # QMessageBox.about(self.dlg.checkBox, 'teste', str(f)) if f == 'MODE': #QMessageBox.about(self.dlg.checkBox, 'teste', str(f)) number = i for feat in vdlayer.getFeatures(): attrb = feat.attributes() QMessageBox.about(self.dlg.checkBox, 'teste', str(attrb)) attribute_read = attrb[ number] # reads the attribute one by one lista_attrib = lista_attrib + [str(attribute_read)] lista_attributes = lista_attrib #QMessageBox.about(self.dlg.checkBox, 'teste', str(lista_attributes)) # list without duplicates woduplicates = list(set(lista_attributes)) # QMessageBox.about(self.dlg.checkBox, 'teste', str(woduplicates)) for index, ii in enumerate(woduplicates): if ii == 'NULL': woduplicates[index] = ' ' # QMessageBox.about(self.dlg.checkBox, 'teste', str(woduplicates)) # 'SELECT freguesia, adm_tip, CASE WHEN adm_tip='2' THEN count(adm_tip)/17::float*100 ELSE 0 END FROM grande_porto LEFT OUTER JOIN sample_reside_join ON grande_porto.dicofre = sample_reside_join.reside GROUP BY freguesia, adm_tip ORDER BY freguesia' # create tuple with the colors and the fields list_color = ["yellow", "darkcyan", "red", "grey"] # list_fields = woduplicates # list_fields = ['1', '2', '6', " "] # dicti = {'1':('yellow','1'), '2':('darkcyan','2'), '6':('red', '6'), '':("grey", ' ')} tuples = zip(list_color, woduplicates) # create dict with the tuples and the attributes attribute_sym = dict(zip(woduplicates, tuples)) # QMessageBox.about(self.dlg.checkBox, 'teste', str(attribute_sym)) # symbolize categories = [] for code, (color, fields) in attribute_sym.items(): sym = QgsSymbol.defaultSymbol(vdlayer.geometryType()) sym.setColor(QColor(color)) category = QgsRendererCategory(code, sym, fields) categories.append(category) # QMessageBox.about(self.dlg.checkBox, 'teste', str(categories)) # self.field = self.dlg.comboBox.currentText() renderer = QgsCategorizedSymbolRenderer('MODE', categories) # renderer.setSourceColorRamp(ramp) vdlayer.setRenderer(renderer) # layer.setRendererV2(renderer) QgsProject.instance().addMapLayer(vdlayer) self.canvas = QgsMapCanvas() # self.canvas.scene().addItem(vdlayer) QgsProject.instance().addMapLayer(vdlayer) # self.canvas = QgsMapCanvas() # self.canvas.setExtent(layer.extent()) # self.canvas.setLayerSet( [ QgsMapCanvasLayer(layer) ] ) # self.setCentralWidget(self.canvas) if self.dlg.checkBox_2.isChecked(): Processing.initialize() layer_limit = QFileInfo(QgsApplication.qgisUserDatabaseFilePath() ).path() + "/layer_limit.shp" # QMessageBox.about(self.dlg.checkBox, 'teste', str(layer.source())) Processing.runAlgorithm("qgis:dissolve", None, layer, True, "None", layer_limit) l_limit = QgsVectorLayer( unicode(layer_limit).encode('utf8'), "layer_limit", "ogr") QgsMapLayerRegistry.instance().addMapLayer(l_limit)
def createBar(self): #get year list for xx #year_list = list(set(features_year)) #QMessageBox.about(self.dlg.checkBox, 'teste', str(year_list.sort())) layer = iface.activeLayer() uri = QgsDataSourceUri() uri.setConnection("localhost", "5432", "MED", "postgres", "postgres") uri.setDataSource("public", "distritos", "geom", '', "gid") # now query the table db = QSqlDatabase.addDatabase("QPSQL") db.setDatabaseName(uri.database()) db.setPort(int(uri.port())) db.setUserName(uri.username()) db.setPassword(uri.password()) db.open() PQI_code = self.window.comboBox_2.currentText() dist = self.window.comboBox.currentText() import os directory = 'C:\OSGeo4W64/apps\qgis\python\plugins\hcqi_tool' with open(os.path.join(directory, 'plot.bat'), 'w') as OPATH: OPATH.writelines([ 'SET Path=%WINDIR%\system32;%WINDIR%;%WINDIR%\System32\Wbem;&', 'C:/"Program Files"/PostgreSQL/12/bin/pgsql2shp.exe -f C:/Users/Lia/AppData/Roaming/QGIS/QGIS3/profiles/default/layer_plot.shp -u postgres -P postgres MED "SELECT distritos.distrito,data_pqi.valor_pqi,data_pqi.chrono,geom FROM data_pqi INNER JOIN distritos ON data_pqi.geo=distritos.distrito WHERE (data_pqi.pqi=' + "'" + str(str(PQI_code)) + "'" + ' AND data_pqi.geo=' + "'" + str(dist) + "'" + ') GROUP BY distritos.distrito, data_pqi.valor_pqi, data_pqi.chrono, distritos.geom ORDER BY distritos.distrito"' ]) # QMessageBox.about(self.dlg.checkBox, 'teste', str('yes')) # layer = QFileInfo(QgsApplication.qgisUserDatabaseFilePath()).path() + "/layer_average.shp" # os.system(os.path.join(QGIS_DIR, "python/plugins/hcqi_tool/average.bat")) os.system(os.path.join(QGIS_DIR, "python/plugins/hcqi_tool/plot.bat")) my_shp = 'C:/Users/Lia/AppData/Roaming/QGIS/QGIS3/profiles/default/layer_plot.shp' self.layer = QgsVectorLayer(my_shp, "layer_plot", "ogr") fields = self.layer.fields() self.field_names = [field.name() for field in fields] #QMessageBox.about(self.dlg.checkBox, 'teste', str(self.field_names)) # add pqi and year to filter features_pqi = [] features_year = [] for feat in self.layer.getFeatures(): features_pqi = features_pqi + [feat['valor_pqi']] features_year = features_year + [feat['chrono']] #QMessageBox.about(self.dlg.checkBox, 'teste', str(features_pqi)) #QMessageBox.about(self.dlg.checkBox, 'teste', str(features_year)) # construir dicionario e fazer média dictionary = [(i, j) for i, j in zip(features_year, features_pqi)] #dictionary = dict(zip(features_year, features_pqi)) #QMessageBox.about(self.dlg.checkBox, 'teste', str(dictionary)) # find repeated elements in dictionary and sum d = {x: 0 for x, _ in dictionary} for name, num in dictionary: d[name] += num # using map Output = list(map(tuple, d.items())) #QMessageBox.about(self.dlg.checkBox, 'teste', str(Output[0])) #sort tuples by key sort_output = Output.sort() #QMessageBox.about(self.dlg.checkBox, 'teste', str(sort_output)) keys = [ele for key in Output for ele in key] #QMessageBox.about(self.dlg.checkBox, 'teste', str(keys)) # canvas = iface.mapCanvas() # layers = canvas.layers() # list_l = [] # for l in layers: # name = l.name() # list_l.append(name) import matplotlib.pyplot as plt import numpy as np # Prepare the data x = [] y = [] for u in range(0, len(keys), 2): #QMessageBox.about(self.dlg.checkBox, 'teste', str(keys[u])) x.append(keys[u]) y.append(keys[u + 1]) #QMessageBox.about(self.dlg.checkBox, 'teste', str(x)) #QMessageBox.about(self.dlg.checkBox, 'teste', str(y)) # Plot the data plt.plot(x, y, label='PQI') plt.title('PQI02') plt.xlabel('Year') plt.ylabel('Hospitalizations per 1000 admissions') # Add a legend plt.legend() # Show the plot plt.show()
def run(self): """Runs the logic behind the dialogs""" # Create the dialog with elements (after translation) and keep reference # Only create GUI ONCE in callback, so that it will only load when the plugin is started if self.first_start: self.first_start = False # Start a dialog window self.dlg = PRW_Data_OpvragerDialog() self.dlg.OutputLocation.setStorageMode(1) self.dlg.OutputLocation.setFilePath( self.dlg.OutputLocation.defaultRoot()) # Set a validator on the filename lineEdit so that no random signs can be put in. rx2 = QRegExp(r"^[\w\-. ]+$") filename_validator = QRegExpValidator(rx2) self.dlg.FileName.setValidator(filename_validator) # Show the dialog self.dlg.show() # Run the dialog event loop result = self.dlg.exec_() # See if OK was pressed if result: # Extracting values from the dialog form self.selected_layer = self.dlg.cmb_layer.currentLayer() self.dateMax = self.dlg.DateMax.date().toString('yyyy-MM-dd') self.dateMin = self.dlg.DateMin.date().toString('yyyy-MM-dd') self.shpExportBool = self.dlg.exportShapefile.isChecked() self.fileName = self.dlg.FileName.text() self.outputLocation = self.dlg.OutputLocation.filePath() source = self.selected_layer.source() uri = QgsDataSourceUri(source) try: # Check if a database is connected to the layer source assert uri.database( ), '"{layer}" heeft geen connectie met een database.'.format( layer=self.selected_layer.name()) assert self.selected_layer.selectedFeatureCount( ) != 0, 'Geen Objecten zijn geselecteerd in laag: "{layer}".'.format( layer=self.selected_layer.name()) assert self.fileName, 'Er is geen Excel uitvoernaam opgegeven.' assert self.outputLocation, 'Er is geen uitvoermap opgegeven.' except Exception as e: self.iface.messageBar().pushMessage("Error", str(e), level=2, duration=5) raise e return savedUsername = uri.hasParam('username') savedPassword = uri.hasParam('password') host = uri.host() port = uri.port() database = uri.database() self.username = uri.username() self.password = uri.password() self.dsn = cora.makedsn(host=host, port=port, service_name=database) errorMessage = None # If we have a username and password try to connect, otherwise ask for credentials # If the connection fails store the error and show dialog screen for credentials input if savedUsername is True and savedPassword is True: try: self.check_connection() self.run_task() except cora.DatabaseError as e: errorObj, = e.args errorMessage = errorObj.message success = 'false' # Credentials loop while success == 'false': success, errorMessage = \ self.get_credentials( host, port, self.database, message=errorMessage) if success == 'exit': pass elif success == 'true': self.run_task() else: success, errorMessage = \ self.get_credentials( host, port, self.database, username=self.username, password=self.password) # Credentials loop while success == 'false': success, errorMessage = \ self.get_credentials( host, port, self.database, username=self.username, password=self.password, message=errorMessage) if success == 'exit': pass elif success == 'true': self.run_task()
def ogrConnectionStringAndFormat(uri, context): """Generates OGR connection string and format string from layer source Returned values are a tuple of the connection string and format string """ ogrstr = None format = None layer = QgsProcessingUtils.mapLayerFromString(uri, context, False) if layer is None: path, ext = os.path.splitext(uri) format = QgsVectorFileWriter.driverForExtension(ext) return '"' + uri + '"', '"' + format + '"' provider = layer.dataProvider().name() if provider == 'spatialite': # dbname='/geodata/osm_ch.sqlite' table="places" (Geometry) sql= regex = re.compile("dbname='(.+)'") r = regex.search(str(layer.source())) ogrstr = r.groups()[0] format = 'SQLite' elif provider == 'postgres': # dbname='ktryjh_iuuqef' host=spacialdb.com port=9999 # user='******' password='******' sslmode=disable # key='gid' estimatedmetadata=true srid=4326 type=MULTIPOLYGON # table="t4" (geom) sql= dsUri = QgsDataSourceUri(layer.dataProvider().dataSourceUri()) conninfo = dsUri.connectionInfo() conn = None ok = False while not conn: try: conn = psycopg2.connect(dsUri.connectionInfo()) except psycopg2.OperationalError: (ok, user, passwd) = QgsCredentials.instance().get(conninfo, dsUri.username(), dsUri.password()) if not ok: break dsUri.setUsername(user) dsUri.setPassword(passwd) if not conn: raise RuntimeError('Could not connect to PostgreSQL database - check connection info') if ok: QgsCredentials.instance().put(conninfo, user, passwd) ogrstr = "PG:%s" % dsUri.connectionInfo() format = 'PostgreSQL' elif provider == "oracle": # OCI:user/password@host:port/service:table dsUri = QgsDataSourceUri(layer.dataProvider().dataSourceUri()) ogrstr = "OCI:" if dsUri.username() != "": ogrstr += dsUri.username() if dsUri.password() != "": ogrstr += "/" + dsUri.password() delim = "@" if dsUri.host() != "": ogrstr += delim + dsUri.host() delim = "" if dsUri.port() != "" and dsUri.port() != '1521': ogrstr += ":" + dsUri.port() ogrstr += "/" if dsUri.database() != "": ogrstr += dsUri.database() elif dsUri.database() != "": ogrstr += delim + dsUri.database() if ogrstr == "OCI:": raise RuntimeError('Invalid oracle data source - check connection info') ogrstr += ":" if dsUri.schema() != "": ogrstr += dsUri.schema() + "." ogrstr += dsUri.table() format = 'OCI' else: ogrstr = str(layer.source()).split("|")[0] path, ext = os.path.splitext(ogrstr) format = QgsVectorFileWriter.driverForExtension(ext) return '"' + ogrstr + '"', '"' + format + '"'
def ogrConnectionString(uri): """Generates OGR connection sting from layer source """ ogrstr = None layer = dataobjects.getObjectFromUri(uri, False) if layer is None: return '"' + uri + '"' provider = layer.dataProvider().name() if provider == 'spatialite': # dbname='/geodata/osm_ch.sqlite' table="places" (Geometry) sql= regex = re.compile("dbname='(.+)'") r = regex.search(unicode(layer.source())) ogrstr = r.groups()[0] elif provider == 'postgres': # dbname='ktryjh_iuuqef' host=spacialdb.com port=9999 # user='******' password='******' sslmode=disable # key='gid' estimatedmetadata=true srid=4326 type=MULTIPOLYGON # table="t4" (geom) sql= dsUri = QgsDataSourceUri(layer.dataProvider().dataSourceUri()) conninfo = dsUri.connectionInfo() conn = None ok = False while not conn: try: conn = psycopg2.connect(dsUri.connectionInfo()) except psycopg2.OperationalError: (ok, user, passwd) = QgsCredentials.instance().get(conninfo, dsUri.username(), dsUri.password()) if not ok: break dsUri.setUsername(user) dsUri.setPassword(passwd) if not conn: raise RuntimeError('Could not connect to PostgreSQL database - check connection info') if ok: QgsCredentials.instance().put(conninfo, user, passwd) ogrstr = "PG:%s" % dsUri.connectionInfo() elif provider == "oracle": # OCI:user/password@host:port/service:table dsUri = QgsDataSourceUri(layer.dataProvider().dataSourceUri()) ogrstr = "OCI:" if dsUri.username() != "": ogrstr += dsUri.username() if dsUri.password() != "": ogrstr += "/" + dsUri.password() delim = "@" if dsUri.host() != "": ogrstr += delim + dsUri.host() delim = "" if dsUri.port() != "" and dsUri.port() != '1521': ogrstr += ":" + dsUri.port() ogrstr += "/" if dsUri.database() != "": ogrstr += dsUri.database() elif dsUri.database() != "": ogrstr += delim + dsUri.database() if ogrstr == "OCI:": raise RuntimeError('Invalid oracle data source - check connection info') ogrstr += ":" if dsUri.schema() != "": ogrstr += dsUri.schema() + "." ogrstr += dsUri.table() else: ogrstr = unicode(layer.source()).split("|")[0] return '"' + ogrstr + '"'
def read_form(self): """Extracts all inputs from the dialog form and provides run_task when succesful. The selected_layer object is used to find the connection info to the Oracle database. If the database connection is there but either the username or password is missing get_credentials() will be called until valid credentials are entered or the task is canceled. """ filter_on_height = self.dlg.cb_filterOnHeight.isChecked() filter_on_volumetric_weight = self.dlg.cb_filterOnVolumetricWeight.isChecked( ) selected_layer = self.dlg.cmb_layers.currentLayer() trx_bool = self.dlg.cb_TriaxiaalProeven.isChecked() sdp_bool = self.dlg.cb_SamendrukkingProeven.isChecked() output_location = self.dlg.fileWidget.filePath() output_name = self.dlg.le_outputName.text() args = { 'selected_layer': selected_layer, 'output_location': output_location, 'output_name': output_name, 'trx_bool': trx_bool, 'sdp_bool': sdp_bool } # General Asserts assert isinstance( selected_layer, QgsVectorLayer ), 'De geselecteerde laag \'{}\' is geen vector laag.'.format( selected_layer.name()) assert output_name, 'Het veld \'uitvoernaam\' mag niet leeg zijn.' assert output_location, 'Het veld \'uitvoermap\' mag niet leeg zijn.' if trx_bool: # TRX Asserts assert any([ self.dlg.cb_CU.isChecked(), self.dlg.cb_CD.isChecked(), self.dlg.cb_UU.isChecked() ]), 'Een van de drie Proeftypes moet aangekruisd worden.' proef_types = [] if self.dlg.cb_CU.isChecked(): proef_types.append('CU') if self.dlg.cb_CD.isChecked(): proef_types.append('CD') if self.dlg.cb_UU.isChecked(): proef_types.append('UU') args['proef_types'] = proef_types args['ea'] = self.dlg.sb_strain.value() args['save_plot'] = self.dlg.cb_savePlot.isChecked() if self.dlg.le_vg_trx.text(): volG_trx = self.dlg.le_vg_trx.text().strip('[').strip( ']').split(',') volG_trx = [float(x) for x in volG_trx] volG_trx.sort() if len(volG_trx) < 2: self.iface.messageBar().pushMessage( "Warning", 'Maar 1 volumegewicht interval voor triaxiaalproeven is gegeven, het interval wordt automatisch gegenereerd.', level=1, duration=5) volG_trx = None else: volG_trx = None args['volG_trx'] = volG_trx if sdp_bool: if self.dlg.le_vg_sdp.text(): volG_sdp = self.dlg.le_vg_sdp.text().strip('[').strip( ']').split(',') volG_sdp = [float(x) for x in volG_sdp] volG_sdp.sort() if len(volG_sdp) < 2: self.iface.messageBar().pushMessage( "Warning", 'Maar 1 volumegewicht interval voor samendrukkingsproeven is gegeven, het interval wordt automatisch gegenereerd.', level=1, duration=5) volG_sdp = None else: volG_sdp = None args['volG_sdp'] = volG_sdp if filter_on_height: args['maxH'] = self.dlg.sb_maxHeight.value() args['minH'] = self.dlg.sb_minHeight.value() assert args['maxH'] > args[ 'minH'], 'Maximum hoogte moet hoger zijn dan minimum hoogte.' if filter_on_volumetric_weight: args['maxVg'] = self.dlg.sb_maxVolumetricWeight.value() args['minVg'] = self.dlg.sb_minVolumetricWeight.value() assert args['maxVg'] > args[ 'minVg'], 'Maximum volumegewicht moet hoger zijn dan het minimum volumegewicht.' source = selected_layer.source() uri = QgsDataSourceUri(source) savedUsername = uri.hasParam('username') savedPassword = uri.hasParam('password') host = uri.host() port = uri.port() database = uri.database() username = uri.username() password = uri.password() errorMessage = None if savedUsername is True and savedPassword is True: try: qb = qgis_backend.QgisBackend(host=host, port=port, database=database, username=username, password=password) qb.check_connection() args['qb'] = qb self.run_task(args) except cx_Oracle.DatabaseError as e: errorObj, = e.args errorMessage = errorObj.message suc = 'false' while suc == 'false': suc, qb, errorMessage = self.get_credentials( host, port, database, username=username, password=password, message=errorMessage) if suc == 'exit': pass elif suc == 'true': args['qb'] = qb self.run_task(args) else: suc, qb, errorMessage = self.get_credentials(host, port, database, username=username, password=password) while suc == 'false': suc, qb, message = self.get_credentials(host, port, database, message=errorMessage) if suc == 'exit': pass elif suc == 'true': args['qb'] = qb self.run_task(args)
def processAlgorithm(self, parameters, context, feedback): # Dummy function to enable running an alg inside an alg def no_post_process(alg, context, feedback): pass # Retrieving parameters if qgs_version < 31400: connection_name = self.parameterAsString(parameters, self.DATABASE, context) db = postgis.GeoDB.from_name(connection_name) uri = db.uri schema = self.parameterAsString(parameters, self.SCHEMA, context) else: connection_name = self.parameterAsConnectionName(parameters, self.DATABASE, context) md = QgsProviderRegistry.instance().providerMetadata('postgres') conn = md.createConnection(connection_name) uri = QgsDataSourceUri(conn.uri()) schema = self.parameterAsSchema(parameters, self.SCHEMA, context) geopackage = parameters[self.GEOPACKAGE] # Debugging info ''' feedback.pushInfo('Input parameters:') feedback.pushInfo('connection = ' + connection) feedback.pushInfo('db = ' + str(db)) feedback.pushInfo('schema = ' + schema) feedback.pushInfo('geopackage = ' + geopackage) feedback.pushInfo('') ''' # Raise error if reamb isn't in schema name if not 'reamb' in schema: raise QgsProcessingException('A palavra reamb precisa fazer parte do nome do esquema') # Connect with Geopackage feedback.pushInfo('Listing non-empty layers from geopackage') with lite.connect(geopackage) as con: feedback.pushInfo('Con = ' + str(con)) layers_import = [] # will store the non-empty tables # Create cursor cur = con.cursor() # Fetch layer names cur.execute("SELECT table_name FROM gpkg_geometry_columns") rows = cur.fetchall() layer_names = [camada[0] for camada in rows] feedback.pushInfo('Layers = ' + str(layer_names)) # Append non-empty geometry layers to list for layer in layer_names: # Count rows cur.execute("SELECT COUNT(1) FROM {}".format(layer)) rows = cur.fetchall() rows_count = rows[0][0] #feedback.pushInfo('Rows = ' + str(rows_count)) # Append to list if rows_count > 0: #feedback.pushInfo('Table non-empty = ' + str(rows_count)) layers_import.append(layer) feedback.pushInfo('Non-empty tables = ' + str(layers_import)) feedback.pushInfo('') # Connect with PostGIS database con = psycopg2.connect(user = uri.username(), password = uri.password(), host = uri.host(), port = uri.port(), database = uri.database()) feedback.pushInfo('Uri = ' + str(uri)) feedback.pushInfo('Uri text = ' + uri.uri()) feedback.pushInfo('Connection = ' + str(con)) # Clean PostGIS schema if marked #cleanSchema = self.parameterAsBool(parameters, self.CLEAN_SCHEMA, context) cleanSchema = False if cleanSchema: with con: select_schema_tables = "SELECT table_name FROM information_schema.tables " \ "WHERE table_type = '{}' AND table_schema = '{}'".format('BASE TABLE', schema) cur = con.cursor() cur.execute(select_schema_tables) rows = cur.fetchall() schema_tables = [table[0] for table in rows] for table in schema_tables: feedback.pushInfo("Deleting from {}.{}".format(schema, table)) cur.execute("DELETE FROM {}.{}".format(schema, table)) con.commit() cur.close() con.close() feedback.pushInfo('') # ============================================================================= # # Testing # nome = 'cbge_trecho_arruamento_l' # # QGIS Vector Layer from geopackage layer # uri_geopackage = geopackage + '|layername=' + nome # vlayer = QgsVectorLayer(uri_geopackage, 'geopackage_layer', 'ogr') # # # Use database table as QGIS Vector Layer # uri_tabela = uri # uri_tabela.setDataSource(schema, nome, 'geom') # uri_tabela.setWkbType(vlayer.wkbType()) # uri_tabela.setSrid(str(vlayer.sourceCrs().postgisSrid())) # target = QgsVectorLayer(uri_tabela.uri(), 'teste', 'postgres') # feedback.pushInfo(uri_tabela.uri()) # feedback.pushInfo('Validade = ' + str(target.isValid())) # # # processing.run("script:appendfeaturestolayer", {'SOURCE_LAYER':vlayer, 'TARGET_LAYER':target, 'ACTION_ON_DUPLICATE':0}, context=context, feedback=feedback, onFinish=no_post_process) # ============================================================================= # Import layers for layer in layers_import: feedback.pushInfo("Importing {}.{}".format(schema, layer)) # QGIS Vector Layer from source uri_geopackage = geopackage + '|layername=' + layer source = QgsVectorLayer(uri_geopackage, 'geopackage_layer', 'ogr') if not source.isValid(): raise QgsProcessingException('Source layer not valid') # QGIS Vector Layer from target uri_table = uri uri_table.setDataSource(schema, layer, 'geom') uri_table.setWkbType(source.wkbType()) uri_table.setSrid(str(source.sourceCrs().postgisSrid())) target = QgsVectorLayer(uri_table.uri(), 'schema_table', 'postgres') if not target.isValid(): raise QgsProcessingException('Target layer not valid') # Run QGIS script for importing processing.run("publibase:appendfeaturestolayer", {'SOURCE_LAYER':source, 'TARGET_LAYER':target, 'ACTION_ON_DUPLICATE':0}, context=context, feedback=feedback, onFinish=no_post_process) feedback.pushInfo('') return {'Result':'Layers imported'}
class GeoDB(object): @classmethod def from_name(cls, conn_name): uri = uri_from_name(conn_name) return cls(uri=uri) def __init__(self, host=None, port=None, dbname=None, user=None, passwd=None, service=None, uri=None): # Regular expression for identifiers without need to quote them self.re_ident_ok = re.compile(r"^\w+$") port = str(port) if uri: self.uri = uri else: self.uri = QgsDataSourceUri() if service: self.uri.setConnection(service, dbname, user, passwd) else: self.uri.setConnection(host, port, dbname, user, passwd) conninfo = self.uri.connectionInfo(False) err = None for i in range(4): expandedConnInfo = self.uri.connectionInfo(True) try: self.con = psycopg2.connect(expandedConnInfo) if err is not None: QgsCredentials.instance().put(conninfo, self.uri.username(), self.uri.password()) break except psycopg2.OperationalError as e: if i == 3: raise QgsProcessingException(str(e)) err = str(e) user = self.uri.username() password = self.uri.password() (ok, user, password) = QgsCredentials.instance().get(conninfo, user, password, err) if not ok: raise QgsProcessingException(QCoreApplication.translate("PostGIS", 'Action canceled by user')) if user: self.uri.setUsername(user) if password: self.uri.setPassword(password) finally: # remove certs (if any) of the expanded connectionInfo expandedUri = QgsDataSourceUri(expandedConnInfo) sslCertFile = expandedUri.param("sslcert") if sslCertFile: sslCertFile = sslCertFile.replace("'", "") os.remove(sslCertFile) sslKeyFile = expandedUri.param("sslkey") if sslKeyFile: sslKeyFile = sslKeyFile.replace("'", "") os.remove(sslKeyFile) sslCAFile = expandedUri.param("sslrootcert") if sslCAFile: sslCAFile = sslCAFile.replace("'", "") os.remove(sslCAFile) self.has_postgis = self.check_postgis() def get_info(self): c = self.con.cursor() self._exec_sql(c, 'SELECT version()') return c.fetchone()[0] def check_postgis(self): """Check whether postgis_version is present in catalog. """ c = self.con.cursor() self._exec_sql(c, "SELECT COUNT(*) FROM pg_proc WHERE proname = 'postgis_version'") return c.fetchone()[0] > 0 def get_postgis_info(self): """Returns tuple about PostGIS support: - lib version - installed scripts version - released scripts version - geos version - proj version - whether uses stats """ c = self.con.cursor() self._exec_sql(c, 'SELECT postgis_lib_version(), postgis_scripts_installed(), \ postgis_scripts_released(), postgis_geos_version(), \ postgis_proj_version(), postgis_uses_stats()') return c.fetchone() def list_schemas(self): """Get list of schemas in tuples: (oid, name, owner, perms). """ c = self.con.cursor() sql = "SELECT oid, nspname, pg_get_userbyid(nspowner), nspacl \ FROM pg_namespace \ WHERE nspname !~ '^pg_' AND nspname != 'information_schema'" self._exec_sql(c, sql) return c.fetchall() def list_geotables(self, schema=None): """Get list of tables with schemas, whether user has privileges, whether table has geometry column(s) etc. Geometry_columns: - f_table_schema - f_table_name - f_geometry_column - coord_dimension - srid - type """ c = self.con.cursor() if schema: schema_where = " AND nspname = '%s' " % self._quote_unicode(schema) else: schema_where = \ " AND (nspname != 'information_schema' AND nspname !~ 'pg_') " # LEFT OUTER JOIN: like LEFT JOIN but if there are more matches, # for join, all are used (not only one) # First find out whether PostGIS is enabled if not self.has_postgis: # Get all tables and views sql = """SELECT pg_class.relname, pg_namespace.nspname, pg_class.relkind, pg_get_userbyid(relowner), reltuples, relpages, NULL, NULL, NULL, NULL FROM pg_class JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace WHERE pg_class.relkind IN ('v', 'r', 'm', 'p')""" \ + schema_where + 'ORDER BY nspname, relname' else: # Discovery of all tables and whether they contain a # geometry column sql = """SELECT pg_class.relname, pg_namespace.nspname, pg_class.relkind, pg_get_userbyid(relowner), reltuples, relpages, pg_attribute.attname, pg_attribute.atttypid::regtype, NULL, NULL FROM pg_class JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace LEFT OUTER JOIN pg_attribute ON pg_attribute.attrelid = pg_class.oid AND (pg_attribute.atttypid = 'geometry'::regtype OR pg_attribute.atttypid IN (SELECT oid FROM pg_type WHERE typbasetype='geometry'::regtype)) WHERE pg_class.relkind IN ('v', 'r', 'm', 'p') """ \ + schema_where + 'ORDER BY nspname, relname, attname' self._exec_sql(c, sql) items = c.fetchall() # Get geometry info from geometry_columns if exists if self.has_postgis: sql = """SELECT relname, nspname, relkind, pg_get_userbyid(relowner), reltuples, relpages, geometry_columns.f_geometry_column, geometry_columns.type, geometry_columns.coord_dimension, geometry_columns.srid FROM pg_class JOIN pg_namespace ON relnamespace=pg_namespace.oid LEFT OUTER JOIN geometry_columns ON relname=f_table_name AND nspname=f_table_schema WHERE relkind IN ('r','v','m','p') """ \ + schema_where + 'ORDER BY nspname, relname, \ f_geometry_column' self._exec_sql(c, sql) # Merge geometry info to "items" for (i, geo_item) in enumerate(c.fetchall()): if geo_item[7]: items[i] = geo_item return items def get_table_rows(self, table, schema=None): c = self.con.cursor() self._exec_sql(c, 'SELECT COUNT(*) FROM %s' % self._table_name(schema, table)) return c.fetchone()[0] def get_table_fields(self, table, schema=None): """Return list of columns in table""" c = self.con.cursor() schema_where = (" AND nspname='%s' " % self._quote_unicode(schema) if schema is not None else '' ) sql = """SELECT a.attnum AS ordinal_position, a.attname AS column_name, t.typname AS data_type, a.attlen AS char_max_len, a.atttypmod AS modifier, a.attnotnull AS notnull, a.atthasdef AS hasdefault, adef.adsrc AS default_value FROM pg_class c JOIN pg_attribute a ON a.attrelid = c.oid JOIN pg_type t ON a.atttypid = t.oid JOIN pg_namespace nsp ON c.relnamespace = nsp.oid LEFT JOIN pg_attrdef adef ON adef.adrelid = a.attrelid AND adef.adnum = a.attnum WHERE c.relname = '%s' %s AND a.attnum > 0 ORDER BY a.attnum""" \ % (self._quote_unicode(table), schema_where) self._exec_sql(c, sql) attrs = [] for row in c.fetchall(): attrs.append(TableAttribute(row)) return attrs def get_table_indexes(self, table, schema=None): """Get info about table's indexes. ignore primary key and unique index, they get listed in constraints. """ c = self.con.cursor() schema_where = (" AND nspname='%s' " % self._quote_unicode(schema) if schema is not None else '' ) sql = """SELECT relname, indkey FROM pg_class, pg_index WHERE pg_class.oid = pg_index.indexrelid AND pg_class.oid IN ( SELECT indexrelid FROM pg_index, pg_class JOIN pg_namespace nsp ON pg_class.relnamespace = nsp.oid WHERE pg_class.relname='%s' %s AND pg_class.oid=pg_index.indrelid AND indisunique != 't' AND indisprimary != 't' )""" \ % (self._quote_unicode(table), schema_where) self._exec_sql(c, sql) indexes = [] for row in c.fetchall(): indexes.append(TableIndex(row)) return indexes def get_table_constraints(self, table, schema=None): c = self.con.cursor() schema_where = (" AND nspname='%s' " % self._quote_unicode(schema) if schema is not None else '' ) sql = """SELECT c.conname, c.contype, c.condeferrable, c.condeferred, array_to_string(c.conkey, ' '), c.consrc, t2.relname, c.confupdtype, c.confdeltype, c.confmatchtype, array_to_string(c.confkey, ' ') FROM pg_constraint c LEFT JOIN pg_class t ON c.conrelid = t.oid LEFT JOIN pg_class t2 ON c.confrelid = t2.oid JOIN pg_namespace nsp ON t.relnamespace = nsp.oid WHERE t.relname = '%s' %s """ \ % (self._quote_unicode(table), schema_where) self._exec_sql(c, sql) constrs = [] for row in c.fetchall(): constrs.append(TableConstraint(row)) return constrs def get_view_definition(self, view, schema=None): """Returns definition of the view.""" schema_where = (" AND nspname='%s' " % self._quote_unicode(schema) if schema is not None else '' ) sql = """SELECT pg_get_viewdef(c.oid) FROM pg_class c JOIN pg_namespace nsp ON c.relnamespace = nsp.oid WHERE relname='%s' %s AND relkind IN ('v','m')""" \ % (self._quote_unicode(view), schema_where) c = self.con.cursor() self._exec_sql(c, sql) return c.fetchone()[0] def add_geometry_column(self, table, geom_type, schema=None, geom_column='the_geom', srid=-1, dim=2): # Use schema if explicitly specified if schema: schema_part = "'%s', " % self._quote_unicode(schema) else: schema_part = '' sql = "SELECT AddGeometryColumn(%s'%s', '%s', %d, '%s', %d)" % ( schema_part, self._quote_unicode(table), self._quote_unicode(geom_column), srid, self._quote_unicode(geom_type), dim, ) self._exec_sql_and_commit(sql) def delete_geometry_column(self, table, geom_column, schema=None): """Use PostGIS function to delete geometry column correctly.""" if schema: schema_part = "'%s', " % self._quote_unicode(schema) else: schema_part = '' sql = "SELECT DropGeometryColumn(%s'%s', '%s')" % (schema_part, self._quote_unicode(table), self._quote_unicode(geom_column)) self._exec_sql_and_commit(sql) def delete_geometry_table(self, table, schema=None): """Delete table with one or more geometries using PostGIS function.""" if schema: schema_part = "'%s', " % self._quote_unicode(schema) else: schema_part = '' sql = "SELECT DropGeometryTable(%s'%s')" % (schema_part, self._quote_unicode(table)) self._exec_sql_and_commit(sql) def create_table(self, table, fields, pkey=None, schema=None): """Create ordinary table. 'fields' is array containing instances of TableField 'pkey' contains name of column to be used as primary key """ if len(fields) == 0: return False table_name = self._table_name(schema, table) sql = 'CREATE TABLE %s (%s' % (table_name, fields[0].field_def()) for field in fields[1:]: sql += ', %s' % field.field_def() if pkey: sql += ', PRIMARY KEY (%s)' % self._quote(pkey) sql += ')' self._exec_sql_and_commit(sql) return True def delete_table(self, table, schema=None): """Delete table from the database.""" table_name = self._table_name(schema, table) sql = 'DROP TABLE %s' % table_name self._exec_sql_and_commit(sql) def empty_table(self, table, schema=None): """Delete all rows from table.""" table_name = self._table_name(schema, table) sql = 'DELETE FROM %s' % table_name self._exec_sql_and_commit(sql) def rename_table(self, table, new_table, schema=None): """Rename a table in database.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s RENAME TO %s' % (table_name, self._quote(new_table)) self._exec_sql_and_commit(sql) # Update geometry_columns if PostGIS is enabled if self.has_postgis: sql = "UPDATE geometry_columns SET f_table_name='%s' \ WHERE f_table_name='%s'" \ % (self._quote_unicode(new_table), self._quote_unicode(table)) if schema is not None: sql += " AND f_table_schema='%s'" % self._quote_unicode(schema) self._exec_sql_and_commit(sql) def create_view(self, name, query, schema=None): view_name = self._table_name(schema, name) sql = 'CREATE VIEW %s AS %s' % (view_name, query) self._exec_sql_and_commit(sql) def delete_view(self, name, schema=None): view_name = self._table_name(schema, name) sql = 'DROP VIEW %s' % view_name self._exec_sql_and_commit(sql) def rename_view(self, name, new_name, schema=None): """Rename view in database.""" self.rename_table(name, new_name, schema) def create_schema(self, schema): """Create a new empty schema in database.""" sql = 'CREATE SCHEMA %s' % self._quote(schema) self._exec_sql_and_commit(sql) def delete_schema(self, schema): """Drop (empty) schema from database.""" sql = 'DROP SCHEMA %s' % self._quote(schema) self._exec_sql_and_commit(sql) def rename_schema(self, schema, new_schema): """Rename a schema in database.""" sql = 'ALTER SCHEMA %s RENAME TO %s' % (self._quote(schema), self._quote(new_schema)) self._exec_sql_and_commit(sql) # Update geometry_columns if PostGIS is enabled if self.has_postgis: sql = \ "UPDATE geometry_columns SET f_table_schema='%s' \ WHERE f_table_schema='%s'" \ % (self._quote_unicode(new_schema), self._quote_unicode(schema)) self._exec_sql_and_commit(sql) def table_add_column(self, table, field, schema=None): """Add a column to table (passed as TableField instance).""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s ADD %s' % (table_name, field.field_def()) self._exec_sql_and_commit(sql) def table_delete_column(self, table, field, schema=None): """Delete column from a table.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s DROP %s' % (table_name, self._quote(field)) self._exec_sql_and_commit(sql) def table_column_rename(self, table, name, new_name, schema=None): """Rename column in a table.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s RENAME %s TO %s' % (table_name, self._quote(name), self._quote(new_name)) self._exec_sql_and_commit(sql) # Update geometry_columns if PostGIS is enabled if self.has_postgis: sql = "UPDATE geometry_columns SET f_geometry_column='%s' \ WHERE f_geometry_column='%s' AND f_table_name='%s'" \ % (self._quote_unicode(new_name), self._quote_unicode(name), self._quote_unicode(table)) if schema is not None: sql += " AND f_table_schema='%s'" % self._quote(schema) self._exec_sql_and_commit(sql) def table_column_set_type(self, table, column, data_type, schema=None): """Change column type.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s ALTER %s TYPE %s' % (table_name, self._quote(column), data_type) self._exec_sql_and_commit(sql) def table_column_set_default(self, table, column, default, schema=None): """Change column's default value. If default=None drop default value. """ table_name = self._table_name(schema, table) if default: sql = 'ALTER TABLE %s ALTER %s SET DEFAULT %s' % (table_name, self._quote(column), default) else: sql = 'ALTER TABLE %s ALTER %s DROP DEFAULT' % (table_name, self._quote(column)) self._exec_sql_and_commit(sql) def table_column_set_null(self, table, column, is_null, schema=None): """Change whether column can contain null values.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s ALTER %s ' % (table_name, self._quote(column)) if is_null: sql += 'DROP NOT NULL' else: sql += 'SET NOT NULL' self._exec_sql_and_commit(sql) def table_add_primary_key(self, table, column, schema=None): """Add a primery key (with one column) to a table.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s ADD PRIMARY KEY (%s)' % (table_name, self._quote(column)) self._exec_sql_and_commit(sql) def table_add_unique_constraint(self, table, column, schema=None): """Add a unique constraint to a table.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s ADD UNIQUE (%s)' % (table_name, self._quote(column)) self._exec_sql_and_commit(sql) def table_delete_constraint(self, table, constraint, schema=None): """Delete constraint in a table.""" table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s DROP CONSTRAINT %s' % (table_name, self._quote(constraint)) self._exec_sql_and_commit(sql) def table_move_to_schema(self, table, new_schema, schema=None): if new_schema == schema: return table_name = self._table_name(schema, table) sql = 'ALTER TABLE %s SET SCHEMA %s' % (table_name, self._quote(new_schema)) self._exec_sql_and_commit(sql) # Update geometry_columns if PostGIS is enabled if self.has_postgis: sql = "UPDATE geometry_columns SET f_table_schema='%s' \ WHERE f_table_name='%s'" \ % (self._quote_unicode(new_schema), self._quote_unicode(table)) if schema is not None: sql += " AND f_table_schema='%s'" % self._quote_unicode(schema) self._exec_sql_and_commit(sql) def create_index(self, table, name, column, schema=None): """Create index on one column using default options.""" table_name = self._table_name(schema, table) idx_name = self._quote(name) sql = 'CREATE INDEX %s ON %s (%s)' % (idx_name, table_name, self._quote(column)) self._exec_sql_and_commit(sql) def create_spatial_index(self, table, schema=None, geom_column='the_geom'): table_name = self._table_name(schema, table) idx_name = self._quote(u"sidx_%s_%s" % (table, geom_column)) sql = 'CREATE INDEX %s ON %s USING GIST(%s)' % (idx_name, table_name, self._quote(geom_column)) self._exec_sql_and_commit(sql) def delete_index(self, name, schema=None): index_name = self._table_name(schema, name) sql = 'DROP INDEX %s' % index_name self._exec_sql_and_commit(sql) def get_database_privileges(self): """DB privileges: (can create schemas, can create temp. tables). """ sql = "SELECT has_database_privilege('%(d)s', 'CREATE'), \ has_database_privilege('%(d)s', 'TEMP')" \ % {'d': self._quote_unicode(self.uri.database())} c = self.con.cursor() self._exec_sql(c, sql) return c.fetchone() def get_schema_privileges(self, schema): """Schema privileges: (can create new objects, can access objects in schema).""" sql = "SELECT has_schema_privilege('%(s)s', 'CREATE'), \ has_schema_privilege('%(s)s', 'USAGE')" \ % {'s': self._quote_unicode(schema)} c = self.con.cursor() self._exec_sql(c, sql) return c.fetchone() def get_table_privileges(self, table, schema=None): """Table privileges: (select, insert, update, delete). """ t = self._table_name(schema, table) sql = """SELECT has_table_privilege('%(t)s', 'SELECT'), has_table_privilege('%(t)s', 'INSERT'), has_table_privilege('%(t)s', 'UPDATE'), has_table_privilege('%(t)s', 'DELETE')""" \ % {'t': self._quote_unicode(t)} c = self.con.cursor() self._exec_sql(c, sql) return c.fetchone() def vacuum_analyze(self, table, schema=None): """Run VACUUM ANALYZE on a table.""" t = self._table_name(schema, table) # VACUUM ANALYZE must be run outside transaction block - we # have to change isolation level self.con.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) c = self.con.cursor() self._exec_sql(c, 'VACUUM ANALYZE %s' % t) self.con.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED) def sr_info_for_srid(self, srid): if not self.has_postgis: return 'Unknown' try: c = self.con.cursor() self._exec_sql(c, "SELECT srtext FROM spatial_ref_sys WHERE srid = '%d'" % srid) srtext = c.fetchone()[0] # Try to extract just SR name (should be quoted in double # quotes) x = re.search('"([^"]+)"', srtext) if x is not None: srtext = x.group() return srtext except DbError: return 'Unknown' def insert_table_row(self, table, values, schema=None, cursor=None): """Insert a row with specified values to a table. If a cursor is specified, it doesn't commit (expecting that there will be more inserts) otherwise it commits immediately. """ t = self._table_name(schema, table) sql = '' for value in values: # TODO: quote values? if sql: sql += ', ' sql += value sql = 'INSERT INTO %s VALUES (%s)' % (t, sql) if cursor: self._exec_sql(cursor, sql) else: self._exec_sql_and_commit(sql) def _exec_sql(self, cursor, sql): try: cursor.execute(sql) except psycopg2.Error as e: raise QgsProcessingException(str(e) + ' QUERY: ' + e.cursor.query.decode(e.cursor.connection.encoding)) def _exec_sql_and_commit(self, sql): """Tries to execute and commit some action, on error it rolls back the change. """ try: c = self.con.cursor() self._exec_sql(c, sql) self.con.commit() except DbError: self.con.rollback() raise def _quote(self, identifier): """Quote identifier if needed.""" # Make sure it's python unicode string identifier = str(identifier) # Is it needed to quote the identifier? if self.re_ident_ok.match(identifier) is not None: return identifier # It's needed - let's quote it (and double the double-quotes) return u'"%s"' % identifier.replace('"', '""') def _quote_unicode(self, txt): """Make the string safe - replace ' with ''. """ # make sure it's python unicode string txt = str(txt) return txt.replace("'", "''") def _table_name(self, schema, table): if not schema: return self._quote(table) else: return u'%s.%s' % (self._quote(schema), self._quote(table))
class SitnDB(object): def __init__(self, dbname, host, port, user, password, providerkey, iface): """ Defines the db connexion parameters and the qgis provider key """ self.uri = QgsDataSourceUri() self.uri.setConnection(host, port, dbname, user, password) self.providerkey = providerkey self.errorMessage = '' self.messageBar = iface.messageBar() def getLayer(self, shema, table, geomfieldname, whereclause, layername, uniqueidfield): """ Returns a layer or a table. If no geometry is available, geomfieldname must be set to None. """ self.uri.setDataSource(shema, table, geomfieldname, whereclause, uniqueidfield) layer = QgsVectorLayer(self.uri.uri(), layername, self.providerkey) if not layer.isValid(): return None else: return layer def getStyleUri(self, shema, table, geomfieldname, whereclause, layername, uniqueidfield): """ Returns a style URI """ self.uri.setDataSource(shema, table, geomfieldname, whereclause, uniqueidfield) return self.uri.uri() def createQtMSDB(self): """ Returns a db Connection to a MSSQL (SQL Server database) using QtSql. This is requiered in order to create views with SQL Server """ db = QSqlDatabase.addDatabase("QODBC") if db.isValid(): db.setDatabaseName("DRIVER={SQL Server};SERVER=" + self.uri.host() + ";DATABASE=" + self.uri.database()) db.setUserName(self.uri.username()) db.setPassword(self.uri.password()) if db.open(): return db, True else: self.messageBar.pushCritical("Connection SQl Server", db.lastError().text()) db.close() db.removeDatabase(db.databaseName()) db = None return db, False else: self.messageBar.pushCritical("Connection SQL Server", 'QODBC db is NOT valid')