def __init__(self, **kwargs): SQLiteDialect.__init__(self, **kwargs) if self.dbapi is not None: sqlite_ver = self.dbapi.version_info if sqlite_ver < (2, 1, 3): util.warn( ("The installed version of pysqlite2 (%s) is out-dated " "and will cause errors in some cases. Version 2.1.3 " "or greater is recommended.") % '.'.join([str(subver) for subver in sqlite_ver]))
def __init__(self, **kwargs): SQLiteDialect.__init__(self, **kwargs) def vers(num): return tuple([int(x) for x in num.split('.')]) if self.dbapi is not None: sqlite_ver = self.dbapi.version_info if sqlite_ver < (2, 1, '3'): util.warn( ("The installed version of pysqlite2 (%s) is out-dated " "and will cause errors in some cases. Version 2.1.3 " "or greater is recommended.") % '.'.join([str(subver) for subver in sqlite_ver])) if self.dbapi.sqlite_version_info < (3, 3, 8): self.supports_default_values = False self.supports_cast = (self.dbapi is None or vers(self.dbapi.sqlite_version) >= vers("3.2.3"))
def test_load_dialect_impl(self): dialect = MySQLDialect() impl = self.sqltype.load_dialect_impl(dialect) self.assertNotEqual(types.Text, type(impl)) dialect = SQLiteDialect() impl = self.sqltype.load_dialect_impl(dialect) self.assertEqual(types.Text, type(impl))
def test_unique_params_only(self): metadata = MetaData() table = Table( 'table', metadata, Column('id', Integer, primary_key=True), Column('name', Unicode(16), nullable=False, unique=True), ) clause = ConditionalInsert(table, {table.c.name: 'asdf'}) # there is a bug in upstream in pylint so we have to disable it for # SQLAlchemy 0.9. # https://bitbucket.org/logilab/astroid/issue/39/support-for-sqlalchemy #pylint: disable=E1120 compiled = clause.compile(dialect=MySQLDialect()) self.assertEquals( str(compiled), 'INSERT INTO `table` (name)\n' 'SELECT %s\n' 'FROM DUAL\n' 'WHERE NOT (EXISTS (SELECT 1 \n' 'FROM `table` \n' 'WHERE `table`.name = %s FOR UPDATE))') self.assertEquals(compiled.positiontup, ['name', 'name_1']) self.assertEquals(compiled.params, {'name': 'asdf', 'name_1': 'asdf'}) #pylint: disable=E1120 compiled = clause.compile(dialect=SQLiteDialect()) self.assertEquals( str(compiled), 'INSERT INTO "table" (name)\n' 'SELECT ?\n' 'WHERE NOT (EXISTS (SELECT 1 \n' 'FROM "table" \n' 'WHERE "table".name = ?))') self.assertEquals(compiled.positiontup, ['name', 'name_1']) self.assertEquals(compiled.params, {'name': 'asdf', 'name_1': 'asdf'})
def test_with_extra_params(self): metadata = MetaData() table = Table('table', metadata, Column('id', Integer, primary_key=True), Column('name', Unicode(16), nullable=False, unique=True), Column('extra', Unicode(16), nullable=False), ) clause = ConditionalInsert(table, {table.c.name: 'asdf'}, {table.c.extra: 'something'}) #pylint: disable=E1120 compiled = clause.compile(dialect=MySQLDialect()) self.assertEquals(str(compiled), 'INSERT INTO `table` (name, extra)\n' 'SELECT %s, %s\n' 'FROM DUAL\n' 'WHERE NOT (EXISTS (SELECT 1 \n' 'FROM `table` \n' 'WHERE `table`.name = %s FOR UPDATE))') self.assertEquals(compiled.positiontup, ['name', 'extra', 'name_1']) self.assertEquals(compiled.params, {'name': 'asdf', 'extra': 'something', 'name_1': 'asdf'}) #pylint: disable=E1120 compiled = clause.compile(dialect=SQLiteDialect()) self.assertEquals(str(compiled), 'INSERT INTO "table" (name, extra)\n' 'SELECT ?, ?\n' 'WHERE NOT (EXISTS (SELECT 1 \n' 'FROM "table" \n' 'WHERE "table".name = ?))') self.assertEquals(compiled.positiontup, ['name', 'extra', 'name_1']) self.assertEquals(compiled.params, {'name': 'asdf', 'extra': 'something', 'name_1': 'asdf'})
def test_get_spatial_dialect(self): spatial_dialect = DialectManager.get_spatial_dialect(PGDialect_psycopg2()) ok_(isinstance(spatial_dialect, PGSpatialDialect)) ok_(isinstance(DialectManager.get_spatial_dialect(MySQLDialect()), MySQLSpatialDialect)) ok_(isinstance(DialectManager.get_spatial_dialect(SQLiteDialect()), SQLiteSpatialDialect)) ok_(isinstance(DialectManager.get_spatial_dialect(OracleDialect()), OracleSpatialDialect)) ok_(isinstance(DialectManager.get_spatial_dialect(MSDialect()), MSSpatialDialect)) spatial_dialect2 = DialectManager.get_spatial_dialect(PGDialect_psycopg2()) ok_(spatial_dialect is spatial_dialect2, "only one instance per dialect should be created")
def go(): dialect = SQLiteDialect() cast.compile(dialect=dialect)
class Db_GPKG(BaseDb): """Functionality for using sqlalchemy to connect to a GPKG database.""" GPKG_CACHE_SIZE_MiB = 200 preparer = SQLiteIdentifierPreparer(SQLiteDialect()) @classmethod def create_engine(cls, path, **kwargs): def _on_connect(pysqlite_conn, connection_record): pysqlite_conn.isolation_level = None pysqlite_conn.enable_load_extension(True) pysqlite_conn.load_extension(spatialite_path) pysqlite_conn.enable_load_extension(False) dbcur = pysqlite_conn.cursor() dbcur.execute("SELECT EnableGpkgMode();") dbcur.execute("PRAGMA foreign_keys = ON;") dbcur.execute(f"PRAGMA cache_size = -{cls.GPKG_CACHE_SIZE_MiB * 1024};") path = os.path.expanduser(path) engine = sqlalchemy.create_engine(f"sqlite:///{path}", module=sqlite, **kwargs) sqlalchemy.event.listen(engine, "connect", _on_connect) return engine @classmethod def list_tables(cls, sess, db_schema=None): if db_schema is not None: raise RuntimeError("GPKG files don't have a db_schema") gpkg_contents_exists = sess.scalar( "SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name='gpkg_contents';", ) if gpkg_contents_exists: r = sess.execute( """ SELECT SM.name, GC.identifier FROM sqlite_master SM LEFT OUTER JOIN gpkg_contents GC ON GC.table_name = SM.name WHERE SM.type='table' AND SM.name NOT LIKE 'sqlite%' AND SM.name NOT LIKE 'gpkg%' and SM.name NOT LIKE 'rtree%' and SM.name != 'ogr_empty_table' ORDER BY SM.name; """ ) return {row["name"]: row["identifier"] for row in r} r = sess.execute( """ SELECT name FROM sqlite_master SM WHERE type='table' AND name NOT LIKE 'sqlite%' AND name NOT LIKE 'gpkg%' and name NOT LIKE 'rtree%' AND name != 'ogr_empty_table' ORDER BY name; """ ) return {row["name"]: None for row in r} @classmethod def pk_name(cls, sess, db_schema=None, table=None): """ Find the primary key for a GeoPackage table """ # Requirement 150: # A feature table or view SHALL have a column that uniquely identifies the # row. For a feature table, the column SHOULD be a primary key. If there # is no primary key column, the first column SHALL be of type INTEGER and # SHALL contain unique values for each row. if db_schema is not None: raise RuntimeError("GPKG files don't have a db_schema") r = sess.execute(f"PRAGMA table_info({cls.quote(table)});") fields = [] for field in r: if field["pk"]: return field["name"] fields.append(field) if fields[0]["type"].upper() == "INTEGER": return fields[0]["name"] else: raise RuntimeError("No valid GeoPackage primary key field found") @classmethod def pk_names(cls, sess, db_schema=None, table=None): # GPKG only ever has one primary key. return [cls.pk_names(sess, db_schema, table)]
def __init__(self, *args, **kwargs): ZxJDBCConnector.__init__(self, *args, **kwargs) SQLiteDialect.__init__(self, *args, **kwargs)
def test_select_star(mocker: MockFixture, app_context: AppContext) -> None: """ Test the ``select_star`` method. The method removes pseudo-columns from structures inside arrays. While these pseudo-columns show up as "columns" for metadata reasons, we can't select them in the query, as opposed to fields from non-array structures. """ from superset.db_engine_specs.bigquery import BigQueryEngineSpec cols = [ { "name": "trailer", "type": sqltypes.ARRAY(sqltypes.JSON()), "nullable": True, "comment": None, "default": None, "precision": None, "scale": None, "max_length": None, }, { "name": "trailer.key", "type": sqltypes.String(), "nullable": True, "comment": None, "default": None, "precision": None, "scale": None, "max_length": None, }, { "name": "trailer.value", "type": sqltypes.String(), "nullable": True, "comment": None, "default": None, "precision": None, "scale": None, "max_length": None, }, { "name": "trailer.email", "type": sqltypes.String(), "nullable": True, "comment": None, "default": None, "precision": None, "scale": None, "max_length": None, }, ] # mock the database so we can compile the query database = mocker.MagicMock() database.compile_sqla_query = lambda query: str( query.compile(dialect=SQLiteDialect()) ) # use SQLite dialect so we don't need the BQ dependency engine = mocker.MagicMock() engine.dialect = SQLiteDialect() sql = BigQueryEngineSpec.select_star( database=database, table_name="my_table", engine=engine, schema=None, limit=100, show_cols=True, indent=True, latest_partition=False, cols=cols, ) assert ( sql == """SELECT trailer AS trailer FROM my_table LIMIT ? OFFSET ?""" ) # BigQuery-specific SQL try: from pybigquery.sqlalchemy_bigquery import BigQueryDialect except ModuleNotFoundError: return database.compile_sqla_query = lambda query: str( query.compile(dialect=BigQueryDialect()) ) engine.dialect = BigQueryDialect() sql = BigQueryEngineSpec.select_star( database=database, table_name="my_table", engine=engine, schema=None, limit=100, show_cols=True, indent=True, latest_partition=False, cols=cols, ) assert ( sql == """SELECT `trailer` AS `trailer` FROM `my_table` LIMIT :param_1""" )