def test_sqla_schema_conversion(self): typespec = [ # name, type, nullable ('smallint', sat.SmallInteger, False, dt.int16), ('int', sat.Integer, True, dt.int32), ('integer', sat.INTEGER(), True, dt.int64), ('bigint', sat.BigInteger, False, dt.int64), ('real', sat.REAL, True, dt.double), ('bool', sat.Boolean, True, dt.boolean), ('timestamp', sat.DateTime, True, dt.timestamp), ] sqla_types = [] ibis_types = [] for name, t, nullable, ibis_type in typespec: sqla_type = sa.Column(name, t, nullable=nullable) sqla_types.append(sqla_type) ibis_types.append((name, ibis_type(nullable))) table = sa.Table('tname', self.meta, *sqla_types) schema = alch.schema_from_table(table) expected = ibis.schema(ibis_types) assert_equal(schema, expected)
def test_sqla_schema_conversion(self): typespec = [ # name, type, nullable ("smallint", sat.SmallInteger, False, dt.int16), ("int", sat.Integer, True, dt.int32), ("integer", sat.INTEGER(), True, dt.int64), ("bigint", sat.BigInteger, False, dt.int64), ("real", sat.REAL, True, dt.double), ("bool", sat.Boolean, True, dt.boolean), ("timestamp", sat.DateTime, True, dt.timestamp), ] sqla_types = [] ibis_types = [] for name, t, nullable, ibis_type in typespec: sqla_type = sa.Column(name, t, nullable=nullable) sqla_types.append(sqla_type) ibis_types.append((name, ibis_type(nullable))) table = sa.Table("tname", self.meta, *sqla_types) schema = alch.schema_from_table(table) expected = ibis.schema(ibis_types) assert_equal(schema, expected)
def test_schema_type_conversion(): typespec = [ # name, type, nullable ('json', sa.dialects.postgresql.JSON, True, dt.JSON), ('jsonb', sa.dialects.postgresql.JSONB, True, dt.JSONB), ('uuid', sa.dialects.postgresql.UUID, True, dt.UUID), ] sqla_types = [] ibis_types = [] for name, t, nullable, ibis_type in typespec: sqla_type = sa.Column(name, t, nullable=nullable) sqla_types.append(sqla_type) ibis_types.append((name, ibis_type(nullable=nullable))) # Create a table with placeholder stubs for JSON, JSONB, and UUID. engine = sa.create_engine('postgresql://') table = sa.Table('tname', sa.MetaData(bind=engine), *sqla_types) # Check that we can correctly create a schema with dt.any for the # missing types. schema = alch.schema_from_table(table) expected = ibis.schema(ibis_types) assert_equal(schema, expected)
def test_sa_default_numeric_precision_and_scale(con, backend, dialects, default_precisions, default_scales): # TODO: find a better way to access ibis.sql.alchemy import ibis.sql.alchemy as alch dialect = dialects[backend.name] default_precision = default_precisions[backend.name] default_scale = default_scales[backend.name] typespec = [ # name, sqlalchemy type, ibis type ('n1', dialect.NUMERIC, dt.Decimal(default_precision, default_scale)), ('n2', dialect.NUMERIC(5), dt.Decimal(5, default_scale)), ('n3', dialect.NUMERIC(None, 4), dt.Decimal(default_precision, 4)), ('n4', dialect.NUMERIC(10, 2), dt.Decimal(10, 2)), ] sqla_types = [] ibis_types = [] for name, t, ibis_type in typespec: sqla_type = sa.Column(name, t, nullable=True) sqla_types.append(sqla_type) ibis_types.append((name, ibis_type(nullable=True))) # Create a table with the numeric types. table_name = 'test_sa_default_param_decimal' engine = con.con table = sa.Table(table_name, sa.MetaData(bind=engine), *sqla_types) # Check that we can correctly recover the default precision and scale. schema = alch.schema_from_table(table) expected = ibis.schema(ibis_types) assert_equal(schema, expected) con.drop_table(table_name, force=True)