def test_register_conn(hstore, conn): info = TypeInfo.fetch(conn, "hstore") register_hstore(info, conn) assert conn.adapters.types[info.oid].name == "hstore" cur = conn.execute("select null::hstore, ''::hstore, 'a => b'::hstore") assert cur.fetchone() == (None, {}, {"a": "b"})
def test_fetch_not_found(conn, name, status): status = getattr(TransactionStatus, status) if status == TransactionStatus.INTRANS: conn.execute("select 1") assert conn.info.transaction_status == status info = TypeInfo.fetch(conn, name) assert conn.info.transaction_status == status assert info is None
def test_register_globally(hstore, dsn, svcconn, global_adapters): info = TypeInfo.fetch(svcconn, "hstore") register_hstore(info) assert psycopg.adapters.types[info.oid].name == "hstore" assert svcconn.adapters.types.get(info.oid) is None conn = psycopg.connect(dsn) assert conn.adapters.types[info.oid].name == "hstore" cur = conn.execute("select null::hstore, ''::hstore, 'a => b'::hstore") assert cur.fetchone() == (None, {}, {"a": "b"})
def shapely_conn(conn, svcconn): try: with svcconn.transaction(): svcconn.execute("create extension if not exists postgis") except psycopg.Error as e: pytest.skip(f"can't create extension postgis: {e}") info = TypeInfo.fetch(conn, "geometry") assert info register_shapely(info, conn) return conn
def test_fetch_by_schema_qualified_string(conn, name): conn.execute("create schema if not exists testschema") conn.execute("create type testschema.testtype as (foo text)") info = TypeInfo.fetch(conn, name) assert info.name == "testtype" # assert info.schema == "testschema" cur = conn.execute(""" select oid, typarray from pg_type where oid = 'testschema.testtype'::regtype """) assert cur.fetchone() == (info.oid, info.array_oid)
def test_array_register(conn): conn.execute("create table mytype (data text)") cur = conn.execute("""select '(foo)'::mytype, '{"(foo)"}'::mytype[]""") res = cur.fetchone() assert res[0] == "(foo)" assert res[1] == "{(foo)}" info = TypeInfo.fetch(conn, "mytype") info.register(conn) cur = conn.execute("""select '(foo)'::mytype, '{"(foo)"}'::mytype[]""") res = cur.fetchone() assert res[0] == "(foo)" assert res[1] == ["(foo)"]
def test_set_custom_type(conn, hstore): command = """copy (select '"a"=>"1", "b"=>"2"'::hstore) to stdout""" cur = conn.cursor() with cur.copy(command) as copy: rows = list(copy.rows()) assert rows == [('"a"=>"1", "b"=>"2"',)] register_hstore(TypeInfo.fetch(conn, "hstore"), cur) with cur.copy(command) as copy: copy.set_types(["hstore"]) rows = list(copy.rows()) assert rows == [({"a": "1", "b": "2"},)]
def test_fetch(conn, name, status): status = getattr(TransactionStatus, status) if status == TransactionStatus.INTRANS: conn.execute("select 1") assert conn.info.transaction_status == status info = TypeInfo.fetch(conn, name) assert conn.info.transaction_status == status assert info.name == "text" # TODO: add the schema? # assert info.schema == "pg_catalog" assert info.oid == psycopg.adapters.types["text"].oid assert info.array_oid == psycopg.adapters.types["text"].array_oid assert info.alt_name == "text"
def test_roundtrip_array(hstore, conn): register_hstore(TypeInfo.fetch(conn, "hstore"), conn) samp1 = conn.execute("select %s", (samp, )).fetchone()[0] assert samp1 == samp
def test_roundtrip(hstore, conn, d): register_hstore(TypeInfo.fetch(conn, "hstore"), conn) d1 = conn.execute("select %s", [d]).fetchone()[0] assert d == d1
def main() -> None: opt = parse_cmdline() make_random_table(opt) with psycopg.connect(opt.dsn, autocommit=True) as conn: queries = { "jsonb-unparsed": "select data from test_jsonb", "jsonb": "select data from test_jsonb", "orjson": "select data from test_jsonb", "bytea": "select data::bytea from test_jsonb", "jsonb-disk": "select data::bytea from test_jsonb", "ubjson": "select data::ubjson from test_jsonb", "ubjson-unparsed": "select data::ubjson from test_jsonb", } timings = defaultdict(list) def test(cur: psycopg.Cursor[Any], title: str) -> None: t0 = time.time() cur.execute(queries[title]) t1 = time.time() for row in cur: pass t2 = time.time() logger.info( f"time {title}: {t1-t0:f} xfer, {t2-t1:f} parsing, {t2-t0:f} total" ) timings[title].append((t0, t1, t2)) with conn.cursor() as cur: logger.info("warming up") cur.execute(""" select count(*), pg_size_pretty(pg_total_relation_size('test_jsonb')) from test_jsonb""") nrecs, size = cur.fetchone() # type: ignore cur.execute("select data from test_jsonb") logger.info(f"number of records: {nrecs}, table size {size}") ubjson_info = TypeInfo.fetch(conn, "ubjson") if ubjson_info: conn.adapters.types.add(ubjson_info) else: logger.warning("ubjson extension not found, not including it") for i in range(3): # Jsonb sent as varlena, not parsed cur = conn.cursor(binary=True) test(cur, "bytea") # Jsonb sent as text, not parsed cur = conn.cursor() cur.adapters.register_loader("jsonb", UnparsedLoader) test(cur, "jsonb-unparsed") # Jsonb sent as text, parsed with stdlib json cur = conn.cursor() test(cur, "jsonb") # Jsonb sent as text, parsed with orjson parser cur = conn.cursor() cur.adapters.register_loader("jsonb", ORJsonLoader) test(cur, "orjson") # Jsonb sent as varlena, parsed on the client cur = conn.cursor(binary=True) cur.adapters.register_loader("bytea", JsonbByteaLoader) test(cur, "jsonb-disk") if ubjson_info: # Jsonb sent as ubjson, parsed on the client cur = conn.cursor(binary=True) cur.adapters.register_loader("ubjson", UBJsonBinaryLoader) test(cur, "ubjson") # Jsonb sent as ubjson, not parsed on the client cur = conn.cursor(binary=True) test(cur, "ubjson-unparsed") bests = sorted((min(t2 - t0 for t0, _, t2 in timings[title]), title) for title in queries) for t, title in bests: logger.info(f"best for {title}: {t:f} sec")
def _type_info_fetch(self, connection, name): from psycopg.types import TypeInfo adapted = connection.connection return adapted.await_(TypeInfo.fetch(adapted._connection, name))
def _type_info_fetch(self, connection, name): from psycopg.types import TypeInfo return TypeInfo.fetch(connection.connection, name)