def test_connect_badargs(monkeypatch, pgconn, args, kwargs): def fake_connect(conninfo): return pgconn yield monkeypatch.setattr(psycopg3.connection, "connect", fake_connect) with pytest.raises((TypeError, psycopg3.ProgrammingError)): psycopg3.connect(*args, **kwargs)
def test_connect_args(monkeypatch, pgconn, args, kwargs, want): the_conninfo = None def fake_connect(conninfo): nonlocal the_conninfo the_conninfo = conninfo return pgconn yield monkeypatch.setattr(psycopg3.connection, "connect", fake_connect) psycopg3.connect(*args, **kwargs) assert conninfo_to_dict(the_conninfo) == conninfo_to_dict(want)
def test_get_params_env(self, dsn, monkeypatch): dsn = conninfo_to_dict(dsn) dsn.pop("application_name", None) monkeypatch.delenv("PGAPPNAME", raising=False) with psycopg3.connect(**dsn) as conn: assert "application_name" not in conn.info.get_parameters() monkeypatch.setenv("PGAPPNAME", "hello test") with psycopg3.connect(**dsn) as conn: assert ( conn.info.get_parameters()["application_name"] == "hello test")
def test_psycopg3_tail_copy(self) -> None: """Test tail with psycopg3 via its new binary COPY decoding support.""" with psycopg3.connect(MATERIALIZED_URL) as conn: conn.autocommit = True with conn.cursor() as cur: # Create a table with one row of data. cur.execute("CREATE TABLE psycopg3_tail_copy (a int, b text)") cur.execute("INSERT INTO psycopg3_tail_copy VALUES (1, 'a')") conn.autocommit = False # Start a tail using the binary copy protocol. with cur.copy( "COPY (TAIL psycopg3_tail_copy) TO STDOUT (FORMAT BINARY)" ) as copy: copy.set_types([ builtins["numeric"].oid, # timestamp builtins["int8"].oid, # diff builtins["int4"].oid, # a column builtins["text"].oid, # b column ]) # Validate the first row, but ignore the timestamp column. (ts, diff, a, b) = copy.read_row() self.assertEqual(diff, 1) self.assertEqual(a, 1) self.assertEqual(b, "a") # Insert another row from another connection to simulate an # update arriving. with psycopg3.connect(MATERIALIZED_URL) as conn2: conn2.autocommit = True with conn2.cursor() as cur2: cur2.execute( "INSERT INTO psycopg3_tail_copy VALUES (2, 'b')" ) # Validate the new row, again ignoring the timestamp column. (ts, diff, a, b) = copy.read_row() self.assertEqual(diff, 1) self.assertEqual(a, 2) self.assertEqual(b, "b") # The tail won't end until we send a cancel request. conn.cancel() with self.assertRaises(Exception) as context: copy.read_row() self.assertTrue("canceling statement due to user request" in str(context.exception))
def start(self): if self.proc: raise ValueError("proxy already running") logging.info("starting proxy") pproxy = which("pproxy") if not pproxy: raise ValueError("pproxy program not found") cmdline = [pproxy, "--reuse"] cmdline.extend(["-l", f"tunnel://:{self.client_port}"]) cmdline.extend( ["-r", f"tunnel://{self.server_host}:{self.server_port}"] ) self.proc = sp.Popen(cmdline, stdout=sp.DEVNULL) logging.info("proxy started") self._wait_listen() # verify that the proxy works # TODO: investigate why it doesn't on Travis try: with psycopg3.connect(self.client_dsn): pass except Exception as e: pytest.xfail(f"failed to create a working proxy: {e}")
def test_weakref(dsn): conn = psycopg3.connect(dsn) w = weakref.ref(conn) conn.close() del conn gc.collect() assert w() is None
def test_encoding_env_var(dsn, monkeypatch, enc, out, codec): monkeypatch.setenv("PGCLIENTENCODING", enc) conn = psycopg3.connect(dsn) assert ( conn.pgconn.parameter_status(b"client_encoding").decode("utf-8") == out ) assert conn.client_encoding == codec
def test_custom_types(self) -> None: with psycopg3.connect(MATERIALIZED_URL, autocommit=True) as conn: # Text encoding of lists and maps is supported... with conn.cursor() as cur: cur.execute("SELECT LIST[1, 2, 3]") row = cur.fetchone() self.assertEqual(row, ("{1,2,3}", )) cur.execute("SELECT '{a => 1, b => 2}'::map[text => int]") row = cur.fetchone() self.assertEqual(row, ("{a=>1,b=>2}", )) # ...but binary encoding is not. with conn.cursor(binary=True) as cur: with self.assertRaisesRegex( psycopg3.errors.ProtocolViolation, "binary encoding of list types is not implemented", ): cur.execute("SELECT LIST[1, 2, 3]") with self.assertRaisesRegex( psycopg3.errors.ProtocolViolation, "binary encoding of map types is not implemented", ): cur.execute("SELECT '{a => 1, b => 2}'::map[text => int]")
def run_query(query_command): """Runs the query on the database""" connection = psycopg3.connect(database=DBNAME) cursor = connection.cursor() cursor.execute(query_command) data = cursor.fetchall() connection.close() return data
def scrape_load_and_check_data( target_db: str, table_config: TableConfig, pipeline_instance: "_PandasPipelineWithPollingSupport", **kwargs, ): create_temp_tables(target_db, *table_config.tables, **kwargs) temp_table = get_temp_table(table_config.table, suffix=kwargs['ts_nodash']) data_frames = pipeline_instance.__class__.data_getter() parsed_uri = urlparse(os.environ['AIRFLOW_CONN_DATASETS_DB']) host, port, dbname, user, password = ( parsed_uri.hostname, parsed_uri.port or 5432, parsed_uri.path.strip('/'), parsed_uri.username, parsed_uri.password, ) # Psycopg3 is still under active development, but crucially has support for generating data and pushing it to # postgres efficiently via `cursor.copy` and the COPY protocol. with psycopg3.connect( f'host={host} port={port} dbname={dbname} user={user} password={password}' ) as connection: with connection.cursor() as cursor: logger.info("Starting streaming copy to DB") records_num = 0 df_num = 0 with cursor.copy( f'COPY "{temp_table.schema}"."{temp_table.name}" FROM STDIN' ) as copy: for data_frame in data_frames: df_num += 1 df_len = len(data_frame) records_num += df_len logger.info( "Copying data frame #%s (records %s - %s)", df_num, records_num - df_len, records_num, ) copy.write( data_frame.to_csv( index=False, header=False, sep='\t', na_rep=r'\N', columns=[ data_column for data_column, sa_column in table_config.columns ], )) del data_frame logger.info("Copy complete.")
def __init__(self, sql: Sql, dbname: str): self.sql = sql self.conn = psycopg3.connect(dbname=dbname) self.cursor = self.conn.cursor() if sql.select_sql != '': self.cursor.execute(sql.select_sql) self.select_result = Some(self.cursor.fetchall()) else: self.select_result = Nothing()
def conn(dsn): """Return a `Connection` connected to the ``--test-dsn`` database. The connection is autocommit and the database will contain a jsonb to bytea cast that can be used in the tests. """ conn = psycopg3.connect(dsn) conn.autocommit = True ensure_jsonb_bytea_cast(conn) yield conn conn.close()
def test_context_rollback_no_clobber(conn, dsn, recwarn): with pytest.raises(ZeroDivisionError): with psycopg3.connect(dsn) as conn2: conn2.execute("select 1") conn.execute( "select pg_terminate_backend(%s::int)", [conn2.pgconn.backend_pid], ) 1 / 0 assert "rolling back" in str(recwarn.pop(RuntimeWarning).message)
def test_copy_to_leaks(dsn, faker, fmt, method): faker.format = PgFormat.from_pq(fmt) faker.choose_schema(ncols=20) faker.make_records(20) n = [] for i in range(3): with psycopg3.connect(dsn) as conn: with conn.cursor(binary=fmt) as cur: cur.execute(faker.drop_stmt) cur.execute(faker.create_stmt) cur.executemany(faker.insert_stmt, faker.records) stmt = sql.SQL( "copy (select {} from {} order by id) to stdout (format {})" ).format( sql.SQL(", ").join(faker.fields_names), faker.table_name, sql.SQL(fmt.name), ) with cur.copy(stmt) as copy: types = [ t.as_string(conn).replace('"', "") for t in faker.types_names ] copy.set_types(types) if method == "read": while 1: tmp = copy.read() if not tmp: break elif method == "iter": list(copy) elif method == "row": while 1: tmp = copy.read_row() if tmp is None: break elif method == "rows": list(copy.rows()) tmp = None del cur, conn gc.collect() gc.collect() n.append(len(gc.get_objects())) assert ( n[0] == n[1] == n[2] ), f"objects leaked: {n[1] - n[0]}, {n[2] - n[1]}"
def test_context_commit(conn, dsn): with conn: with conn.cursor() as cur: cur.execute("drop table if exists textctx") cur.execute("create table textctx ()") assert conn.closed with psycopg3.connect(dsn) as conn: with conn.cursor() as cur: cur.execute("select * from textctx") assert cur.fetchall() == []
def test_psycopg3_tail_stream(self) -> None: """Test tail with psycopg3 via its new streaming query support.""" with psycopg3.connect(MATERIALIZED_URL) as conn: conn.autocommit = True with conn.cursor() as cur: # Create a table with one row of data. cur.execute( "CREATE TABLE psycopg3_tail_stream (a int, b text)") cur.execute("INSERT INTO psycopg3_tail_stream VALUES (1, 'a')") conn.autocommit = False # Start a tail using the streaming query API. stream = cur.stream("TAIL psycopg3_tail_stream") # Validate the first row, but ignore the timestamp column. (ts, diff, a, b) = next(stream) self.assertEqual(diff, 1) self.assertEqual(a, 1) self.assertEqual(b, "a") # Insert another row from another connection to simulate an # update arriving. with psycopg3.connect(MATERIALIZED_URL) as conn2: conn2.autocommit = True with conn2.cursor() as cur2: cur2.execute( "INSERT INTO psycopg3_tail_stream VALUES (2, 'b')") # Validate the new row, again ignoring the timestamp column. (ts, diff, a, b) = next(stream) self.assertEqual(diff, 1) self.assertEqual(a, 2) self.assertEqual(b, "b") # The tail won't end until we send a cancel request. conn.cancel() self.assertEqual(next(stream, None), None)
def test_context_rollback(conn, dsn): with conn.cursor() as cur: cur.execute("drop table if exists textctx") conn.commit() with pytest.raises(ZeroDivisionError): with conn: with conn.cursor() as cur: cur.execute("create table textctx ()") 1 / 0 assert conn.closed with psycopg3.connect(dsn) as conn: with conn.cursor() as cur: with pytest.raises(UndefinedTable): cur.execute("select * from textctx")
def test_leak(dsn, faker, fmt, fetch, row_factory): faker.format = fmt faker.choose_schema(ncols=5) faker.make_records(10) row_factory = getattr(rows, row_factory) n = [] for i in range(3): with psycopg3.connect(dsn) as conn: with conn.cursor( binary=fmt == Format.BINARY, row_factory=row_factory ) as cur: cur.execute(faker.drop_stmt) cur.execute(faker.create_stmt) cur.executemany(faker.insert_stmt, faker.records) cur.execute(faker.select_stmt) if fetch == "one": while 1: tmp = cur.fetchone() if tmp is None: break elif fetch == "many": while 1: tmp = cur.fetchmany(3) if not tmp: break elif fetch == "all": cur.fetchall() elif fetch == "iter": for rec in cur: pass tmp = None del cur, conn gc.collect() gc.collect() n.append(len(gc.get_objects())) assert ( n[0] == n[1] == n[2] ), f"objects leaked: {n[1] - n[0]}, {n[2] - n[1]}"
def test_identify_closure(conn, dsn): conn2 = psycopg3.connect(dsn) def closer(): time.sleep(0.3) conn2.execute("select pg_terminate_backend(%s)", [conn.pgconn.backend_pid]) t0 = time.time() sel = selectors.DefaultSelector() sel.register(conn, selectors.EVENT_READ) t = threading.Thread(target=closer) t.start() assert sel.select(timeout=1.0) with pytest.raises(psycopg3.OperationalError): conn.execute("select 1") t1 = time.time() assert 0.3 < t1 - t0 < 0.5
def test_notifies(conn, dsn): nconn = psycopg3.connect(dsn, autocommit=True) npid = nconn.pgconn.backend_pid def notifier(): time.sleep(0.25) nconn.cursor().execute("notify foo, '1'") time.sleep(0.25) nconn.cursor().execute("notify foo, '2'") conn.autocommit = True conn.cursor().execute("listen foo") t0 = time.time() t = threading.Thread(target=notifier) t.start() ns = [] gen = conn.notifies() for n in gen: ns.append((n, time.time())) if len(ns) >= 2: gen.close() assert len(ns) == 2 n, t1 = ns[0] assert isinstance(n, psycopg3.Notify) assert n.pid == npid assert n.channel == "foo" assert n.payload == "1" assert t1 - t0 == pytest.approx(0.25, abs=0.05) n, t1 = ns[1] assert n.pid == npid assert n.channel == "foo" assert n.payload == "2" assert t1 - t0 == pytest.approx(0.5, abs=0.05)
def test_copy_from_leaks(dsn, faker, fmt): faker.format = PgFormat.from_pq(fmt) faker.choose_schema(ncols=20) faker.make_records(20) n = [] for i in range(3): with psycopg3.connect(dsn) as conn: with conn.cursor(binary=fmt) as cur: cur.execute(faker.drop_stmt) cur.execute(faker.create_stmt) stmt = sql.SQL("copy {} ({}) from stdin (format {})").format( faker.table_name, sql.SQL(", ").join(faker.fields_names), sql.SQL(fmt.name), ) with cur.copy(stmt) as copy: for row in faker.records: copy.write_row(row) cur.execute(faker.select_stmt) recs = cur.fetchall() for got, want in zip(recs, faker.records): faker.assert_record(got, want) del recs del cur, conn gc.collect() gc.collect() n.append(len(gc.get_objects())) assert ( n[0] == n[1] == n[2] ), f"objects leaked: {n[1] - n[0]}, {n[2] - n[1]}"
# Main entry point for the writer service import os import sys import json import psycopg3 from observatory.writer import Writer connection = psycopg3.connect( 'postgresql://*****:*****@localhost/postgres', autocommit=True) Writer.start(connection=connection, consumer=map(json.loads, map(str.rstrip, sys.stdin)))
import psycopg3 conn = psycopg3.connect(CONNINFO) cur = conn.cursor() cur.execute("SELECT * FROM table") print(cur.fetchall()) cur.close() conn.close()
def logsAnalysis(): db = psycopg3.connect("dbname=news")
def get_connection(): return psycopg3.connect(CONNECTION_STRING)
def get_connection_sync(): return psycopg3.connect(DB_CONNECTION_STRING)
# Main entry point for the writer service import os import json import psycopg3 from kafka import KafkaConsumer from observatory.writer import Writer connection = psycopg3.connect( os.getenv('PG_CONNECTION', 'dbname=test user=postgres')) consumer = KafkaConsumer( os.getenv('KAFKA_TOPIC', 'localhost:9092'), bootstrap_servers=[os.getenv('KAFKA_HOST', 'localhost:9092')], auto_offset_reset='earliest', enable_auto_commit=True, group_id='my-group', value_deserializer=lambda x: json.loads(x.decode('utf-8'))) Writer.start(connection=connection, consumer=consumer)
import sys def application(): app = QApplication(sys.argv) window = QMainWindow() window.setWindowTitle("Моя программа") window.setGeometry(150, 100, 500, 500) window.show() sys.exit(app.exec()) # Connect to an existing database with psycopg3.connect( user="******", password="******", host="localhost", dbname="shop", port="5432") as conn: # Open a cursor to perform database operations with conn.cursor() as cur: # Execute a command: this creates a new table cur.execute(""" CREATE TABLE test ( id serial PRIMARY KEY, num integer, data text) """) # Pass data to fill a query placeholders and let Psycopg perform # the correct conversion (no SQL injections!) cur.execute(
async def test_putconn_no_pool(dsn): async with pool.AsyncConnectionPool(dsn, min_size=1) as p: conn = psycopg3.connect(dsn) with pytest.raises(ValueError): await p.putconn(conn)
def worker(): cnn = psycopg3.connect(dsn) cur = cnn.cursor() cur.execute("select pg_sleep(0.5)") cur.close() cnn.close()