def connect(dsn=None, *, timeout=TIMEOUT, loop=None, enable_json=True, enable_hstore=True, echo=False, **kwargs): """A factory for connecting to PostgreSQL. The coroutine accepts all parameters that psycopg2.connect() does plus optional keyword-only `loop` and `timeout` parameters. Returns instantiated Connection object. """ if loop is None: loop = asyncio.get_event_loop() waiter = asyncio.Future(loop=loop) conn = Connection(dsn, loop, timeout, waiter, bool(echo), **kwargs) try: yield from conn._poll(waiter, timeout) except Exception: conn.close() raise if enable_json: extras.register_default_json(conn._conn) if enable_hstore: oids = yield from _enable_hstore(conn) if oids is not None: oid, array_oid = oids extras.register_hstore(conn._conn, oid=oid, array_oid=array_oid) return conn
def on_connect(conn): if self._has_native_json: extras.register_default_json(conn, loads=self._json_deserializer) if self._has_native_jsonb: extras.register_default_jsonb( conn, loads=self._json_deserializer)
def on_connect(conn): if self._has_native_json: extras.register_default_json( conn, loads=self._json_deserializer) if self._has_native_jsonb: extras.register_default_jsonb( conn, loads=self._json_deserializer)
def on_connect(conn): extras.register_default_json( conn, loads=self._json_deserializer ) extras.register_default_jsonb( conn, loads=self._json_deserializer )
def _connect(dsn=None, *, timeout=TIMEOUT, loop=None, enable_json=True, enable_hstore=True, enable_uuid=True, echo=False, **kwargs): if loop is None: loop = asyncio.get_event_loop() waiter = create_future(loop) conn = Connection(dsn, loop, timeout, waiter, bool(echo), **kwargs) try: yield from conn._poll(waiter, timeout) except Exception: conn.close() raise if enable_json: extras.register_default_json(conn._conn) if enable_uuid: extras.register_uuid(conn_or_curs=conn._conn) if enable_hstore: oids = yield from _enable_hstore(conn) if oids is not None: oid, array_oid = oids extras.register_hstore(conn._conn, oid=oid, array_oid=array_oid) return conn
def setUpTestDb(self,y): from UserLoginPackage import encryptString register_default_json(self.conn) self.writeQuery("create table users (username text, password text, games text, friends text, friend_requests text, locked_out boolean default false, attempts integer default 4)") self.writeQuery("create table games (game_id text, game_data json, players text, turn text)") print(self.getQuery("select column_name, data_type, character_maximum_length from INFORMATION_SCHEMA.COLUMNS where table_name = 'games'")) self.writeQuery("""insert into users (username, password) values ('test1','{}')""".format(encryptString(y[0]))) self.writeQuery("""insert into users (username, password) values ('test2','{}')""".format(encryptString(y[1])))
def test_manual_wrap_extension_types(self): conn = self._get_conn() # NOTE: this will crash if it doesn't work. # _ext.register_type(_ext.UUID, conn_or_curs) # TypeError: argument 2 must be a connection, cursor or None extras.register_uuid(conn_or_curs=conn) # NOTE: this will crash if it doesn't work. # _ext.register_default_json(conn) # TypeError: argument 2 must be a connection, cursor or None extras.register_default_json(conn)
def test_manual_wrap_extension_types(self): conn, _ = self._get_conn_and_tracer() # NOTE: this will crash if it doesn't work. # _ext.register_type(_ext.UUID, conn_or_curs) # TypeError: argument 2 must be a connection, cursor or None extras.register_uuid(conn_or_curs=conn) # NOTE: this will crash if it doesn't work. # _ext.register_default_json(conn) # TypeError: argument 2 must be a connection, cursor or None extras.register_default_json(conn)
def setUpTestDb(self, y): from UserLoginPackage import encryptString register_default_json(self.conn) self.writeQuery( "create table users (username text, password text, games text, friends text, friend_requests text, locked_out boolean default false, attempts integer default 4)" ) self.writeQuery( "create table games (game_id text, game_data json, players text, turn text)" ) self.writeQuery( """insert into users (username, password) values ('test1','{}')""". format(encryptString(y[0]))) self.writeQuery( """insert into users (username, password) values ('test2','{}')""". format(encryptString(y[1])))
def patch_ujson_into_psycopg2(): # Use fast json parser. try: import ujson except ImportError: return # patch decoding from psycopg2.extras import register_default_json, register_default_jsonb register_default_json(globally=True, loads=ujson.loads) register_default_jsonb(globally=True, loads=ujson.loads) # patch encoding from psycopg2 import _json _json.json = ujson
async def _connect(self): try: await self._poll(self._waiter, self._timeout) except Exception: self.close() raise if self._enable_json: extras.register_default_json(self._conn) if self._enable_uuid: extras.register_uuid(conn_or_curs=self._conn) if self._enable_hstore: oids = await self._get_oids() if oids is not None: oid, array_oid = oids extras.register_hstore(self._conn, oid=oid, array_oid=array_oid) return self
def setUpTestDb(self, y): from UserLoginPackage import encryptString register_default_json(self.conn) self.writeQuery( "create table users (username text, password text, games text, friends text, friend_requests text, locked_out boolean default false, attempts integer default 4)" ) self.writeQuery( "create table games (game_id text, game_data json, players text, turn text)" ) print( self.getQuery( "select column_name, data_type, character_maximum_length from INFORMATION_SCHEMA.COLUMNS where table_name = 'games'" )) self.writeQuery( """insert into users (username, password) values ('test1','{}')""". format(encryptString(y[0]))) self.writeQuery( """insert into users (username, password) values ('test2','{}')""". format(encryptString(y[1])))
def _connect(dsn=None, *, timeout=TIMEOUT, loop=None, enable_json=True, enable_hstore=True, echo=False, **kwargs): if loop is None: loop = asyncio.get_event_loop() waiter = asyncio.Future(loop=loop) conn = Connection(dsn, loop, timeout, waiter, bool(echo), **kwargs) try: yield from conn._poll(waiter, timeout) except Exception: conn.close() raise if enable_json: extras.register_default_json(conn._conn) if enable_hstore: oids = yield from _enable_hstore(conn) if oids is not None: oid, array_oid = oids extras.register_hstore(conn._conn, oid=oid, array_oid=array_oid) return conn
state = conn.poll() if state == extensions.POLL_OK: break elif state == extensions.POLL_READ: wait_read(conn.fileno(), timeout=timeout) elif state == extensions.POLL_WRITE: wait_write(conn.fileno(), timeout=timeout) else: raise OperationalError("Bad result from poll: %r" % state) extensions.set_wait_callback(gevent_wait_callback) # End Gevent Monkey patching # Set JSON to Pyon default simplejson to get str instead of unicode in deserialization register_default_json(None, globally=True, loads=json.loads) # THREAD (GEVENT) LOCAL - Holds current transaction and per request stats db_context = threading.local() class DatabaseConnectionPool(object): """ Gevent compliant database connection pool """ def __init__(self, maxsize=100): if not isinstance(maxsize, (int, long)): raise TypeError('Expected integer, got %r' % (maxsize, )) self.maxsize = maxsize # Maximum connections (pool + checkout out) self.pool = Queue() # Open connection pool self.size = 0 # Number of open connections def get(self):
'ELSE last_value + %s ' 'END ' 'FROM "%s"' % (self.flavor.param, name), (increment, )) else: cursor.execute('SELECT CASE WHEN NOT is_called THEN last_value ' 'ELSE last_value + increment_by ' 'END ' 'FROM "%s"' % name) return cursor.fetchone()[0] register_type(UNICODE) if PYDATE: register_type(PYDATE) if PYDATETIME: register_type(PYDATETIME) if PYTIME: register_type(PYTIME) if PYINTERVAL: register_type(PYINTERVAL) register_adapter(float, lambda value: AsIs(repr(value))) register_adapter(Decimal, lambda value: AsIs(str(value))) def convert_json(value): return json.loads(value, object_hook=JSONDecoder()) register_default_json(loads=convert_json) register_default_jsonb(loads=convert_json)
def on_connect(conn): extras.register_default_json(conn, loads=self._json_deserializer)
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Custom field definitions for Peewee and Postgresql.""" import enum import ipaddress from datetime import timedelta # Need to register alternate JSON codecs early from devtest import json from psycopg2 import extras extras.register_default_json(globally=True, loads=json.loads) extras.register_default_jsonb(globally=True, loads=json.loads) del extras, json from peewee import Field from playhouse.postgres_ext import ( BinaryJSONField, TSVectorField, # noqa DateTimeTZField, ArrayField) from devtest.core import types JSONField = BinaryJSONField _PEEWEE_FIELDS = [
# Abort quickly if no DNS if not self.dns_results: logger.error("Aborting test, no addresses found") return_value = 8 else: self.run_ping_tests() return_value = self.run_browser_tests() # Set all other "latest" flags to false Measurement.objects.filter(url=self.url, latest=True).update(latest=False) self.latest = True self.finished = timezone.now() self.save() return return_value # Proper representation with OrderedDict register_default_json( globally=True, loads=lambda s: json.loads(s, object_pairs_hook=OrderedDict)) register_default_jsonb( globally=True, loads=lambda s: json.loads(s, object_pairs_hook=OrderedDict)) yaml.add_representer( OrderedDict, lambda self, data: self.represent_mapping( 'tag:yaml.org,2002:map', data.items()))
import urllib import traceback if len(sys.argv) > 2: PORT = int(sys.argv[2]) I = sys.argv[1] elif len(sys.argv) > 1: PORT = int(sys.argv[1]) I = "" else: PORT = 8000 I = "" # this will prevent the GeoJSON we load from PostGIS to be parsed into a Python object, # since we want to send it straight to the client extras.register_default_json(loads=lambda x: x) # some content headers we'll need a few times: geojsonHeader = 'application/vnd.geo+json; charset=utf-8' jsonHeader = 'application/json; charset=utf-8' try: conn = psycopg2.connect("dbname='cpt' host='localhost'") except Exception as e: logging.error(e) cur = conn.cursor() class ServerHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def do_GET(self):
state = conn.poll() if state == extensions.POLL_OK: break elif state == extensions.POLL_READ: wait_read(conn.fileno(), timeout=timeout) elif state == extensions.POLL_WRITE: wait_write(conn.fileno(), timeout=timeout) else: raise OperationalError( "Bad result from poll: %r" % state) extensions.set_wait_callback(gevent_wait_callback) # End Gevent Monkey patching # Set JSON to Pyon default simplejson to get str instead of unicode in deserialization register_default_json(None, globally=True, loads=json.loads) # THREAD (GEVENT) LOCAL - Holds current transaction and per request stats db_context = threading.local() class DatabaseConnectionPool(object): """ Gevent compliant database connection pool """ def __init__(self, maxsize=100): if not isinstance(maxsize, (int, long)): raise TypeError('Expected integer, got %r' % (maxsize, )) self.maxsize = maxsize # Maximum connections (pool + checkout out) self.pool = Queue() # Open connection pool self.size = 0 # Number of open connections
import os, uuid, csv import config_reader from pathlib import Path from psycopg2.extras import execute_values, register_default_json, register_default_jsonb register_default_json(loads=lambda x: x) register_default_jsonb(loads=lambda x: x) def copy_rows(source, destination, query, destination_table, destination_schema): cursor = source.cursor() cursor_name = 'table_cursor_' + str(uuid.uuid4()).replace('-', '') q = 'DECLARE {} SCROLL CURSOR FOR {}'.format(cursor_name, query) cursor.execute(q) fetch_row_count = 10000 while True: cursor.execute('FETCH FORWARD {} FROM {}'.format( fetch_row_count, cursor_name)) if cursor.rowcount == 0: break destination_cursor = destination.cursor() execute_values( destination_cursor, 'INSERT INTO "{}"."{}" VALUES %s'.format(destination_schema, destination_table), cursor.fetchall())
from collections import OrderedDict from django.db import models, connection from django.db.models.fields.related import ForeignKey from psycopg2 import extras extras.register_default_json(loads=lambda x: x) class PostgreSQLManager(models.Manager): def get_queryset(self): return PostgreSQLQuerySet(self.model, using=self._db) def to_json(self): return self.get_queryset().to_json() class PostgreSQLQuerySet(models.QuerySet): def to_json(self): template = """ select array_to_json(array_agg(row_to_json(t))) FROM({}) t""" sql = self.query.sql_with_params() args = (template.format(sql[0]), sql[1]) with connection.cursor() as c: c.execute(*args)
import os, uuid, csv from . import config_reader from pathlib import Path from psycopg2.extras import execute_values, register_default_json, register_default_jsonb from subset_utils import columns_joined, columns_tupled, schema_name, table_name, fully_qualified_table, redact_relationships, quoter register_default_json(loads=lambda x: str(x)) register_default_jsonb(loads=lambda x: str(x)) def prep_temp_dbs(_, __): pass def unprep_temp_dbs(_, __): pass def turn_off_constraints(connection): # can't be done in postgres pass def copy_rows(source, destination, query, destination_table): datatypes = get_table_datatypes(table_name(destination_table), schema_name(destination_table), destination) def template_piece(dt): if dt == '_json': return '%s::json[]'