def on_connect(conn): if self._has_native_json: extras.register_default_json(conn, loads=self._json_deserializer) if self._has_native_jsonb: extras.register_default_jsonb( conn, loads=self._json_deserializer)
def on_connect(conn): extras.register_default_json( conn, loads=self._json_deserializer ) extras.register_default_jsonb( conn, loads=self._json_deserializer )
def on_connect(conn): if self._has_native_json: extras.register_default_json( conn, loads=self._json_deserializer) if self._has_native_jsonb: extras.register_default_jsonb( conn, loads=self._json_deserializer)
def patch_ujson_into_psycopg2(): # Use fast json parser. try: import ujson except ImportError: return # patch decoding from psycopg2.extras import register_default_json, register_default_jsonb register_default_json(globally=True, loads=ujson.loads) register_default_jsonb(globally=True, loads=ujson.loads) # patch encoding from psycopg2 import _json _json.json = ujson
def __init__(self, name, settings): super().__init__(name, settings) kwargs = { 'host': settings['host'], 'database': settings['database'], } optional_keys = ['port', 'username', 'password'] for key in optional_keys: if key in settings: kwargs[key] = settings[key] conn = psycopg2.connect(**kwargs) register_default_jsonb(conn, loads=rapidjson.loads) self.conn = conn
def __init__(self, name, settings): super().__init__(name, settings) kwargs = { 'host': settings['host'], 'database': settings['database'], } optional_keys = [ 'port', 'username', 'password' ] for key in optional_keys: if key in settings: kwargs[key] = settings[key] conn = psycopg2.connect(**kwargs) register_default_jsonb(conn, loads=rapidjson.loads) self.conn = conn
'ELSE last_value + %s ' 'END ' 'FROM "%s"' % (self.flavor.param, name), (increment, )) else: cursor.execute('SELECT CASE WHEN NOT is_called THEN last_value ' 'ELSE last_value + increment_by ' 'END ' 'FROM "%s"' % name) return cursor.fetchone()[0] register_type(UNICODE) if PYDATE: register_type(PYDATE) if PYDATETIME: register_type(PYDATETIME) if PYTIME: register_type(PYTIME) if PYINTERVAL: register_type(PYINTERVAL) register_adapter(float, lambda value: AsIs(repr(value))) register_adapter(Decimal, lambda value: AsIs(str(value))) def convert_json(value): return json.loads(value, object_hook=JSONDecoder()) register_default_json(loads=convert_json) register_default_jsonb(loads=convert_json)
import os, uuid, csv from . import config_reader from pathlib import Path from psycopg2.extras import execute_values, register_default_json, register_default_jsonb from subset_utils import columns_joined, columns_tupled, schema_name, table_name, fully_qualified_table, redact_relationships, quoter register_default_json(loads=lambda x: str(x)) register_default_jsonb(loads=lambda x: str(x)) def prep_temp_dbs(_, __): pass def unprep_temp_dbs(_, __): pass def turn_off_constraints(connection): # can't be done in postgres pass def copy_rows(source, destination, query, destination_table): datatypes = get_table_datatypes(table_name(destination_table), schema_name(destination_table), destination) def template_piece(dt): if dt == '_json': return '%s::json[]'
import os, uuid, csv import config_reader from pathlib import Path from psycopg2.extras import execute_values, register_default_json, register_default_jsonb register_default_json(loads=lambda x: x) register_default_jsonb(loads=lambda x: x) def copy_rows(source, destination, query, destination_table, destination_schema): cursor = source.cursor() cursor_name = 'table_cursor_' + str(uuid.uuid4()).replace('-', '') q = 'DECLARE {} SCROLL CURSOR FOR {}'.format(cursor_name, query) cursor.execute(q) fetch_row_count = 10000 while True: cursor.execute('FETCH FORWARD {} FROM {}'.format( fetch_row_count, cursor_name)) if cursor.rowcount == 0: break destination_cursor = destination.cursor() execute_values( destination_cursor, 'INSERT INTO "{}"."{}" VALUES %s'.format(destination_schema, destination_table), cursor.fetchall())
from __future__ import unicode_literals import json from django.core.serializers.json import DjangoJSONEncoder from django.db import models from django.db.models.lookups import BuiltinLookup, Transform from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection from django.utils import six from psycopg2.extras import register_default_jsonb # we want to be able to use customize decoder to load json, so get avoid the psycopg2's decode json, just return raw text then we deserilize by the field from_db_value register_default_jsonb(loads=lambda x: x) DatabaseIntrospection.data_types_reverse[3802] = "django_pgjsonb.JSONField" class JSONField(models.Field): description = 'JSON Field' def __init__(self, *args, **kwargs): self.decode_kwargs = kwargs.pop('decode_kwargs', { # 'parse_float': decimal.Decimal }) self.encode_kwargs = kwargs.pop('encode_kwargs', { 'cls': DjangoJSONEncoder, }) db_index = kwargs.get("db_index") db_index_options = kwargs.pop("db_index_options", {}) if db_index: self.db_index_options = db_index_options if isinstance(db_index_options, (list, tuple)) else [db_index_options]
# Abort quickly if no DNS if not self.dns_results: logger.error("Aborting test, no addresses found") return_value = 8 else: self.run_ping_tests() return_value = self.run_browser_tests() # Set all other "latest" flags to false Measurement.objects.filter(url=self.url, latest=True).update(latest=False) self.latest = True self.finished = timezone.now() self.save() return return_value # Proper representation with OrderedDict register_default_json( globally=True, loads=lambda s: json.loads(s, object_pairs_hook=OrderedDict)) register_default_jsonb( globally=True, loads=lambda s: json.loads(s, object_pairs_hook=OrderedDict)) yaml.add_representer( OrderedDict, lambda self, data: self.represent_mapping( 'tag:yaml.org,2002:map', data.items()))
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Custom field definitions for Peewee and Postgresql.""" import enum import ipaddress from datetime import timedelta # Need to register alternate JSON codecs early from devtest import json from psycopg2 import extras extras.register_default_json(globally=True, loads=json.loads) extras.register_default_jsonb(globally=True, loads=json.loads) del extras, json from peewee import Field from playhouse.postgres_ext import ( BinaryJSONField, TSVectorField, # noqa DateTimeTZField, ArrayField) from devtest.core import types JSONField = BinaryJSONField _PEEWEE_FIELDS = [ 'BareField', 'BigIntegerField', 'BlobField', 'BooleanField', 'CharField',
# -*- coding: utf-8 -*- import json import logging import os import time import psycopg2 import psycopg2.extras as pg_extras import psycopg2.extensions as pg_extensions import kafka import kafka.errors # Register jsonb extras to convert jsonb data to dict transparently pg_extras.register_default_jsonb(globally=True) pg_extensions.register_adapter(dict, psycopg2.extras.Json) DB_DSN = os.getenv('DB_DSN', 'postgresql://*****:*****@localhost/meetup') KAFKA_SERVER = os.getenv('KAFKA_SERVER', 'localhost:9092') TOPIC = os.getenv('KAFKA_TOPIC', 'my_topic') logging.basicConfig( level=logging.INFO, format='%(asctime)s %(levelname)s [%(name)s] %(message)s', ) logger = logging.getLogger(__name__) def get_consumer(): """Factory method to get KafkaConsumer instance with retries logic :return: KafkaConsumer instance """
# -*- coding: utf-8 -*- from itertools import chain from psycopg2 import connect, sql from psycopg2.extras import NamedTupleCursor, Json, register_default_jsonb from psycopg2.extensions import register_adapter from flask import current_app # adapt python dict to postgresql json type register_adapter(dict, Json) # register the jsonb type register_default_jsonb() class Database(): ''' Database object used as a global connection object to the db ''' db = None @classmethod def _query(cls, query, parameters=None, rowcount=None): ''' Performs a query and returns results as a named tuple ''' cur = cls.db.cursor() cur.execute(query, parameters) query_str = query.as_string(cur) if isinstance(query, sql.Composable) else query