コード例 #1
0
ファイル: pg.py プロジェクト: drcloud/junkyard
 def cxn(self):
     if self._cxn is None:
         self._cxn = LoggingConnection(self.dsn)
         self._cxn.initialize(self._log)
         register_uuid()
         register_inet()
     return self._cxn
コード例 #2
0
ファイル: test_psycopg.py プロジェクト: tebriel/dd-trace-py
    def test_manual_wrap_extension_types(self):
        conn, _ = self._get_conn_and_tracer()
        # NOTE: this will crash if it doesn't work.
        #   _ext.register_type(_ext.UUID, conn_or_curs)
        #   TypeError: argument 2 must be a connection, cursor or None
        extras.register_uuid(conn_or_curs=conn)

        # NOTE: this will crash if it doesn't work.
        #   _ext.register_default_json(conn)
        #   TypeError: argument 2 must be a connection, cursor or None
        extras.register_default_json(conn)
コード例 #3
0
    def update_last_used_time(cls, token: str):
        """更新token最后使用时间"""
        with pg_conn_context() as conn, conn.cursor() as cursor:
            register_uuid(cursor)

            insert_query = """
            UPDATE calendar_tokens SET last_used_time = %s WHERE token = %s;
            """
            cursor.execute(insert_query,
                           (datetime.datetime.now(), uuid.UUID(token)))
            conn.commit()
コード例 #4
0
    def test_manual_wrap_extension_types(self):
        conn = self._get_conn()
        # NOTE: this will crash if it doesn't work.
        #   _ext.register_type(_ext.UUID, conn_or_curs)
        #   TypeError: argument 2 must be a connection, cursor or None
        extras.register_uuid(conn_or_curs=conn)

        # NOTE: this will crash if it doesn't work.
        #   _ext.register_default_json(conn)
        #   TypeError: argument 2 must be a connection, cursor or None
        extras.register_default_json(conn)
コード例 #5
0
    def reset_tokens(cls,
                     student_id: str,
                     typ: Optional[str] = "student") -> None:
        """删除某用户所有的 token,默认为学生"""
        with pg_conn_context() as conn, conn.cursor() as cursor:
            register_uuid(cursor)

            insert_query = """
            DELETE FROM calendar_tokens WHERE identifier = %s AND type = %s;
            """
            cursor.execute(insert_query, (student_id, typ))
            conn.commit()
コード例 #6
0
def get_connection(credentials: PostgresCredentials):
    connection = psycopg2.connect(
        dbname=credentials.PG_DATABASE,
        user=credentials.PG_USER,
        host=credentials.PG_HOST,
        password=credentials.PG_PASSWORD.get_secret_value(),
        port=credentials.PG_PORT)
    register_uuid(conn_or_curs=connection)

    try:
        yield connection
    finally:
        connection.close()
コード例 #7
0
 def __init__(
         self, json: dict, s3_bucket: str, db_host: str, db_user: str, db_name: str, db_password: str, threads: int):
     self.client = Client(json)
     self.s3 = resource('s3', endpoint_url=None, aws_access_key_id=None,
                        aws_secret_access_key=None)
     self.bucket_name = s3_bucket
     self.bucket = self.s3.Bucket(self.bucket_name)
     self.host = db_host
     self.db_user = db_user
     self.db_name = db_name
     self.db_password = db_password
     self.threads = threads
     extras.register_uuid()
コード例 #8
0
    def __init__(self, database, **kwargs):
        kwargs['database'] = database
        kwargs['host'] = kwargs.get('host', 'localhost')
        kwargs['port'] = kwargs.get('port', 5432)

        self.connection = psycopg2.connect(**kwargs)
        extensions.register_type(extensions.UNICODE, self.connection)
        extensions.register_type(extensions.UNICODEARRAY, self.connection)
        extras.register_uuid(conn_or_curs=self.connection)
        # psycopg2.extras.register_json(conn_or_curs=self.connection, name='jsonb')
        extensions.register_adapter(dict, BetterJson)
        self.connection.autocommit = True

        self.cursor = self.connection.cursor()
コード例 #9
0
def insert_matches(recording_mapping, mb_recordings, msb_recordings, source):

    completed = {}
    with psycopg2.connect(config.DB_CONNECT_MB) as conn:
        with conn.cursor() as curs:
            register_uuid(curs)
            rows = []
            total = 0
            for k in recording_mapping.keys():
                a = recording_mapping[k]
                completed[a[0]] = 1
                rows.append((a[0], msb_recordings[a[1]]["artist_name"],
                             msb_recordings[a[1]]["artist_msid"],
                             msb_recordings[a[1]]["recording_name"],
                             msb_recordings[a[1]]["recording_msid"],
                             msb_recordings[a[1]]["release_name"],
                             msb_recordings[a[1]]["release_msid"],
                             mb_recordings[a[2]]["artist_name"],
                             mb_recordings[a[2]]["artist_credit_id"],
                             mb_recordings[a[2]]["recording_name"],
                             mb_recordings[a[2]]["recording_id"],
                             mb_recordings[a[2]]["release_name"],
                             mb_recordings[a[2]]["release_id"], source))
                total += 1

                if len(rows) == 2000:
                    insert_rows(curs, "mapping.msid_mbid_mapping", rows)
                    rows = []

                if total % 1000000 == 0:
                    print("  wrote %d of %d" % (total, len(recording_mapping)))

            insert_rows(curs, "mapping.msid_mbid_mapping", rows)
            conn.commit()

    msb_recording_index = []
    for i, msb_recording in enumerate(msb_recordings):
        if i in completed:
            continue

        msb_recording_index.append(i)

    msb_recording_index = sorted(msb_recording_index,
                                 key=lambda rec:
                                 (msb_recordings[rec]["artist_name"],
                                  msb_recordings[rec]["recording_name"]))

    return (total, msb_recording_index)
コード例 #10
0
ファイル: create.py プロジェクト: lisatn/workload-automation
def _apply_database_schema(args, sql_commands, schema_major, schema_minor):
    conn = connect(dbname=args.dbname,
                   user=args.username,
                   password=args.password,
                   host=args.postgres_host,
                   port=args.postgres_port)
    cursor = conn.cursor()
    cursor.execute(sql_commands)

    extras.register_uuid()
    cursor.execute("INSERT INTO DatabaseMeta VALUES (%s, %s, %s)",
                   (uuid.uuid4(), schema_major, schema_minor))

    conn.commit()
    cursor.close()
    conn.close()
コード例 #11
0
    def _init_connection(self):
        _postgres = True

        if _postgres:
            import psycopg2
            from psycopg2.extras import register_uuid
            from psycopg2.extras import LoggingConnection

            config = {
                'user': os.getenv('POSTGRES_USER', os.getenv('USER')),
                'database': os.getenv('POSTGRES_DB', 'backbone_service'),
                'password': os.getenv('POSTGRES_PASSWORD', None),
                'host': os.getenv('POSTGRES_HOST', 'localhost'),
                'port': os.getenv('POSTGRES_PORT', 5432),
            }

            psycopg2.extensions.register_type(register_uuid())
            psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
            psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
            conn = psycopg2.connect(connection_factory=LoggingConnection,
                                    **config)
            conn.initialize(self._logger)

    #        cur = conn.cursor()
    #        cur.execute("SET search_path TO " + 'backbone,public,contrib')
    #        cur.close()
        return conn
コード例 #12
0
ファイル: common.py プロジェクト: dslaw/fatal-accidents
def connect(max_wait_time: int = 60 * 5) -> Connectable:
    if max_wait_time < 0:
        raise ValueError("`max_wait_time` must be non-negative")

    params = read_params()
    error_message = ""
    end_t = time() + max_wait_time
    while time() < end_t:
        try:
            conn: Connectable = psycopg2.connect(params["database_url"])
            register_uuid()
            return conn
        except psycopg2.OperationalError as e:
            error_message = str(e)
            sleep(1)

    raise psycopg2.OperationalError(error_message)
コード例 #13
0
ファイル: create.py プロジェクト: lisatn/workload-automation
def _apply_database_schema(args, sql_commands, schema_major, schema_minor):
    conn = connect(dbname=args.dbname, user=args.username,
                   password=args.password, host=args.postgres_host, port=args.postgres_port)
    cursor = conn.cursor()
    cursor.execute(sql_commands)

    extras.register_uuid()
    cursor.execute("INSERT INTO DatabaseMeta VALUES (%s, %s, %s)",
                   (
                       uuid.uuid4(),
                       schema_major,
                       schema_minor
                   )
                   )

    conn.commit()
    cursor.close()
    conn.close()
コード例 #14
0
    def init_db(self):
        if init_sa is None:
            self.logger.error("Database support requires cs.eyrie to be installed with the PostgreSQL extra: install_requires = ['cs.eyrie[PostgreSQL]']")
            self.terminate()
        else:
            register_uuid()
            # TODO: look into using Momoko for async
            #       processing using Tornado's IOLoop
            #       (LISTEN/NOTIFY currently not supported)
            #       https://github.com/FSX/momoko/issues/32
            self.db_session = init_sa(self.config, application_name=self.title)
            self.db_engine = self.db_session.get_bind()
            self.db_conn = self.db_engine.raw_connection()
            # Ensure we back out of any automatic transaction SQLAlchemy started
            self.db_conn.rollback()
            self.db_conn.set_session(autocommit=True)

            self.cursor = self.db_conn.cursor(cursor_factory=self.cursor_factory)
            self.cursor.arraysize = 1024
コード例 #15
0
    def migrate(cls) -> None:
        """migrate data from mongodb"""
        mongo = get_mongodb()
        with pg_conn_context() as pg_conn, pg_conn.cursor() as cursor:
            register_uuid(conn_or_curs=cursor)
            results = mongo.get_collection("calendar_token").find()
            for each in results:
                insert_query = """
                INSERT INTO calendar_tokens (type, identifier, semester, token, create_time, last_used_time)
                    VALUES (%s,%s,%s,%s,%s,%s)
                """

                cursor.execute(
                    insert_query,
                    (each['type'], each['identifier'], each['semester'],
                     each['token'], each['create_time'],
                     each['last_used'] if 'last_used' in each else None))
            pg_conn.commit()
        print("Migration finished.")
コード例 #16
0
    async def _connect(self):
        try:
            await self._poll(self._waiter, self._timeout)
        except Exception:
            self.close()
            raise
        if self._enable_json:
            extras.register_default_json(self._conn)
        if self._enable_uuid:
            extras.register_uuid(conn_or_curs=self._conn)
        if self._enable_hstore:
            oids = await self._get_oids()
            if oids is not None:
                oid, array_oid = oids
                extras.register_hstore(self._conn,
                                       oid=oid,
                                       array_oid=array_oid)

        return self
コード例 #17
0
    def migrate(cls):
        """migrate data from mongodb"""
        mongo = get_mongodb()

        with pg_conn_context() as pg_conn, pg_conn.cursor() as cursor:
            register_uuid(conn_or_curs=cursor)
            register_hstore(conn_or_curs=cursor)
            results = mongo.get_collection("verification_requests").find()
            for each in results:
                insert_query = """
                INSERT INTO identity_verify_requests (request_id, identifier, method, status, create_time, extra)
                    VALUES (%s,%s,%s,%s,%s,%s)
                """

                cursor.execute(insert_query,
                               (each['request_id'], each['sid_orig'],
                                each['verification_method'], each['status'],
                                each['create_time'], {
                                    'password': each['password']
                                } if 'password' in each else None))
            pg_conn.commit()
        print("Migration finished.")
コード例 #18
0
ファイル: connection.py プロジェクト: vir-mir/aiopg
def _connect(dsn=None, *, timeout=TIMEOUT, loop=None, enable_json=True,
             enable_hstore=True, enable_uuid=True, echo=False, **kwargs):
    if loop is None:
        loop = asyncio.get_event_loop()

    waiter = create_future(loop)
    conn = Connection(dsn, loop, timeout, waiter, bool(echo), **kwargs)
    try:
        yield from conn._poll(waiter, timeout)
    except Exception:
        conn.close()
        raise
    if enable_json:
        extras.register_default_json(conn._conn)
    if enable_uuid:
        extras.register_uuid(conn_or_curs=conn._conn)
    if enable_hstore:
        oids = yield from _enable_hstore(conn)
        if oids is not None:
            oid, array_oid = oids
            extras.register_hstore(conn._conn, oid=oid, array_oid=array_oid)
    return conn
コード例 #19
0
def _connect(dsn=None, *, timeout=TIMEOUT, loop=None, enable_json=True,
             enable_hstore=True, enable_uuid=True, echo=False, **kwargs):
    if loop is None:
        loop = asyncio.get_event_loop()

    waiter = create_future(loop)
    conn = Connection(dsn, loop, timeout, waiter, bool(echo), **kwargs)
    try:
        yield from conn._poll(waiter, timeout)
    except Exception:
        conn.close()
        raise
    if enable_json:
        extras.register_default_json(conn._conn)
    if enable_uuid:
        extras.register_uuid(conn_or_curs=conn._conn)
    if enable_hstore:
        oids = yield from _enable_hstore(conn)
        if oids is not None:
            oid, array_oid = oids
            extras.register_hstore(conn._conn, oid=oid, array_oid=array_oid)
    return conn
コード例 #20
0
    def insert_calendar_token(cls, resource_type: str, semester: str,
                              identifier: str) -> str:
        """
        生成日历令牌,写入数据库并返回字符串类型的令牌。此时的 last_used_time 是 NULL。

        :param resource_type: student/teacher
        :param semester: 学期字符串
        :param identifier: 学号或教工号
        :return: token 字符串
        """
        token = uuid.uuid4()

        with pg_conn_context() as conn, conn.cursor() as cursor:
            register_uuid(cursor)
            insert_query = """
            INSERT INTO calendar_tokens (type, identifier, semester, token, create_time)
                VALUES (%s,%s,%s,%s,%s);
            """
            cursor.execute(insert_query,
                           (resource_type, identifier, semester, token,
                            datetime.datetime.now()))
            conn.commit()
        return str(token)
コード例 #21
0
    def _init_connection(self):

        database_name = os.getenv('DATABASE', 'example')
        config = {
            'user': os.getenv('DB_USER', os.getenv('USER')),
            'database': database_name,
            'password': os.getenv('DB_PASSWORD', None),
            'host': os.getenv('DB_HOST', 'localhost'),
        }

        psycopg2.extensions.register_type(register_uuid())
        psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
        psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)

        conn = None

        try:
            try:
                conn = psycopg2.connect(connection_factory=LoggingConnection,
                                        **config)
                conn.initialize(self._logger)
            except Exception as e:
                print("Database connection problem")
                self._logger.exception("Database connection problem")
                if os.getenv('CREATE_SCHEMA_IF_MISSING', "false") == "true":
                    #Unlikely to be the problem
                    config['database'] = 'postgres'
                    conn = psycopg2.connect(
                        connection_factory=LoggingConnection, **config)
                    conn.initialize(self._logger)
                    cur = conn.cursor()
                    cur.execute('CREATE DATABASE ' + database_name)
                    conn.commit()
                    conn.close()
                    self._tries = self._tries + 1
                    if self._tries < 2:
                        return self._init_connection()
                    else:
                        return None
        except Exception as e:
            self._logger.exception("Database connection problem")

        self._create_database(conn, database_name)

        return conn
コード例 #22
0
import psycopg2
from psycopg2.extensions import ISOLATION_LEVEL_READ_COMMITTED
from psycopg2.extras import NamedTupleCursor
from psycopg2.extras import register_uuid
from psycopg2.extensions import cursor as NormalCursor
from psycopg2 import OperationalError

from veil.model.collection import *
from veil.utility.json import *
from veil.backend.database.client import *

LOGGER = getLogger(__name__)

psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
psycopg2.extensions.register_type(register_uuid())


class CustomJsonAdapter(psycopg2.extras.Json):
    def __init__(self, adapted, dumps=None):
        super(CustomJsonAdapter, self).__init__(adapted, dumps)

    def dumps(self, obj):
        return to_readable_json(obj) if obj.get('readable', True) else to_json(obj)


psycopg2.extensions.register_adapter(dict, CustomJsonAdapter)
psycopg2.extras.register_default_json(globally=True, loads=lambda obj: objectify(from_json(obj)))
psycopg2.extras.register_default_jsonb(globally=True, loads=lambda obj: objectify(from_json(obj)))

コード例 #23
0
ファイル: db.py プロジェクト: mjjs/tsoha-2021-cabin-browser
from psycopg2 import connect, extras
from flask import g
from config import DATABASE_URL
from user_repository import UserRepository
from cabin_repository import CabinRepository
from review_repository import ReviewRepository
from cabin_image_repository import CabinImageRepository
from reservation_repository import ReservationRepository
from municipality_repository import MunicipalityRepository
from keyword_repository import KeywordRepository

DATABASE_INIT_FILE = "create_tables.sql"

extras.register_uuid()


def get_db():
    db = getattr(g, "_database", None)

    if db is None:
        connection_pool = connect(DATABASE_URL)

        database = Database(connection_pool)

        db = g._database = database

    return db


class Database:
    def __init__(self, connection_pool):
コード例 #24
0
ファイル: app.py プロジェクト: drytoastman/wwscc
def create_app(config=None):
    """ Setup the application for the WSGI server """

    def errorlog(exception):
        """ We want to log exception information to file for later investigation """
        traceback = get_current_traceback(ignore_system_exceptions=True, show_hidden_frames=True)
        log.error(traceback.plaintext)
        last = traceback.frames[-1]
        now = datetime.datetime.now().replace(microsecond=0)
        return render_template("error.html", now=now, name=os.path.basename(last.filename), line=last.lineno, exception=exception)

    def preprocessor(endpoint, values):
        """ Remove the requirement for blueprint functions to put series/eventid in their function definitions """
        if values is not None:
            g.series = values.pop('series', None)
            g.eventid = values.pop('eventid', None)

    def urldefaults(endpoint, values):
        """ Make sure series,eventid from the subapp URLs are available for url_for relative calls """
        for u in ('series', 'eventid'):
            if u not in values and getattr(g, u) and current_app.url_map.is_endpoint_expecting(endpoint, u):
                values[u] = getattr(g, u)

    def t3(val, sign=False):
        """ Wrapper to safely print floats as XXX.123 format """
        if val is None: return ""
        if type(val) is not float: return str(val)
        try:
            return (sign and "%+0.3f" or "%0.3f") % (val,)
        except:
            return str(val)

    def msort(val, *attr):
        """ Filter to sort on multiple attributes """
        ret = list(val)
        ret.sort(key=attrgetter(*attr))
        return ret

    # setup uuid for postgresql
    register_uuid()

    # Setup the application with default configuration
    theapp = FlaskWithPool("nwrsc")
    theapp.config.update({
        "PORT":                    int(os.environ.get('NWRSC_PORT',     80)),
        "DEBUG":                  bool(os.environ.get('NWRSC_DEBUG',    False)),
        "PROFILE":                bool(os.environ.get('NWRSC_PROFILE',  False)),
        "DBHOST":                      os.environ.get('NWRSC_DBHOST',   'db'),
        "DBPORT":                  int(os.environ.get('NWRSC_DBPORT',   5432)),
        "DBUSER":                      os.environ.get('NWRSC_DBUSER',   'localuser'),
        "SHOWLIVE":               bool(os.environ.get('NWRSC_SHOWLIVE', True)),
        "LOG_LEVEL":                   os.environ.get('NWRSC_LOGLEVEL', 'INFO'),
        "SECRET_KEY":                  os.environ.get('NWRSC_SECRET',   'secret stuff here'),
        "ASSETS_DEBUG":           False,
        "LOGGER_HANDLER_POLICY":  "None",
    })

    theapp.config['TEMPLATES_AUTO_RELOAD'] = theapp.config['DEBUG']
    theapp.config['LOG_STDERR']            = theapp.config['DEBUG']
    #"RUN_MERGER":

    # Setup basic top level URL handling followed by Blueprints for the various sections
    theapp.url_value_preprocessor(preprocessor)
    theapp.url_defaults(urldefaults)
    theapp.add_url_rule('/',             'toresults', redirect_to='/results')
    theapp.register_blueprint(Admin,     url_prefix="/admin/<series>")
    theapp.register_blueprint(Announcer, url_prefix="/announcer/<series>")
    theapp.register_blueprint(Json,      url_prefix="/json/<series>")
    theapp.register_blueprint(Register,  url_prefix="/register")
    theapp.register_blueprint(Results,   url_prefix="/results/<series>")
    theapp.register_blueprint(Timer,     url_prefix="/timer")
    theapp.register_blueprint(Xml,       url_prefix="/xml/<series>")

    # Some static things that need to show up at the root level
    @theapp.route('/favicon.ico')
    def favicon(): return send_from_directory('static/images', 'cone.png')
    @theapp.route('/robots.txt')
    def robots(): return send_from_directory('static', 'robots.txt')
    @theapp.route('/<subapp>/')
    def serieslist(subapp): return render_template('serieslist.html', subapp=subapp, serieslist=Series.list())
    @theapp.before_request
    def onrequest(): current_app.db_prepare()
    @theapp.teardown_request
    def teardown(exc=None): current_app.db_return()
    @theapp.after_request
    def logrequest(response):
        log.info("%s %s?%s %s %s (%s)" % (request.method, request.path, request.query_string, response.status_code, response.content_length, response.content_encoding))
        return response

    theapp._reset_pool()

    # extra Jinja bits
    theapp.jinja_env.filters['t3'] = t3
    theapp.jinja_env.filters['msort'] = msort
    theapp.jinja_env.filters['to_json'] = to_json

    # Configure our logging to use webserver.log with rotation and optionally stderr
    if not theapp.debug:
        theapp.register_error_handler(Exception, errorlog)

    level = getattr(logging, theapp.config['LOG_LEVEL'], logging.INFO)
    fmt  = logging.Formatter('%(asctime)s %(name)s %(levelname)s: %(message)s', '%m/%d/%Y %H:%M:%S')
    root = logging.getLogger()
    root.setLevel(level)
    root.handlers = []

    fhandler = RotatingFileHandler(os.path.expanduser('~/nwrscwebserver.log'), maxBytes=1000000, backupCount=10)
    fhandler.setFormatter(fmt)
    fhandler.setLevel(level)
    root.addHandler(fhandler)
    logging.getLogger('werkzeug').setLevel(logging.WARN)

    if theapp.config.get('LOG_STDERR', False):
        shandler = StreamHandler()
        shandler.setFormatter(fmt)
        shandler.setLevel(level)
        root.addHandler(shandler)

    # Setting up WebAssets, crypto stuff, compression and profiling
    Environment(theapp)
    Compress(theapp)
    if theapp.config.get('PROFILE', False):
        theapp.wsgi_app = ProfilerMiddleware(theapp.wsgi_app, restrictions=[30])
    theapp.hasher = Bcrypt(theapp)
    theapp.usts = URLSafeTimedSerializer(theapp.config["SECRET_KEY"])

    if theapp.config.get('RUN_MERGER', False):
        MergeProcess().start()

    log.info("Scorekeeper App created")
    return theapp
コード例 #25
0
import uuid

from django.db import models
from django.utils import six
from django.core.exceptions import ImproperlyConfigured

import psycopg2
from psycopg2.extras import register_uuid, UUID_adapter

# TODO: Shouldn't call this unless the field is used
register_uuid()

class UUIDField(six.with_metaclass(models.SubfieldBase, models.Field)):
    description = """
    Python UUID field. If used as a primary key, will automatically
    generate v1 UUIDs.
    """
    
    def db_type(self, connection):
        return "uuid"
    
    def pre_save(self, model_instance, add):
        current_val = getattr(model_instance, self.attname, None)
        if self.primary_key and add:
            if current_val is None:
                current_val = uuid.uuid1()
        return current_val
    
    def get_prep_value(self, value):
        if value is None:
            return value
コード例 #26
0
ファイル: adapter.py プロジェクト: Shasthojoy/veil
import psycopg2
from psycopg2.extensions import ISOLATION_LEVEL_READ_COMMITTED
from psycopg2.extras import NamedTupleCursor
from psycopg2.extras import register_uuid
from psycopg2.extensions import cursor as NormalCursor
from psycopg2 import DatabaseError, OperationalError, InterfaceError, InternalError

from veil.model.collection import *
from veil.utility.json import *
from veil.backend.database.client import *

LOGGER = getLogger(__name__)

psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
psycopg2.extensions.register_type(register_uuid())


class CustomJsonAdapter(psycopg2.extras.Json):
    def __init__(self, adapted, dumps=None):
        super(CustomJsonAdapter, self).__init__(adapted, dumps)

    def dumps(self, obj):
        return to_readable_json(obj) if obj.get('readable', True) else to_json(obj)


psycopg2.extensions.register_adapter(dict, CustomJsonAdapter)
psycopg2.extras.register_default_json(globally=True, loads=lambda obj: objectify(from_json(obj)))
psycopg2.extras.register_default_jsonb(globally=True, loads=lambda obj: objectify(from_json(obj)))

コード例 #27
0
    def initialize(self, context):

        if not psycopg2:
            raise ImportError(
                'The psycopg2 module is required for the ' +
                'Postgresql Output Processor: {}'.format(import_error_msg))
        # N.B. Typecasters are for postgres->python and adapters the opposite
        self.connect_to_database()
        self.cursor = self.conn.cursor()
        # Register the adapters and typecasters for enum types
        self.cursor.execute("SELECT NULL::status_enum")
        status_oid = self.cursor.description[0][1]
        self.cursor.execute("SELECT NULL::param_enum")
        param_oid = self.cursor.description[0][1]
        LEVEL = psycopg2.extensions.new_type(
            (status_oid,), "LEVEL", postgres_convert.cast_level)
        psycopg2.extensions.register_type(LEVEL)
        PARAM = psycopg2.extensions.new_type(
            (param_oid,), "PARAM", postgres_convert.cast_vanilla)
        psycopg2.extensions.register_type(PARAM)
        psycopg2.extensions.register_adapter(level, postgres_convert.return_as_is(postgres_convert.adapt_level))
        psycopg2.extensions.register_adapter(
            postgres_convert.ListOfLevel, postgres_convert.adapt_ListOfX(postgres_convert.adapt_level))
        psycopg2.extensions.register_adapter(KernelVersion, postgres_convert.adapt_vanilla)
        psycopg2.extensions.register_adapter(
            CpuInfo, postgres_convert.adapt_vanilla)
        psycopg2.extensions.register_adapter(
            collections.OrderedDict, extras.Json)
        psycopg2.extensions.register_adapter(dict, extras.Json)
        psycopg2.extensions.register_adapter(
            KernelConfig, postgres_convert.create_iterable_adapter(2, explicit_iterate=True))
        # Register ready-made UUID type adapter
        extras.register_uuid()
        # Insert a run_uuid which will be globally accessible during the run
        self.run_uuid = uuid.UUID(str(uuid.uuid4()))
        run_output = context.run_output
        retry_on_status = postgres_convert.ListOfLevel(run_output.run_config.retry_on_status)
        self.cursor.execute(
            self.sql_command['create_run'],
            (
                self.run_uuid,
                run_output.event_summary,
                run_output.basepath,
                run_output.status,
                run_output.state.timestamp,
                run_output.info.run_name,
                run_output.info.project,
                retry_on_status,
                run_output.run_config.max_retries,
                run_output.run_config.bail_on_init_failure,
                run_output.run_config.allow_phone_home,
                run_output.info.uuid,
                run_output.info.start_time,
                run_output.metadata))
        self.target_uuid = uuid.uuid4()
        target_info = context.target_info
        self.cursor.execute(
            self.sql_command['create_target'],
            (
                self.target_uuid,
                self.run_uuid,
                target_info.target,
                target_info.cpus,
                target_info.os,
                target_info.os_version,
                target_info.hostid,
                target_info.hostname,
                target_info.abi,
                target_info.is_rooted,
                # Important caveat: kernel_version is the name of the column in the Targets table
                # However, this refers to kernel_version.version, not to kernel_version as a whole
                target_info.kernel_version.version,
                target_info.kernel_version.release,
                target_info.kernel_version.sha1,
                target_info.kernel_config,
                target_info.sched_features))
        # Commit cursor commands
        self.conn.commit()
コード例 #28
0
def sketch_package_file(package_file_key,
        blob_name,
        dataset_format,
        max_records,
        table_sample_size,
        minhash_size,
        minhash_seed,
        hyperloglog_p,
        column_sample_size,
        enable_word_vector_data):
    """Generate column sketches and table sample of the table in the
    package file.

    Args:
        package_file_key: the primary key of package_files table.
        blob_name: the relative path to the blob of the package file.
        dataset_format: one of csv, jsonl, and avro.
        max_records: the maximum number of records to sketch.
        table_sample_size: the number of records include in the table sample.
        minhash_size: the number of permutation (hash functions) to use for
            MinHash sketches.
        minhash_seed: the random seed for generating MinHash sketches'
            permutations.
        hyperloglog_p: the precision parameter used by HyperLogLog.
        column_sample_size: the number of non-random sampled values.
        enable_word_vector_data: whether to create word vectors for the
            data values -- this can be 10x more expensive.
    """
    # Get sketcher
    if dataset_format not in _sketchers:
        raise ValueError("{} is not supported".format(dataset_format))
    sketcher = _sketchers[dataset_format]

    # Sketch the file.
    try:
        with storage.get_file(blob_name) as input_file:
            table_sketch = sketcher(input_file,
                    record_sample_size=table_sample_size,
                    max_records=max_records,
                    minhash_size=minhash_size,
                    minhash_seed=minhash_seed,
                    hyperloglog_p=hyperloglog_p,
                    sample_size=column_sample_size,
                    enable_word_vector_data=enable_word_vector_data
                    )
    except Exception as e:
        logger.error("Sketching {} ({}) failed due to {}".format(
            blob_name, package_file_key, e))
        raise e

    try:
        # Save sketches to the database
        # Initialize Postgres connection.
        conn = psycopg2.connect(**db_configs)
        cur = conn.cursor(cursor_factory=RealDictCursor)
        register_uuid(conn_or_curs=cur)
        # Save column sketches
        column_sketch_ids = []
        for sketch in table_sketch.column_sketches:
            cur.execute(r"""INSERT INTO findopendata.column_sketches
                    (
                        package_file_key,
                        id,
                        column_name,
                        sample,
                        count,
                        empty_count,
                        out_of_vocabulary_count,
                        numeric_count,
                        distinct_count,
                        word_vector_column_name,
                        word_vector_data,
                        minhash,
                        seed,
                        hyperloglog
                    )
                    VALUES (%s, uuid_generate_v1mc(),
                        %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
                    ON CONFLICT (package_file_key, column_name)
                    DO UPDATE
                    SET updated = current_timestamp,
                    sample = EXCLUDED.sample,
                    count = EXCLUDED.count,
                    empty_count = EXCLUDED.empty_count,
                    out_of_vocabulary_count = EXCLUDED.out_of_vocabulary_count,
                    numeric_count = EXCLUDED.numeric_count,
                    distinct_count = EXCLUDED.distinct_count,
                    word_vector_column_name = EXCLUDED.word_vector_column_name,
                    word_vector_data = EXCLUDED.word_vector_data,
                    minhash = EXCLUDED.minhash,
                    seed = EXCLUDED.seed,
                    hyperloglog = EXCLUDED.hyperloglog
                    RETURNING id::uuid
                    """, (
                        package_file_key,
                        sketch.column_name,
                        sketch.sample,
                        sketch.count,
                        sketch.empty_count,
                        sketch.out_of_vocabulary_count,
                        sketch.numeric_count,
                        sketch.distinct_count,
                        sketch.word_vector_column_name,
                        sketch.word_vector_data,
                        sketch.minhash,
                        sketch.seed,
                        sketch.hyperloglog,
                        ))
            column_sketch_ids.append(cur.fetchone()["id"])
        # Save table samples, column names and column sketch IDs.
        cur.execute(r"""UPDATE findopendata.package_files
                        SET column_names = %s,
                        column_sketch_ids = %s,
                        sample = %s
                        WHERE key = %s
                        """, (
                            table_sketch.column_names,
                            column_sketch_ids,
                            Json(table_sketch.record_sample),
                            package_file_key,
                            ))
        # Commit
        conn.commit()
        cur.close()
        conn.close()
    except Exception as e:
        logger.error("Error saving sketches of {} ({}) due to {}".format(
            blob_name, package_file_key, e))
        raise e

    # Finish
    logger.info("Sketching {} ({}) successful".format(blob_name,
        package_file_key))
コード例 #29
0
import logging
import math
import os
import time

from collections import defaultdict, deque
from datetime import datetime
from psycopg2.extras import register_uuid
from urllib.parse import urlparse
from uuid import uuid4

from benchmarks.htap.lib.analytical import QUERY_IDS, is_ignored_query
from s64da_benchmark_toolkit.dbconn import DBConn

LOG = logging.getLogger()
register_uuid()

QUERY_TYPES = [
    'new_order', 'payment', 'order_status', 'delivery', 'stock_level'
]


class OLTPBucketStats:
    def __init__(self):
        self.ok_transactions = 0
        self.err_transactions = 0
        self.min_runtime = float('inf')
        self.max_runtime = 0
        self.acc_runtime = 0

    def add_sample(self, status, runtime):
コード例 #30
0
ファイル: sql2rwn_xml.py プロジェクト: Zebradil/RuWordNet
    "-c",
    "--connection-string",
    type=str,
    help="Postgresql database connection string ({})".format(connection_string),
    default=connection_string,
)
parser.add_argument(
    "-o",
    "--output-directory",
    help="A directory where xml-files will be saved",
    default=os.path.join(PKG_ROOT, "out", "rwn"),
)

ARGS = parser.parse_args()

extras.register_uuid()


class Generator:
    def __init__(self, out_dir: str, connection):
        self.connection = connection
        self.out_dir = out_dir
        self.synset_counter = 0
        self.sense_counter = 0
        self.synsets = []
        self.senses = []
        self.synset_relations = []

    def run(self):
        print("Start")
コード例 #31
0
ファイル: taskui.py プロジェクト: s3nd3r5/tasklog
    for t in tasks:
        tt = t.type
        key = "Total {}".format(tt.title())
        if stats.get(key):
            stats[key] += 1
        else:
            stats[key] = 1
    return stats

# utils
logging.basicConfig(format='%(asctime)s - [%(levelname)s]\t%(message)s',
        datefmt='%Y-%m-%dT%H:%M:%S',
        level=logging.INFO)
config = configparser.ConfigParser()
config.read_file(open('/secrets/tasklog.cfg'))
register_uuid() # allow DB to use UUIDs

# flask
app = Flask(__name__)
configureApp(app, config.items('taskui', raw=True))
conn = db_connect(config['db'])

@app.route('/')
@app.route('/open')
def open():
    cursor = None
    try:
        cursor = conn.cursor()
        cursor.execute(SQL_SELECT_OPEN)
        res = cursor.fetchall()
        tasks = map_to_tasks(res)
コード例 #32
0
ファイル: postgresql.py プロジェクト: Binse-Park/lisa-1
    def initialize(self, context):

        if not psycopg2:
            raise ImportError(
                'The psycopg2 module is required for the ' +
                'Postgresql Output Processor: {}'.format(import_error_msg))
        # N.B. Typecasters are for postgres->python and adapters the opposite
        self.connect_to_database()
        self.cursor = self.conn.cursor()
        self.verify_schema_versions()

        # Register the adapters and typecasters for enum types
        self.cursor.execute("SELECT NULL::status_enum")
        status_oid = self.cursor.description[0][1]
        self.cursor.execute("SELECT NULL::param_enum")
        param_oid = self.cursor.description[0][1]
        LEVEL = psycopg2.extensions.new_type((status_oid, ), "LEVEL",
                                             cast_level)
        psycopg2.extensions.register_type(LEVEL)
        PARAM = psycopg2.extensions.new_type((param_oid, ), "PARAM",
                                             cast_vanilla)
        psycopg2.extensions.register_type(PARAM)
        psycopg2.extensions.register_adapter(level, return_as_is(adapt_level))
        psycopg2.extensions.register_adapter(ListOfLevel,
                                             adapt_ListOfX(adapt_level))
        psycopg2.extensions.register_adapter(KernelVersion, adapt_vanilla)
        psycopg2.extensions.register_adapter(CpuInfo, adapt_vanilla)
        psycopg2.extensions.register_adapter(collections.OrderedDict,
                                             extras.Json)
        psycopg2.extensions.register_adapter(dict, extras.Json)
        psycopg2.extensions.register_adapter(
            KernelConfig, create_iterable_adapter(2, explicit_iterate=True))
        # Register ready-made UUID type adapter
        extras.register_uuid()

        # Insert a run_uuid which will be globally accessible during the run
        self.run_uuid = uuid.UUID(str(uuid.uuid4()))
        run_output = context.run_output
        retry_on_status = ListOfLevel(run_output.run_config.retry_on_status)
        self.cursor.execute(
            self.sql_command['create_run'],
            (
                self.run_uuid,
                run_output.event_summary,
                run_output.basepath,
                run_output.status,
                run_output.state.timestamp,
                run_output.info.run_name,
                run_output.info.project,
                run_output.info.project_stage,
                retry_on_status,
                run_output.run_config.max_retries,
                run_output.run_config.bail_on_init_failure,
                run_output.run_config.allow_phone_home,
                run_output.info.uuid,
                run_output.info.start_time,
                run_output.metadata,
                json.dumps(run_output.state.to_pod()),
                run_output.result._pod_version,  # pylint: disable=protected-access
                run_output.result._pod_serialization_version,  # pylint: disable=protected-access
            ))
        self.target_uuid = uuid.uuid4()
        target_info = context.target_info
        target_pod = target_info.to_pod()
        self.cursor.execute(
            self.sql_command['create_target'],
            (
                self.target_uuid,
                self.run_uuid,
                target_pod['target'],
                target_pod['cpus'],
                target_pod['os'],
                target_pod['os_version'],
                target_pod['hostid'],
                target_pod['hostname'],
                target_pod['abi'],
                target_pod['is_rooted'],
                # Important caveat: kernel_version is the name of the column in the Targets table
                # However, this refers to kernel_version.version, not to kernel_version as a whole
                target_pod['kernel_version'],
                target_pod['kernel_release'],
                target_info.kernel_version.sha1,
                target_info.kernel_config,
                target_pod['sched_features'],
                target_pod['page_size_kb'],
                # Android Specific
                list(target_pod.get('screen_resolution', [])),
                target_pod.get('prop'),
                target_pod.get('android_id'),
                target_pod.get('pod_version'),
                target_pod.get('pod_serialization_version'),
            ))

        # Commit cursor commands
        self.conn.commit()
コード例 #33
0
	def __init__(self, database):
		super(Database, self).__init__(database)
		register_type(psycopg2.extensions.UNICODE)
		register_uuid()
		dsn = Database.build_dsn(database)
		self._pool = ThreadedConnectionPool(config.get('database', 'min_database_connections'), config.get('database', 'max_database_connections'), dsn)
コード例 #34
0
def register_types(conn):
    real_conn = conn._con._con
    # conn 是 PooledDB(或PersistentDB)的连接,它的 _con 是 SteadyDB。而 SteadyDB 的 _con 是原始的 psycopg2 连接对象
    register_uuid(conn_or_curs=real_conn)
    register_hstore(conn_or_curs=real_conn)
コード例 #35
0
    def initialize(self, context):

        if not psycopg2:
            raise ImportError(
                'The psycopg2 module is required for the ' +
                'Postgresql Output Processor: {}'.format(import_error_msg))
        # N.B. Typecasters are for postgres->python and adapters the opposite
        self.connect_to_database()
        self.cursor = self.conn.cursor()
        # Register the adapters and typecasters for enum types
        self.cursor.execute("SELECT NULL::status_enum")
        status_oid = self.cursor.description[0][1]
        self.cursor.execute("SELECT NULL::param_enum")
        param_oid = self.cursor.description[0][1]
        LEVEL = psycopg2.extensions.new_type(
            (status_oid,), "LEVEL", postgres_convert.cast_level)
        psycopg2.extensions.register_type(LEVEL)
        PARAM = psycopg2.extensions.new_type(
            (param_oid,), "PARAM", postgres_convert.cast_vanilla)
        psycopg2.extensions.register_type(PARAM)
        psycopg2.extensions.register_adapter(level, postgres_convert.return_as_is(postgres_convert.adapt_level))
        psycopg2.extensions.register_adapter(
            postgres_convert.ListOfLevel, postgres_convert.adapt_ListOfX(postgres_convert.adapt_level))
        psycopg2.extensions.register_adapter(KernelVersion, postgres_convert.adapt_vanilla)
        psycopg2.extensions.register_adapter(
            CpuInfo, postgres_convert.adapt_vanilla)
        psycopg2.extensions.register_adapter(
            collections.OrderedDict, extras.Json)
        psycopg2.extensions.register_adapter(dict, extras.Json)
        psycopg2.extensions.register_adapter(
            KernelConfig, postgres_convert.create_iterable_adapter(2, explicit_iterate=True))
        # Register ready-made UUID type adapter
        extras.register_uuid()
        # Insert a run_uuid which will be globally accessible during the run
        self.run_uuid = uuid.UUID(str(uuid.uuid4()))
        run_output = context.run_output
        retry_on_status = postgres_convert.ListOfLevel(run_output.run_config.retry_on_status)
        self.cursor.execute(
            self.sql_command['create_run'],
            (
                self.run_uuid,
                run_output.event_summary,
                run_output.basepath,
                run_output.status,
                run_output.state.timestamp,
                run_output.info.run_name,
                run_output.info.project,
                retry_on_status,
                run_output.run_config.max_retries,
                run_output.run_config.bail_on_init_failure,
                run_output.run_config.allow_phone_home,
                run_output.info.uuid,
                run_output.info.start_time,
                run_output.metadata))
        self.target_uuid = uuid.uuid4()
        target_info = context.target_info
        self.cursor.execute(
            self.sql_command['create_target'],
            (
                self.target_uuid,
                self.run_uuid,
                target_info.target,
                target_info.cpus,
                target_info.os,
                target_info.os_version,
                target_info.hostid,
                target_info.hostname,
                target_info.abi,
                target_info.is_rooted,
                # Important caveat: kernel_version is the name of the column in the Targets table
                # However, this refers to kernel_version.version, not to kernel_version as a whole
                target_info.kernel_version.version,
                target_info.kernel_version.release,
                target_info.kernel_version.sha1,
                target_info.kernel_config,
                target_info.sched_features))
        # Commit cursor commands
        self.conn.commit()
コード例 #36
0
ファイル: psycopg2.py プロジェクト: cloudera/hue
 def on_connect(conn):
     extras.register_uuid(None, conn)
コード例 #37
0
ファイル: basic.py プロジェクト: spuriousdata/django-pgfields
def init_uuid(sender, **kwargs):
    register_uuid()
コード例 #38
0
ファイル: psycopg2.py プロジェクト: kkirsche/sqlalchemy
 def on_connect(dbapi_conn):
     extras.register_uuid(None, dbapi_conn)
コード例 #39
0
 def __init__(self, connection_url: str, max_connections: int):
     self._connection_url = connection_url
     self._max_connections = max_connections
     self._pool: Optional[ThreadedConnectionPool] = None
     register_uuid()
コード例 #40
0
 def on_connect(conn):
     extras.register_uuid(None, conn)
コード例 #41
0
ファイル: main.py プロジェクト: ekimekim/wubloader
def main(dbconnect,
         sheets_creds_file,
         edit_url,
         bustime_start,
         sheet_id,
         worksheet_names,
         metrics_port=8005,
         backdoor_port=0,
         allocate_ids=False,
         playlist_worksheet=None):
    """
	Sheet sync constantly scans a Google Sheets sheet and a database, copying inputs from the sheet
	to the DB and outputs from the DB to the sheet.

	With the exception of id allocation, all operations are idempotent and multiple sheet syncs
	may be run for redundancy.
	"""
    common.PromLogCountsHandler.install()
    common.install_stacksampler()
    prom.start_http_server(metrics_port)

    register_uuid()

    if backdoor_port:
        gevent.backdoor.BackdoorServer(('127.0.0.1', backdoor_port),
                                       locals=locals()).start()

    stop = gevent.event.Event()
    gevent.signal_handler(signal.SIGTERM, stop.set)  # shut down on sigterm

    logging.info("Starting up")

    dbmanager = DBManager(dsn=dbconnect)
    while True:
        try:
            # Get a test connection so we know the database is up,
            # this produces a clearer error in cases where there's a connection problem.
            conn = dbmanager.get_conn()
        except Exception:
            delay = common.jitter(10)
            logging.info(
                'Cannot connect to database. Retrying in {:.0f} s'.format(
                    delay))
            stop.wait(delay)
        else:
            # put it back so it gets reused on next get_conn()
            dbmanager.put_conn(conn)
            break

    sheets_creds = json.load(open(sheets_creds_file))

    sheets = Sheets(
        client_id=sheets_creds['client_id'],
        client_secret=sheets_creds['client_secret'],
        refresh_token=sheets_creds['refresh_token'],
    )

    SheetSync(stop, dbmanager, sheets, sheet_id, worksheet_names, edit_url,
              bustime_start, allocate_ids, playlist_worksheet).run()

    logging.info("Gracefully stopped")