def init(self, database, **kwargs): self.min_connections = 1 self.max_connections = 1 super().init(database, **kwargs) self.init_async() @property def use_speedups(self): return False @use_speedups.setter def use_speedups(self, value): pass register_database(PostgresqlDatabase, 'postgres+async', 'postgresql+async') class PooledPostgresqlDatabase(AsyncPostgresqlMixin, peewee.PostgresqlDatabase): """PosgreSQL database driver providing **single drop-in sync** connection and **async connections pool** interface. :param max_connections: connections pool size Example:: database = PooledPostgresqlDatabase('test', max_connections=20) See also: http://peewee.readthedocs.io/en/latest/peewee/api.html#PostgresqlDatabase
from playhouse.shortcuts import ReconnectMixin from playhouse.sqliteq import SqliteQueueDatabase from spider_admin_pro.config import SCHEDULE_HISTORY_DATABASE_URL # 显示查询日志 from spider_admin_pro.utils.sqlite_util import make_sqlite_dir logger = logging.getLogger('peewee') logger.setLevel(logging.DEBUG) logger.addHandler(logging.StreamHandler()) class ReconnectSqliteDatabase(ReconnectMixin, SqliteQueueDatabase): pass class ReconnectMySQLDatabase(ReconnectMixin, MySQLDatabase): pass register_database(ReconnectSqliteDatabase, 'sqlite') register_database(ReconnectMySQLDatabase, 'mysql') make_sqlite_dir(SCHEDULE_HISTORY_DATABASE_URL) db = connect(url=SCHEDULE_HISTORY_DATABASE_URL) class BaseModel(Model): class Meta: database = db
'TABLE_SCHEMA = %s AND TABLE_TYPE = %s ORDER BY TABLE_NAME') cursor = self.execute_sql(query, ( schema, 'BASE TABLE', ), require_commit=False) else: query = ('SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE ' 'TABLE_TYPE = %s ORDER BY TABLE_NAME') cursor = self.execute_sql(query, ('BASE TABLE', ), require_commit=False) return [row[0] for row in cursor.fetchall()] def execute_sql(self, sql, params, *args, **kwargs): # convert params to tuple params = tuple(params) return super(MssqlDatabase, self).execute_sql(sql, params, *args, **kwargs) register_database(MssqlDatabase, 'mssql') if PooledDatabase: class PooledMssqlDatabase(PooledDatabase, MssqlDatabase): pass # TODO: implement _is_closed() register_database(PooledMssqlDatabase, 'mssql+pool')
# -*- coding: utf-8 -*- from playhouse.db_url import schemes, register_database, connect as peewee_connect from .database import TimerMySQLDatabase from .model import DictModel from .decorator import to_dict, to_data, timer register_database(TimerMySQLDatabase, 'mysql+timer') def connect(db_url=None, **kwargs): if db_url: return peewee_connect(db_url, **kwargs) else: scheme = kwargs.pop('scheme') return schemes[scheme](**kwargs)
def init(self, database, **kwargs): self.min_connections = 1 self.max_connections = 1 super().init(database, **kwargs) self.init_async() @property def use_speedups(self): return False @use_speedups.setter def use_speedups(self, value): pass register_database(PostgresqlDatabase, 'postgres+async', 'postgresql+async') class PooledPostgresqlDatabase(AsyncPostgresqlMixin, peewee.PostgresqlDatabase): """PosgreSQL database driver providing **single drop-in sync** connection and **async connections pool** interface. :param max_connections: connections pool size Example:: database = PooledPostgresqlDatabase('test', max_connections=20) See also: http://peewee.readthedocs.io/en/latest/peewee/api.html#PostgresqlDatabase
) app.url_map.strict_slashes = False oauth = OAuth2Provider(app) api = Api(app) limiter = Limiter( app, key_func=get_remote_address, headers_enabled=True, default_limits=[ "40 per second", # burst: 40/sec "1440 per minute", # allowed max: 24/sec ]) DATABASE_URL = app.config.get("DATABASE_URL") # TODO: implement connection factory db_url.register_database(PgDbWithFailover, "pg+failover", "postgres+failover") db_url.PostgresqlDatabase = ReconnectablePostgresqlDatabase if DATABASE_URL.startswith("sqlite"): db = db_url.connect(DATABASE_URL, autorollback=True) else: db = db_url.connect(DATABASE_URL, autorollback=True, connect_timeout=3) class JSONEncoder(_JSONEncoder): """date and datetime encoding into ISO format for JSON payload.""" def default(self, o): """Provide default endocing for date and datetime.""" if isinstance(o, datetime): return o.isoformat(timespec="seconds") elif isinstance(o, date): return o.isoformat()
import importlib import inspect import logging import peewee import peewee_async from playhouse.db_url import connect, register_database register_database(peewee_async.PostgresqlDatabase, 'postgres') register_database(peewee_async.PooledPostgresqlDatabase, 'postgres+pool') register_database(peewee_async.PostgresqlDatabase, 'postgresql') register_database(peewee_async.PooledPostgresqlDatabase, 'postgresql+pool') logger = logging.getLogger(__name__) database_proxy = peewee.Proxy() class BaseModel(peewee.Model): class Meta: database = database_proxy class XanmelDB: def __init__(self, db_url): if db_url: self.db = connect(db_url) self.mgr = peewee_async.Manager(database_proxy) database_proxy.initialize(self.db) else: self.db = None
self.min_connections = 1 self.max_connections = 1 super().init(database, **kwargs) self.init_async(enable_json=True, enable_hstore=self._register_hstore) @property def use_speedups(self): return False @use_speedups.setter def use_speedups(self, value): pass register_database(PostgresqlExtDatabase, 'postgresext+async', 'postgresqlext+async') class PooledPostgresqlExtDatabase(AsyncPostgresqlMixin, ext.PostgresqlExtDatabase): """PosgreSQL database extended driver providing **single drop-in sync** connection and **async connections pool** interface. JSON fields support is always enabled, HStore supports is enabled by default, but can be disabled with ``register_hstore=False`` argument. :param max_connections: connections pool size Example:: database = PooledPostgresqlExtDatabase('test', register_hstore=False,
except OperationalError: if not self.is_closed(): self.close() try: cursor = self.get_cursor() cursor.execute(sql, params or ()) if require_commit and self.get_autocommit(): self.commit() except OperationalError: cursor = None sleep(5) continue return cursor register_database(MySQLRetryDatabase, 'mysql') # prepare connection db = connect(conf['database']['connection']) while True: try: log.info('Connecting database...') db.connect() if db: break except: log.error(traceback.format_exc()) # take a break for 5 secconds sleep(5)
def init(self, database, **kwargs): self.min_connections = 1 self.max_connections = 1 super().init(database, **kwargs) self.init_async(enable_json=True, enable_hstore=self._register_hstore) @property def use_speedups(self): return False @use_speedups.setter def use_speedups(self, value): pass register_database(PostgresqlExtDatabase, 'postgresext+async', 'postgresqlext+async') class PooledPostgresqlExtDatabase(AsyncPostgresqlMixin, ext.PostgresqlExtDatabase): """PosgreSQL database extended driver providing **single drop-in sync** connection and **async connections pool** interface. JSON fields support is always enabled, HStore supports is enabled by default, but can be disabled with ``register_hstore=False`` argument. :param max_connections: connections pool size Example:: database = PooledPostgresqlExtDatabase('test', register_hstore=False,