class DataBaseService(Service): """Manage all services""" def __init__(self, env, host, port, user, passwd, db): super(DataBaseService, self).__init__(env) self._db_proxy = Proxy() self._conn_info = dict(host=host, port=port, \ user=user, passwd=passwd, \ db=db) def on_active(self): super(DataBaseService, self).on_active() conn_info = self._conn_info.copy() db_name = conn_info.pop('db') database = PooledMySQLDatabaseWithReconnection( db_name, max_connections=DB_CONNECTION_MAX_NUM, stale_timeout=300, threadlocals=True, **conn_info ) self._db_proxy.initialize( database ) self._db_proxy.connect() def get_db(self): return self._db_proxy
class RepoConfig(object): def __init__(self): self._database_path = None self._database = None self._database_proxy = None def set_database_path(self, path): self._database_path = path self._database = SqliteDatabase(self._database_path) self._database_proxy = Proxy() self._database_proxy.initialize(self._database) @property def database_path(self): return self._database_path @property def database(self): return self._database @property def database_proxy(self): return self._database_proxy # singleton instance = None @classmethod def get(cls): if cls.instance is None: cls.instance = cls() return cls.instance
def __init__(self, app=None): """ Initialize the plugin. """ self.app = app self.database = Proxy() if app is not None: self.init_app(app)
def Model(self): if self._app is None: database = getattr(self, 'database', None) if database is None: self.database = Proxy() if not hasattr(self, '_model_class'): self._model_class = self.get_model_class() return self._model_class
def test_binary_type_info(self): db_proxy = Proxy() class A(Model): blob_field = BlobField() class Meta: database = db_proxy self.assertTrue(A.blob_field._constructor is binary_construct) db = SqliteDatabase(':memory:') db_proxy.initialize(db) self.assertTrue(A.blob_field._constructor is sqlite3.Binary)
class PeeweePlugin(object): """ Integrate peewee to bottle. """ name = 'peewee' api = 2 default_connection = 'sqlite:///db.sqlite' def __init__(self, connection=None): self.database = None self.connection = connection or self.default_connection self.proxy = Proxy() self.serializer = Serializer() def setup(self, app): """ Initialize the application. """ app.config.setdefault('PEEWEE_CONNECTION', self.connection) self.connection = app.config.get('PEEWEE_CONNECTION') self.database = connect(self.connection) self.proxy.initialize(self.database) def apply(self, callback, route): def wrapper(*args, **kwargs): if self.connection.startswith('sqlite'): return callback(*args, **kwargs) try: self.database.connect() except Exception as e: pass # Error can now be leaked to Bottle, e.g.: as `peewee.OperationalError` try: with self.database.transaction(): response = callback(*args, **kwargs) except PeeweeException: self.database.rollback() raise finally: self.database.commit() if not self.database.is_closed(): self.database.close() return response return wrapper def to_dict(self, obj, **kwargs): return self.serializer.serialize_object(obj, **kwargs)
def db(self) -> dict: """ Sets up the database proxy and exposes the database variables in a `db` property. """ # Create a database proxy (placeholder) to be filled at runtime with the actual database object. self._config_dict['aryas']['db']['_db_proxy'] = Proxy() return self._config_dict['aryas']['db']
class PABase(Model): sqldb = Proxy() def __str__(self): return json.dumps(self._data, default=json_serial) def __repr__(self): return self.__str__()
class PeeweePlugin(object): """ Integrate peewee to bottle. """ name = 'peewee' api = 2 default_connection = 'sqlite:///db.sqlite' def __init__(self, connection=None): self.database = None self.connection = connection or self.default_connection self.proxy = Proxy() self.serializer = Serializer() def setup(self, app): """ Initialize the application. """ app.config.setdefault('PEEWEE_CONNECTION', self.connection) self.connection = app.config.get('PEEWEE_CONNECTION') self.database = connect(self.connection) self.proxy.initialize(self.database) def apply(self, callback, route): def wrapper(*args, **kwargs): if self.connection.startswith('sqlite'): return callback(*args, **kwargs) self.database.connect() try: with self.database.transaction(): response = callback(*args, **kwargs) except PeeweeException: self.database.rollback() raise finally: self.database.commit() if not self.database.is_closed(): self.database.close() return response return wrapper def to_dict(self, obj, **kwargs): return self.serializer.serialize_object(obj, **kwargs)
class KyogreDB: _db = Proxy() _migrator = None @classmethod def start(cls, db_path): handle = APSWDatabase(db_path, pragmas={ 'journal_mode': 'wal', 'cache_size': -1 * 64000, 'foreign_keys': 1, 'ignore_check_constraints': 0 }) cls._db.initialize(handle) # ensure db matches current schema cls._db.create_tables([ LocationTable, TeamTable, GuildTable, TrainerTable, PokemonTable, SilphcardTable, RegionTable, LocationRegionRelation, PokestopTable, GymTable, TrainerReportRelation, QuestTable, ResearchTable, SightingTable, RaidBossRelation, RaidTable, SubscriptionTable, TradeTable, LocationNoteTable ]) cls.init() cls._migrator = SqliteMigrator(cls._db) @classmethod def stop(cls): return cls._db.close() @classmethod def init(cls): #check team try: TeamTable.get() except: TeamTable.reload_default() #check pokemon try: PokemonTable.get() except: PokemonTable.reload_default() #check regions try: RegionTable.get() except: RegionTable.reload_default() #check locations try: LocationTable.get() except: LocationTable.reload_default() #check quests try: QuestTable.get() except: QuestTable.reload_default()
class DobbyDB: _db = Proxy() _migrator = None @classmethod def start(cls, db_path): handle = APSWDatabase(db_path, pragmas={ 'journal_mode': 'wal', 'cache_size': -1 * 64000, 'foreign_keys': 1, 'ignore_check_constraints': 0 }) cls._db.initialize(handle) # ensure db matches current schema cls._db.create_tables([ GuildTable, WizardTable, HouseTable, ProfessionTable, ProfileTable, TitleTable, LocationTable, RegionTable, LocationRegionRelation, LocationNoteTable, InnTable, GreenhouseTable, FortressTable, WizardReportRelation, EventTable, BadgeTable, BadgeAssignmentTable ]) cls.init() cls._migrator = SqliteMigrator(cls._db) @classmethod def stop(cls): return cls._db.close() @classmethod def init(cls): #check house try: HouseTable.get() except: HouseTable.reload_default() try: ProfessionTable.get() except: ProfessionTable.reload_default() try: TitleTable.get() except: TitleTable.reload_default() #check regions try: RegionTable.get() except: RegionTable.reload_default() #check locations try: LocationTable.get() except: LocationTable.reload_default()
def test(self): db = self.get_mysql_db() # Re proxy to avoid previous test use SyncManager._meta.database = Proxy() # Init/Create in sync mode SyncManager.init_db(db) SyncManager.create_table() # Clear out from previous test run SyncManager.delete().execute() sync_manager = get_sync_manager(app="test-async", start=0, db=db, set_async=True) async def it(since=None, limit=None, offset=None): log.debug("Getting iterator since={} limit={} offset={}".format( since, limit, offset)) def dummy(): for x in range(since + 1, since + limit + 1): log.debug("yielded {}".format(x)) yield {"x": x} return LastOffsetQueryIterator(dummy(), row_output_fun=lambda x: x, key_fun=lambda x: x['x'], is_unique_key=True) output = [] async def process(it): nonlocal output for item in it: output.append(item) log.debug("process item: {}".format(item)) processor = AsyncProcessor(sync_manager=sync_manager, it_function=it, process_function=process, object=Manager(db, loop=None)) async def consume(): await processor.process(limit=10, i=3) asyncio.get_event_loop().run_until_complete(consume()) self.assertEqual(len(output), 30)
def prepare_database_with_table(name: str, rows: list): from peewee import IntegerField, Proxy, CharField, Model from playhouse.sqlite_ext import CSqliteExtDatabase db = Proxy() db.initialize(CSqliteExtDatabase(':memory:', bloomfilter=True)) NameModel = type( name, (Model, ), { 'id_': IntegerField(primary_key=True, column_name='id'), 'name': CharField(column_name='name') }) table: Model = NameModel() table.bind(db) db.create_tables([NameModel]) for row in rows: table.insert(row).execute() return db
def DBProxy(): if not PWDatabase.__proxy: PWDatabase.__proxy = Proxy() return PWDatabase.__proxy
class FlaskDB(object): def __init__(self, app=None, database=None): self.database = None # Reference to actual Peewee database instance. self._app = app self._db = database # dict, url, Database, or None (default). if app is not None: self.init_app(app) def init_app(self, app): self._app = app if self._db is None: if 'DATABASE' in app.config: initial_db = app.config['DATABASE'] elif 'DATABASE_URL' in app.config: initial_db = app.config['DATABASE_URL'] else: raise ValueError('Missing required configuration data for ' 'database: DATABASE or DATABASE_URL.') else: initial_db = self._db self._load_database(app, initial_db) self._register_handlers(app) def _load_database(self, app, config_value): if isinstance(config_value, Database): database = config_value elif isinstance(config_value, dict): database = self._load_from_config_dict(dict(config_value)) else: # Assume a database connection URL. database = db_url_connect(config_value) if isinstance(self.database, Proxy): self.database.initialize(database) else: self.database = database def _load_from_config_dict(self, config_dict): try: name = config_dict.pop('name') engine = config_dict.pop('engine') except KeyError: raise RuntimeError('DATABASE configuration must specify a ' '`name` and `engine`.') if '.' in engine: path, class_name = engine.rsplit('.', 1) else: path, class_name = 'peewee', engine try: __import__(path) module = sys.modules[path] database_class = getattr(module, class_name) assert issubclass(database_class, Database) except ImportError: raise RuntimeError('Unable to import %s' % engine) except AttributeError: raise RuntimeError('Database engine not found %s' % engine) except AssertionError: raise RuntimeError('Database engine not a subclass of ' 'peewee.Database: %s' % engine) return database_class(name, **config_dict) def _register_handlers(self, app): app.before_request(self.connect_db) app.teardown_request(self.close_db) def get_model_class(self): if self.database is None: raise RuntimeError('Database must be initialized.') class BaseModel(Model): class Meta: database = self.database return BaseModel @property def Model(self): if self._app is None: database = getattr(self, 'database', None) if database is None: self.database = Proxy() if not hasattr(self, '_model_class'): self._model_class = self.get_model_class() return self._model_class def connect_db(self): self.database.connect() def close_db(self, exc): if not self.database.is_closed(): self.database.close()
import re from peewee import PrimaryKeyField, CharField, ForeignKeyField, IntegerField,\ Model, Proxy, CompositeKey from playhouse.fields import ManyToManyField import users # Create a proxy to DB that can # be instantiated at runtime db_proxy = Proxy() GroupToCapabilityProxy = Proxy() UserToGroupProxy = Proxy() class ActionField(IntegerField): db_field = 'action' def db_value(self, value): return value def python_value(self, value): return Action(value) class BaseModel(Model): class Meta: database = db_proxy # Use proxy for our DB. def to_dict(self): return dict(id=self.id)
from peewee_migrate import Router from flask_restful import Resource from flask_restful import request from flask import g BASE_PATH = "/api" not_found_message = "Requested resource does not exist on this server." unauthorized_message = "User could not be authorized with the given credentials." invalid_call_message = "This endpoint does not implements this method." no_permission_message = "You don't have permission to access this resource on this server." DB = Proxy() class Singleton(type): _instances = {} def __call__(cls, *args, **kwargs): if cls not in cls._instances: cls._instances[cls] = super( Singleton, cls).__call__(*args, **kwargs) return cls._instances[cls] # Base DB models class BaseModel(peewee.Model): """ Peewee's Base model
import os import psycogreen.gevent psycogreen.gevent.patch_psycopg() from peewee import Proxy, OP, Model from peewee import Expression from playhouse.postgres_ext import PostgresqlExtDatabase REGISTERED_MODELS = [] # Create a database proxy we can setup post-init database = Proxy() OP['IRGX'] = 'irgx' def pg_regex_i(lhs, rhs): return Expression(lhs, OP.IRGX, rhs) class ModelBase(Model): class Meta: database = database @staticmethod def register(cls): REGISTERED_MODELS.append(cls) return cls def init_db(env):
from sanic_aioorm import AioOrm from sanic_aioorm import AioModel as Model from aioorm import AioManyToManyField as ManyToManyField from peewee import CharField, UUIDField, DateTimeField, IntegerField, ForeignKeyField from peewee import Proxy from playhouse.fields import PasswordField db = Proxy() class BaseModel(Model): class Meta: database = db @AioOrm.regist class Role(BaseModel): service_name = CharField(max_length=64) def __unicode__(self): return self.service_name @AioOrm.regist class User(BaseModel): _id = UUIDField(primary_key=True) username = CharField(max_length=80, unique=True) password = PasswordField() main_email = CharField(max_length=80, unique=True) ctime = DateTimeField(formats='%Y-%m-%d %H:%M:%S') roles = ManyToManyField(Role, related_name='users')
class FlaskDB(object): """ Convenience wrapper for configuring a Peewee database for use with a Flask application. Provides a base `Model` class and registers handlers to manage the database connection during the request/response cycle. Usage:: from flask import Flask from peewee import * from playhouse.flask_utils import FlaskDB # The database can be specified using a database URL, or you can pass a # Peewee database instance directly: DATABASE = 'postgresql:///my_app' DATABASE = PostgresqlDatabase('my_app') # If we do not want connection-management on any views, we can specify # the view names using FLASKDB_EXCLUDED_ROUTES. The db connection will # not be opened/closed automatically when these views are requested: FLASKDB_EXCLUDED_ROUTES = ('logout',) app = Flask(__name__) app.config.from_object(__name__) # Now we can configure our FlaskDB: flask_db = FlaskDB(app) # Or use the "deferred initialization" pattern: flask_db = FlaskDB() flask_db.init_app(app) # The `flask_db` provides a base Model-class for easily binding models # to the configured database: class User(flask_db.Model): email = CharField() """ def __init__(self, app=None, database=None, model_class=Model, excluded_routes=None): self.database = None # Reference to actual Peewee database instance. self.base_model_class = model_class self._app = app self._db = database # dict, url, Database, or None (default). self._excluded_routes = excluded_routes or () if app is not None: self.init_app(app) def init_app(self, app): self._app = app if self._db is None: if 'DATABASE' in app.config: initial_db = app.config['DATABASE'] elif 'DATABASE_URL' in app.config: initial_db = app.config['DATABASE_URL'] else: raise ValueError('Missing required configuration data for ' 'database: DATABASE or DATABASE_URL.') else: initial_db = self._db if 'FLASKDB_EXCLUDED_ROUTES' in app.config: self._excluded_routes = app.config['FLASKDB_EXCLUDED_ROUTES'] self._load_database(app, initial_db) self._register_handlers(app) def _load_database(self, app, config_value): if isinstance(config_value, Database): database = config_value elif isinstance(config_value, dict): database = self._load_from_config_dict(dict(config_value)) else: # Assume a database connection URL. database = db_url_connect(config_value) if isinstance(self.database, Proxy): self.database.initialize(database) else: self.database = database def _load_from_config_dict(self, config_dict): try: name = config_dict.pop('name') engine = config_dict.pop('engine') except KeyError: raise RuntimeError('DATABASE configuration must specify a ' '`name` and `engine`.') if '.' in engine: path, class_name = engine.rsplit('.', 1) else: path, class_name = 'peewee', engine try: __import__(path) module = sys.modules[path] database_class = getattr(module, class_name) assert issubclass(database_class, Database) except ImportError: raise RuntimeError('Unable to import %s' % engine) except AttributeError: raise RuntimeError('Database engine not found %s' % engine) except AssertionError: raise RuntimeError('Database engine not a subclass of ' 'peewee.Database: %s' % engine) return database_class(name, **config_dict) def _register_handlers(self, app): app.before_request(self.connect_db) app.teardown_request(self.close_db) def get_model_class(self): if self.database is None: raise RuntimeError('Database must be initialized.') class BaseModel(self.base_model_class): class Meta: database = self.database return BaseModel @property def Model(self): if self._app is None: database = getattr(self, 'database', None) if database is None: self.database = Proxy() if not hasattr(self, '_model_class'): self._model_class = self.get_model_class() return self._model_class def connect_db(self): if self._excluded_routes and request.endpoint in self._excluded_routes: return self.database.connect() def close_db(self, exc): if self._excluded_routes and request.endpoint in self._excluded_routes: return if not self.database.is_closed(): self.database.close()
def __init__(self, connection=None): self.database = None self.connection = connection or self.default_connection self.proxy = Proxy() self.serializer = Serializer()
from playhouse.kv import JSONField from social_core.storage import UserMixin, AssociationMixin, NonceMixin, \ CodeMixin, PartialMixin, BaseStorage def get_query_by_dict_param(cls, params): query = True for field_name, value in params.items(): query_item = cls._meta.fields[field_name] == value query = query & query_item return query database_proxy = Proxy() class BaseModel(Model): class Meta: database = database_proxy class PeeweeUserMixin(UserMixin, BaseModel): provider = CharField() extra_data = JSONField(null=True) uid = CharField() user = None @classmethod def changed(cls, user):
"""pgpool-pyui model""" import logging from peewee import Model, Proxy, \ CharField, DateTimeField, SmallIntegerField, BooleanField, ForeignKeyField from playhouse.db_url import connect LOG = logging.getLogger(__name__) DATABASE = Proxy() class BaseModel(Model): """Base model for all entities""" class Meta: """Meta class needed for peewee""" database = DATABASE class Account(BaseModel): """Database model for account""" auth_service = CharField() username = CharField(primary_key=True) password = CharField() last_modified = DateTimeField() system_id = CharField() level = SmallIntegerField() banned = BooleanField() shadowbanned = BooleanField() lures = SmallIntegerField()
from peewee import SqliteDatabase, Model, Proxy, CharField # type: ignore from peewee import BooleanField, ForeignKeyField, CompositeKey # type: ignore proxy = Proxy() class BaseModel(Model): class Meta(object): database = proxy class WeaveEnvInstanceData(BaseModel): machine_id = CharField(primary_key=True) app_token = CharField() class PluginData(BaseModel): app_url = CharField() name = CharField() description = CharField(default="") enabled = BooleanField(default=False) machine = ForeignKeyField(WeaveEnvInstanceData, backref='plugins') class Meta: primary_key = CompositeKey('app_url', 'machine') class PluginsDatabase(object): def __init__(self, path): self.conn = SqliteDatabase(path)
def test(self): db = self.get_sqlite_db() # Re proxy to avoid previous test use SyncManager._meta.database = Proxy() SyncManager.init_db(db) SyncManager.create_table() class TestModel(Model): value = IntegerField() @classmethod def get_value(cls, item): return item.value @classmethod def get_key(cls, item): return item.id @classmethod def select_since_id(cls, since, limit, offset): q = cls.select().where(cls.id > since) if limit: q = q.limit(limit) return q class Meta: database = db TestModel.create_table() sync_manager = get_sync_manager(app="test", start=0, test=None) output = [] def row_output(model): data = {'id': model.id, 'value': model.value} output.append(data) return data for i in range(25): TestModel.create(id=i + 1, value=i + 1) self.assertEqual(25, TestModel.select().count()) iteration = 0 def process(it): nonlocal iteration iteration += 1 for x in it: log.debug("process it={} id={}".format(iteration, x['id'])) def it(since, limit, offset): log.debug("it since={} limit={} offset={}".format( since, limit, offset)) q = TestModel.select_since_id(since, limit=limit, offset=offset) return LastOffsetQueryIterator(q.iterator(), row_output_fun=row_output, key_fun=TestModel.get_key, is_unique_key=True) processor = Processor(sync_manager=sync_manager, it_function=it, process_function=process, sleep_duration=0) processor.process(limit=10, i=5) self.assertEqual(len(output), 25) self.assertEqual(output[0]['id'], 1) self.assertEqual(output[-1]['id'], 25)
def test_offset_processing(self): db = self.get_mysql_db() # Re proxy to avoid previous test use SyncManager._meta.database = Proxy() # Init/Create in sync mode SyncManager.init_db(db) SyncManager.create_table() # Clear out from previous test run SyncManager.delete().execute() sync_manager = get_sync_manager(app="test-async", start=0, db=db, set_async=True) # 15 regular, 25 @ 50 (ie the "hump"), 10 afterwards items = list(range(15)) + list([50 for _ in range(25)]) + list( range(55, 65)) items = [{'id': i + 1, 'x': x} for i, x in enumerate(items)] async def it(since=0, limit=0, offset=0): log.debug("Getting iterator since={} limit={} offset={}".format( since, limit, offset)) def dummy(): nonlocal items nonlocal limit nonlocal offset for item in items: if item['x'] < since: continue if offset > 0: offset -= 1 continue limit -= 1 if limit < 0: break yield item return LastOffsetQueryIterator(dummy(), row_output_fun=lambda x: x, key_fun=lambda x: x['x'], is_unique_key=False) output = [] async def process(it): nonlocal output for item in it: output.append(item) log.debug("process item: {}".format(item)) processor = AsyncProcessor(sync_manager=sync_manager, it_function=it, process_function=process, object=Manager(db, loop=None)) async def consume(): await processor.process(limit=10, i=8) asyncio.get_event_loop().run_until_complete(consume()) # todo: cache to avoid dup values? self.assertTrue(len(output), 59) unique_values = list(set([x['x'] for x in output])) self.assertEquals(unique_values, [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 50, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64 ]) ids = list(set([x['id'] for x in output])) self.assertEqual(len(ids), 50) self.assertEqual(ids[0], 1) self.assertEqual(ids[-1], 50)
# # You should have received a copy of the GNU Lesser General Public License # along with PyDefects. If not, see <https://www.gnu.org/licenses/>. # pylint: disable=missing-docstring,too-few-public-methods from peewee import ( Model, TextField, ForeignKeyField, IntegerField, CompositeKey, DateTimeField, FloatField, Proxy, ) DATABASE_PROXY = Proxy() class BaseModel(Model): class Meta: database = DATABASE_PROXY class Keyword(BaseModel): keyword = TextField() class License(BaseModel): license = TextField()
def __init__(self, env, host, port, user, passwd, db): super(DataBaseService, self).__init__(env) self._db_proxy = Proxy() self._conn_info = dict(host=host, port=port, \ user=user, passwd=passwd, \ db=db)
import json import logging from typing import Dict, Optional import boto3 from dataclasses import asdict from peewee import Model, CharField, Proxy, DoesNotExist from playhouse.postgres_ext import PostgresqlExtDatabase, JSONField from blue.base import BlueprintInstructionExecutionStore, BlueprintExecution, BlueprintInstructionState, InstructionStatus, EventBus, \ Event from blue.blueprint import BlueprintManager from blue.util import blue_json_dumps, superjson database_proxy = Proxy() # Create a proxy for our db. log = logging.getLogger(__name__) class BaseModel(Model): class Meta: database = database_proxy # Use proxy for our DB. class BlueprintExecutionModel(BaseModel): execution_id = CharField(unique=True) execution_context = JSONField() blueprint = JSONField(dumps=blue_json_dumps) class BlueprintInstructionStateModel(BaseModel):
class Meta: table_name = "sync_manager" database = Proxy()
import asyncio import os from pathlib import Path from decouple import config from peewee import Proxy from playhouse.db_url import connect from telegram.ext import JobQueue ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) ACCOUNTS_DIR = Path(ROOT_DIR) / "accounts" DATABASE_PATH = config('DATABASE_URL') _auto_typed_db = connect(DATABASE_PATH) _auto_typed_db.autorollback = True db = Proxy() db.initialize(_auto_typed_db) loop = asyncio.get_event_loop() """ Global singleton ptb job_queue as I'm too lazy to rewrite everything to say `use_context=True` and propagating the `pass_job_queue` flag across all handlers would be an even bigger nightmare. At some point this is going to be replaced with `CallbackContext`, but for now we're gonna live with a global. """ job_queue: JobQueue = None
class Meta: database = Proxy()
""" Database and storage related functions and classes """ import datetime from enum import IntEnum import functools import sys from flask import g from flask_redis import FlaskRedis from peewee import IntegerField, DateTimeField, BooleanField, Proxy, Model, Database from peewee import CharField, ForeignKeyField, TextField, PrimaryKeyField from werkzeug.local import LocalProxy from .storage import file_url from .config import config rconn = FlaskRedis() dbp = Proxy() def get_db(): if "db" not in g: if dbp.is_closed(): dbp.connect() g.db = dbp return g.db db = LocalProxy(get_db) def db_init_app(app): dbconnect = dict(app.config["THROAT_CONFIG"].database)
def test_offset_processing(self): db = self.get_sqlite_db() # Re proxy to avoid previous test use SyncManager._meta.database = Proxy() SyncManager.init_db(db) SyncManager.create_table() class TestModel(Model): value = IntegerField() @classmethod def get_value(cls, item): return item.value @classmethod def get_key(cls, item): return item.value @classmethod def select_since_value(cls, since, limit, offset): q = cls.select().where(cls.value > since) if limit: q = q.limit(limit) if offset: q = q.offset(offset) log.debug(q.sql()) return q class Meta: database = db TestModel.create_table() sync_manager = get_sync_manager(app="test", start=-1, test=None) output = [] def row_output(model): data = {'id': model.id, 'value': model.value} output.append(data) return data # Create 15 regular records for i in range(15): TestModel.create(value=i) # Now add 25 with same value (ie an "hump" that will require "offset" to get over) for i in range(25): TestModel.create(value=50) # And a final few for i in range(10): TestModel.create(value=51 + i) self.assertEqual(50, TestModel.select().count()) iteration = 0 def process(it): nonlocal iteration iteration += 1 for x in it: log.debug("process it={} id={} value={}".format( iteration, x['id'], x['value'])) # Note: is_unique_key=False (ie multiple same value may exist (eg same "lastModified" due to bulk update for example) def it(since, limit, offset): log.debug("it since={} limit={} offset={}".format( since, limit, offset)) q = TestModel.select_since_value(since, limit=limit, offset=offset) return LastOffsetQueryIterator(q.iterator(), row_output_fun=row_output, key_fun=TestModel.get_key, is_unique_key=False) processor = Processor(sync_manager=sync_manager, it_function=it, process_function=process, sleep_duration=0) processor.process(limit=10, i=10) # is_unique_key=False reduces in duplicate values when we hit the offset limit # todo: cache to avoid dup values? self.assertTrue(len(output), 56) value_ids = list(set([x['value'] for x in output])) # 0-14, 50, 51-60 self.assertEquals(value_ids, [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60 ]) ids = list(set([x['id'] for x in output])) self.assertEqual(len(ids), 50) self.assertEqual(ids[0], 1) self.assertEqual(ids[-1], 50)
# coding=utf-8 # write a base class wrap peewee and fields from peewee import Proxy, Model data_proxy = Proxy() class BaseModel(Model): class Meta: db = data_proxy
from peewee import Model from peewee import IntegerField from peewee import ForeignKeyField from peewee import TextField from peewee import TextField from peewee import PostgresqlDatabase, Proxy from peewee import DateTimeField from datetime import datetime db_proxy = Proxy() class DBModel(Model): class Meta: database = db_proxy class Account(Model): login = TextField() password = TextField() class Meta: database = db_proxy class Messenger(Model): name = TextField() cost = IntegerField() class Meta: database = db_proxy