Ejemplo n.º 1
0
 def db(self) -> dict:
     """
     Sets up the database proxy and exposes the database variables in a `db` property.
     """
     # Create a database proxy (placeholder) to be filled at runtime with the actual database object.
     self._config_dict['aryas']['db']['_db_proxy'] = Proxy()
     return self._config_dict['aryas']['db']
Ejemplo n.º 2
0
class PABase(Model):
    sqldb = Proxy()

    def __str__(self):
        return json.dumps(self._data, default=json_serial)

    def __repr__(self):
        return self.__str__()
Ejemplo n.º 3
0
    def __init__(self, app=None):
        """
        Initialize the plugin.
        """
        self.app = app
        self.database = Proxy()

        if app is not None:
            self.init_app(app)
Ejemplo n.º 4
0
    def Model(self):
        if self._app is None:
            database = getattr(self, 'database', None)
            if database is None:
                self.database = Proxy()

        if not hasattr(self, '_model_class'):
            self._model_class = self.get_model_class()
        return self._model_class
Ejemplo n.º 5
0
class KyogreDB:
    _db = Proxy()
    _migrator = None

    @classmethod
    def start(cls, db_path):
        handle = APSWDatabase(db_path,
                              pragmas={
                                  'journal_mode': 'wal',
                                  'cache_size': -1 * 64000,
                                  'foreign_keys': 1,
                                  'ignore_check_constraints': 0
                              })
        cls._db.initialize(handle)
        # ensure db matches current schema
        cls._db.create_tables([
            LocationTable, TeamTable, GuildTable, TrainerTable, PokemonTable,
            SilphcardTable, RegionTable, LocationRegionRelation, PokestopTable,
            GymTable, TrainerReportRelation, QuestTable, ResearchTable,
            SightingTable, RaidBossRelation, RaidTable, SubscriptionTable,
            TradeTable, LocationNoteTable
        ])
        cls.init()
        cls._migrator = SqliteMigrator(cls._db)

    @classmethod
    def stop(cls):
        return cls._db.close()

    @classmethod
    def init(cls):
        #check team
        try:
            TeamTable.get()
        except:
            TeamTable.reload_default()
        #check pokemon
        try:
            PokemonTable.get()
        except:
            PokemonTable.reload_default()
        #check regions
        try:
            RegionTable.get()
        except:
            RegionTable.reload_default()
        #check locations
        try:
            LocationTable.get()
        except:
            LocationTable.reload_default()
        #check quests
        try:
            QuestTable.get()
        except:
            QuestTable.reload_default()
Ejemplo n.º 6
0
    def test_binary_type_info(self):
        db_proxy = Proxy()
        class A(Model):
            blob_field = BlobField()
            class Meta:
                database = db_proxy

        self.assertTrue(A.blob_field._constructor is binary_construct)

        db = SqliteDatabase(':memory:')
        db_proxy.initialize(db)
        self.assertTrue(A.blob_field._constructor is sqlite3.Binary)
Ejemplo n.º 7
0
    def test(self):

        db = self.get_mysql_db()

        # Re proxy to avoid previous test use
        SyncManager._meta.database = Proxy()

        # Init/Create in sync mode
        SyncManager.init_db(db)
        SyncManager.create_table()

        # Clear out from previous test run
        SyncManager.delete().execute()

        sync_manager = get_sync_manager(app="test-async",
                                        start=0,
                                        db=db,
                                        set_async=True)

        async def it(since=None, limit=None, offset=None):

            log.debug("Getting iterator since={} limit={} offset={}".format(
                since, limit, offset))

            def dummy():
                for x in range(since + 1, since + limit + 1):
                    log.debug("yielded {}".format(x))
                    yield {"x": x}

            return LastOffsetQueryIterator(dummy(),
                                           row_output_fun=lambda x: x,
                                           key_fun=lambda x: x['x'],
                                           is_unique_key=True)

        output = []

        async def process(it):
            nonlocal output
            for item in it:
                output.append(item)
                log.debug("process item: {}".format(item))

        processor = AsyncProcessor(sync_manager=sync_manager,
                                   it_function=it,
                                   process_function=process,
                                   object=Manager(db, loop=None))

        async def consume():
            await processor.process(limit=10, i=3)

        asyncio.get_event_loop().run_until_complete(consume())

        self.assertEqual(len(output), 30)
Ejemplo n.º 8
0
class DobbyDB:
    _db = Proxy()
    _migrator = None

    @classmethod
    def start(cls, db_path):
        handle = APSWDatabase(db_path,
                              pragmas={
                                  'journal_mode': 'wal',
                                  'cache_size': -1 * 64000,
                                  'foreign_keys': 1,
                                  'ignore_check_constraints': 0
                              })
        cls._db.initialize(handle)
        # ensure db matches current schema
        cls._db.create_tables([
            GuildTable, WizardTable, HouseTable, ProfessionTable, ProfileTable,
            TitleTable, LocationTable, RegionTable, LocationRegionRelation,
            LocationNoteTable, InnTable, GreenhouseTable, FortressTable,
            WizardReportRelation, EventTable, BadgeTable, BadgeAssignmentTable
        ])
        cls.init()
        cls._migrator = SqliteMigrator(cls._db)

    @classmethod
    def stop(cls):
        return cls._db.close()

    @classmethod
    def init(cls):
        #check house
        try:
            HouseTable.get()
        except:
            HouseTable.reload_default()
        try:
            ProfessionTable.get()
        except:
            ProfessionTable.reload_default()
        try:
            TitleTable.get()
        except:
            TitleTable.reload_default()
        #check regions
        try:
            RegionTable.get()
        except:
            RegionTable.reload_default()
        #check locations
        try:
            LocationTable.get()
        except:
            LocationTable.reload_default()
Ejemplo n.º 9
0
def prepare_database_with_table(name: str, rows: list):
    from peewee import IntegerField, Proxy, CharField, Model
    from playhouse.sqlite_ext import CSqliteExtDatabase

    db = Proxy()
    db.initialize(CSqliteExtDatabase(':memory:', bloomfilter=True))
    NameModel = type(
        name, (Model, ), {
            'id_': IntegerField(primary_key=True, column_name='id'),
            'name': CharField(column_name='name')
        })
    table: Model = NameModel()
    table.bind(db)
    db.create_tables([NameModel])
    for row in rows:
        table.insert(row).execute()
    return db
Ejemplo n.º 10
0
 def DBProxy():
     if not PWDatabase.__proxy:
         PWDatabase.__proxy = Proxy()
     return PWDatabase.__proxy
Ejemplo n.º 11
0
 class Meta:
     database = Proxy()
Ejemplo n.º 12
0
import datetime
from peewee import (Proxy, Model, PrimaryKeyField, IntegerField, CharField,
                    ForeignKeyField, DateTimeField, BooleanField,
                    SqliteDatabase)
from tmlib.models.task import Status, Priority
from tmlib.models.notification import Status as NotificationStatus

database_proxy = Proxy(
)  # Peewee doesn't allow to add database without global database object


class BaseModel(Model):
    """Base class for classes that work with peewee library"""
    class Meta:
        database = database_proxy


class Category(BaseModel):
    id = PrimaryKeyField(null=False)
    name = CharField()
    user_id = IntegerField(null=True)


class Task(BaseModel):
    id = PrimaryKeyField(null=False)
    user_id = IntegerField(null=True)
    title = CharField()
    note = CharField(default="")
    start_time = DateTimeField(null=True)
    end_time = DateTimeField(null=True)
    assigned_user_id = IntegerField(null=True)
Ejemplo n.º 13
0
from playhouse.kv import JSONField

from social_core.storage import UserMixin, AssociationMixin, NonceMixin, \
                                CodeMixin, PartialMixin, BaseStorage


def get_query_by_dict_param(cls, params):
    query = True

    for field_name, value in params.items():
        query_item = cls._meta.fields[field_name] == value
        query = query & query_item
        return query


database_proxy = Proxy()


class BaseModel(Model):
    class Meta:
        database = database_proxy


class PeeweeUserMixin(UserMixin, BaseModel):
    provider = CharField()
    extra_data = JSONField(null=True)
    uid = CharField()
    user = None

    @classmethod
    def changed(cls, user):
import asyncio
import os
from pathlib import Path

from decouple import config
from peewee import Proxy
from playhouse.db_url import connect
from telegram.ext import JobQueue

ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
ACCOUNTS_DIR = Path(ROOT_DIR) / "accounts"

DATABASE_PATH = config('DATABASE_URL')

_auto_typed_db = connect(DATABASE_PATH)
_auto_typed_db.autorollback = True

db = Proxy()
db.initialize(_auto_typed_db)

loop = asyncio.get_event_loop()

""" Global singleton ptb job_queue as I'm too lazy to rewrite everything to say `use_context=True` and propagating
the `pass_job_queue` flag across all handlers would be an even bigger nightmare. 
At some point this is going to be replaced with `CallbackContext`, but for now we're gonna live with a global. """
job_queue: JobQueue = None
Ejemplo n.º 15
0
import json
import logging
from typing import Dict, Optional

import boto3
from dataclasses import asdict
from peewee import Model, CharField, Proxy, DoesNotExist
from playhouse.postgres_ext import PostgresqlExtDatabase, JSONField

from blue.base import BlueprintInstructionExecutionStore, BlueprintExecution, BlueprintInstructionState, InstructionStatus, EventBus, \
    Event
from blue.blueprint import BlueprintManager
from blue.util import blue_json_dumps, superjson

database_proxy = Proxy()  # Create a proxy for our db.

log = logging.getLogger(__name__)


class BaseModel(Model):
    class Meta:
        database = database_proxy  # Use proxy for our DB.


class BlueprintExecutionModel(BaseModel):
    execution_id = CharField(unique=True)
    execution_context = JSONField()
    blueprint = JSONField(dumps=blue_json_dumps)


class BlueprintInstructionStateModel(BaseModel):
Ejemplo n.º 16
0
"""pgpool-pyui model"""
import logging

from peewee import Model, Proxy, \
    CharField, DateTimeField, SmallIntegerField, BooleanField, ForeignKeyField
from playhouse.db_url import connect

LOG = logging.getLogger(__name__)

DATABASE = Proxy()


class BaseModel(Model):
    """Base model for all entities"""
    class Meta:
        """Meta class needed for peewee"""
        database = DATABASE


class Account(BaseModel):
    """Database model for account"""
    auth_service = CharField()
    username = CharField(primary_key=True)
    password = CharField()
    last_modified = DateTimeField()
    system_id = CharField()
    level = SmallIntegerField()
    banned = BooleanField()
    shadowbanned = BooleanField()
    lures = SmallIntegerField()
Ejemplo n.º 17
0
from peewee import SqliteDatabase, Model, Proxy, CharField  # type: ignore
from peewee import BooleanField, ForeignKeyField, CompositeKey  # type: ignore

proxy = Proxy()


class BaseModel(Model):
    class Meta(object):
        database = proxy


class WeaveEnvInstanceData(BaseModel):
    machine_id = CharField(primary_key=True)
    app_token = CharField()


class PluginData(BaseModel):
    app_url = CharField()
    name = CharField()
    description = CharField(default="")
    enabled = BooleanField(default=False)
    machine = ForeignKeyField(WeaveEnvInstanceData, backref='plugins')

    class Meta:
        primary_key = CompositeKey('app_url', 'machine')


class PluginsDatabase(object):
    def __init__(self, path):
        self.conn = SqliteDatabase(path)
Ejemplo n.º 18
0
    def test(self):

        db = self.get_sqlite_db()

        # Re proxy to avoid previous test use
        SyncManager._meta.database = Proxy()

        SyncManager.init_db(db)

        SyncManager.create_table()

        class TestModel(Model):

            value = IntegerField()

            @classmethod
            def get_value(cls, item):
                return item.value

            @classmethod
            def get_key(cls, item):
                return item.id

            @classmethod
            def select_since_id(cls, since, limit, offset):
                q = cls.select().where(cls.id > since)

                if limit:
                    q = q.limit(limit)

                return q

            class Meta:
                database = db

        TestModel.create_table()

        sync_manager = get_sync_manager(app="test", start=0, test=None)

        output = []

        def row_output(model):
            data = {'id': model.id, 'value': model.value}
            output.append(data)
            return data

        for i in range(25):
            TestModel.create(id=i + 1, value=i + 1)

        self.assertEqual(25, TestModel.select().count())

        iteration = 0

        def process(it):
            nonlocal iteration
            iteration += 1
            for x in it:
                log.debug("process it={} id={}".format(iteration, x['id']))

        def it(since, limit, offset):
            log.debug("it since={} limit={} offset={}".format(
                since, limit, offset))
            q = TestModel.select_since_id(since, limit=limit, offset=offset)
            return LastOffsetQueryIterator(q.iterator(),
                                           row_output_fun=row_output,
                                           key_fun=TestModel.get_key,
                                           is_unique_key=True)

        processor = Processor(sync_manager=sync_manager,
                              it_function=it,
                              process_function=process,
                              sleep_duration=0)

        processor.process(limit=10, i=5)

        self.assertEqual(len(output), 25)

        self.assertEqual(output[0]['id'], 1)
        self.assertEqual(output[-1]['id'], 25)
Ejemplo n.º 19
0
    def test_offset_processing(self):

        db = self.get_mysql_db()

        # Re proxy to avoid previous test use
        SyncManager._meta.database = Proxy()

        # Init/Create in sync mode
        SyncManager.init_db(db)
        SyncManager.create_table()

        # Clear out from previous test run
        SyncManager.delete().execute()

        sync_manager = get_sync_manager(app="test-async",
                                        start=0,
                                        db=db,
                                        set_async=True)

        # 15 regular, 25 @ 50 (ie the "hump"), 10 afterwards
        items = list(range(15)) + list([50 for _ in range(25)]) + list(
            range(55, 65))
        items = [{'id': i + 1, 'x': x} for i, x in enumerate(items)]

        async def it(since=0, limit=0, offset=0):

            log.debug("Getting iterator since={} limit={} offset={}".format(
                since, limit, offset))

            def dummy():
                nonlocal items
                nonlocal limit
                nonlocal offset

                for item in items:
                    if item['x'] < since:
                        continue

                    if offset > 0:
                        offset -= 1
                        continue

                    limit -= 1
                    if limit < 0:
                        break

                    yield item

            return LastOffsetQueryIterator(dummy(),
                                           row_output_fun=lambda x: x,
                                           key_fun=lambda x: x['x'],
                                           is_unique_key=False)

        output = []

        async def process(it):
            nonlocal output
            for item in it:
                output.append(item)
                log.debug("process item: {}".format(item))

        processor = AsyncProcessor(sync_manager=sync_manager,
                                   it_function=it,
                                   process_function=process,
                                   object=Manager(db, loop=None))

        async def consume():
            await processor.process(limit=10, i=8)

        asyncio.get_event_loop().run_until_complete(consume())

        # todo: cache to avoid dup values?
        self.assertTrue(len(output), 59)

        unique_values = list(set([x['x'] for x in output]))

        self.assertEquals(unique_values, [
            0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 50, 55, 56, 57,
            58, 59, 60, 61, 62, 63, 64
        ])

        ids = list(set([x['id'] for x in output]))

        self.assertEqual(len(ids), 50)
        self.assertEqual(ids[0], 1)
        self.assertEqual(ids[-1], 50)
Ejemplo n.º 20
0
    def test_offset_processing(self):

        db = self.get_sqlite_db()

        # Re proxy to avoid previous test use
        SyncManager._meta.database = Proxy()

        SyncManager.init_db(db)

        SyncManager.create_table()

        class TestModel(Model):

            value = IntegerField()

            @classmethod
            def get_value(cls, item):
                return item.value

            @classmethod
            def get_key(cls, item):
                return item.value

            @classmethod
            def select_since_value(cls, since, limit, offset):
                q = cls.select().where(cls.value > since)

                if limit:
                    q = q.limit(limit)

                if offset:
                    q = q.offset(offset)

                log.debug(q.sql())
                return q

            class Meta:
                database = db

        TestModel.create_table()

        sync_manager = get_sync_manager(app="test", start=-1, test=None)

        output = []

        def row_output(model):
            data = {'id': model.id, 'value': model.value}
            output.append(data)
            return data

        # Create 15 regular records
        for i in range(15):
            TestModel.create(value=i)

        # Now add 25 with same value (ie an "hump" that will require "offset" to get over)
        for i in range(25):
            TestModel.create(value=50)

        # And a final few
        for i in range(10):
            TestModel.create(value=51 + i)

        self.assertEqual(50, TestModel.select().count())

        iteration = 0

        def process(it):
            nonlocal iteration
            iteration += 1
            for x in it:
                log.debug("process it={} id={} value={}".format(
                    iteration, x['id'], x['value']))

        # Note: is_unique_key=False (ie multiple same value may exist (eg same "lastModified" due to bulk update for example)
        def it(since, limit, offset):
            log.debug("it since={} limit={} offset={}".format(
                since, limit, offset))
            q = TestModel.select_since_value(since, limit=limit, offset=offset)
            return LastOffsetQueryIterator(q.iterator(),
                                           row_output_fun=row_output,
                                           key_fun=TestModel.get_key,
                                           is_unique_key=False)

        processor = Processor(sync_manager=sync_manager,
                              it_function=it,
                              process_function=process,
                              sleep_duration=0)

        processor.process(limit=10, i=10)

        # is_unique_key=False reduces in duplicate values when we hit the offset limit
        # todo: cache to avoid dup values?
        self.assertTrue(len(output), 56)

        value_ids = list(set([x['value'] for x in output]))
        # 0-14, 50, 51-60
        self.assertEquals(value_ids, [
            0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 50, 51, 52, 53,
            54, 55, 56, 57, 58, 59, 60
        ])

        ids = list(set([x['id'] for x in output]))

        self.assertEqual(len(ids), 50)
        self.assertEqual(ids[0], 1)
        self.assertEqual(ids[-1], 50)
Ejemplo n.º 21
0
from math import floor
from peewee import Model
from peewee import Proxy
from peewee import Float
from peewee import Integer
from peewee import PrimaryKeyField
from peewee import TextField

from datetime import datetime
from playhouse.postgres_ext import ArrayField
from playhouse.postgres_ext import JSONField

vulner_db_proxy = Proxy()


def unify_dt(dt):
    return datetime.strptime(dt.strftime('%Y-%m-%d %H:%M:%S'),
                             '%Y-%m-%d %H:%M:%S')


def dt2str(dt):
    return dt.strftime('%Y-%m-%d %H:%M:%S')


def str2dt(dts):
    return datetime.strptime(dts, '%Y-%m-%d %H:%M:%S')


def onlydigits(s):
    return re.sub("\D\.\?", "", s)
Ejemplo n.º 22
0
from peewee import Proxy
from peewee import SqliteDatabase

DATABASE_PROXY = Proxy()  # Create a proxy for our db.


def OpenSqliteDatabase(database_file):
    """
    """
    database = SqliteDatabase(database_file)
    # Configure our proxy to use the db we specified in config.
    DATABASE_PROXY.initialize(database)
    database.connect()
    return database
#
# You should have received a copy of the GNU Lesser General Public License
# along with PyDefects.  If not, see <https://www.gnu.org/licenses/>.
# pylint: disable=missing-docstring,too-few-public-methods
from peewee import (
    Model,
    TextField,
    ForeignKeyField,
    IntegerField,
    CompositeKey,
    DateTimeField,
    FloatField,
    Proxy,
)

DATABASE_PROXY = Proxy()


class BaseModel(Model):
    class Meta:
        database = DATABASE_PROXY


class Keyword(BaseModel):
    keyword = TextField()


class License(BaseModel):
    license = TextField()

Ejemplo n.º 24
0
from peewee import Model
from peewee import IntegerField
from peewee import ForeignKeyField
from peewee import TextField
from peewee import TextField
from peewee import PostgresqlDatabase, Proxy
from peewee import DateTimeField
from datetime import datetime

db_proxy = Proxy()


class DBModel(Model):
    class Meta:
        database = db_proxy


class Account(Model):
    login = TextField()
    password = TextField()

    class Meta:
        database = db_proxy


class Messenger(Model):
    name = TextField()
    cost = IntegerField()

    class Meta:
        database = db_proxy
Ejemplo n.º 25
0
import os
import psycogreen.gevent
psycogreen.gevent.patch_psycopg()

from peewee import Proxy, OP, Model
from peewee import Expression
from playhouse.postgres_ext import PostgresqlExtDatabase

REGISTERED_MODELS = []

# Create a database proxy we can setup post-init
database = Proxy()

OP['IRGX'] = 'irgx'


def pg_regex_i(lhs, rhs):
    return Expression(lhs, OP.IRGX, rhs)


class ModelBase(Model):
    class Meta:
        database = database

    @staticmethod
    def register(cls):
        REGISTERED_MODELS.append(cls)
        return cls


def init_db(env):
Ejemplo n.º 26
0
""" Database and storage related functions and classes """
import datetime
from enum import IntEnum
import functools
import sys
from flask import g
from flask_redis import FlaskRedis
from peewee import IntegerField, DateTimeField, BooleanField, Proxy, Model, Database
from peewee import CharField, ForeignKeyField, TextField, PrimaryKeyField
from werkzeug.local import LocalProxy
from .storage import file_url
from .config import config

rconn = FlaskRedis()

dbp = Proxy()


def get_db():
    if "db" not in g:
        if dbp.is_closed():
            dbp.connect()
        g.db = dbp
    return g.db


db = LocalProxy(get_db)


def db_init_app(app):
    dbconnect = dict(app.config["THROAT_CONFIG"].database)
Ejemplo n.º 27
0
from peewee_migrate import Router

from flask_restful import Resource
from flask_restful import request

from flask import g


BASE_PATH = "/api"
not_found_message = "Requested resource does not exist on this server."
unauthorized_message = "User could not be authorized with the given credentials."
invalid_call_message = "This endpoint does not implements this method."
no_permission_message = "You don't have permission to access this resource on this server."

DB = Proxy()


class Singleton(type):
    _instances = {}

    def __call__(cls, *args, **kwargs):
        if cls not in cls._instances:
            cls._instances[cls] = super(
                Singleton, cls).__call__(*args, **kwargs)
        return cls._instances[cls]


# Base DB models
class BaseModel(peewee.Model):
    """ Peewee's Base model
Ejemplo n.º 28
0
 class Meta:
     table_name = "sync_manager"
     database = Proxy()
Ejemplo n.º 29
0
# coding=utf-8

# write a base class wrap peewee and fields
from peewee import Proxy, Model

data_proxy = Proxy()


class BaseModel(Model):
    class Meta:
        db = data_proxy
Ejemplo n.º 30
0

def add_entry(latitude, longitude, altitude, provider, accuracy, time):
    if GPS_Provider.select().where(GPS_Provider.name == provider).count() == 0:
        provider = GPS_Provider.create(name=provider)
    else:
        provider = GPS_Provider.select().where(GPS_Provider.name == provider)
    GPS_Entry.create(latitude=int(D(latitude) * 10**8),
                     longitude=int(D(longitude) * 10**8),
                     altitude=int(D(altitude) * 10),
                     provider=provider,
                     accuracy=int(D(accuracy) * 10),
                     time=time)


dbProxy = Proxy()


class GPS_Provider(Model):
    name = TextField()

    class Meta:
        database = dbProxy


class GPS_Entry(Model):
    '''Note: This will always store time according to UTC zone'''
    altitude = IntegerField()  # * 10
    time = DateTimeField(formats=['%Y-%m-%dT%H:%M:%SZ'])
    latitude = IntegerField()  # * 1e8
    longitude = IntegerField()  # * 1e8