def init_database(self): migrate_db = SqliteExtDatabase(self.config.database.path) # Run migrations del (logging.getLogger('peewee_migrate').handlers[:]) router = Router(migrate_db) router.run() migrate_db.close() self.db = SqliteQueueDatabase(self.config.database.path) models = [Event] self.db.bind(models)
def __open_database(): global _db if not os.path.exists(get_data_dir()): os.makedirs(get_data_dir()) _db = SqliteQueueDatabase(os.path.join(get_data_dir(), "cozy.db"), queue_max_size=128, results_timeout=15.0, timeout=15.0, pragmas=[('cache_size', -1024 * 32), ('journal_mode', 'wal')])
def __init__(self, config): if type(config) is dict: self.config = config elif type(config) is str: with open(config, 'r') as ymlfile: self.config = yaml.load(ymlfile) # Create database and connect to it self.db = SqliteQueueDatabase(self.config['name'])
def init_database(self): # Migrate DB location old_db_path = os.path.join(CLIPS_DIR, "frigate.db") if not os.path.isfile( self.config.database.path) and os.path.isfile(old_db_path): os.rename(old_db_path, self.config.database.path) # Migrate DB schema migrate_db = SqliteExtDatabase(self.config.database.path) # Run migrations del logging.getLogger("peewee_migrate").handlers[:] router = Router(migrate_db) router.run() migrate_db.close() self.db = SqliteQueueDatabase(self.config.database.path) models = [Event, Recordings] self.db.bind(models)
def create_model(file_path, file_name): db = SqliteQueueDatabase(file_path + file_name + '.db') class BaseModel(Model): class Meta: database = db class Message(BaseModel): message_id = IntegerField() message_text = TextField() modified_text = TextField(null=True) posted = BooleanField(default=False) message_type = TextField() file_name = CharField(null=True) file_size = IntegerField(null=True) db.connect() db.create_tables([Message]) return Message
def prepare_db(): from playhouse.sqliteq import SqliteQueueDatabase from cozy.db.artwork_cache import ArtworkCache from cozy.db.book import Book from cozy.db.offline_cache import OfflineCache from cozy.db.settings import Settings from cozy.db.storage import Storage from cozy.db.storage_blacklist import StorageBlackList from cozy.db.track import Track models = [ Track, Book, Settings, ArtworkCache, Storage, StorageBlackList, OfflineCache ] print("Setup database...") db_path = '/tmp/cozy_test.db' test_db = SqliteQueueDatabase(db_path, pragmas=[('journal_mode', 'wal')]) test_db.bind(models, bind_refs=False, bind_backrefs=False) test_db.connect() test_db.create_tables(models) test_db.stop() test_db.start() return db_path, models, test_db
import cozy.filesystem_monitor DB_VERSION = 6 # first we get the data home and find the database if it exists data_dir = os.path.join(GLib.get_user_data_dir(), "cozy") log.debug(data_dir) if not os.path.exists(data_dir): os.makedirs(data_dir) update = None if os.path.exists(os.path.join(data_dir, "cozy.db")): update = True else: update = False db = SqliteQueueDatabase(os.path.join(data_dir, "cozy.db"), pragmas=[('journal_mode', 'wal')]) class ModelBase(Model): """ The ModelBase is the base class for all db tables. """ class Meta: """ The Meta class encapsulates the db object """ database = db class Book(ModelBase): """
def database_connection(db_name): db_dir = database_dir(db_name) db_conn = SqliteQueueDatabase(db_dir) return db_conn
from peewee import * from playhouse.sqliteq import SqliteQueueDatabase db = SqliteQueueDatabase("database/db.db") class Global(Model): id = PrimaryKeyField(null=False) user_id = IntegerField(null=False) balance = IntegerField(default=0) license = DateTimeField(default='') adm = BooleanField(default=False) class Meta: db_table = 'Users' database = db class WhiteNumber(Model): id = PrimaryKeyField(null=False) number = IntegerField(null=False) user_id = IntegerField(null=False) class Meta: db_table = 'Numbers' database = db def con(): try: db.connect()
import datetime import hashlib import json import logging import arrow from builtins import * from builtins import object from dateutil.tz import tzutc from playhouse.migrate import * from playhouse.sqliteq import SqliteQueueDatabase logger = logging.getLogger('root') db = SqliteQueueDatabase(None, autostart=False, results_timeout=20.0) DATABASE_VERSION = 16 class JSONField(TextField): db_field = "text" def db_value(self, value): return json.dumps(value) def python_value(self, value): return json.loads(value) class DateTimeUTCField(DateTimeField):
# -*- coding: utf-8 -*- """ Models to store scraped data in a database. """ import inspect from peewee import SqliteDatabase, Model from peewee import CompositeKey, FloatField, IntegerField, TextField from playhouse.sqliteq import SqliteQueueDatabase # db = SqliteDatabase("delancey.db") db = SqliteQueueDatabase("delancey.db", autostart=True) class BaseModel(Model): class Meta: database = db def __getitem__(self, key): return getattr(self, key) def __setitem__(self, key, value): return setattr(self, key, value) def __delitem__(self, key): return setattr(self, key, None) @classmethod
# This module contains models of your DB import datetime from peewee import * from playhouse.sqliteq import SqliteQueueDatabase from config import Config __sp = r"-\|/-\|/" # this thingie used as spinner # You can choose other types of DB, supported by peewee db_handle = SqliteQueueDatabase( Config.DB_FILENAME, use_gevent=False, autostart=True, queue_max_size=128, ) def stop_db() -> bool: try: db_handle.commit() db_handle.stop() x = 0 while not db_handle.is_stopped(): print("Closing database...", __sp[x % 8], end="\r") x += 1 continue print("Closing database... ok") except InternalError as e: print(e) return False
from peewee import * from playhouse.sqliteq import SqliteQueueDatabase db = SqliteQueueDatabase('db.sqlite3', use_gevent=False, # Use the standard library "threading" module. autostart=False, # The worker thread now must be started manually. queue_max_size=64) ALAMER = "df548f-61ac83-624ea4" class Items(Model): item_id = IntegerField() cost = IntegerField() class Meta: database = db db.start() db.connect() # db.drop_tables([Items]) # db.create_tables([Items]) # db.stop()
# (Seconds) how long a user must wait in between messaging the bot LAST_MSG_TIME = 1 # How many messages consider a user rain eligible LAST_MSG_RAIN_COUNT = 5 # (Seconds) How spaced out the messages must be LAST_MSG_RAIN_DELTA = 60 # How many words messages must contain LAST_MSG_RAIN_WORDS = 3 # (Seconds) how long user must wait between tiprandom TIP_RANDOM_WAIT=10 # (Seconds) how long user must wait between tipfavorites TIP_FAVORITES_WAIT=150 db = SqliteQueueDatabase('discord.db') logger = util.get_logger("db") ### User Stuff def get_user_by_id(user_id): try: user = User.get(user_id=str(user_id)) return user except User.DoesNotExist: # logger.debug('user %s does not exist !', user_id) return None def get_user_by_wallet_address(address): try: user = User.get(wallet_address=address)
# -*- coding: utf-8 -*- """ Models to store scraped data in a database. """ import inspect from peewee import SqliteDatabase, Model from peewee import CompositeKey, FloatField, IntegerField, TextField from playhouse.sqliteq import SqliteQueueDatabase db = SqliteQueueDatabase("leetcode.db", autostart=True) class BaseModel(Model): class Meta: database = db def __getitem__(self, key): return getattr(self, key) def __setitem__(self, key, value): return setattr(self, key, value) def __delitem__(self, key): return setattr(self, key, None) @classmethod def primary_keys(cls):
from datetime import datetime import peewee from peewee import DoesNotExist from playhouse.sqliteq import SqliteQueueDatabase from config import config from utils import get_message_content db = SqliteQueueDatabase(database=config["db"]["messages"].get(str), thread_safe=True) class BaseModel(peewee.Model): class Meta: database = db class User(BaseModel): uid = peewee.IntegerField() username = peewee.CharField(null=True) first_name = peewee.CharField(null=True) last_name = peewee.CharField(null=True) @classmethod def from_message(cls, message): return cls.create( uid=message.from_user.id, username=message.from_user.username, first_name=message.from_user.first_name, last_name=message.from_user.last_name, )
backup_path.mkdir(parents=True, exist_ok=True) zip_name = DT.datetime.today().strftime(date_fmt) zip_name = backup_path / zip_name shutil.make_archive(zip_name, 'zip', DB_DIR_NAME) # This working with multithreading # SOURCE: http://docs.peewee-orm.com/en/latest/peewee/playhouse.html#sqliteq db = SqliteQueueDatabase( DB_FILE_NAME, pragmas={ 'foreign_keys': 1, 'journal_mode': 'wal', # WAL-mode 'cache_size': -1024 * 64 # 64MB page-cache }, use_gevent=False, # Use the standard library "threading" module. autostart=True, queue_max_size=64, # Max. # of pending writes that can accumulate. results_timeout=5.0 # Max. time to wait for query to be executed. ) class BaseModel(Model): class Meta: database = db def __str__(self): fields = [] for k, field in self._meta.fields.items(): v = getattr(self, k)
import datetime import util from peewee import * from playhouse.sqliteq import SqliteQueueDatabase db = SqliteQueueDatabase('bananodiscord.db') logger = util.get_logger("db") ### User Stuff def add_new_request(user): try: fuser = FaucetUser.get(user_id == user.user_id) fuser.last_request_1 = fuser.last_request_2 fuser.last_request_2 = fuser.last_request_3 fuser.last_request_3 = fuser.last_request_4 fuser.last_request_4 = datetime.datetime.now() fuser.request_count = fuser.request_count + 1 fuser.save() except FaucetUser.DoesNotExist: create_fuser(user.user_id, user.user_name) add_new_request(user) return None def get_first_request(user): try: fuser = FaucetUser.get(user_id == user.user_id) return fuser.last_request_1 except FaucetUser.DoesNotExist:
Initialize the file storage class for a given path :param root_path: the root path on the local file system for where to store files """ self._root_path = root_path def _validate_setup(self): if self._root_path is None: raise ValueError("root_path is not set, call init first") database = SqliteQueueDatabase( None, use_gevent=False, autostart=False, queue_max_size=128, results_timeout=30, ) storage = FileStorage() class BaseModel(Model): """ Base peewee model all DB models must extend from """ class Meta(object): database = database storage = storage
from datetime import datetime import peewee from peewee import DoesNotExist from playhouse.sqliteq import SqliteQueueDatabase from config import config from utils import get_message_content db = SqliteQueueDatabase(database=config["db"]["messages"].get(str), thread_safe=True, pragmas={ 'journal_mode': 'wal', 'cache_size': -1024 * 64, 'foreign_keys': 1 }) class BaseModel(peewee.Model): class Meta: database = db class User(BaseModel): uid = peewee.IntegerField() username = peewee.CharField(null=True) first_name = peewee.CharField(null=True) last_name = peewee.CharField(null=True) @classmethod def from_message(cls, message):
from peewee import Model, IntegerField, CharField, TextField from playhouse.sqliteq import SqliteQueueDatabase db = SqliteQueueDatabase("bot.db") class BaseModel(Model): class Meta: database = db class Quote(BaseModel): """Represents a Quote Message for Discord. Fields: guildId: int keyword: char result: text authorId: int""" guildId = IntegerField() keyword = CharField() result = TextField(null=False) authorId = IntegerField(null=False) Quote.add_index(Quote.guildId, Quote.keyword)
import time from peewee import Model, IntegerField, TextField, TimestampField from playhouse.sqlite_ext import JSONField from playhouse.sqliteq import SqliteQueueDatabase db = SqliteQueueDatabase('bot-db.sqlite3', pragmas={ 'journal_mode': 'wal', 'foreign_keys': 1 }) class Thing(Model): # This table is not meant to represent a complete relationship of submissions/comments on reddit # Its behaviour is more of a log to track submissions and comments # that have had replies attempted and prevent replying twice # It also acts as a job queue of sorts, for the model text generator daemon # timestamp representation of when this record was entered into the database created_utc = TimestampField(default=time.time, utc=True) # the praw *name* of the original comment or submission, # where t3_ prefix = submission, t1_ prefix = comment source_name = TextField() # json object of the model parameters, passed into the generator daemon function text_generation_parameters = JSONField(null=True)