def db_connect(path, type): # Connect to new database if type == 'peewee': db = APSWDatabase(path, autorollback=True, journal_mode='WAL', timeout=BUSY_TIMEOUT) elif type == 'raw': db = apsw.Connection(path, flags=apsw.SQLITE_OPEN_READWRITE | apsw.SQLITE_OPEN_CREATE | apsw.SQLITE_OPEN_WAL) db.setbusytimeout(BUSY_TIMEOUT) else: raise ValueError('Unknown database type: %r' % type) log.debug('Connected to database at %r', path) return db
class TestORMTempDB(unittest.TestCase): """ Uses a temporary in-memory DB, so changes won't touch actual DB """ test_db = APSWDatabase(':memory:') def setUp(self): logging.basicConfig(level=logging.DEBUG) def create_test_data(self): # Only call this within a with test_database block # Uses bogus values for required fields - would fail trip checks at Center. for i in range(10): Trips.create(trip=i, partial_trip='X', program=1, trip_status='X', user=1, vessel=1) for i in range(100, 110): FishingActivities.create(fishing_activity=i, fishing_activity_num=i + 1, data_quality='X', trip=1) def test_fakedb_select(self): with test_database(self.test_db, [FishingActivities, Trips, Settings]): self.create_test_data() query = FishingActivities.select().where( FishingActivities.fishing_activity == 105) self.assertGreater(len(query), 0) for q in query: logging.debug( str(q.fishing_activity) + ': ' + str(q.fishing_activity_num)) self.assertGreater(int(q.fishing_activity_num), int(q.fishing_activity)) def test_settings_get(self): with test_database(self.test_db, [Settings]): Settings.create(parameter='first_run', value='FALSE') fr = Settings.get(Settings.parameter == 'first_run') logging.info(fr) self.assertEqual(fr.value.lower(), 'false') def test_settings_set(self): with test_database(self.test_db, [Settings]): Settings.create(parameter='first_run', value='FALSE') fr = Settings.get(Settings.parameter == 'first_run') fr.value = 'TRUE' fr.save() fr2 = Settings.get(Settings.parameter == 'first_run') self.assertNotEqual(fr2.value.lower(), 'false')
def set_sensors_proxy(self, db_file): """ Method to set the value of self._sensors_proxy :param db: :return: """ if db_file is None or db_file == "": self._sensors_proxy.initialize("") self.sensorsDbChanged.emit() return if not isinstance(db_file, str): return try: # database = SqliteDatabase(db_file, **{}) database = APSWDatabase(db_file, **{}) # database = APSWDatabase(db_file, timeout=5000) # Sets the setbusytimeout keyword self._sensors_proxy.initialize(database) self.sensorsDbChanged.emit() except Exception as ex: logging.info('Error setting the sensor db proxy: {0} > {1}'.format( db_file, ex)) self._sensors_proxy.initialize("") self.sensorsDbChanged.emit()
def init_db(db_path): """Initialize the given database Args: db_path (str): path to the database """ database = APSWDatabase(db_path, pragmas=(("journal_mode", "wal"), )) db.initialize(database)
class TestObserverTrip(unittest.TestCase): """ Note: any write/update interaction should be done with test_database... http://stackoverflow.com/questions/15982801/custom-sqlite-database-for-unit-tests-for-code-using-peewee-orm """ test_db = APSWDatabase(':memory:') vessel_id_test = 12850 user_id_test = 1241 prog_id_test = 1 def setUp(self): logging.basicConfig(level=logging.DEBUG) def _create_test_trips(self): """ Note: intended to run with test_db, running alone will write to real DB Example of updating a trip in DB and in model """ for t in range(3): newtrip = self.test.create_trip(observer_id=self.user_id_test + t, vessel_id=self.vessel_id_test + t, program_id=self.prog_id_test) newtrip.save() def _create_test_data(self): """ Intended to run with test_db, before trips created """ for t in range(3): Vessels.create(vessel=self.vessel_id_test + t, port=0, vessel_name='Test Vessel {}'.format(t)) Users.create(user=self.user_id_test + t, first_name='User {}'.format(t), last_name='Last', password='******', status=1) Programs.create(program=self.prog_id_test, program_name='Test Program') vess = Vessels.select() for p in vess: print('Created {}'.format(p.vessel_name)) users = Users.select() for u in users: print('Created {}'.format(u.first_name)) p = Programs.get(Programs.program == 1) print('Created {}'.format(p.program_name)) def test_create(self): with test_database(self.test_db, [Trips, Vessels, Users, Programs]): self.test = ObserverTrip() self._create_test_data() self._create_test_trips() q = Trips.select() self.assertEqual(q.count(), 3)
def __init__(self): database_config = DATABASE.copy() db_name = database_config.pop("name") if WORK_MODE == WorkMode.STANDALONE: self.database_connection = APSWDatabase('fate_flow_sqlite.db') elif WORK_MODE == WorkMode.CLUSTER: self.database_connection = PooledMySQLDatabase( db_name, **database_config) else: raise Exception('can not init database')
def __init__(self): database_config = DATABASE.copy() db_name = database_config.pop("name") if is_standalone: from playhouse.apsw_ext import APSWDatabase self.database_connection = APSWDatabase( file_utils.get_project_base_directory("fate_sqlite.db")) else: from playhouse.pool import PooledMySQLDatabase self.database_connection = PooledMySQLDatabase( db_name, **database_config)
def setUp(self): logging.basicConfig(level=logging.INFO) self.soap = ObserverSoap() self.user = '******' self.unhashed_pw = '' # DO NOT COMMIT PW self.hashed_pw = self.soap.hash_pw(self.user, self.unhashed_pw) self.test_db = APSWDatabase('../data/observer.db') self.dbsync = ObserverDBSyncController() self.test_trip_id = 1 self.test_user_id = ObserverUsers.get_user_id(self.user)
def __init__(self): database_config = DATABASE.copy() db_name = database_config.pop("name") if WORK_MODE == WorkMode.STANDALONE: db_file_path = os.path.join(file_utils.get_python_base_directory(), 'fate_flow', 'fate_flow_sqlite.db') self.database_connection = APSWDatabase(db_file_path) elif WORK_MODE == WorkMode.CLUSTER: self.database_connection = PooledMySQLDatabase( db_name, **database_config) else: raise Exception('can not init database')
def __init__(self): database_config = DATABASE.copy() db_name = database_config.pop("name") if WORK_MODE == WorkMode.STANDALONE: self.database_connection = APSWDatabase('fate_flow_sqlite.db') RuntimeConfig.init_config(USE_LOCAL_DATABASE=True) stat_logger.info('init sqlite database on standalone mode successfully') elif WORK_MODE == WorkMode.CLUSTER: self.database_connection = PooledMySQLDatabase(db_name, **database_config) stat_logger.info('init mysql database on cluster mode successfully') RuntimeConfig.init_config(USE_LOCAL_DATABASE=False) else: raise Exception('can not init database')
def db_config(): global db_connection #pylint: disable=W0603 database_config = DATABASE.copy() db_type = database_config.pop("db_type") db_name = database_config.pop("db_name") if db_type == DBTYPE.SQLITE: db_connection = APSWDatabase( database_config.get('db_path', 'fedlearner.db')) logging.debug('init sqlite database on standalone mode successfully') elif db_type == DBTYPE.MYSQL: db_connection = PooledMySQLDatabase(db_name, **database_config) logging.debug('init mysql database on standalone mode successfully') else: raise Exception('can not init database')
def setUp(self): # TODO: Either phase out ObserverDB, or make it testable, as in ObserverDB(':memory:') # Tools available now are a Peewee test database context manager using an in-memory APSW database self.test_db = APSWDatabase(':memory:') self.test_tables = ( Vessels, Users, Programs, Trips, FishingActivities, CatchCategories, FishingLocations, ) self.test_vessel_id = 1 self.test_user_id = 1 self.test_program_id = 1 self.test_category_id = 1 self.test_activity_num = 1 # aka Haul # Only one test dataset of locations used in this test class. # Here are the expected position number assignments by primary key id: self.expected_position_assignments = { 20: -1, # Last-entered has earliest date 19: 1, 18: 2, 17: 3, 16: 4, 15: 5, 14: 6, 13: 7, 12: 8, 11: 9, 10: 10, 9: 11, 8: 12, 7: 13, 6: 14, 5: 15, 4: 16, 3: 17, 2: 18, 1: 0, # Location first entered has latest date } logging.basicConfig(level=logging.DEBUG) # Turn on peewee's SQL logging, if desired, by commenting out next two statements. pwlogger = logging.getLogger('peewee') pwlogger.setLevel(logging.WARNING)
def __init__(self): database_config = DATABASE.copy() db_name = database_config.pop("name") if IS_STANDALONE: from playhouse.apsw_ext import APSWDatabase self.database_connection = APSWDatabase( file_utils.get_project_base_directory("fate_sqlite.db")) RuntimeConfig.init_config(USE_LOCAL_DATABASE=True) stat_logger.info( 'init sqlite database on standalone mode successfully') else: self.database_connection = PooledMySQLDatabase( db_name, **database_config) stat_logger.info( 'init mysql database on cluster mode successfully')
def test_checksum_peewee_model_exception_not_possible_with_peewee(self): clean_test_db = APSWDatabase(':memory:') with test_database(clean_test_db, [NoPrimaryKeyTable]): self.assertTrue(NoPrimaryKeyTable.table_exists()) ObserverDBUtil.checksum_peewee_model(NoPrimaryKeyTable, logging) """ No exception. That's because: "Because we have not specified a primary key, peewee will automatically add an auto-incrementing integer primary key field named id." (http://docs.peewee-orm.com/en/latest/peewee/models.html) """ peewee_default_primary_key_field_name = 'id' primary_key_field = NoPrimaryKeyTable._meta.primary_key self.assertIsNotNone(primary_key_field) self.assertEqual(peewee_default_primary_key_field_name, primary_key_field.name)
def test_schema_specifications(): # Create database db = APSWDatabase(':memory:', autorollback=True, journal_mode='WAL', timeout=3000) # Create migration router router = Router(migrations_path, DATABASE=db) migrator = Migrator(db) # Run each migration, and validate the specification for name in router.fs_migrations: # Execute migration router.run_one(name, migrator) # Match specification against migration assert router.match() == name
def db_connect(name): """ Performs database connection using database settings from settings.py. Returns sqlalchemy engine instance """ database = APSWDatabase(None) database.init(name, timeout=60, pragmas=(('journal_mode', 'wal'), ('cache_size', -1024 * 64))) database.connect() #database.create_tables([AuthorFlair, Author, Url, Domain, Subreddit, Submission, SubmissionCommentIDs, Comment, SubmissionLinks, CommentLinks]) return database
def prepare_db(): from playhouse.apsw_ext import APSWDatabase from cozy.db.artwork_cache import ArtworkCache from cozy.db.book import Book from cozy.db.offline_cache import OfflineCache from cozy.db.settings import Settings from cozy.db.storage import Storage from cozy.db.storage_blacklist import StorageBlackList from cozy.db.track import Track models = [ Track, Book, Settings, ArtworkCache, Storage, StorageBlackList, OfflineCache ] print("Setup database...") db_path = '/tmp/cozy_test.db' test_db = APSWDatabase(db_path, pragmas=[('journal_mode', 'wal')]) test_db.bind(models, bind_refs=False, bind_backrefs=False) test_db.connect() test_db.create_tables(models) return db_path, models, test_db
def __init__(self, api_key: str, lang: Mapping[str, str], theme_image_ids: tuple[str], keyboards: Mapping[str, InlineKeyboardMarkup], guesslang_syntaxes: Mapping[str, str], *args: Any, admin_chat_id: Optional[str] = None, db_path: str = str( local.path(__file__).up() / 'user_themes.sqlite'), **kwargs: Any): self.lang = lang self.theme_image_ids = theme_image_ids self.kb = keyboards self.guesslang_syntaxes = guesslang_syntaxes self.admin_chat_id = admin_chat_id self.db_path = db_path self.user_themes = KeyValue(key_field=IntegerField(primary_key=True), value_field=CharField(), database=APSWDatabase(db_path)) self.log = mk_logger() self.bot = TeleBot(api_key, *args, **kwargs) self.register_handlers() self.guesser = Guess()
def KeyValueDatabase(db_name): return APSWDatabase(db_name)
def __init__(self, output_dir, **kwargs): self.output_dir = output_dir self.db_path = get_db_path(output_dir) db.initialize(APSWDatabase(self.db_path))
from peewee import * from playhouse.sqlite_ext import SqliteExtDatabase from playhouse.apsw_ext import APSWDatabase # Standard SQLite: database = SqliteDatabase('data/test.sqlite3') # Playhouse extensions: database = SqliteExtDatabase('data/test.sqlite3', # journal_mode='WAL') # APSW SQLite Driver, which is faster and provides: # "Connections can be shared across threads without any additional locking" database = APSWDatabase('data/test.sqlite3', timeout=1000) class UnknownField(object): def __init__(self, *_, **__): pass class BaseModel(Model): class Meta: database = database class TestTable(BaseModel): last_written_date = TextField(db_column='LAST_WRITTEN_DATE', null=True) test_table = PrimaryKeyField(db_column='TEST_TABLE_ID') text_value = TextField(db_column='TEXT_VALUE', null=True) class Meta: db_table = 'TEST_TABLE'
def KeyValueDatabase(db_name, **kwargs): return APSWDatabase(db_name, **kwargs)
def peewee_database(): from playhouse.apsw_ext import APSWDatabase from cozy.db.artwork_cache import ArtworkCache from cozy.db.book import Book from cozy.db.offline_cache import OfflineCache from cozy.db.settings import Settings from cozy.db.storage import Storage from cozy.db.storage_blacklist import StorageBlackList from cozy.db.track import Track models = [ Track, Book, Settings, ArtworkCache, Storage, StorageBlackList, OfflineCache ] print("Setup database...") db_path = '/tmp/cozy_test.db' test_db = APSWDatabase(db_path, pragmas=[('journal_mode', 'wal')]) test_db.bind(models, bind_refs=False, bind_backrefs=False) test_db.connect() test_db.create_tables(models) path_of_test_folder = os.path.dirname(os.path.realpath(__file__)) + '/' with open(path_of_test_folder + 'books.json') as json_file: book_data = json.load(json_file) with open(path_of_test_folder + 'tracks.json') as json_file: track_data = json.load(json_file) Book.insert_many(book_data).execute() for chunk in chunks(track_data, 25): Track.insert_many(chunk).execute() print("Provide database...") yield test_db print("Teardown database...") test_db.drop_tables(models) test_db.close() os.remove(db_path)
from playhouse.apsw_ext import APSWDatabase import apsw import logging import os log = logging.getLogger(__name__) # Locate "com.plexapp.plugins.library.db" if os.environ.get('LIBRARY_DB'): db_path = os.path.abspath(os.environ['LIBRARY_DB']) else: log.warn('Unable to locate plex database') db_path = None # Connect to database if db_path: log.debug('Connecting to %r', db_path) db = APSWDatabase(db_path, flags=apsw.SQLITE_OPEN_READONLY, journal_mode='WAL') else: db = None
# -*- coding: utf-8 -*- import datetime from peewee import Model, CharField, ForeignKeyField from playhouse.apsw_ext import APSWDatabase, DateTimeField db = APSWDatabase(None) class BaseModel(Model): class Meta: database = db class Feed(BaseModel): url = CharField() save_path = CharField() title = CharField(default='No name') create_time = DateTimeField(default=datetime.datetime.now) keywords = CharField(default='[]') last_check = DateTimeField(default=datetime.datetime.now) last_add = DateTimeField(default=datetime.datetime.now) class Item(BaseModel): title = CharField() magnet_link = CharField() feed = ForeignKeyField(Feed) seen_time = DateTimeField(default=datetime.datetime.now) publish_time = DateTimeField()
class TestObserverDBUtil(unittest.TestCase): """ Note: any write/update interaction should be done with test_database... http://stackoverflow.com/questions/15982801/custom-sqlite-database-for-unit-tests-for-code-using-peewee-orm """ test_db = APSWDatabase(':memory:') def setUp(self): logging.basicConfig(level=logging.DEBUG) self._logger = logging.getLogger(__name__) # Shut up peewee debug and info messages. Comment out setLevel below to get them peewee_logger = logging.getLogger('peewee') peewee_logger.setLevel(logging.WARNING) def test_save_get_setting(self): with test_database(self.test_db, [Settings]): self.assertTrue(ObserverDBUtil.db_save_setting('TestSettingParam', '1234')) self.assertTrue(ObserverDBUtil.db_save_setting('TestSettingParam2', '12345')) self.assertTrue(ObserverDBUtil.db_save_setting('TestSettingParam', '4321')) retval = ObserverDBUtil.db_load_setting('TestSettingParam') self.assertEqual(retval, '4321') retval2 = ObserverDBUtil.db_load_setting('TestSettingParam2') self.assertEqual(retval2, '12345') def test_failcase(self): with test_database(self.test_db, [Settings]): retval = ObserverDBUtil.db_load_setting('TestSettingParam') self.assertIsNone(retval) def test_load_list_setting(self): with test_database(self.test_db, [Settings]): self.assertTrue(ObserverDBUtil.db_save_setting_as_json('TestSettingParam', ['1234', '4567'])) self.assertTrue(ObserverDBUtil.db_save_setting_as_json('TestSettingParam2', [12345, 67891])) self.assertTrue(ObserverDBUtil.db_save_setting_as_json('TestSettingParam', ['4321', '7654'])) retval = ObserverDBUtil.db_load_setting_as_json('TestSettingParam') self.assertEqual(retval, ['4321', '7654']) retval2 = ObserverDBUtil.db_load_setting_as_json('TestSettingParam2') self.assertEqual(retval2, [12345, 67891]) # Load/Save test_parameter = 'TestSettingParam3' default_value = 'This value is not in Settings'.split(" ") self.assertIsNone(ObserverDBUtil.db_load_setting(test_parameter)) actual_value = ObserverDBUtil.db_load_save_setting_as_json(test_parameter, default_value) self.assertEqual(default_value, actual_value) def test_load_dict_setting(self): """ Use 'list' load/save for a dictionary. Drawbacks: - non-string keys are returned as strings (immutable type required for dict key) """ with test_database(self.test_db, [Settings]): test_dict = {1: '2017-09-16', 2: 2.0, 3.0: 3.0} setting_name = "trips_with_last_TER_error_free" self.assertTrue(ObserverDBUtil.db_save_setting_as_json(setting_name, test_dict)) retval = ObserverDBUtil.db_load_setting_as_json(setting_name) expected_dict = { str(k):v for k, v in test_dict.items()} self.assertEqual(retval, expected_dict) def test_load_tuple_setting_as_json(self): """ Use json load/save for a tuple. Drawbacks: - Returned as List, not Tuple """ with test_database(self.test_db, [Settings]): test_tuple = ('2017-09-16', '2017-09-17', 3, 4.0) setting_name = "tuple_of_strings" self.assertTrue(ObserverDBUtil.db_save_setting_as_json(setting_name, test_tuple)) retval = ObserverDBUtil.db_load_setting_as_json(setting_name) self.assertEqual(retval, list(test_tuple)) def test_datefuncs(self): # Create two datetime objects, convert back and forth to string, compare nowdate = arrow.now() nowdatestr = ObserverDBUtil.get_arrow_datestr() nowdatestr_test = ObserverDBUtil.datetime_to_str(nowdate) date_from_str = ObserverDBUtil.str_to_datetime(nowdatestr) date2_from_str = ObserverDBUtil.str_to_datetime(nowdatestr_test) deltat = date_from_str - date2_from_str # check within 1 second of each other self.assertLess(abs(deltat.seconds), 1) nowdate = ObserverDBUtil.get_arrow_datestr(date_format='DD-MMM-YYYY') self.assertEqual(len('00-MMM-0000'), len(nowdate)) def test_escapelf(self): test_str = 'this\nline has "things" and various\nfeeds' new_str = ObserverDBUtil.escape_linefeeds(test_str) self.assertTrue('\n' not in new_str) self.assertTrue('<br>' in new_str) self.assertTrue('"' not in new_str) def test_checksum_peewee_model(self): expected_sha1 = "689327755da6658627c0f015c25796a5cdc98c0c" with test_database(self.test_db, [Settings]): # Use Settings as test data table. Seed with two rows. self.assertTrue(ObserverDBUtil.db_save_setting('TestSettingParam1', '1234')) self.assertTrue(ObserverDBUtil.db_save_setting('TestSettingParam2', '4321')) actual_sha1 = ObserverDBUtil.checksum_peewee_model(Settings, logging) self.assertEqual(expected_sha1, actual_sha1) # Change a digit: SHA1 should change. self.assertTrue(ObserverDBUtil.db_save_setting('TestSettingParam2', '4221')) actual_sha1 = ObserverDBUtil.checksum_peewee_model(Settings, logging) self.assertNotEquals(expected_sha1, actual_sha1) # Restore the original digit: SHA1's should match. self.assertTrue(ObserverDBUtil.db_save_setting('TestSettingParam2', '4321')) actual_sha1 = ObserverDBUtil.checksum_peewee_model(Settings, logging) self.assertEqual(expected_sha1, actual_sha1) def test_checksum_peewee_model_exception_not_possible_with_peewee(self): clean_test_db = APSWDatabase(':memory:') with test_database(clean_test_db, [NoPrimaryKeyTable]): self.assertTrue(NoPrimaryKeyTable.table_exists()) ObserverDBUtil.checksum_peewee_model(NoPrimaryKeyTable, logging) """ No exception. That's because: "Because we have not specified a primary key, peewee will automatically add an auto-incrementing integer primary key field named id." (http://docs.peewee-orm.com/en/latest/peewee/models.html) """ peewee_default_primary_key_field_name = 'id' primary_key_field = NoPrimaryKeyTable._meta.primary_key self.assertIsNotNone(primary_key_field) self.assertEqual(peewee_default_primary_key_field_name, primary_key_field.name) def test_empty_string_coerce(self): with test_database(self.test_db, [TripChecks]): # Demonstrate peewee's problem with empty string in integer field, on a save of new record. expected_exception_msg = "invalid literal for int() with base 10: ''" try: test_record_1 = TripChecks( allow_ack="N", check_code="", # Integer field! check_message="A msg", check_sql="Insert something", check_type="E", created_by=100, created_date="12/05/2017", status=0, trip_check_group=456) test_record_1.save() self.fail("Should have objected to invalid literal") except ValueError as ve: self.assertEqual(expected_exception_msg, ve.args[0]) # Demonstrate peewee's problem with empty string in integer field, on a read of a record. expected_exception_msg = "invalid literal for int() with base 10: ''" try: test_record_2 = TripChecks( allow_ack="N", check_code=0, check_message="A msg", check_sql="Insert something", check_type="E", created_by=101, created_date="12/05/2017", status=0, trip_check_group=456) test_record_2.save() self._logger.debug(f"TRIP_CHECK_ID={test_record_2.trip_check}.") # Introduce an empty string in an integer string - outside of peewee model. ret_val = self.test_db.execute_sql( f"update TRIP_CHECKS set CREATED_BY = '' where CREATED_BY = {test_record_2.created_by}") # Now try to access record with empty string in integer field. trip_check_record = TripChecks.get(TripChecks.trip_check == test_record_2.trip_check) self.fail("Should have objected to invalid literal") except ValueError as ve: self.assertEqual(expected_exception_msg, ve.args[0]) # Run utility to clear empty strings from numeric fields. empty_field_count_dict = ObserverDBUtil.db_coerce_empty_strings_in_number_fields(TripChecks, self._logger) self.assertEqual(1, empty_field_count_dict["CREATED_BY"]) # Now try to access record with formerly empty string in integer field - now should be zero. trip_check_record = TripChecks.get(TripChecks.trip_check == test_record_2.trip_check) self.assertEqual(0, trip_check_record.created_by, "Empty string in non-nullable integer field should be 0.")
import os import peewee import datetime from libs import config_bot as config from playhouse.apsw_ext import (APSWDatabase, DateTimeField) db_logger = APSWDatabase(os.path.join(config.BOT_DATA_DIR, 'db_cleaner.db')) class BaseModel(peewee.Model): class Meta: database = db_logger class Chat(BaseModel): id = peewee.IntegerField(primary_key=True) type = peewee.CharField(max_length=16) title = peewee.CharField(null=True) clean = peewee.BooleanField(default=False) created = DateTimeField(default=datetime.datetime.now) updated = DateTimeField(default=datetime.datetime.now) def __str__(self): return '<Chat #{id} {title}>'.format( id=self.id, title=self.title, ) def save(self, *args, **kwargs): self.updated = datetime.datetime.now() return super(Chat, self).save(*args, **kwargs)
def init_db(db_path): database = APSWDatabase(db_path, pragmas=(('journal_mode', 'wal'), )) db.initialize(database)
BooleanField, IntegerField, ForeignKeyField, TextField, ) from playhouse.apsw_ext import APSWDatabase from gutenbergtozim import logger # db = SqliteDatabase('gutenberg.db') timeout = 1000 * 60 * 5 * 16 db = APSWDatabase( "gutenberg.db", pragmas=( ("journal_mode", "WAL"), ("cache_size", 10000), ("mmap_size", 1024 * 1024 * 32), ), timeout=timeout, ) db.connect() db.execute_sql("PRAGMA journal_mode=WAL;") class BaseModel(Model): @classmethod def get_or_none(cls, *query, **kwargs): try: return cls.get(*query, **kwargs) except cls.DoesNotExist: return None
from __future__ import (unicode_literals, absolute_import, division, print_function) from peewee import (Model, # SqliteDatabase, CharField, BooleanField, IntegerField, ForeignKeyField, TextField) from playhouse.apsw_ext import APSWDatabase from gutenbergtozim import logger # db = SqliteDatabase('gutenberg.db') timeout = 1000 * 60 * 5 * 16 db = APSWDatabase('gutenberg.db', pragmas=( ('journal_mode', 'WAL'), ('cache_size', 10000), ('mmap_size', 1024 * 1024 * 32)), timeout=timeout) db.connect() db.execute_sql("PRAGMA journal_mode=WAL;") class BaseModel(Model): @classmethod def get_or_none(cls, *query, **kwargs): try: return cls.get(*query, **kwargs) except cls.DoesNotExist: return None
import os from libs import config_bot as config from peewee import * from playhouse.apsw_ext import APSWDatabase import logging # if be.DEBUG_MODE: # cleaner = logging.getLogger('peewee') # cleaner.addHandler(logging.StreamHandler()) # cleaner.setLevel(logging.DEBUG) db_bot = APSWDatabase(os.path.join(config.BOT_DATA_DIR, 'bot.db')) class BotModel(Model): class Meta: database = db_bot class Chat(BotModel): id = IntegerField(primary_key=True) type = CharField(max_length=16) title = CharField(null=True) can_send_messages = BooleanField() can_send_media_messages = BooleanField() can_send_polls = BooleanField() can_send_other_messages = BooleanField() can_add_web_page_previews = BooleanField() can_change_info = BooleanField() can_invite_users = BooleanField()