def __init__(self, db_file=None): self.cleanup = True if db_file is None: self.db_file = ':memory:' self.cleanup = False else: self.db_file = db_file try: os.remove(self.db_file) except FileNotFoundError: pass sqlite3.enable_shared_cache(True)
def __init__(self, filename=None, initialize=False): sqlite3.enable_shared_cache(True) if filename is None: filename = db_filename self.filename = filename self.conn = sqlite3.connect(self.filename, isolation_level='DEFERRED') self.conn.row_factory = sqlite3.Row self.conn.text_factory = str self.free_places_update_time = None if initialize: self.initialize()
def open_database(): """ function to open the database connection """ global cursor global connection rows = None """ Changed due to database locked errors -removed: connection = sqlite3.connect(pi_ager_paths.sqlite3_file) -added timeout = 5 (five seconds) -enabled shared cache -set isolation level to None -set journal mode WAL See also https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.isolation_level http://charlesleifer.com/blog/going-fast-with-sqlite-and-python/ """ #Enable shared chache sqlite3.enable_shared_cache(True) # Open database in autocommit mode by setting isolation_level to None. connection = sqlite3.connect(pi_ager_paths.sqlite3_file, isolation_level=None, timeout=10) # Set journal mode to WAL (Write-Ahead Log) connection.execute('PRAGMA journal_mode = wal') connection.execute('PRAGMA synchronous = OFF') connection.execute('PRAGMA read_uncommitted = True') connection.row_factory = sqlite3.Row cursor = connection.cursor() """
from bb.compat import total_ordering from collections import Mapping try: import sqlite3 except ImportError: from pysqlite2 import dbapi2 as sqlite3 sqlversion = sqlite3.sqlite_version_info if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): raise Exception("sqlite3 version 3.3.0 or later is required.") logger = logging.getLogger("BitBake.PersistData") if hasattr(sqlite3, 'enable_shared_cache'): try: sqlite3.enable_shared_cache(True) except sqlite3.OperationalError: pass @total_ordering class SQLTable(collections.MutableMapping): """Object representing a table/domain in the database""" def __init__(self, cachefile, table): self.cachefile = cachefile self.table = table self.cursor = connect(self.cachefile) self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" % table)
import os.path import sys import warnings from bb.compat import total_ordering from collections import Mapping import sqlite3 sqlversion = sqlite3.sqlite_version_info if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): raise Exception("sqlite3 version 3.3.0 or later is required.") logger = logging.getLogger("BitBake.PersistData") if hasattr(sqlite3, 'enable_shared_cache'): try: sqlite3.enable_shared_cache(True) except sqlite3.OperationalError: pass @total_ordering class SQLTable(collections.MutableMapping): """Object representing a table/domain in the database""" def __init__(self, cachefile, table): self.cachefile = cachefile self.table = table self.cursor = connect(self.cachefile) self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" % table)
def CheckSharedCacheDeprecated(self): for enable in (True, False): with self.assertWarns(DeprecationWarning) as cm: sqlite.enable_shared_cache(enable) self.assertIn("dbapi.py", cm.filename)
import sqlite3 import synapse.lib.db as s_db ''' Integration utilities for sqlite db pools. ''' # turn on db cache sharing sqlite3.enable_shared_cache(1) def pool(size, path, **kwargs): ''' Create an sqlite connection pool. Args: size (int): Number of connections in the pool. path (str): Path to the sqlite file. Returns: s_db.Pool: A DB Pool for sqlite connections. ''' def ctor(): db = sqlite3.connect(path, check_same_thread=False) db.cursor().execute('PRAGMA read_uncommitted=1').close() return db return s_db.Pool(size, ctor=ctor)
def startup_low(self): flags, debug_output = self._parse_command_line_flags() install_global_trace_handlers(flags=flags, args=self.argv) for item in debug_output: TRACE(item) TRACE('Dropbox called with options: %r %r', flags, self.argv) time.strptime('07/25/2013', '%m/%d/%Y') if feature_enabled('multiaccount'): self.instance_config = InstanceConfig(self) instance_id = flags.get('--client', None) if instance_id: instance_row = self.instance_config.instance_db.get_row(int(instance_id)) TRACE('Running as secondary client') self.mbox.is_secondary = True self.mbox.initialize_handlers() try: self.mbox.primary_address = flags['--server-address'] TRACE('Primary client pipe address is %r', self.mbox.primary_address) except Exception: unhandled_exc_handler() else: instance_row = self.instance_config.instance_db.get_or_create_master() TRACE('Multibox configuration info: %r', instance_row) self.instance_id = instance_row.id self.appdata_path = instance_row.appdata_path self.default_dropbox_path = instance_row.default_dropbox_path self.default_dropbox_folder_name = instance_row.default_dropbox_folder_name TRACE('MULTIACCOUNT: Setting default_path to %r and default_folder_name to %r' % (self.default_dropbox_path, self.default_dropbox_folder_name)) if not hasattr(build_number, 'frozen'): self.mbox.parse_commandline_flags(flags) else: self.appdata_path = arch.constants.appdata_path self.default_dropbox_path = arch.constants.default_dropbox_path self.default_dropbox_folder_name = arch.constants.default_dropbox_folder_name set_debugging(not build_number.stable_build() or not build_number.is_frozen()) set_tracing_sql(not build_number.is_frozen() and os.getenv('DBSQLTRACE')) self.dropbox_url_info = dropbox_url_info self.background_worker = BackgroundWorkerThread() self.background_worker.start() self.desktop_login = DesktopLogin(self) self.client_shmodel = ClientShmodel(self) arch.startup.install_early_in_boot(self) arch.startup.pre_appdata_use_startup(self.appdata_path) for path in (self.appdata_path,): self.safe_makedirs(path, 448) sqlite3.enable_shared_cache(True) try: start_trace_thread(self) except Exception: unhandled_exc_handler(False) TRACE('Initializing the event reporter.') self.event = db_thread(EventReporterThread)(get_build_number(), self) self.event.start() def flush(*n, **kw): self.event.flush_events(True) self.add_quit_handler(flush)
def createDatabase(self): with sqlite3.connect(self.db_filename) as conn: with open(self.schema_filename, 'rt') as f: schema = f.read() conn.executescript(schema) sqlite3.enable_shared_cache(True)